From b1481606377a4bf665495a44dc13df07a0052cbd Mon Sep 17 00:00:00 2001 From: mageed Date: Sat, 23 Jan 2016 22:21:45 -0500 Subject: [PATCH 01/36] update --- python_tutorial_part_1.ipynb | 4643 ++++++++++++++++++++++++++++++++++ python_tutorial_part_2.ipynb | 977 +++++++ 2 files changed, 5620 insertions(+) create mode 100644 python_tutorial_part_1.ipynb create mode 100644 python_tutorial_part_2.ipynb diff --git a/python_tutorial_part_1.ipynb b/python_tutorial_part_1.ipynb new file mode 100644 index 0000000..ce15fa0 --- /dev/null +++ b/python_tutorial_part_1.ipynb @@ -0,0 +1,4643 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": false + }, + "source": [ + "# Quick Python Tutorial" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This tutorial should grow over time.\n", + "Python has a number of types. You need to be familiar with some of them as a start, then you will learn about more as you go. Let's quickly investigate some of these here:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#Integers and floats" + ] + }, + { + "cell_type": "code", + "execution_count": 231, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "22\n", + "33.33\n", + "I am an integer/int: 22\n", + "I am a float: 33.33\n" + ] + } + ], + "source": [ + "#---------------------\n", + "# Integers and floats:\n", + "#---------------------\n", + "# You can use Python as a calculator; and when you do, you are interacting with numbers that may have \n", + "# \"int\" or \"float\" types. Let's print these, with a \"print\" statement.\n", + "print(22) # an integer\n", + "print(33.33) # a float\n", + "\n", + "# You can print more than an object with the same print statement, if you use an \",\" (coma) in between\n", + "# (Hint: Both the integer 22 and the float 33.33 are 'objects' in the Pyathon language.\n", + "# They are objects of type 'int' and type 'float,' respectively)\n", + "print \"I am an integer/int:\", 22 # an integer\n", + "print \"I am a float:\",33.33 # a float" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1 2 3\n" + ] + } + ], + "source": [ + "# If you use a comma after the print statement/function, it will suppress\n", + "# the new line character \"\\n\". You see it better in a \"for loop\".\n", + "l=[1, 2, 3]\n", + "for i in l:\n", + " print i," + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I am an int division: 0\n", + "I am an float division: 0.666666666667\n" + ] + } + ], + "source": [ + "# You can perform operations on ints and floats, \n", + "# but be cautious as to the difference between int division and float divison\n", + "my_int= 22\n", + "my_new_int=33\n", + "my_float= 33.0\n", + "print \"I am an int division:\", my_int/my_new_int\n", + "print \"I am an float division:\", my_int/my_float\n", + "# (Hint: We assigned the numbers to some variables above, more about 'assignment' below.\n", + "# You can think about this just as storing something in another. It's like you put something in a box and \n", + "# you are now just looking at the box from outside. Another metaphor is simply that you called each of the numbers a name\n", + "# and can now interact with the numbers using these names.)" + ] + }, + { + "cell_type": "code", + "execution_count": 234, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "2.0" + ] + }, + "execution_count": 234, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#You can use the \"float\" function:\n", + "20/float(10)" + ] + }, + { + "cell_type": "code", + "execution_count": 256, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "22 S. Walnut St.\n", + "('This is the list', [1, 2, 3, 3, 4])\n", + "('This is the set', set([1, 2, 3, 4]))\n" + ] + } + ], + "source": [ + "# Side note:\n", + "# There are also other built-in functions for type-casting \n", + "# Casting an int into a string:\n", + "address= str(22)+\" S. Walnut St.\"\n", + "print(address)\n", + "# Casting a list into a set:\n", + "num_list=[1, 2, 3, 3, 4]\n", + "print(\"This is the list\", num_list)\n", + "num_set=set(num_list)\n", + "print(\"This is the set\",num_set)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Strings" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello world\n" + ] + } + ], + "source": [ + "#--------\n", + "# String:\n", + "#--------\n", + "# The string type is for characters like \"Hello world\". We can print this string:\n", + "print \"Hello world\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello world\n" + ] + } + ], + "source": [ + "# The above is called a print statement. We can assign the string to a variable\n", + "greeting = \"Hello world\"\n", + "# We cann the word \"greeting\" a \"variable\" and the string \"Hello world\" a value. \n", + "# What we did is \"assign\" the value \"Hello world\" to the variable \"greeting\".\n", + "# The \"=\" is called an operator and we use it for \"assignment\". (This is important!)\n", + "# We can now print \"grreting\"\n", + "print greeting" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The type of the variable: \n", + "The type of the value: 14\n" + ] + } + ], + "source": [ + "# For another example, we can assign another string value to another variable:\n", + "feeling=\"I love Python!\"\n", + "# Since you love Python, it loves you back and so gives you a number of \"built-in\" functions to work with. \n", + "# For more about these take a look here: https://docs.python.org/2/library/functions.html\n", + "# For example, the \"type()\" function tells us about the type of an object. \n", + "# Similarly, the \"len()\" function opertates in some objects, like strings, \n", + "# and tells us about their length in characters:\n", + "print \"The type of the variable: \", type(feeling)\n", + "print \"The type of the value: \", len(feeling)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Lists" + ] + }, + { + "cell_type": "code", + "execution_count": 247, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I'm an empty list []\n", + "I'm list of string items ['Python', 'Lua', 'Java']\n", + "I'm list of numbers [44, 11, 55]\n", + "I'm list of string items and numbers ['Hello', 88, 4.0, 'Hey there!']\n", + "I'm list of string items and numbers, and an internal list ['Hello', 88, 4.0, 'Hey there!', ['a', 'z']]\n" + ] + } + ], + "source": [ + "#--------\n", + "# List:\n", + "#--------\n", + "# A list is another Python data type where you can store and access your data with a lot of flexibility.\n", + "# The list is a square-bracketed, comma-separated sequence of items.\n", + "# So, to create a list, you use square brakets.\n", + "# This is an empty list:\n", + "my_first_list=[]\n", + "print \"I'm an empty list\", my_first_list\n", + "# Items in a list can be strings, or numbers, or a mixture\n", + "words=[\"Python\", \"Lua\", \"Java\"]\n", + "numbs= [44, 11, 55] \n", + "words_and_numbs= [\"Hello\", 88, 4.0, \"Hey there!\"]\n", + "print \"I'm list of string items\", words\n", + "print \"I'm list of numbers\", numbs\n", + "print \"I'm list of string items and numbers\", words_and_numbs\n", + "words_and_numbs_and_internal_list= [\"Hello\", 88, 4.0, \"Hey there!\", [\"a\", \"z\"]]\n", + "print \"I'm list of string items and numbers, and an internal list\",\\\n", + " words_and_numbs_and_list" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'z'" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Accessing an item from an internal list, directly:\n", + "[\"Hello\", 88, 4.0, \"Hey there!\", [\"a\", \"z\"]][-1][-1]" + ] + }, + { + "cell_type": "code", + "execution_count": 272, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'a'" + ] + }, + "execution_count": 272, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Here's the same as above, a bit unfolded:\n", + "x=[\"Hello\", 88, 4.0, \"Hey there!\", [\"a\", \"z\"]]\n", + "internal=x[-1]\n", + "internal[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 275, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'a'" + ] + }, + "execution_count": 275, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# One more example:\n", + "[\"Hello\", 88, 4.0, \"Hey there!\", [\"a\", \"z\"]][4][-2]" + ] + }, + { + "cell_type": "code", + "execution_count": 278, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "3\n", + "This is the first item in the list, and its index is zero Apple\n", + "This is the last item in the list Facebook\n", + "Pay him 3000 $\n" + ] + } + ], + "source": [ + "# Length of a list, and more on indexing and slicing:\n", + "#----------------------------------------------------\n", + "# Similar to a string, you can get the length of a list:\n", + "tech_comp=[\"Apple\", \"Google\", \"Facebook\"]\n", + "print len(tech_comp)\n", + "# You can also slice from a list, using the bractets with an integer index.\n", + "# Notice: we start from index \"zero\".\n", + "print \"This is the first item in the list, and its index is zero\", tech_comp[0]\n", + "# You can also access a list from the end, with a minus index\n", + "print \"This is the last item in the list\", tech_comp[-1]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This is my first tuple: ('Tiger', 'Lion', 'Monkey')\n" + ] + } + ], + "source": [ + "# Tuples:\n", + "#--------\n", + "# A tuple is like a list, but its items are immutable/unchangeable.\n", + "# The syntax is different in that the tuple employs the parathenses \"()\"\n", + "my_animals_tuple=(\"Tiger\", \"Lion\", \"Monkey\")\n", + "print \"This is my first tuple: \", my_tuple" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "My list before changes: ['Tiger', 'Lion', 'Monkey']\n", + "My list after changes: ['Goat', 'Lion', 'Monkey']\n" + ] + } + ], + "source": [ + "# So you can change an item in a list, but not in a tuple:\n", + "my_animals_list=[\"Tiger\", \"Lion\", \"Monkey\"]\n", + "print \"My list before changes: \", my_animals_list\n", + "my_animals_list[0]=\"Goat\"\n", + "print \"My list after changes: \", my_animals_list\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This will give an error!!!\n" + ] + }, + { + "ename": "NameError", + "evalue": "name 'my_tuple' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Trying to change this will give an error:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"This will give an error!!!\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mmy_tuple\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"Goat\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mNameError\u001b[0m: name 'my_tuple' is not defined" + ] + } + ], + "source": [ + "# Trying to change this will give an error:\n", + "print \"This will give an error!!!\"\n", + "my_tuple[0]=\"Goat\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('z', 'b')\n" + ] + } + ], + "source": [ + "# You can cast a tuple to a list and make the change, and cast back to a tuple!\n", + "t=(\"a\", \"b\")\n", + "l=list(t)\n", + "l[0]=\"z\"\n", + "t=tuple(l)\n", + "print t" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Dictionaries" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Printing the 'students' dict: {777: 'Mary', 1111: 'John'}\n", + "The length of the 'students' dict is: 2\n", + "The value of the key 777 in the 'students' dict is: Mary\n", + "{777: 'Mary', 555: 'Maggi', 1111: 'John'}\n", + "Maggi\n" + ] + } + ], + "source": [ + "# A Ptthon dictionary is a \"mapping\" type. We map a \"key\" to a \"value\".\n", + "# For example, we can map a \"student_id\" to the \"name\" of a student.\n", + "# The sytax is simple: We use the curly braces, and delimit each key:value pair by the \"colon\"\n", + "students={1111: \"John\", 777: \"Mary\"}\n", + "print \"Printing the 'students' dict: \", students\n", + "print \"The length of the 'students' dict is: \", len(students)\n", + "# This is how you access the value of the key 777\n", + "print \"The value of the key 777 in the 'students' dict is: \", students[777]\n", + "students[555]=\"Maggi\"\n", + "print students\n", + "print students[555]" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{777: 'Mary', 555: 'Maggi', 1111: 'John'}\n" + ] + } + ], + "source": [ + "my_dict={777: 'Mary', 555: 'Maggi', 1111: 'John'}\n", + "my_dict[\"XYZ\"]=\"___\"\n", + "#delete an item in the dict\n", + "del my_dict[\"XYZ\"]\n", + "\n", + "print my_dict" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['a', 'b', 'c']\n" + ] + }, + { + "data": { + "text/plain": [ + "'a'" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "l=[\"a\", \"b\"]\n", + "l.append(\"c\")\n", + "print l\n", + "l[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The value of the key 'CS' in the 'students' dict is: ['John', 'Alex', 'Amanda']\n" + ] + } + ], + "source": [ + "# A value in a Python dict can be a string, a list, another dict, etc.\n", + "# So, if \"Alex\" and \"Amanda\" are also students in CS, then we can have the value for the key 'CS' as a list:\n", + "students={\"CS\": [\"John\", \"Alex\", \"Amanda\"] , \"Business\": \"Mary\"}\n", + "# And now when we print, we get all the students in CS as a full list:\n", + "print \"The value of the key 'CS' in the 'students' dict is: \", students['CS']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Files: Just a quick note!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Python is very efficient with files and text processing. Let's see how we open and interact with a file\n", + "my_file=open(\"path_to_my_read_file\", \"r\").read() # Opens for reading and gets you the file content as a string\n", + "my_file=open(\"path_to_my_read_file\", \"r\").readlines() # Opens for reading and gets you the file content as a list\n", + "out_file=open(\"path_to_my_write_file\", \"w\") # Opens for writing\n", + "\n", + "# This is how to print/write to a file\n", + "s=\"This is a line\"\n", + "print>>out_file, s\n", + "out_file.write(s)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I'm a pretty line====\n" + ] + } + ], + "source": [ + "# get red of the \"\\n\" char\n", + "line=\"I'm a pretty line\\n\"\n", + "print line.strip()+\"====\"\n", + "print line[:-1]+\"====\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Loops" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "apple \t--> is a fruit!\n", + "strawberry \t--> is a fruit!\n", + "grapes \t--> is a fruit!\n" + ] + } + ], + "source": [ + "# Loops\n", + "fruits=[\"apple\", \"strawberry\", \"grapes\"]\n", + "for fruit in fruits:\n", + " print fruit, \"\\t--> is a fruit!\"\n", + "# \"\\t\" is the tab characters. Also be aware of \"\\n\", \"\\r\", and \"\\r\\n\" and how these work across different platforms." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "xz\n", + "bz\n" + ] + } + ], + "source": [ + "# Printing with conditionals:\n", + "# You can combine this with conditionals\n", + "my_dict={\"aaaa\":10, \"b\":20, \"bz\":20, \"xz\":55, \"ss\":55}\n", + "for k in my_dict:\n", + " if len(k)==2 and (k.endswith(\"z\") or k.startswith(\"x\")):\n", + " print k\n", + " #print mary[k]\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Functions" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello Dane !\n", + "Hello Chris !\n", + "Hello Lubna !\n", + "Hello Nora !\n", + "None\n" + ] + } + ], + "source": [ + "# This is a function that only prints something, but does not \n", + "# return anything\n", + "def greet(name):\n", + " print \"Hello\", name, \"!\"\n", + " \n", + "# This is how you call the function:\n", + "greet(\"Dane\")\n", + "greet(\"Chris\")\n", + "greet(\"Lubna\")\n", + "something =greet(\"Nora\")\n", + "print something" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# A sort of an 'empty' function, but it is a valid one!\n", + "# Bascially, it's a place-filler and you can go back and fill in the \n", + "# body of the function.\n", + "def place_filler():\n", + " \"\"\"\n", + " This is a place filler.\n", + " \"\"\"\n", + " pass\n", + "\n", + "place_filler()" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " ('A coke is $', 2)\n" + ] + } + ], + "source": [ + "# A function that returns some value\n", + "def coke_vending_machine():\n", + " return \"A coke is $\", 2 \n", + "\n", + "price = coke_vending_machine()\n", + "print type(price), price" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Vikas ---> 105000.0\n", + "Revathi ---> 73500.0\n" + ] + } + ], + "source": [ + "# Create a function that calculates a raise for a database of employees\n", + "# key is name of an employee, value is base_salary\n", + "employees={\"Revathi\": 70000, \"Vikas\": 100000} \n", + "\n", + "def yearly_raise(base_salary):\n", + " return base_salary *1.05\n", + "\n", + "\n", + "for k in employees:\n", + " print k,\"--->\" , yearly_raise(employees[k])\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "12\n", + "54\n" + ] + } + ], + "source": [ + "# Parallel processing of two lists:\n", + "list_a=[10, 50, 4]\n", + "list_b=[2, 4]\n", + "for i in range(len(list_a)):\n", + " try:\n", + " print list_a[i]+list_b[i]\n", + " except:\n", + " pass " + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(10, 2), (50, 4)]" + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Python gives you a function \"zip\" that returns a list of tuples when passed two lists:\n", + "x=[10, 50]\n", + "y=[2, 4]\n", + "zip(x,y)\n", + "# (Hint: There is also \"izip\". What does it do? Can you tell?)" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "12\n", + "54\n" + ] + } + ], + "source": [ + "# Process with \"zip\"\n", + "x=[10, 50]\n", + "y=[2, 4]\n", + "my_list_of_tuples=zip(x,y)\n", + "\n", + "for pair in my_list_of_tuples:\n", + " print pair[0]+pair[1]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[12, 54]" + ] + }, + "execution_count": 46, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# On the fly, with list comprehension:\n", + "[i[0]+i[1] for i in zip(x,y)]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[2, 3, 4, 5, 6, 7, 8, 9]" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Above, we used \"range\". It returns a list, possibly starting from a given point \\\n", + "# (as from 2 below), up to but not including another:\n", + "range(2, 10)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# More on functions" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I\n", + "am\n", + "happy\n" + ] + } + ], + "source": [ + "# First look at this:\n", + "s=\"I am happy\"\n", + "words= s.split()\n", + "for w in words:\n", + " print w" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'and': 4, 'learning.': 1, '(the': 1, 'family': 1, 'be': 1, 'other.': 1, 'experience,': 1, 'unknown.Artificial': 1, 'number': 1, 'numeric': 1, 'connections': 1, 'as': 1, 'brain)': 1, 'are': 4, 'learning': 1, 'in': 1, 'based': 1, 'tuned': 1, 'nets': 1, 'networks': 3, '(ANNs)': 1, 'functions': 1, 'depend': 1, 'capable': 1, 'nervous': 1, 'exchange': 1, 'generally': 2, 'approximate': 1, 'artificial': 1, 'machine': 1, 'to': 2, 'systems': 2, 'which': 1, 'between': 1, 'adaptive': 1, '\"neurons\"': 1, 'inputs': 2, 'used': 1, 'that': 2, 'models': 1, 'each': 1, 'animals,': 1, 'particular': 1, 'The': 1, 'estimate': 1, 'by': 1, 'a': 2, 'on': 2, 'central': 1, 'cognitive': 1, 'neural': 4, 'of': 5, 'inspired': 1, 'presented': 1, 'messages': 1, 'science,': 1, 'interconnected': 1, 'large': 1, 'weights': 1, 'can': 2, 'have': 1, 'In': 1, 'biological': 1, 'the': 1, 'or': 1, 'making': 1}\n" + ] + } + ], + "source": [ + "# A function can \"return\" an object.\n", + "# We provide an example here\n", + "\n", + "# text below is from https://en.wikipedia.org/wiki/Artificial_neural_network\n", + "sentences=[\"In machine learning and cognitive science, artificial neural networks (ANNs)\\\n", + " are a family of models inspired by biological neural networks (the central nervous systems of animals, \\\n", + " in particular the brain) and are used to estimate or approximate functions that can depend on a large\\\n", + " number of inputs and are generally unknown.\"\n", + " \"Artificial neural networks are generally presented as systems of interconnected \\\"neurons\\\" which \\\n", + " exchange messages between each other. The connections have numeric weights that can be tuned based \\\n", + " on experience, making neural nets adaptive to inputs and capable of learning.\"]\n", + "def get_dict(sentences):\n", + " \"\"\"\n", + " arguments:\n", + " input: @sentences: a list of sentences\n", + " returns: a dictionary of the words in the sentences.\n", + " dict key is a word and value is word frequency\n", + " \"\"\"\n", + " word_freq={}\n", + " for sent in sentences:\n", + " words=sent.split()\n", + " for w in words:\n", + " if w in word_freq:\n", + " word_freq[w]+=1\n", + " else:\n", + " word_freq[w]=1\n", + " return word_freq\n", + " \n", + " \n", + "my_word_freq_dict=get_dict(sentences)\n", + "print my_word_freq_dict" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "set(['it.', 'sure', 'like', 'People', 'I', 'people', 'am', 'do!'])\n", + "set(['it.', 'sure', 'like', 'people', 'i', 'do!', 'am'])\n" + ] + } + ], + "source": [ + "# Sidenote on lowercasing and splitting\n", + "text=\"People like it. I am sure people do!\"\n", + "x1=set(text.split())\n", + "print(x1)\n", + "x2=set(text.lower().split())\n", + "print(x2)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "People__dance-dance__like__dance-dance__it.__dance-dance__I__dance-dance__am__dance-dance__sure__dance-dance__people__dance-dance__do!\n" + ] + } + ], + "source": [ + "# Cleaning with regex\n", + "text=\"People like it. I am sure people do!\"\n", + "import re\n", + "text=re.sub(\"!\", \"\", text)\n", + "print text\n" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('', 'Ooops! There was text here!')\n", + "(' ', 'Ooops! There was text here!')\n" + ] + } + ], + "source": [ + "# Cleaning with regex, more..\n", + "import re\n", + "# Remove *all* chars (any character, including white space)\n", + "text=\"People like it. I am sure people do!\"\n", + "text=re.sub(\".\", \"\", text)\n", + "print(text, \"Ooops! There was text here!\")\n", + "# Remove all *non-white space* chars\n", + "text=\"People like it. I am sure people do!\"\n", + "text=re.sub(\"\\S+\", \"\", text)\n", + "print (text, \"Ooops! There was text here!\")" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Here're are your keys with values > 2:\n", + "****************************************\n", + "and 4\n", + "are 4\n", + "networks 3\n", + "neural 4\n", + "of 5\n", + "\n", + "Here're are your keys with values > 1 and keys of more than 5 chars:\n", + "**********************************************************************\n", + "networks 3\n", + "generally 2\n", + "systems 2\n", + "inputs 2\n", + "neural 4\n" + ] + } + ], + "source": [ + "# Here's the same function as above, but using python's \"defaultdict\"\n", + "from collections import defaultdict\n", + "sentences=[\"In machine learning and cognitive science, artificial neural networks (ANNs)\\\n", + " are a family of models inspired by biological neural networks (the central nervous systems of animals, \\\n", + " in particular the brain) and are used to estimate or approximate functions that can depend on a large\\\n", + " number of inputs and are generally unknown.\"\n", + " \"Artificial neural networks are generally presented as systems of interconnected \\\"neurons\\\" which \\\n", + " exchange messages between each other. The connections have numeric weights that can be tuned based \\\n", + " on experience, making neural nets adaptive to inputs and capable of learning.\"]\n", + "\n", + "def get_dict(sentences):\n", + " \"\"\"\n", + " arguments:\n", + " input: @sentences: a list of sentences\n", + " returns: a dictionary of the words in the sentences.\n", + " dict key is a word and value is word frequency\n", + " \"\"\"\n", + " word_freq=defaultdict(int)\n", + " for sent in sentences:\n", + " words=sent.split()\n", + " for w in words:\n", + " word_freq[w]+=1\n", + " return word_freq\n", + " \n", + "my_word_freq_dict=get_dict(sentences)\n", + "# Let's print only keys with values > 2 this time\n", + "print \"Here're are your keys with values > 2:\\n\", \"*\"*40\n", + "for k in my_word_freq_dict:\n", + " if my_word_freq_dict[k] > 2:\n", + " print k, my_word_freq_dict[k]\n", + "\n", + "# Let's print only keys whose length > 5 (so keys that have at least 6 characters/letters) and values > 1 \n", + "print \"\\nHere're are your keys with values > 1 and keys of more than 5 chars:\\n\", \"*\"*70\n", + "for k in my_word_freq_dict:\n", + " if my_word_freq_dict[k] > 1 and len(k) > 5:\n", + " print k, my_word_freq_dict[k]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Conditionals" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This is such a short list!\n" + ] + } + ], + "source": [ + "fruits=[\"apple\", \"strawberry\", \"grapes\"]\n", + "if len(fruits) < 10:\n", + " print \"This is such a short list!\"" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I need an apple!\n" + ] + } + ], + "source": [ + "fruits=[\"apple\", \"strawberry\", \"grapes\"]\n", + "if \"apple\" not in fruits:\n", + " print \"No apples?!\"\n", + "else:\n", + " print \"I need an apple!\"\n", + "#---------------\n" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "No apples?!\n" + ] + } + ], + "source": [ + "fruits=[]\n", + "if \"apple\" not in fruits:\n", + " print \"No apples?!\"\n", + "elif \"banana\" in fruits:\n", + " print \"I need a banana!\"\n", + "else:\n", + " print \"I need an apple!\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# List Comprehension" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['s', 't', 'r', 'a', 'w', 'b', 'e', 'r', 'r', 'y']\n" + ] + } + ], + "source": [ + "dessert=\"strawberry\"\n", + "chars=[char for char in dessert]\n", + "print chars" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['s', 't', 'r', 'w', 'b', 'r', 'r']\n" + ] + } + ], + "source": [ + "# With a condition\n", + "dessert=\"strawberry\"\n", + "vowels=[\"a\", \"e\", \"y\"]\n", + "chars=[char for char in dessert if char not in vowels]\n", + "print chars" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "metadata": { + "collapsed": false, + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "k\n", + "i\n", + "n\n", + "g\n", + "['i', 'n', 'g']\n" + ] + } + ], + "source": [ + "for c in \"king\":\n", + " print c\n", + " \n", + "x=[c for c in \"king\" if c !=\"k\"]\n", + "print x" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# NLTK Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "*** Introductory Examples for the NLTK Book ***\n", + "Loading text1, ..., text9 and sent1, ..., sent9\n", + "Type the name of the text or sentence to view it.\n", + "Type: 'texts()' or 'sents()' to list the materials.\n", + "text1: Moby Dick by Herman Melville 1851\n", + "text2: Sense and Sensibility by Jane Austen 1811\n", + "text3: The Book of Genesis\n", + "text4: Inaugural Address Corpus\n", + "text5: Chat Corpus\n", + "text6: Monty Python and the Holy Grail\n", + "text7: Wall Street Journal\n", + "text8: Personals Corpus\n", + "text9: The Man Who Was Thursday by G . K . Chesterton 1908\n" + ] + } + ], + "source": [ + "# Take a look at the preface here: http://www.nltk.org/book/ch00.html\n", + "# This tutorial is based on Python 2.7, but it shouldn't be an issue to write the same code for Python 3 as the differences\n", + "# are minimal so long as the tutorial is concerned\n", + "import nltk\n", + "from nltk.book import *" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 253, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Displaying 10 of 66 matches:\n", + "r occupations may come . The Negroes are now Americans . Their ancestors came here years ago agains\n", + "e it so or not . And yet we are not the less Americans on that account . We shall be the more Ameri\n", + "we find them now secure ; and there comes to Americans the profound assurance that our representati\n", + "have called me . I am certain that my fellow Americans expect that on my induction into the Preside\n", + " and the hurricanes of disaster . In this we Americans were discovering no wholly new truth ; we we\n", + " and that freedom is an ebbing tide . But we Americans know that this is not true . Eight years ago\n", + "eat . We are not content to stand still . As Americans , we go forward , in the service of our coun\n", + "uguration be simple and its words brief . We Americans of today , together with our allies , are pa\n", + "in the discharge of this responsibility , we Americans know and we observe the difference between w\n", + "cked bargain of trading honor for security . Americans , indeed all free men , remember that in the\n", + "None\n" + ] + } + ], + "source": [ + "print(text4.concordance(\"Americans\", width=100, lines=10))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "the free power opportunity fellow opinions colleges peace gangs\n", + "judgments consent noblest ideas colors fidelity unquestionable worship\n", + "discipline industrious just\n", + "None\n" + ] + } + ], + "source": [ + "# Other words that appear in a similar range of contexts as a given word\n", + "print(text4.similar(\"patriotic\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 85, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "No common contexts were found\n", + "None\n" + ] + } + ], + "source": [ + "print(text4.common_contexts([\"patriotic\", \"very\"])) " + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "every_citizen and_and our_citizens\n", + "None\n" + ] + } + ], + "source": [ + "print(text4.common_contexts([\"patriotic\", \"free\"])) " + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0669\n" + ] + } + ], + "source": [ + "from __future__ import division # in Python 3 you don't need to do the import\n", + "# Lexical diversity\n", + "def lexical_diversity(text):\n", + " return len(set(text))/len(text)\n", + "\n", + "lex_div=lexical_diversity(text4)\n", + "print(round(lex_div, 4))" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'AS', u'Abandonment', u'Abhorring', u'About', u'Above', u'Abraham', u'Abroad', u'Accept', u'Across', u'Act', u'Acting', u'Action', u'Actual', u'Adams', u'Additional', u'Address', u'Administered', u'Administration', u'Administrations', u'Advance']\n" + ] + } + ], + "source": [ + "# sorted set of words\n", + "print(sorted(set(text4))[100:120])" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "None\n", + "['father', 'man', 'mother', 'woman']\n" + ] + } + ], + "source": [ + "#Pay attenstion to the difference of these!\n", + "tokens=[\"man\", \"woman\", \"father\", \"mother\"]\n", + "print tokens.sort() # Returns \"None\", but sorts the list in place\n", + "print sorted(tokens) # Returns the sorted list\n" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('man', 102)\n", + "('woman', 3)\n", + "('father', 4)\n", + "('mother', 4)\n" + ] + } + ], + "source": [ + "words=[\"man\", \"woman\", \"father\", \"mother\"]\n", + "for w in words:\n", + " print(w, text4.count(w))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + " # Stop here:=====" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "****************************************************************************************************\n", + "[(u'institutions', 76), (u'come', 75), (u'party', 75), (u'better', 75), (u'always', 74), (u'today', 74), (u'office', 73), (u'still', 73), (u'need', 73), (u'others', 73), (u'strength', 72), (u'Let', 72), (u'nor', 72), (u'itself', 72), (u'means', 70), (u'believe', 70), (u'themselves', 70), (u'place', 70), (u'land', 69), (u'could', 69), (u'then', 69), (u'.\"', 69), (u'home', 69), (u'equal', 69), (u'together', 68), (u'might', 68), (u'things', 67), (u'secure', 67), (u'Nation', 67), (u'whose', 66), (u'find', 66), (u'given', 66), (u'prosperity', 66), (u'Americans', 66), (u'old', 65), (u'am', 65), (u'full', 65), (u'give', 65), (u'here', 64), (u'Federal', 64), (u'action', 64), (u'order', 64), (u'yet', 64), (u'proper', 64), (u'found', 63), (u'up', 63), (u'important', 63), (u'responsibility', 63), (u'take', 62), (u'where', 62), (u'being', 62), (u'change', 62), (u'Executive', 62), (u'even', 62), (u'subject', 62), (u'administration', 61), (u'revenue', 61), (u'State', 61), (u'see', 60), (u'security', 60), (u'ought', 60), (u'trust', 60), (u'These', 60), (u'A', 59), (u'self', 59), (u'true', 59), (u'business', 59), (u'seek', 59), (u'character', 59), (u'honor', 59), (u'question', 59), (u'called', 59), (u'respect', 59), (u'commerce', 58), (u'cause', 58), (u'toward', 58), (u'principle', 58), (u'again', 58), (u'century', 58), (u'influence', 57), (u'become', 56), (u'protection', 56), (u'done', 56), (u'stand', 56), (u'course', 55), (u'another', 55), (u'very', 55), (u'help', 55), (u'like', 55), (u'citizen', 54), (u'authority', 54), (u'also', 53), (u'Republic', 53), (u'live', 53), (u'civil', 53), (u'past', 52), (u'sense', 52), (u'constitutional', 52), (u'meet', 52), (u'democracy', 52)]\n", + "****************************************************************************************************\n", + "14\n", + "312\n", + "****************************************************************************************************\n", + "[u'than', u'country', u'.', u'has', u'people', u'for', u'citizens', u'time', u'so', u'nation']\n" + ] + } + ], + "source": [ + "# Frequency distribution\n", + "freq_dist = FreqDist(text4) \n", + "print(\"*\"*100)\n", + "print(freq_dist.most_common(1000))[200:300]\n", + "print(\"*\"*100)\n", + "print(freq_dist[\"European\"])\n", + "print(freq_dist[\"world\"])\n", + "print(\"*\"*100)\n", + "#------------------------------------------\n", + "# Vocabulary\n", + "V=set(text4)\n", + "words=[w for w in V if freq_dist[w] > 200][:10]\n", + "print(words)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "United States; fellow citizens; four years; years ago; Federal\n", + "Government; General Government; American people; Vice President; Old\n", + "World; Almighty God; Fellow citizens; Chief Magistrate; Chief Justice;\n", + "God bless; every citizen; Indian tribes; public debt; one another;\n", + "foreign nations; political parties\n", + "None\n" + ] + } + ], + "source": [ + "# Collocations\n", + "print(text4.collocations())" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'zodiac', u'zodiac', u'zogranda', u'zone', u'zoned', u'zoned', u'zones', u'zoology', u'zoology', u'zoroaster']\n", + "[u'zephyr', u'zeuglodon', u'zig', u'zodiac', u'zogranda', u'zone', u'zoned', u'zones', u'zoology', u'zoroaster']\n" + ] + } + ], + "source": [ + "# Could you tell the difference?\n", + "print(sorted(w.lower() for w in set(text1))[-10:])\n", + "print(sorted(set(w.lower() for w in text1))[-10:])" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\n", + "Hamlet: Entire Play\n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "The Tragedy of Hamlet, Prince of Denmark\n", + "\n", + "Shakespeare homepage \n", + " | Hamlet \n", + " | Entire play\n", + "\n", + "ACT I\n", + "SCENE I. Elsinore. A platform before the castle.\n", + "\n", + "FRANCISCO at his post. Enter to him BERNARDO\n", + "\n", + "BERNARDO\n", + "\n", + "Who's there?\n", + "\n", + "FRANCISCO\n", + "\n", + "Nay, answer me: stand, and unfold you\n" + ] + }, + { + "ename": "NameError", + "evalue": "name 'nltk' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0;31m#print(\"*\"*100)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;31m#------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 13\u001b[0;31m \u001b[0mtokens\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnltk\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mword_tokenize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mraw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 14\u001b[0m \u001b[0;31m#print(tokens[100:200])\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'nltk' is not defined" + ] + } + ], + "source": [ + "from urllib import urlopen\n", + "from bs4 import BeautifulSoup\n", + "url=\"http://shakespeare.mit.edu/hamlet/full.html\"\n", + "page = urlopen(url)\n", + "soup = BeautifulSoup(page.read())\n", + "#print type(soup)\n", + "#html = urlopen(url).read() \n", + "raw = BeautifulSoup.get_text(soup) \n", + "\n", + "print(raw[:300])\n", + "#print(\"*\"*100)\n", + "#------------------------------------------\n", + "tokens=nltk.word_tokenize(raw)\n", + "#print(tokens[100:200])\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['I', 'am', 'happy']\n" + ] + } + ], + "source": [ + "import nltk\n", + "raw=\"I am happy\"\n", + "tokens=nltk.word_tokenize(raw)\n", + "print tokens" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# More on Files" + ] + }, + { + "cell_type": "code", + "execution_count": 230, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "[u'The', u'Project', u'Gutenberg', u'EBook', u'of', u'Hamlet', u',', u'by', u'William', u'Shakespeare', u'This', u'eBook', u'is', u'for', u'the', u'use', u'of', u'anyone', u'anywhere', u'at']\n", + "**************************************************\n", + "[u'The', u'Project', u'Gutenberg', u'EBook', u'of', u'Hamlet', u',', u'by', u'William', u'Shakespeare', u'This', u'eBook', u'is', u'for', u'the', u'use', u'of', u'anyone', u'anywhere', u'at', u'no', u'cost', u'and', u'with', u'almost', u'no', u'restrictions', u'whatsoever', u'.', u'You', u'may', u'copy', u'it', u',', u'give', u'it', u'away', u'or', u're-use', u'it', u'under', u'the', u'terms', u'of', u'the', u'Project', u'Gutenberg', u'License', u'included', u'with']\n", + "Project Gutenberg-tm; _1st Clo._; Project Gutenberg; _Crosses to_;\n", + "Literary Archive; Gutenberg-tm electronic; Archive Foundation;\n", + "electronic works; Gutenberg Literary; United States; _2nd Clo._;\n", + "ROSENCRANTZ _and_; public domain; _and_ GUILDENSTERN; Dr. Johnson;\n", + "_1st Play._; electronic work; _and_ Attendants; the_ KING; set forth\n", + "None\n" + ] + } + ], + "source": [ + "import codecs\n", + "from nltk import word_tokenize, Text\n", + "text_string=codecs.open(\"hamlet.txt\", \"r\", \"utf-8\").read() # Opens for reading and gets you the file content as a list\n", + "tokens = word_tokenize(text_string)\n", + "print(type(tokens))\n", + "print(tokens[:20])\n", + "text = Text(tokens)\n", + "print(\"*\"*50)\n", + "print(text[:50])\n", + "print(text.collocations())\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 90, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'[', u'The', u'Tragedie', u'of', u'Hamlet', u'by', u'William', u'Shakespeare', u'1599', u']']\n", + "[u'Actus', u'Primus', u'.']\n", + "[u'Scoena', u'Prima', u'.']\n", + "[u'Enter', u'Barnardo', u'and', u'Francisco', u'two', u'Centinels', u'.']\n", + "[u'Barnardo', u'.']\n" + ] + } + ], + "source": [ + "#sentence splitting\n", + "from nltk.corpus import gutenberg\n", + "hamlet_sent=gutenberg.sents('shakespeare-hamlet.txt')\n", + "for sent in hamlet_sent[:5]:\n", + " print(sent)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'1789-Washington.txt', u'1793-Washington.txt', u'1797-Adams.txt', u'1801-Jefferson.txt', u'1805-Jefferson.txt']\n", + "**************************************************\n", + "[u'1789', u'1793', u'1797', u'1801', u'1805', u'1809', u'1813', u'1817', u'1821', u'1825', u'1829', u'1833', u'1837', u'1841', u'1845', u'1849', u'1853', u'1857', u'1861', u'1865', u'1869', u'1873', u'1877', u'1881', u'1885', u'1889', u'1893', u'1897', u'1901', u'1905', u'1909', u'1913', u'1917', u'1921', u'1925', u'1929', u'1933', u'1937', u'1941', u'1945', u'1949', u'1953', u'1957', u'1961', u'1965', u'1969', u'1973', u'1977', u'1981', u'1985', u'1989', u'1993', u'1997', u'2001', u'2005', u'2009']\n" + ] + } + ], + "source": [ + "from nltk.corpus import inaugural\n", + "print(inaugural.fileids()[:5])\n", + "print(\"*\"*50)\n", + "#print([fileid[:4] for fileid in inaugural.fileids()])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Generate text" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 122, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "living creature that he said , and the land of the land of the land of the land of the land of the land of the land of the land None\n" + ] + } + ], + "source": [ + "# From the NLTK book: http://www.nltk.org/book/ch02.html\n", + "def generate_model(cfdist, word, num=15):\n", + " for i in range(num):\n", + " print(word),\n", + " word = cfdist[word].max()\n", + "\n", + "\n", + "text = nltk.corpus.genesis.words('english-kjv.txt')\n", + "bigrams = nltk.bigrams(text)\n", + "cfd = nltk.ConditionalFreqDist(bigrams)\n", + "print(generate_model(cfd, 'living', num=30))\n", + "#print(cfd[\"living\"].max())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# WordNet" + ] + }, + { + "cell_type": "code", + "execution_count": 133, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('nice.n.01'), Synset('nice.a.01'), Synset('decent.s.01'), Synset('nice.s.03'), Synset('dainty.s.04'), Synset('courteous.s.01')]\n", + "**************************************************\n", + "done with delicacy and skill\n", + "[u'nice', u'skillful']\n", + "**************************************************\n", + "exhibiting courtesy and politeness\n", + "[u'courteous', u'gracious', u'nice']\n", + "**************************************************\n", + "excessively fastidious and easily disgusted\n", + "[u'dainty', u'nice', u'overnice', u'prissy', u'squeamish']\n" + ] + } + ], + "source": [ + "from nltk.corpus import wordnet as wn\n", + "print(wn.synsets('nice'))\n", + "print(\"*\"*50)\n", + "print(wn.synset('nice.s.03').definition())\n", + "print(wn.synset('nice.s.03').lemma_names())\n", + "print(\"*\"*50)\n", + "print(wn.synset('courteous.s.01').definition())\n", + "print(wn.synset('courteous.s.01').lemma_names())\n", + "print(\"*\"*50)\n", + "print(wn.synset('dainty.s.04').definition())\n", + "print(wn.synset('dainty.s.04').lemma_names())\n", + "print(\"*\"*50)" + ] + }, + { + "cell_type": "code", + "execution_count": 138, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "**************************************************\n", + "the act of drilling\n", + "[u'drilling', u'boring']\n", + "**************************************************\n", + "the act of drilling a hole in the earth in the hope of producing petroleum\n", + "[u'boring', u'drilling', u'oil_production']\n", + "**************************************************\n", + "cause to be bored\n", + "[u'bore', u'tire']\n", + "**************************************************\n", + "make a hole, especially with a pointed power or hand tool\n", + "[u'bore', u'drill']\n", + "**************************************************\n", + "so lacking in interest as to cause mental weariness\n", + "[u'boring', u'deadening', u'dull', u'ho-hum', u'irksome', u'slow', u'tedious', u'tiresome', u'wearisome']\n" + ] + } + ], + "source": [ + "for synset in wn.synsets('boring'):\n", + " print(\"*\"*50)\n", + " print(synset.definition())\n", + " print(synset.lemma_names())\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 145, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Lemma('drilling.n.01.boring'), Lemma('boring.n.02.boring'), Lemma('boring.s.01.boring')]\n", + "[Lemma('fantastic.s.02.wonderful')]\n", + "[Lemma('dazzling.s.01.dazzling'), Lemma('blazing.s.01.dazzling')]\n" + ] + } + ], + "source": [ + "# Also\n", + "print(wn.lemmas('boring'))\n", + "print(wn.lemmas('wonderful'))\n", + "print(wn.lemmas('dazzling'))" + ] + }, + { + "cell_type": "code", + "execution_count": 153, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Lemma('dish.n.01.dish'), Lemma('dish.n.02.dish'), Lemma('dish.n.03.dish'), Lemma('smasher.n.02.dish'), Lemma('dish.n.05.dish'), Lemma('cup_of_tea.n.01.dish'), Lemma('serve.v.06.dish'), Lemma('dish.v.02.dish')]\n", + "= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = \n", + "**************************************************\n", + "a piece of dishware normally used as a container for holding or serving food\n", + "[u'dish']\n", + "**************************************************\n", + "a particular item of prepared food\n", + "[u'dish']\n", + "**************************************************\n", + "the quantity that a dish will hold\n", + "[u'dish', u'dishful']\n", + "**************************************************\n", + "a very attractive or seductive looking woman\n", + "[u'smasher', u'stunner', u'knockout', u'beauty', u'ravisher', u'sweetheart', u'peach', u'lulu', u'looker', u'mantrap', u'dish']\n", + "**************************************************\n", + "directional antenna consisting of a parabolic reflector for microwave or radio frequency radiation\n", + "[u'dish', u'dish_aerial', u'dish_antenna', u'saucer']\n", + "**************************************************\n", + "an activity that you like or at which you are superior\n", + "[u'cup_of_tea', u'bag', u'dish']\n", + "**************************************************\n", + "provide (usually but not necessarily food)\n", + "[u'serve', u'serve_up', u'dish_out', u'dish_up', u'dish']\n", + "**************************************************\n", + "make concave; shape like a dish\n", + "[u'dish']\n" + ] + } + ], + "source": [ + "# dish!\n", + "print(wn.lemmas('dish'))\n", + "print(\"= \"*50)\n", + "for synset in wn.synsets('dish'):\n", + " print(\"*\"*50)\n", + " print(synset.definition())\n", + " print(synset.lemma_names())" + ] + }, + { + "cell_type": "code", + "execution_count": 198, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Happy.a.01: enjoying or showing or marked by joy or pleasure\n", + "Felicitous.s.02: marked by good fortune\n", + "Glad.s.02: eagerly disposed to act or to be of service\n", + "Happy.s.04: well expressed and to the point\n", + "Gladiolus.n.01: any of numerous plants of the genus Gladiolus native chiefly to tropical and South Africa having sword-shaped leaves and one-sided spikes of brightly colored funnel-shaped flowers; widely cultivated\n", + "Glad.a.01: showing or causing joy and pleasure; especially made happy\n", + "Glad.s.02: eagerly disposed to act or to be of service\n", + "Glad.s.03: feeling happy appreciation\n", + "Beaming.s.01: cheerful and bright\n", + "Joyful.a.01: full of or producing joy\n", + "Elated.s.02: full of high-spirited delight\n", + "Joyous.a.01: full of or characterized by joy\n" + ] + } + ], + "source": [ + "def get_definitions(word):\n", + " for synset in wn.synsets(word):\n", + " try:\n", + " print synset.name().capitalize() + ':', synset.definition() # capitalizing to give the feel of a dict entry\n", + " except:\n", + " continue\n", + " \n", + "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", + "for w in happy_words:\n", + " get_definitions(w)\n", + "\n", + "#for synset in wn.synsets('mint', wn.NOUN):\n", + "#... print(synset.name() + ':', synset.definition())\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 157, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'happy']\n", + "[u'felicitous', u'happy']\n", + "[u'glad', u'happy']\n", + "[u'happy', u'well-chosen']\n", + "[u'gladiolus', u'gladiola', u'glad', u'sword_lily']\n", + "[u'glad']\n", + "[u'glad', u'happy']\n", + "[u'glad']\n", + "[u'beaming', u'glad']\n", + "[u'joyful']\n", + "[u'elated', u'gleeful', u'joyful', u'jubilant']\n", + "[u'joyous']\n" + ] + } + ], + "source": [ + "def get_lemma_names(word):\n", + " for synset in wn.synsets(word):\n", + " try:\n", + " print(synset.lemma_names())\n", + " except:\n", + " continue\n", + " \n", + "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", + "for w in happy_words:\n", + " get_lemma_names(w)" + ] + }, + { + "cell_type": "code", + "execution_count": 185, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "set([u'felicitous', u'well-chosen', u'glad', u'happy'])\n", + "set([u'gladiolus', u'beaming', u'sword_lily', u'gladiola', u'glad', u'happy'])\n", + "set([u'elated', u'jubilant', u'joyful', u'gleeful'])\n", + "set([u'joyous'])\n", + "set([])\n", + "**************************************************\n", + "\n", + "Here's a single unique list/set:\n", + "\n", + "set([u'elated', u'gladiolus', u'beaming', u'joyous', u'sword_lily', u'well-chosen', u'felicitous', u'jubilant', u'gleeful', u'gladiola', u'joyful', u'glad', u'happy'])\n" + ] + } + ], + "source": [ + "def get_unique_lemma_names(word):\n", + " l=[]\n", + " for synset in wn.synsets(word):\n", + " try:\n", + " l.extend(synset.lemma_names())\n", + " except:\n", + " continue\n", + " l=set(l)\n", + " return l\n", + "\n", + "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", + "for w in happy_words:\n", + " l=get_unique_lemma_names(w)\n", + " print(l)\n", + "\n", + "# To get a set\n", + "print(\"*\"*50)\n", + "print(\"\\nHere's a single unique list/set:\\n\")\n", + "uniq_list=[]\n", + "for w in happy_words:\n", + " l=get_unique_lemma_names(w)\n", + " uniq_list.extend(l)\n", + "print(set(uniq_list))" + ] + }, + { + "cell_type": "code", + "execution_count": 190, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synset('ambulance.n.01')\n", + "**************************************************\n", + "[u'Model_T', u'S.U.V.', u'SUV', u'Stanley_Steamer', u'ambulance', u'beach_waggon', u'beach_wagon', u'bus', u'cab', u'compact', u'compact_car', u'convertible', u'coupe', u'cruiser', u'electric', u'electric_automobile', u'electric_car', u'estate_car', u'gas_guzzler', u'hack', u'hardtop', u'hatchback', u'heap', u'horseless_carriage', u'hot-rod', u'hot_rod', u'jalopy', u'jeep', u'landrover', u'limo', u'limousine', u'loaner', u'minicar', u'minivan', u'pace_car', u'patrol_car', u'phaeton', u'police_car', u'police_cruiser', u'prowl_car', u'race_car', u'racer', u'racing_car', u'roadster', u'runabout', u'saloon', u'secondhand_car', u'sedan', u'sport_car', u'sport_utility', u'sport_utility_vehicle', u'sports_car', u'squad_car', u'station_waggon', u'station_wagon', u'stock_car', u'subcompact', u'subcompact_car', u'taxi', u'taxicab', u'tourer', u'touring_car', u'two-seater', u'used-car', u'waggon', u'wagon']\n", + "**************************************************\n", + "ambulance beach_wagon station_wagon wagon estate_car beach_waggon station_waggon waggon bus jalopy heap cab hack taxi taxicab compact compact_car convertible coupe cruiser police_cruiser patrol_car police_car prowl_car squad_car electric electric_automobile electric_car gas_guzzler hardtop hatchback horseless_carriage hot_rod hot-rod jeep landrover limousine limo loaner minicar minivan Model_T pace_car racer race_car racing_car roadster runabout two-seater sedan saloon sport_utility sport_utility_vehicle S.U.V. SUV sports_car sport_car Stanley_Steamer stock_car subcompact subcompact_car touring_car phaeton tourer used-car secondhand_car\n" + ] + } + ], + "source": [ + "#Nice example from the book (http://www.nltk.org/book/ch02.html)\n", + "motorcar = wn.synset('car.n.01')\n", + "types_of_motorcar = motorcar.hyponyms()\n", + "print(types_of_motorcar[0]) # prints: Synset('ambulance.n.01')\n", + "print(\"*\"*50)\n", + "print(sorted(lemma.name() for synset in types_of_motorcar for lemma in synset.lemmas()))\n", + "print(\"*\"*50)\n", + "\n", + "# Remember, the tuple coprehension can be broken down as follows (with no sorting):\n", + "for synset in types_of_motorcar:\n", + " for lemma in synset.lemmas():\n", + " print(lemma.name())," + ] + }, + { + "cell_type": "code", + "execution_count": 193, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('motor_vehicle.n.01')]\n", + "2\n", + "\n", + "Path 1\n", + "[u'entity.n.01', u'physical_entity.n.01', u'object.n.01', u'whole.n.02', u'artifact.n.01', u'instrumentality.n.03', u'container.n.01', u'wheeled_vehicle.n.01', u'self-propelled_vehicle.n.01', u'motor_vehicle.n.01', u'car.n.01']\n", + "\n", + "Path 2\n", + "[u'entity.n.01', u'physical_entity.n.01', u'object.n.01', u'whole.n.02', u'artifact.n.01', u'instrumentality.n.03', u'conveyance.n.03', u'vehicle.n.01', u'wheeled_vehicle.n.01', u'self-propelled_vehicle.n.01', u'motor_vehicle.n.01', u'car.n.01']\n" + ] + } + ], + "source": [ + "# Another useful example, this time on hypernyms:\n", + "motorcar = wn.synset('car.n.01')\n", + "print(motorcar.hypernyms()) # prints: [Synset('motor_vehicle.n.01')]\n", + "\n", + "paths = motorcar.hypernym_paths()\n", + "print(len(paths)) # prints 2 as there are two paths, as the book states, between car.n.01 and entity.n.01 \n", + " # because wheeled_vehicle.n.01 can be classified as both a vehicle and a container.\n", + " # Take a look at the output below\n", + "\n", + "print(\"\\nPath 1 between car.n.01 and entity.n.01\")\n", + "print([synset.name() for synset in paths[0]])\n", + "print(\"\\nPath 2 between car.n.01 and entity.n.01\")\n", + "print([synset.name() for synset in paths[1]])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Try the graphical WordNet browser from your command line:\n", + "nltk.app.wordnet()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Similarity" + ] + }, + { + "cell_type": "code", + "execution_count": 227, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('lilac.n.01'), Synset('lavender.s.01')]\n", + "[Synset('tulip.n.01')]\n", + "[Synset('flower.n.01'), Synset('flower.n.02'), Synset('flower.n.03'), Synset('bloom.v.01')]\n", + "[Synset('tree.n.01'), Synset('tree.n.02'), Synset('tree.n.03'), Synset('corner.v.02'), Synset('tree.v.02'), Synset('tree.v.03'), Synset('tree.v.04')]\n", + "[Synset('daffodil.n.01')]\n", + "**************************************************\n", + "[Synset('flower.n.01')]\n", + "[Synset('orchid.n.01')]\n", + "[Synset('vascular_plant.n.01')]\n", + "[Synset('vascular_plant.n.01')]\n" + ] + } + ], + "source": [ + "from nltk.corpus import wordnet as wn\n", + "print(wn.synsets('lilac'))\n", + "print(wn.synsets('tulip'))\n", + "print(wn.synsets('flower'))\n", + "print(wn.synsets('tree'))\n", + "print(wn.synsets('daffodil'))\n", + "#--------------------------\n", + "print(\"*\"*50)\n", + "african = wn.synset('african_daisy.n.01')\n", + "orchid = wn.synset('orchid.n.01')\n", + "scarlet = wn.synset('scarlet_musk_flower.n.01')\n", + "aster = wn.synset('white-topped_aster.n.01')\n", + "tree = wn.synset('tree.n.01')\n", + "daffodil = wn.synset('daffodil.n.01')\n", + "#--------------------------\n", + "print(\"*\"*50)\n", + "print(african.lowest_common_hypernyms(orchid))\n", + "print(orchid.lowest_common_hypernyms(orchid))\n", + "print(scarlet.lowest_common_hypernyms(tree))\n", + "print(aster.lowest_common_hypernyms(daffodil))\n", + "#print(wn.synset('flower.n.01').hypernyms())\n", + "#print(wn.synset('flower.n.01').hyponyms())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 164, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "ename": "AttributeError", + "evalue": "'list' object has no attribute 'lower'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 19\u001b[0m \u001b[0mhappy_words\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"happy\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"glad\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"joyful\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"joyous\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"exhuberant\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 21\u001b[0;31m \u001b[0ml\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mget_unique_lemma_names\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhappy_words\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 22\u001b[0m \u001b[0;32mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ml\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mget_unique_lemma_names\u001b[0;34m(word)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mget_unique_lemma_names\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mword\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0ml\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0msynset\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mwn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msynsets\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mword\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0ml\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msynset\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlemma_names\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/nltk/corpus/reader/wordnet.pyc\u001b[0m in \u001b[0;36msynsets\u001b[0;34m(self, lemma, pos, lang)\u001b[0m\n\u001b[1;32m 1402\u001b[0m \u001b[0mof\u001b[0m \u001b[0mthat\u001b[0m \u001b[0mlanguage\u001b[0m \u001b[0mwill\u001b[0m \u001b[0mbe\u001b[0m \u001b[0mreturned\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1403\u001b[0m \"\"\"\n\u001b[0;32m-> 1404\u001b[0;31m \u001b[0mlemma\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlemma\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlower\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1405\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1406\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlang\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'en'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'list' object has no attribute 'lower'" + ] + } + ], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "hi\n" + ] + } + ], + "source": [ + "print \"hi\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "\n", + "import matplotlib\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from nltk.draw.dispersion import dispersion_plot\n", + "print(text4.dispersion_plot([\"citizens\", \"democracy\", \"freedom\", \"duties\", \"America\"]))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", + "\n", + "x = np.linspace(0, 3*np.pi, 500)\n", + "plt.plot(x, np.sin(x**2))\n", + "plt.title('A simple chirp');\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "#!/usr/bin/python\n", + "%matplotlib inline\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "#################################\n", + "\n", + "###################\n", + "# Classification confidence per class:\n", + "#--------------------------------------\n", + "Account=[0.50597769529107606, 0.61137631750300769, 0.67732439371970943, 0.74335210285266851, 0.78045128083006687, 0.85889268848391032, 0.86004645511688793, 0.86034494338992484, 0.86110627662083916, 0.86385229563374299, 0.8652039704758846, 0.86629669936492792, 0.86714993874468849, 0.86951104057901096, 0.87107760241792387, 0.87433969633042807, 0.87731372225510584, 0.87786812374991918, 0.87810590629205709, 0.87892633430246814, 0.87915317999299925, 0.88066216034760669, 0.88408934521227001, 0.88469154406025774, 0.88796777230531254, 0.89044129384145887, 0.90267024623065706, 0.92524363229460704]\n", + "Alert=[0.47637448264043625, 0.66454777829823419, 0.83611243185409068, 0.84343541279259093, 0.84407692188080419, 0.84691874602073758, 0.84846838467167951, 0.84885499324244018, 0.84892085388116045, 0.85662976488218612, 0.85748220725433, 0.85766843771151779, 0.86282314473652266, 0.88306412005817692, 0.90953193806965249]\n", + "EventUpdate=[0.60620179952905806, 0.81414035159064524, 0.83977384250581455, 0.84197728312163245, 0.84665030060369317, 0.86063170708443371, 0.86556282358318515, 0.86670240801689957, 0.86730292811655896, 0.87129628595308339, 0.87179895532552454, 0.87237623417151511]\n", + "purchasesAndPayments= [0.70209823840705454, 0.87763882598773824, 0.87853436781994654, 0.88111290254442187, 0.88188179857430626, 0.88427555688939874, 0.88575543579520666, 0.89021547578541182, 0.89111298147392382, 0.89325185353073655, 0.89699979308063715, 0.89711891518041997, 0.90022327360644228, 0.90038922055635651, 0.90352512667392848, 0.91167136191589293, 0.91258912510512546, 0.91292772595692206, 0.91385158396318156, 0.91514777255816948, 0.91598257367845204, 0.91620376194631981, 0.91624196881789599, 0.91629208488490144, 0.91682304964064509, 0.91685424758210343, 0.91773130126105062, 0.91859239740198162, 0.91885115720464194, 0.9189813690872739, 0.91942897543635649, 0.92005261642510894, 0.92031278031339603, 0.9206131773956312, 0.92162770964147711, 0.92333172011600717, 0.92368736632287529, 0.92445312588899153, 0.92487782965565768, 0.92585645404696637, 0.92736029109282003, 0.92745082086828889, 0.92853691341173528, 0.92974148319851113, 0.93038737495076018, 0.93268166532934882, 0.93485981648392313, 0.93560689873538494, 0.93792467408291125, 0.93990750368079101, 0.94138345097080245, 0.94171127507598984, 0.94257827570703179, 0.94278726547269032, 0.94591753032286208, 0.94684854929169837, 0.94778523908566703, 0.94938466710678737, 0.9501448612157134, 0.95149247851897123, 0.95239002506270776, 0.95375471665360612, 0.95562783008800667, 0.95780240099868053, 0.9578653435017056, 0.95787837810691678, 0.95826883828494502, 0.96152066593986663, 0.96162681902834768, 0.9642282884732325, 0.96529873521893783, 0.96552974107677436, 0.96639428810283956, 0.96678961265384455, 0.96687982068407863, 0.96873869536512358, 0.96880029214978958, 0.96947401140396405, 0.97052647680045956, 0.97057034041651968]\n", + "TrackedInfo=[0.57856804762622893, 0.84336621528774514, 0.84726630715050089, 0.84865439463077963, 0.85259413447559007, 0.85367437726360995, 0.85572529471292957, 0.85595201321671999, 0.86019342820522748, 0.86280650298103134, 0.87695710743248556, 0.88755323066381953, 0.89051154523294096, 0.89159321691228055]\n", + "Travel=[0.83341902931726042, 0.84109694886055264, 0.84915529812154522, 0.84965845310243482, 0.85351817819423192, 0.86268823869067024, 0.87021845881088733]\n", + "\n", + "def createHistogram(x, cat=\"Purchases and Payments\", color='green'):\n", + " \"\"\"\n", + " Plots an individual histogram.\n", + " \"\"\"\n", + " plt.hist(x, len(x)+20, normed=1, facecolor=color, alpha=0.75)\n", + " plt.xlabel('Confidence')\n", + " #plt.ylabel('Frequency')\n", + " plt.title(cat)\n", + " plt.grid(True)\n", + " plt.show()\n", + "\n", + "def createHist(x, cat=\"Purchases and Payments\", color='green'):\n", + " \"\"\"\n", + " Used as a helper function for creating individual histograms \n", + " inside the subplotter\n", + " \"\"\"\n", + " plt.hist(x, len(x)+20, normed=1, facecolor=color, alpha=0.75)\n", + " plt.xlabel('Confidence')\n", + " plt.title(cat)\n", + " plt.grid(True)\n", + " \n", + "def subplotter():\n", + " \"\"\"\n", + " Subplots several histograms...\n", + " \"\"\"\n", + " plt.figure(figsize=(15,15))\n", + " plt.subplot(3,3 , 1 )\n", + " createHist(Account, cat=\"Account\", color=np.random.rand(3))\n", + " plt.subplot( 3,3, 2 )\n", + " createHist(Alert, cat=\"Alert\", color='magenta')\n", + " plt.subplot( 3,3, 3 )\n", + " createHist(EventUpdate, cat=\"Event Update\", color=np.random.rand(3))\n", + " plt.subplot( 3,3, 4 )\n", + " createHist(purchasesAndPayments, cat=\"Purchases and Payments\", color=np.random.rand(3)) \n", + " plt.subplot( 3,3, 5 )\n", + " createHist(TrackedInfo, cat=\"Tracked Info\", color=np.random.rand(3))\n", + " plt.subplot( 3,3, 6 )\n", + " createHist(Travel, cat=\"Travel\", color='y')\n", + " plt.show()\n", + "\n", + "def main():\n", + " subplotter()\n", + " # createHist(Account, cat=\"Account\")\n", + " # createHist(Alert, cat=\"Alert\", color='magenta')\n", + " # createHist(EventUpdate, cat=\"EventUpdate\", color='blue')\n", + " # createHist(purchasesAndPayments, cat=\"Purchases and Payments\", color=\"red\")\n", + " # createHist(TrackedInfo, cat=\"TrackedInfo\", color='brown')\n", + " # createHist(Travel, cat=\"Travel\", color='y')\n", + " \n", + " \n", + "if __name__ == \"__main__\":\n", + " main()\n", + "\n", + "######################################\n", + "# This is useful: http://cs.smith.edu/dftwiki/index.php/MatPlotLib_Tutorial_1\n", + "#---------------------------------\n", + "# This is about color maps: http://matplotlib.org/examples/color/colormaps_reference.html\n", + "\n", + "# cmaps = [('Sequential', ['Blues', 'BuGn', 'BuPu',\n", + "# 'GnBu', 'Greens', 'Greys', 'Oranges', 'OrRd',\n", + "# 'PuBu', 'PuBuGn', 'PuRd', 'Purples', 'RdPu',\n", + "# 'Reds', 'YlGn', 'YlGnBu', 'YlOrBr', 'YlOrRd']),\n", + "# ('Sequential (2)', ['afmhot', 'autumn', 'bone', 'cool', 'copper',\n", + "# 'gist_heat', 'gray', 'hot', 'pink',\n", + "# 'spring', 'summer', 'winter']),\n", + "# ('Diverging', ['BrBG', 'bwr', 'coolwarm', 'PiYG', 'PRGn', 'PuOr',\n", + "# 'RdBu', 'RdGy', 'RdYlBu', 'RdYlGn', 'Spectral',\n", + "# 'seismic']),\n", + "# ('Qualitative', ['Accent', 'Dark2', 'Paired', 'Pastel1',\n", + "# 'Pastel2', 'Set1', 'Set2', 'Set3']),\n", + "# ('Miscellaneous', ['gist_earth', 'terrain', 'ocean', 'gist_stern',\n", + "# 'brg', 'CMRmap', 'cubehelix',\n", + "# 'gnuplot', 'gnuplot2', 'gist_ncar',\n", + "# 'nipy_spectral', 'jet', 'rainbow',\n", + "# 'gist_rainbow', 'hsv', 'flag', 'prism'])]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_2.ipynb b/python_tutorial_part_2.ipynb new file mode 100644 index 0000000..50a53f5 --- /dev/null +++ b/python_tutorial_part_2.ipynb @@ -0,0 +1,977 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# NLTK Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "*** Introductory Examples for the NLTK Book ***\n", + "Loading text1, ..., text9 and sent1, ..., sent9\n", + "Type the name of the text or sentence to view it.\n", + "Type: 'texts()' or 'sents()' to list the materials.\n", + "text1: Moby Dick by Herman Melville 1851\n", + "text2: Sense and Sensibility by Jane Austen 1811\n", + "text3: The Book of Genesis\n", + "text4: Inaugural Address Corpus\n", + "text5: Chat Corpus\n", + "text6: Monty Python and the Holy Grail\n", + "text7: Wall Street Journal\n", + "text8: Personals Corpus\n", + "text9: The Man Who Was Thursday by G . K . Chesterton 1908\n" + ] + } + ], + "source": [ + "# Take a look at the preface here: http://www.nltk.org/book/ch00.html\n", + "# This tutorial is based on Python 2.7, but it shouldn't be an issue to write the same code for Python 3 as the differences\n", + "# are minimal so long as the tutorial is concerned\n", + "import nltk\n", + "from nltk.book import *" + ] + }, + { + "cell_type": "code", + "execution_count": 253, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Displaying 10 of 66 matches:\n", + "r occupations may come . The Negroes are now Americans . Their ancestors came here years ago agains\n", + "e it so or not . And yet we are not the less Americans on that account . We shall be the more Ameri\n", + "we find them now secure ; and there comes to Americans the profound assurance that our representati\n", + "have called me . I am certain that my fellow Americans expect that on my induction into the Preside\n", + " and the hurricanes of disaster . In this we Americans were discovering no wholly new truth ; we we\n", + " and that freedom is an ebbing tide . But we Americans know that this is not true . Eight years ago\n", + "eat . We are not content to stand still . As Americans , we go forward , in the service of our coun\n", + "uguration be simple and its words brief . We Americans of today , together with our allies , are pa\n", + "in the discharge of this responsibility , we Americans know and we observe the difference between w\n", + "cked bargain of trading honor for security . Americans , indeed all free men , remember that in the\n", + "None\n" + ] + } + ], + "source": [ + "# Let's look at the text4: Inaugural Address Corpus\n", + "# NLTK can show a word in context, called a concordance (with a given text window size)\n", + "# width: a parameter forthe window size of surrounding character context\n", + "# lines: a parameter for the number of lines returned \n", + "print(text4.concordance(\"Americans\", width=100, lines=10))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "the free power opportunity fellow opinions colleges peace gangs\n", + "judgments consent noblest ideas colors fidelity unquestionable worship\n", + "discipline industrious just\n", + "None\n" + ] + } + ], + "source": [ + "# Other words that appear in a similar range of contexts as a given word\n", + "print(text4.similar(\"patriotic\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "every_citizen our_citizens\n", + "None\n" + ] + } + ], + "source": [ + "# Let's look at common contexts of two words:\n", + "print(text4.common_contexts([\"patriotic\", \"free\"])) " + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0669\n" + ] + } + ], + "source": [ + "# Lexical diversity shows the richness of a text's vocabulary:\n", + "from __future__ import division # in Python 3 you don't need to do the import\n", + "def lexical_diversity(text):\n", + " return len(set(text))/len(text)\n", + "\n", + "lex_div=lexical_diversity(text4)\n", + "print(round(lex_div, 4))\n", + "\n", + "# What interesting ways can you use \"lexical_diversity\" for?\n", + "# Can you play with some texts, say from presidential candidates and tell us what you find?" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'AS', u'Abandonment', u'Abhorring', u'About', u'Above', u'Abraham', u'Abroad', u'Accept', u'Across', u'Act', u'Acting', u'Action', u'Actual', u'Adams', u'Additional', u'Address', u'Administered', u'Administration', u'Administrations', u'Advance']\n" + ] + } + ], + "source": [ + "# sorted set of words\n", + "print(sorted(set(text4))[100:120])" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "None\n", + "['father', 'man', 'mother', 'woman']\n" + ] + } + ], + "source": [ + "#P ay attenstion to the difference of these!\n", + "tokens=[\"man\", \"woman\", \"father\", \"mother\"]\n", + "print tokens.sort() # Returns \"None\", but sorts the list in place\n", + "print sorted(tokens) # Returns the sorted list\n" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('man', 102)\n", + "('woman', 3)\n", + "('father', 4)\n", + "('mother', 4)\n" + ] + } + ], + "source": [ + "# Counting word frequencies:\n", + "words=[\"man\", \"woman\", \"father\", \"mother\"]\n", + "for w in words:\n", + " print(w, text4.count(w))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + " # We Stopped here:" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "****************************************************************************************************\n", + "[(u'institutions', 76), (u'come', 75), (u'party', 75), (u'better', 75), (u'always', 74), (u'today', 74), (u'office', 73), (u'still', 73), (u'need', 73), (u'others', 73), (u'strength', 72), (u'Let', 72), (u'nor', 72), (u'itself', 72), (u'means', 70), (u'believe', 70), (u'themselves', 70), (u'place', 70), (u'land', 69), (u'could', 69), (u'then', 69), (u'.\"', 69), (u'home', 69), (u'equal', 69), (u'together', 68), (u'might', 68), (u'things', 67), (u'secure', 67), (u'Nation', 67), (u'whose', 66), (u'find', 66), (u'given', 66), (u'prosperity', 66), (u'Americans', 66), (u'old', 65), (u'am', 65), (u'full', 65), (u'give', 65), (u'here', 64), (u'Federal', 64), (u'action', 64), (u'order', 64), (u'yet', 64), (u'proper', 64), (u'found', 63), (u'up', 63), (u'important', 63), (u'responsibility', 63), (u'take', 62), (u'where', 62), (u'being', 62), (u'change', 62), (u'Executive', 62), (u'even', 62), (u'subject', 62), (u'administration', 61), (u'revenue', 61), (u'State', 61), (u'see', 60), (u'security', 60), (u'ought', 60), (u'trust', 60), (u'These', 60), (u'A', 59), (u'self', 59), (u'true', 59), (u'business', 59), (u'seek', 59), (u'character', 59), (u'honor', 59), (u'question', 59), (u'called', 59), (u'respect', 59), (u'commerce', 58), (u'cause', 58), (u'toward', 58), (u'principle', 58), (u'again', 58), (u'century', 58), (u'influence', 57), (u'become', 56), (u'protection', 56), (u'done', 56), (u'stand', 56), (u'course', 55), (u'another', 55), (u'very', 55), (u'help', 55), (u'like', 55), (u'citizen', 54), (u'authority', 54), (u'also', 53), (u'Republic', 53), (u'live', 53), (u'civil', 53), (u'past', 52), (u'sense', 52), (u'constitutional', 52), (u'meet', 52), (u'democracy', 52)]\n", + "****************************************************************************************************\n", + "14\n", + "312\n", + "****************************************************************************************************\n", + "[u'than', u'country', u'.', u'has', u'people', u'for', u'citizens', u'time', u'so', u'nation']\n" + ] + } + ], + "source": [ + "# Frequency distribution\n", + "freq_dist = FreqDist(text4) \n", + "print(\"*\"*100)\n", + "print(freq_dist.most_common(1000))[200:300]\n", + "print(\"*\"*100)\n", + "print(freq_dist[\"European\"])\n", + "print(freq_dist[\"world\"])\n", + "print(\"*\"*100)\n", + "#------------------------------------------\n", + "# Vocabulary\n", + "V=set(text4)\n", + "words=[w for w in V if freq_dist[w] > 200][:10]\n", + "print(words)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "United States; fellow citizens; four years; years ago; Federal\n", + "Government; General Government; American people; Vice President; Old\n", + "World; Almighty God; Fellow citizens; Chief Magistrate; Chief Justice;\n", + "God bless; every citizen; Indian tribes; public debt; one another;\n", + "foreign nations; political parties\n", + "None\n" + ] + } + ], + "source": [ + "# Collocations\n", + "print(text4.collocations())" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'zodiac', u'zodiac', u'zogranda', u'zone', u'zoned', u'zoned', u'zones', u'zoology', u'zoology', u'zoroaster']\n", + "[u'zephyr', u'zeuglodon', u'zig', u'zodiac', u'zogranda', u'zone', u'zoned', u'zones', u'zoology', u'zoroaster']\n" + ] + } + ], + "source": [ + "# Could you tell the difference?\n", + "print(sorted(w.lower() for w in set(text1))[-10:])\n", + "print(sorted(set(w.lower() for w in text1))[-10:])" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "Hamlet: Entire Play\n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "The Tragedy of Hamlet, Prince of Denmark\n", + "\n", + "Shakespeare homepage \n", + " | Hamlet \n", + " | Entire play\n", + "\n", + "ACT I\n", + "SCENE I. Elsinore. A platform before the castle.\n", + "\n", + "FRANCISCO at his post. Enter to him BERNARDO\n", + "\n", + "BERNARDO\n", + "\n", + "Who's there?\n", + "\n", + "FRANCISCO\n", + "\n", + "Nay, answer me: stand, and unfold you\n" + ] + } + ], + "source": [ + "# Fetching and cleaning a webpage:\n", + "from urllib import urlopen\n", + "from bs4 import BeautifulSoup\n", + "url=\"http://shakespeare.mit.edu/hamlet/full.html\"\n", + "page = urlopen(url)\n", + "soup = BeautifulSoup(page.read()) \n", + "raw = BeautifulSoup.get_text(soup) \n", + "print(raw[:300])\n", + "tokens=nltk.word_tokenize(raw)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['I', 'am', 'happy']\n" + ] + } + ], + "source": [ + "# Word tokenization with NLTK:\n", + "import nltk\n", + "raw=\"I am happy\"\n", + "tokens=nltk.word_tokenize(raw)\n", + "print tokens" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# More on files, this time with NLTK:" + ] + }, + { + "cell_type": "code", + "execution_count": 230, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "[u'The', u'Project', u'Gutenberg', u'EBook', u'of', u'Hamlet', u',', u'by', u'William', u'Shakespeare', u'This', u'eBook', u'is', u'for', u'the', u'use', u'of', u'anyone', u'anywhere', u'at']\n", + "**************************************************\n", + "[u'The', u'Project', u'Gutenberg', u'EBook', u'of', u'Hamlet', u',', u'by', u'William', u'Shakespeare', u'This', u'eBook', u'is', u'for', u'the', u'use', u'of', u'anyone', u'anywhere', u'at', u'no', u'cost', u'and', u'with', u'almost', u'no', u'restrictions', u'whatsoever', u'.', u'You', u'may', u'copy', u'it', u',', u'give', u'it', u'away', u'or', u're-use', u'it', u'under', u'the', u'terms', u'of', u'the', u'Project', u'Gutenberg', u'License', u'included', u'with']\n", + "Project Gutenberg-tm; _1st Clo._; Project Gutenberg; _Crosses to_;\n", + "Literary Archive; Gutenberg-tm electronic; Archive Foundation;\n", + "electronic works; Gutenberg Literary; United States; _2nd Clo._;\n", + "ROSENCRANTZ _and_; public domain; _and_ GUILDENSTERN; Dr. Johnson;\n", + "_1st Play._; electronic work; _and_ Attendants; the_ KING; set forth\n", + "None\n" + ] + } + ], + "source": [ + "import codecs\n", + "from nltk import word_tokenize, Text\n", + "text_string=codecs.open(\"hamlet.txt\", \"r\", \"utf-8\").read() # Opens for reading and gets you the file content as a list\n", + "tokens = word_tokenize(text_string)\n", + "print(type(tokens))\n", + "print(tokens[:20])\n", + "text = Text(tokens)\n", + "print(\"*\"*50)\n", + "print(text[:50])\n", + "print(text.collocations())\n" + ] + }, + { + "cell_type": "code", + "execution_count": 90, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'[', u'The', u'Tragedie', u'of', u'Hamlet', u'by', u'William', u'Shakespeare', u'1599', u']']\n", + "[u'Actus', u'Primus', u'.']\n", + "[u'Scoena', u'Prima', u'.']\n", + "[u'Enter', u'Barnardo', u'and', u'Francisco', u'two', u'Centinels', u'.']\n", + "[u'Barnardo', u'.']\n" + ] + } + ], + "source": [ + "# Sentence splitting\n", + "from nltk.corpus import gutenberg\n", + "# This will return each sentence as a list of words\n", + "hamlet_sent=gutenberg.sents('shakespeare-hamlet.txt')\n", + "for sent in hamlet_sent[:5]:\n", + " print(sent)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'1789-Washington.txt', u'1793-Washington.txt', u'1797-Adams.txt', u'1801-Jefferson.txt', u'1805-Jefferson.txt']\n", + "**************************************************\n", + "[u'1789', u'1793', u'1797', u'1801', u'1805', u'1809', u'1813', u'1817', u'1821', u'1825', u'1829', u'1833', u'1837', u'1841', u'1845', u'1849', u'1853', u'1857', u'1861', u'1865', u'1869', u'1873', u'1877', u'1881', u'1885', u'1889', u'1893', u'1897', u'1901', u'1905', u'1909', u'1913', u'1917', u'1921', u'1925', u'1929', u'1933', u'1937', u'1941', u'1945', u'1949', u'1953', u'1957', u'1961', u'1965', u'1969', u'1973', u'1977', u'1981', u'1985', u'1989', u'1993', u'1997', u'2001', u'2005', u'2009']\n" + ] + } + ], + "source": [ + "# NLTK fileids:\n", + "from nltk.corpus import inaugural\n", + "print(inaugural.fileids()[:5])\n", + "print(\"*\"*50)\n", + "#print([fileid[:4] for fileid in inaugural.fileids()])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Generate text" + ] + }, + { + "cell_type": "code", + "execution_count": 122, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "living creature that he said , and the land of the land of the land of the land of the land of the land of the land of the land None\n" + ] + } + ], + "source": [ + "# A function from the NLTK book: http://www.nltk.org/book/ch02.html\n", + "def generate_model(cfdist, word, num=15):\n", + " for i in range(num):\n", + " print(word),\n", + " word = cfdist[word].max()\n", + "\n", + "text = nltk.corpus.genesis.words('english-kjv.txt')\n", + "bigrams = nltk.bigrams(text)\n", + "cfd = nltk.ConditionalFreqDist(bigrams)\n", + "print(generate_model(cfd, 'living', num=30))\n", + "#print(cfd[\"living\"].max())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# WordNet" + ] + }, + { + "cell_type": "code", + "execution_count": 133, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('nice.n.01'), Synset('nice.a.01'), Synset('decent.s.01'), Synset('nice.s.03'), Synset('dainty.s.04'), Synset('courteous.s.01')]\n", + "**************************************************\n", + "done with delicacy and skill\n", + "[u'nice', u'skillful']\n", + "**************************************************\n", + "exhibiting courtesy and politeness\n", + "[u'courteous', u'gracious', u'nice']\n", + "**************************************************\n", + "excessively fastidious and easily disgusted\n", + "[u'dainty', u'nice', u'overnice', u'prissy', u'squeamish']\n" + ] + } + ], + "source": [ + "# WordNet is a very useful resource.\n", + "# You should get familiar with its structure, and with ways to navigate it.\n", + "# NLTK provides many off-the-shelf useful functions\n", + "from nltk.corpus import wordnet as wn\n", + "print(wn.synsets('nice'))\n", + "print(\"*\"*50)\n", + "print(wn.synset('nice.s.03').definition())\n", + "print(wn.synset('nice.s.03').lemma_names())\n", + "print(\"*\"*50)\n", + "print(wn.synset('courteous.s.01').definition())\n", + "print(wn.synset('courteous.s.01').lemma_names())\n", + "print(\"*\"*50)\n", + "print(wn.synset('dainty.s.04').definition())\n", + "print(wn.synset('dainty.s.04').lemma_names())\n", + "print(\"*\"*50)" + ] + }, + { + "cell_type": "code", + "execution_count": 138, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "**************************************************\n", + "the act of drilling\n", + "[u'drilling', u'boring']\n", + "**************************************************\n", + "the act of drilling a hole in the earth in the hope of producing petroleum\n", + "[u'boring', u'drilling', u'oil_production']\n", + "**************************************************\n", + "cause to be bored\n", + "[u'bore', u'tire']\n", + "**************************************************\n", + "make a hole, especially with a pointed power or hand tool\n", + "[u'bore', u'drill']\n", + "**************************************************\n", + "so lacking in interest as to cause mental weariness\n", + "[u'boring', u'deadening', u'dull', u'ho-hum', u'irksome', u'slow', u'tedious', u'tiresome', u'wearisome']\n" + ] + } + ], + "source": [ + "# Printing the definition and lemma names/lemmas of a given word is easily done in a \"for\" loop\n", + "for synset in wn.synsets('boring'):\n", + " print(\"*\"*50)\n", + " print(synset.definition())\n", + " print(synset.lemma_names())\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 145, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Lemma('drilling.n.01.boring'), Lemma('boring.n.02.boring'), Lemma('boring.s.01.boring')]\n", + "[Lemma('fantastic.s.02.wonderful')]\n", + "[Lemma('dazzling.s.01.dazzling'), Lemma('blazing.s.01.dazzling')]\n" + ] + } + ], + "source": [ + "# You can access lemmas of a word directly, using the \"lemmas\" function:\n", + "print(wn.lemmas('boring'))\n", + "print(wn.lemmas('wonderful'))\n", + "print(wn.lemmas('dazzling'))" + ] + }, + { + "cell_type": "code", + "execution_count": 153, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Lemma('dish.n.01.dish'), Lemma('dish.n.02.dish'), Lemma('dish.n.03.dish'), Lemma('smasher.n.02.dish'), Lemma('dish.n.05.dish'), Lemma('cup_of_tea.n.01.dish'), Lemma('serve.v.06.dish'), Lemma('dish.v.02.dish')]\n", + "= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = \n", + "**************************************************\n", + "a piece of dishware normally used as a container for holding or serving food\n", + "[u'dish']\n", + "**************************************************\n", + "a particular item of prepared food\n", + "[u'dish']\n", + "**************************************************\n", + "the quantity that a dish will hold\n", + "[u'dish', u'dishful']\n", + "**************************************************\n", + "a very attractive or seductive looking woman\n", + "[u'smasher', u'stunner', u'knockout', u'beauty', u'ravisher', u'sweetheart', u'peach', u'lulu', u'looker', u'mantrap', u'dish']\n", + "**************************************************\n", + "directional antenna consisting of a parabolic reflector for microwave or radio frequency radiation\n", + "[u'dish', u'dish_aerial', u'dish_antenna', u'saucer']\n", + "**************************************************\n", + "an activity that you like or at which you are superior\n", + "[u'cup_of_tea', u'bag', u'dish']\n", + "**************************************************\n", + "provide (usually but not necessarily food)\n", + "[u'serve', u'serve_up', u'dish_out', u'dish_up', u'dish']\n", + "**************************************************\n", + "make concave; shape like a dish\n", + "[u'dish']\n" + ] + } + ], + "source": [ + "# Play with the word \"dish\"\n", + "print(wn.lemmas('dish'))\n", + "print(\"= \"*50)\n", + "for synset in wn.synsets('dish'):\n", + " print(\"*\"*50)\n", + " print(synset.definition())\n", + " print(synset.lemma_names())" + ] + }, + { + "cell_type": "code", + "execution_count": 198, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Happy.a.01: enjoying or showing or marked by joy or pleasure\n", + "Felicitous.s.02: marked by good fortune\n", + "Glad.s.02: eagerly disposed to act or to be of service\n", + "Happy.s.04: well expressed and to the point\n", + "Gladiolus.n.01: any of numerous plants of the genus Gladiolus native chiefly to tropical and South Africa having sword-shaped leaves and one-sided spikes of brightly colored funnel-shaped flowers; widely cultivated\n", + "Glad.a.01: showing or causing joy and pleasure; especially made happy\n", + "Glad.s.02: eagerly disposed to act or to be of service\n", + "Glad.s.03: feeling happy appreciation\n", + "Beaming.s.01: cheerful and bright\n", + "Joyful.a.01: full of or producing joy\n", + "Elated.s.02: full of high-spirited delight\n", + "Joyous.a.01: full of or characterized by joy\n" + ] + } + ], + "source": [ + "# A function that prints the synsets and definitions of a given word:\n", + "def get_definitions(word):\n", + " for synset in wn.synsets(word):\n", + " try:\n", + " print synset.name().capitalize() + ':', synset.definition() # capitalizing to give the feel of a dict entry\n", + " except:\n", + " continue\n", + " \n", + "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", + "for w in happy_words:\n", + " get_definitions(w)\n", + "\n", + "# You can condition by a part of speech (POS), see the book!\n", + "#for synset in wn.synsets('mint', wn.NOUN):\n", + "#... print(synset.name() + ':', synset.definition())\n" + ] + }, + { + "cell_type": "code", + "execution_count": 157, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'happy']\n", + "[u'felicitous', u'happy']\n", + "[u'glad', u'happy']\n", + "[u'happy', u'well-chosen']\n", + "[u'gladiolus', u'gladiola', u'glad', u'sword_lily']\n", + "[u'glad']\n", + "[u'glad', u'happy']\n", + "[u'glad']\n", + "[u'beaming', u'glad']\n", + "[u'joyful']\n", + "[u'elated', u'gleeful', u'joyful', u'jubilant']\n", + "[u'joyous']\n" + ] + } + ], + "source": [ + "# A function to print the lemma names of a passed word\n", + "def get_lemma_names(word):\n", + " for synset in wn.synsets(word):\n", + " try:\n", + " print(synset.lemma_names())\n", + " except:\n", + " continue\n", + " \n", + "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", + "for w in happy_words:\n", + " get_lemma_names(w)" + ] + }, + { + "cell_type": "code", + "execution_count": 185, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "set([u'felicitous', u'well-chosen', u'glad', u'happy'])\n", + "set([u'gladiolus', u'beaming', u'sword_lily', u'gladiola', u'glad', u'happy'])\n", + "set([u'elated', u'jubilant', u'joyful', u'gleeful'])\n", + "set([u'joyous'])\n", + "set([])\n", + "**************************************************\n", + "\n", + "Here's a single unique list/set:\n", + "\n", + "set([u'elated', u'gladiolus', u'beaming', u'joyous', u'sword_lily', u'well-chosen', u'felicitous', u'jubilant', u'gleeful', u'gladiola', u'joyful', u'glad', u'happy'])\n" + ] + } + ], + "source": [ + "# As above, but we uniqify using a set.\n", + "def get_unique_lemma_names(word):\n", + " l=[]\n", + " for synset in wn.synsets(word):\n", + " try:\n", + " l.extend(synset.lemma_names())\n", + " except:\n", + " continue\n", + " l=set(l)\n", + " return l\n", + "\n", + "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", + "for w in happy_words:\n", + " l=get_unique_lemma_names(w)\n", + " print(l)\n", + "\n", + "# To get a set\n", + "print(\"*\"*50)\n", + "print(\"\\nHere's a single unique list/set:\\n\")\n", + "uniq_list=[]\n", + "for w in happy_words:\n", + " l=get_unique_lemma_names(w)\n", + " uniq_list.extend(l)\n", + "print(set(uniq_list))" + ] + }, + { + "cell_type": "code", + "execution_count": 190, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synset('ambulance.n.01')\n", + "**************************************************\n", + "[u'Model_T', u'S.U.V.', u'SUV', u'Stanley_Steamer', u'ambulance', u'beach_waggon', u'beach_wagon', u'bus', u'cab', u'compact', u'compact_car', u'convertible', u'coupe', u'cruiser', u'electric', u'electric_automobile', u'electric_car', u'estate_car', u'gas_guzzler', u'hack', u'hardtop', u'hatchback', u'heap', u'horseless_carriage', u'hot-rod', u'hot_rod', u'jalopy', u'jeep', u'landrover', u'limo', u'limousine', u'loaner', u'minicar', u'minivan', u'pace_car', u'patrol_car', u'phaeton', u'police_car', u'police_cruiser', u'prowl_car', u'race_car', u'racer', u'racing_car', u'roadster', u'runabout', u'saloon', u'secondhand_car', u'sedan', u'sport_car', u'sport_utility', u'sport_utility_vehicle', u'sports_car', u'squad_car', u'station_waggon', u'station_wagon', u'stock_car', u'subcompact', u'subcompact_car', u'taxi', u'taxicab', u'tourer', u'touring_car', u'two-seater', u'used-car', u'waggon', u'wagon']\n", + "**************************************************\n", + "ambulance beach_wagon station_wagon wagon estate_car beach_waggon station_waggon waggon bus jalopy heap cab hack taxi taxicab compact compact_car convertible coupe cruiser police_cruiser patrol_car police_car prowl_car squad_car electric electric_automobile electric_car gas_guzzler hardtop hatchback horseless_carriage hot_rod hot-rod jeep landrover limousine limo loaner minicar minivan Model_T pace_car racer race_car racing_car roadster runabout two-seater sedan saloon sport_utility sport_utility_vehicle S.U.V. SUV sports_car sport_car Stanley_Steamer stock_car subcompact subcompact_car touring_car phaeton tourer used-car secondhand_car\n" + ] + } + ], + "source": [ + "#Nice example from the book (http://www.nltk.org/book/ch02.html)\n", + "motorcar = wn.synset('car.n.01')\n", + "types_of_motorcar = motorcar.hyponyms()\n", + "print(types_of_motorcar[0]) # prints: Synset('ambulance.n.01')\n", + "print(\"*\"*50)\n", + "print(sorted(lemma.name() for synset in types_of_motorcar for lemma in synset.lemmas()))\n", + "print(\"*\"*50)\n", + "\n", + "# Remember, the tuple coprehension can be broken down as follows (with no sorting):\n", + "for synset in types_of_motorcar:\n", + " for lemma in synset.lemmas():\n", + " print(lemma.name())," + ] + }, + { + "cell_type": "code", + "execution_count": 193, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('motor_vehicle.n.01')]\n", + "2\n", + "\n", + "Path 1\n", + "[u'entity.n.01', u'physical_entity.n.01', u'object.n.01', u'whole.n.02', u'artifact.n.01', u'instrumentality.n.03', u'container.n.01', u'wheeled_vehicle.n.01', u'self-propelled_vehicle.n.01', u'motor_vehicle.n.01', u'car.n.01']\n", + "\n", + "Path 2\n", + "[u'entity.n.01', u'physical_entity.n.01', u'object.n.01', u'whole.n.02', u'artifact.n.01', u'instrumentality.n.03', u'conveyance.n.03', u'vehicle.n.01', u'wheeled_vehicle.n.01', u'self-propelled_vehicle.n.01', u'motor_vehicle.n.01', u'car.n.01']\n" + ] + } + ], + "source": [ + "# Another useful example, this time on hypernyms:\n", + "motorcar = wn.synset('car.n.01')\n", + "print(motorcar.hypernyms()) # prints: [Synset('motor_vehicle.n.01')]\n", + "\n", + "paths = motorcar.hypernym_paths()\n", + "print(len(paths)) # prints 2 as there are two paths, as the book states, between car.n.01 and entity.n.01 \n", + " # because wheeled_vehicle.n.01 can be classified as both a vehicle and a container.\n", + " # Take a look at the output below\n", + "\n", + "print(\"\\nPath 1 between car.n.01 and entity.n.01\")\n", + "print([synset.name() for synset in paths[0]])\n", + "print(\"\\nPath 2 between car.n.01 and entity.n.01\")\n", + "print([synset.name() for synset in paths[1]])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Try the graphical WordNet browser from your command line:\n", + "nltk.app.wordnet()" + ] + }, + { + "cell_type": "code", + "execution_count": 227, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('lilac.n.01'), Synset('lavender.s.01')]\n", + "[Synset('tulip.n.01')]\n", + "[Synset('flower.n.01'), Synset('flower.n.02'), Synset('flower.n.03'), Synset('bloom.v.01')]\n", + "[Synset('tree.n.01'), Synset('tree.n.02'), Synset('tree.n.03'), Synset('corner.v.02'), Synset('tree.v.02'), Synset('tree.v.03'), Synset('tree.v.04')]\n", + "[Synset('daffodil.n.01')]\n", + "**************************************************\n", + "[Synset('flower.n.01')]\n", + "[Synset('orchid.n.01')]\n", + "[Synset('vascular_plant.n.01')]\n", + "[Synset('vascular_plant.n.01')]\n" + ] + } + ], + "source": [ + "# Similarity\n", + "from nltk.corpus import wordnet as wn\n", + "print(wn.synsets('lilac'))\n", + "print(wn.synsets('tulip'))\n", + "print(wn.synsets('flower'))\n", + "print(wn.synsets('tree'))\n", + "print(wn.synsets('daffodil'))\n", + "#--------------------------\n", + "print(\"*\"*50)\n", + "african = wn.synset('african_daisy.n.01')\n", + "orchid = wn.synset('orchid.n.01')\n", + "scarlet = wn.synset('scarlet_musk_flower.n.01')\n", + "aster = wn.synset('white-topped_aster.n.01')\n", + "tree = wn.synset('tree.n.01')\n", + "daffodil = wn.synset('daffodil.n.01')\n", + "#--------------------------\n", + "print(\"*\"*50)\n", + "print(african.lowest_common_hypernyms(orchid))\n", + "print(orchid.lowest_common_hypernyms(orchid))\n", + "print(scarlet.lowest_common_hypernyms(tree))\n", + "print(aster.lowest_common_hypernyms(daffodil))\n", + "#print(wn.synset('flower.n.01').hypernyms())\n", + "#print(wn.synset('flower.n.01').hyponyms())" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From c45077db62c79bef69b4cd139ad77dcf305e8764 Mon Sep 17 00:00:00 2001 From: mageed Date: Sat, 23 Jan 2016 22:23:34 -0500 Subject: [PATCH 02/36] update --- python_tutorial_part_1.ipynb | 3265 ---------------------------------- 1 file changed, 3265 deletions(-) diff --git a/python_tutorial_part_1.ipynb b/python_tutorial_part_1.ipynb index ce15fa0..de6659c 100644 --- a/python_tutorial_part_1.ipynb +++ b/python_tutorial_part_1.ipynb @@ -1352,3271 +1352,6 @@ "x=[c for c in \"king\" if c !=\"k\"]\n", "print x" ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "# NLTK Tutorial" - ] - }, - { - "cell_type": "code", - "execution_count": 80, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "*** Introductory Examples for the NLTK Book ***\n", - "Loading text1, ..., text9 and sent1, ..., sent9\n", - "Type the name of the text or sentence to view it.\n", - "Type: 'texts()' or 'sents()' to list the materials.\n", - "text1: Moby Dick by Herman Melville 1851\n", - "text2: Sense and Sensibility by Jane Austen 1811\n", - "text3: The Book of Genesis\n", - "text4: Inaugural Address Corpus\n", - "text5: Chat Corpus\n", - "text6: Monty Python and the Holy Grail\n", - "text7: Wall Street Journal\n", - "text8: Personals Corpus\n", - "text9: The Man Who Was Thursday by G . K . Chesterton 1908\n" - ] - } - ], - "source": [ - "# Take a look at the preface here: http://www.nltk.org/book/ch00.html\n", - "# This tutorial is based on Python 2.7, but it shouldn't be an issue to write the same code for Python 3 as the differences\n", - "# are minimal so long as the tutorial is concerned\n", - "import nltk\n", - "from nltk.book import *" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 253, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Displaying 10 of 66 matches:\n", - "r occupations may come . The Negroes are now Americans . Their ancestors came here years ago agains\n", - "e it so or not . And yet we are not the less Americans on that account . We shall be the more Ameri\n", - "we find them now secure ; and there comes to Americans the profound assurance that our representati\n", - "have called me . I am certain that my fellow Americans expect that on my induction into the Preside\n", - " and the hurricanes of disaster . In this we Americans were discovering no wholly new truth ; we we\n", - " and that freedom is an ebbing tide . But we Americans know that this is not true . Eight years ago\n", - "eat . We are not content to stand still . As Americans , we go forward , in the service of our coun\n", - "uguration be simple and its words brief . We Americans of today , together with our allies , are pa\n", - "in the discharge of this responsibility , we Americans know and we observe the difference between w\n", - "cked bargain of trading honor for security . Americans , indeed all free men , remember that in the\n", - "None\n" - ] - } - ], - "source": [ - "print(text4.concordance(\"Americans\", width=100, lines=10))" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "the free power opportunity fellow opinions colleges peace gangs\n", - "judgments consent noblest ideas colors fidelity unquestionable worship\n", - "discipline industrious just\n", - "None\n" - ] - } - ], - "source": [ - "# Other words that appear in a similar range of contexts as a given word\n", - "print(text4.similar(\"patriotic\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 85, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "No common contexts were found\n", - "None\n" - ] - } - ], - "source": [ - "print(text4.common_contexts([\"patriotic\", \"very\"])) " - ] - }, - { - "cell_type": "code", - "execution_count": 89, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "every_citizen and_and our_citizens\n", - "None\n" - ] - } - ], - "source": [ - "print(text4.common_contexts([\"patriotic\", \"free\"])) " - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.0669\n" - ] - } - ], - "source": [ - "from __future__ import division # in Python 3 you don't need to do the import\n", - "# Lexical diversity\n", - "def lexical_diversity(text):\n", - " return len(set(text))/len(text)\n", - "\n", - "lex_div=lexical_diversity(text4)\n", - "print(round(lex_div, 4))" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[u'AS', u'Abandonment', u'Abhorring', u'About', u'Above', u'Abraham', u'Abroad', u'Accept', u'Across', u'Act', u'Acting', u'Action', u'Actual', u'Adams', u'Additional', u'Address', u'Administered', u'Administration', u'Administrations', u'Advance']\n" - ] - } - ], - "source": [ - "# sorted set of words\n", - "print(sorted(set(text4))[100:120])" - ] - }, - { - "cell_type": "code", - "execution_count": 78, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "None\n", - "['father', 'man', 'mother', 'woman']\n" - ] - } - ], - "source": [ - "#Pay attenstion to the difference of these!\n", - "tokens=[\"man\", \"woman\", \"father\", \"mother\"]\n", - "print tokens.sort() # Returns \"None\", but sorts the list in place\n", - "print sorted(tokens) # Returns the sorted list\n" - ] - }, - { - "cell_type": "code", - "execution_count": 81, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "('man', 102)\n", - "('woman', 3)\n", - "('father', 4)\n", - "('mother', 4)\n" - ] - } - ], - "source": [ - "words=[\"man\", \"woman\", \"father\", \"mother\"]\n", - "for w in words:\n", - " print(w, text4.count(w))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - " # Stop here:=====" - ] - }, - { - "cell_type": "code", - "execution_count": 61, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "****************************************************************************************************\n", - "[(u'institutions', 76), (u'come', 75), (u'party', 75), (u'better', 75), (u'always', 74), (u'today', 74), (u'office', 73), (u'still', 73), (u'need', 73), (u'others', 73), (u'strength', 72), (u'Let', 72), (u'nor', 72), (u'itself', 72), (u'means', 70), (u'believe', 70), (u'themselves', 70), (u'place', 70), (u'land', 69), (u'could', 69), (u'then', 69), (u'.\"', 69), (u'home', 69), (u'equal', 69), (u'together', 68), (u'might', 68), (u'things', 67), (u'secure', 67), (u'Nation', 67), (u'whose', 66), (u'find', 66), (u'given', 66), (u'prosperity', 66), (u'Americans', 66), (u'old', 65), (u'am', 65), (u'full', 65), (u'give', 65), (u'here', 64), (u'Federal', 64), (u'action', 64), (u'order', 64), (u'yet', 64), (u'proper', 64), (u'found', 63), (u'up', 63), (u'important', 63), (u'responsibility', 63), (u'take', 62), (u'where', 62), (u'being', 62), (u'change', 62), (u'Executive', 62), (u'even', 62), (u'subject', 62), (u'administration', 61), (u'revenue', 61), (u'State', 61), (u'see', 60), (u'security', 60), (u'ought', 60), (u'trust', 60), (u'These', 60), (u'A', 59), (u'self', 59), (u'true', 59), (u'business', 59), (u'seek', 59), (u'character', 59), (u'honor', 59), (u'question', 59), (u'called', 59), (u'respect', 59), (u'commerce', 58), (u'cause', 58), (u'toward', 58), (u'principle', 58), (u'again', 58), (u'century', 58), (u'influence', 57), (u'become', 56), (u'protection', 56), (u'done', 56), (u'stand', 56), (u'course', 55), (u'another', 55), (u'very', 55), (u'help', 55), (u'like', 55), (u'citizen', 54), (u'authority', 54), (u'also', 53), (u'Republic', 53), (u'live', 53), (u'civil', 53), (u'past', 52), (u'sense', 52), (u'constitutional', 52), (u'meet', 52), (u'democracy', 52)]\n", - "****************************************************************************************************\n", - "14\n", - "312\n", - "****************************************************************************************************\n", - "[u'than', u'country', u'.', u'has', u'people', u'for', u'citizens', u'time', u'so', u'nation']\n" - ] - } - ], - "source": [ - "# Frequency distribution\n", - "freq_dist = FreqDist(text4) \n", - "print(\"*\"*100)\n", - "print(freq_dist.most_common(1000))[200:300]\n", - "print(\"*\"*100)\n", - "print(freq_dist[\"European\"])\n", - "print(freq_dist[\"world\"])\n", - "print(\"*\"*100)\n", - "#------------------------------------------\n", - "# Vocabulary\n", - "V=set(text4)\n", - "words=[w for w in V if freq_dist[w] > 200][:10]\n", - "print(words)" - ] - }, - { - "cell_type": "code", - "execution_count": 62, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "United States; fellow citizens; four years; years ago; Federal\n", - "Government; General Government; American people; Vice President; Old\n", - "World; Almighty God; Fellow citizens; Chief Magistrate; Chief Justice;\n", - "God bless; every citizen; Indian tribes; public debt; one another;\n", - "foreign nations; political parties\n", - "None\n" - ] - } - ], - "source": [ - "# Collocations\n", - "print(text4.collocations())" - ] - }, - { - "cell_type": "code", - "execution_count": 67, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[u'zodiac', u'zodiac', u'zogranda', u'zone', u'zoned', u'zoned', u'zones', u'zoology', u'zoology', u'zoroaster']\n", - "[u'zephyr', u'zeuglodon', u'zig', u'zodiac', u'zogranda', u'zone', u'zoned', u'zones', u'zoology', u'zoroaster']\n" - ] - } - ], - "source": [ - "# Could you tell the difference?\n", - "print(sorted(w.lower() for w in set(text1))[-10:])\n", - "print(sorted(set(w.lower() for w in text1))[-10:])" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "\n", - "\n", - "Hamlet: Entire Play\n", - " \n", - "\n", - "\n", - "\n", - "\n", - "\n", - "The Tragedy of Hamlet, Prince of Denmark\n", - "\n", - "Shakespeare homepage \n", - " | Hamlet \n", - " | Entire play\n", - "\n", - "ACT I\n", - "SCENE I. Elsinore. A platform before the castle.\n", - "\n", - "FRANCISCO at his post. Enter to him BERNARDO\n", - "\n", - "BERNARDO\n", - "\n", - "Who's there?\n", - "\n", - "FRANCISCO\n", - "\n", - "Nay, answer me: stand, and unfold you\n" - ] - }, - { - "ename": "NameError", - "evalue": "name 'nltk' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0;31m#print(\"*\"*100)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;31m#------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 13\u001b[0;31m \u001b[0mtokens\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnltk\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mword_tokenize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mraw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 14\u001b[0m \u001b[0;31m#print(tokens[100:200])\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mNameError\u001b[0m: name 'nltk' is not defined" - ] - } - ], - "source": [ - "from urllib import urlopen\n", - "from bs4 import BeautifulSoup\n", - "url=\"http://shakespeare.mit.edu/hamlet/full.html\"\n", - "page = urlopen(url)\n", - "soup = BeautifulSoup(page.read())\n", - "#print type(soup)\n", - "#html = urlopen(url).read() \n", - "raw = BeautifulSoup.get_text(soup) \n", - "\n", - "print(raw[:300])\n", - "#print(\"*\"*100)\n", - "#------------------------------------------\n", - "tokens=nltk.word_tokenize(raw)\n", - "#print(tokens[100:200])\n" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['I', 'am', 'happy']\n" - ] - } - ], - "source": [ - "import nltk\n", - "raw=\"I am happy\"\n", - "tokens=nltk.word_tokenize(raw)\n", - "print tokens" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# More on Files" - ] - }, - { - "cell_type": "code", - "execution_count": 230, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "[u'The', u'Project', u'Gutenberg', u'EBook', u'of', u'Hamlet', u',', u'by', u'William', u'Shakespeare', u'This', u'eBook', u'is', u'for', u'the', u'use', u'of', u'anyone', u'anywhere', u'at']\n", - "**************************************************\n", - "[u'The', u'Project', u'Gutenberg', u'EBook', u'of', u'Hamlet', u',', u'by', u'William', u'Shakespeare', u'This', u'eBook', u'is', u'for', u'the', u'use', u'of', u'anyone', u'anywhere', u'at', u'no', u'cost', u'and', u'with', u'almost', u'no', u'restrictions', u'whatsoever', u'.', u'You', u'may', u'copy', u'it', u',', u'give', u'it', u'away', u'or', u're-use', u'it', u'under', u'the', u'terms', u'of', u'the', u'Project', u'Gutenberg', u'License', u'included', u'with']\n", - "Project Gutenberg-tm; _1st Clo._; Project Gutenberg; _Crosses to_;\n", - "Literary Archive; Gutenberg-tm electronic; Archive Foundation;\n", - "electronic works; Gutenberg Literary; United States; _2nd Clo._;\n", - "ROSENCRANTZ _and_; public domain; _and_ GUILDENSTERN; Dr. Johnson;\n", - "_1st Play._; electronic work; _and_ Attendants; the_ KING; set forth\n", - "None\n" - ] - } - ], - "source": [ - "import codecs\n", - "from nltk import word_tokenize, Text\n", - "text_string=codecs.open(\"hamlet.txt\", \"r\", \"utf-8\").read() # Opens for reading and gets you the file content as a list\n", - "tokens = word_tokenize(text_string)\n", - "print(type(tokens))\n", - "print(tokens[:20])\n", - "text = Text(tokens)\n", - "print(\"*\"*50)\n", - "print(text[:50])\n", - "print(text.collocations())\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 90, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[u'[', u'The', u'Tragedie', u'of', u'Hamlet', u'by', u'William', u'Shakespeare', u'1599', u']']\n", - "[u'Actus', u'Primus', u'.']\n", - "[u'Scoena', u'Prima', u'.']\n", - "[u'Enter', u'Barnardo', u'and', u'Francisco', u'two', u'Centinels', u'.']\n", - "[u'Barnardo', u'.']\n" - ] - } - ], - "source": [ - "#sentence splitting\n", - "from nltk.corpus import gutenberg\n", - "hamlet_sent=gutenberg.sents('shakespeare-hamlet.txt')\n", - "for sent in hamlet_sent[:5]:\n", - " print(sent)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 93, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[u'1789-Washington.txt', u'1793-Washington.txt', u'1797-Adams.txt', u'1801-Jefferson.txt', u'1805-Jefferson.txt']\n", - "**************************************************\n", - "[u'1789', u'1793', u'1797', u'1801', u'1805', u'1809', u'1813', u'1817', u'1821', u'1825', u'1829', u'1833', u'1837', u'1841', u'1845', u'1849', u'1853', u'1857', u'1861', u'1865', u'1869', u'1873', u'1877', u'1881', u'1885', u'1889', u'1893', u'1897', u'1901', u'1905', u'1909', u'1913', u'1917', u'1921', u'1925', u'1929', u'1933', u'1937', u'1941', u'1945', u'1949', u'1953', u'1957', u'1961', u'1965', u'1969', u'1973', u'1977', u'1981', u'1985', u'1989', u'1993', u'1997', u'2001', u'2005', u'2009']\n" - ] - } - ], - "source": [ - "from nltk.corpus import inaugural\n", - "print(inaugural.fileids()[:5])\n", - "print(\"*\"*50)\n", - "#print([fileid[:4] for fileid in inaugural.fileids()])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Generate text" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 122, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "living creature that he said , and the land of the land of the land of the land of the land of the land of the land of the land None\n" - ] - } - ], - "source": [ - "# From the NLTK book: http://www.nltk.org/book/ch02.html\n", - "def generate_model(cfdist, word, num=15):\n", - " for i in range(num):\n", - " print(word),\n", - " word = cfdist[word].max()\n", - "\n", - "\n", - "text = nltk.corpus.genesis.words('english-kjv.txt')\n", - "bigrams = nltk.bigrams(text)\n", - "cfd = nltk.ConditionalFreqDist(bigrams)\n", - "print(generate_model(cfd, 'living', num=30))\n", - "#print(cfd[\"living\"].max())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# WordNet" - ] - }, - { - "cell_type": "code", - "execution_count": 133, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Synset('nice.n.01'), Synset('nice.a.01'), Synset('decent.s.01'), Synset('nice.s.03'), Synset('dainty.s.04'), Synset('courteous.s.01')]\n", - "**************************************************\n", - "done with delicacy and skill\n", - "[u'nice', u'skillful']\n", - "**************************************************\n", - "exhibiting courtesy and politeness\n", - "[u'courteous', u'gracious', u'nice']\n", - "**************************************************\n", - "excessively fastidious and easily disgusted\n", - "[u'dainty', u'nice', u'overnice', u'prissy', u'squeamish']\n" - ] - } - ], - "source": [ - "from nltk.corpus import wordnet as wn\n", - "print(wn.synsets('nice'))\n", - "print(\"*\"*50)\n", - "print(wn.synset('nice.s.03').definition())\n", - "print(wn.synset('nice.s.03').lemma_names())\n", - "print(\"*\"*50)\n", - "print(wn.synset('courteous.s.01').definition())\n", - "print(wn.synset('courteous.s.01').lemma_names())\n", - "print(\"*\"*50)\n", - "print(wn.synset('dainty.s.04').definition())\n", - "print(wn.synset('dainty.s.04').lemma_names())\n", - "print(\"*\"*50)" - ] - }, - { - "cell_type": "code", - "execution_count": 138, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "**************************************************\n", - "the act of drilling\n", - "[u'drilling', u'boring']\n", - "**************************************************\n", - "the act of drilling a hole in the earth in the hope of producing petroleum\n", - "[u'boring', u'drilling', u'oil_production']\n", - "**************************************************\n", - "cause to be bored\n", - "[u'bore', u'tire']\n", - "**************************************************\n", - "make a hole, especially with a pointed power or hand tool\n", - "[u'bore', u'drill']\n", - "**************************************************\n", - "so lacking in interest as to cause mental weariness\n", - "[u'boring', u'deadening', u'dull', u'ho-hum', u'irksome', u'slow', u'tedious', u'tiresome', u'wearisome']\n" - ] - } - ], - "source": [ - "for synset in wn.synsets('boring'):\n", - " print(\"*\"*50)\n", - " print(synset.definition())\n", - " print(synset.lemma_names())\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 145, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Lemma('drilling.n.01.boring'), Lemma('boring.n.02.boring'), Lemma('boring.s.01.boring')]\n", - "[Lemma('fantastic.s.02.wonderful')]\n", - "[Lemma('dazzling.s.01.dazzling'), Lemma('blazing.s.01.dazzling')]\n" - ] - } - ], - "source": [ - "# Also\n", - "print(wn.lemmas('boring'))\n", - "print(wn.lemmas('wonderful'))\n", - "print(wn.lemmas('dazzling'))" - ] - }, - { - "cell_type": "code", - "execution_count": 153, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Lemma('dish.n.01.dish'), Lemma('dish.n.02.dish'), Lemma('dish.n.03.dish'), Lemma('smasher.n.02.dish'), Lemma('dish.n.05.dish'), Lemma('cup_of_tea.n.01.dish'), Lemma('serve.v.06.dish'), Lemma('dish.v.02.dish')]\n", - "= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = \n", - "**************************************************\n", - "a piece of dishware normally used as a container for holding or serving food\n", - "[u'dish']\n", - "**************************************************\n", - "a particular item of prepared food\n", - "[u'dish']\n", - "**************************************************\n", - "the quantity that a dish will hold\n", - "[u'dish', u'dishful']\n", - "**************************************************\n", - "a very attractive or seductive looking woman\n", - "[u'smasher', u'stunner', u'knockout', u'beauty', u'ravisher', u'sweetheart', u'peach', u'lulu', u'looker', u'mantrap', u'dish']\n", - "**************************************************\n", - "directional antenna consisting of a parabolic reflector for microwave or radio frequency radiation\n", - "[u'dish', u'dish_aerial', u'dish_antenna', u'saucer']\n", - "**************************************************\n", - "an activity that you like or at which you are superior\n", - "[u'cup_of_tea', u'bag', u'dish']\n", - "**************************************************\n", - "provide (usually but not necessarily food)\n", - "[u'serve', u'serve_up', u'dish_out', u'dish_up', u'dish']\n", - "**************************************************\n", - "make concave; shape like a dish\n", - "[u'dish']\n" - ] - } - ], - "source": [ - "# dish!\n", - "print(wn.lemmas('dish'))\n", - "print(\"= \"*50)\n", - "for synset in wn.synsets('dish'):\n", - " print(\"*\"*50)\n", - " print(synset.definition())\n", - " print(synset.lemma_names())" - ] - }, - { - "cell_type": "code", - "execution_count": 198, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Happy.a.01: enjoying or showing or marked by joy or pleasure\n", - "Felicitous.s.02: marked by good fortune\n", - "Glad.s.02: eagerly disposed to act or to be of service\n", - "Happy.s.04: well expressed and to the point\n", - "Gladiolus.n.01: any of numerous plants of the genus Gladiolus native chiefly to tropical and South Africa having sword-shaped leaves and one-sided spikes of brightly colored funnel-shaped flowers; widely cultivated\n", - "Glad.a.01: showing or causing joy and pleasure; especially made happy\n", - "Glad.s.02: eagerly disposed to act or to be of service\n", - "Glad.s.03: feeling happy appreciation\n", - "Beaming.s.01: cheerful and bright\n", - "Joyful.a.01: full of or producing joy\n", - "Elated.s.02: full of high-spirited delight\n", - "Joyous.a.01: full of or characterized by joy\n" - ] - } - ], - "source": [ - "def get_definitions(word):\n", - " for synset in wn.synsets(word):\n", - " try:\n", - " print synset.name().capitalize() + ':', synset.definition() # capitalizing to give the feel of a dict entry\n", - " except:\n", - " continue\n", - " \n", - "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", - "for w in happy_words:\n", - " get_definitions(w)\n", - "\n", - "#for synset in wn.synsets('mint', wn.NOUN):\n", - "#... print(synset.name() + ':', synset.definition())\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 157, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[u'happy']\n", - "[u'felicitous', u'happy']\n", - "[u'glad', u'happy']\n", - "[u'happy', u'well-chosen']\n", - "[u'gladiolus', u'gladiola', u'glad', u'sword_lily']\n", - "[u'glad']\n", - "[u'glad', u'happy']\n", - "[u'glad']\n", - "[u'beaming', u'glad']\n", - "[u'joyful']\n", - "[u'elated', u'gleeful', u'joyful', u'jubilant']\n", - "[u'joyous']\n" - ] - } - ], - "source": [ - "def get_lemma_names(word):\n", - " for synset in wn.synsets(word):\n", - " try:\n", - " print(synset.lemma_names())\n", - " except:\n", - " continue\n", - " \n", - "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", - "for w in happy_words:\n", - " get_lemma_names(w)" - ] - }, - { - "cell_type": "code", - "execution_count": 185, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "set([u'felicitous', u'well-chosen', u'glad', u'happy'])\n", - "set([u'gladiolus', u'beaming', u'sword_lily', u'gladiola', u'glad', u'happy'])\n", - "set([u'elated', u'jubilant', u'joyful', u'gleeful'])\n", - "set([u'joyous'])\n", - "set([])\n", - "**************************************************\n", - "\n", - "Here's a single unique list/set:\n", - "\n", - "set([u'elated', u'gladiolus', u'beaming', u'joyous', u'sword_lily', u'well-chosen', u'felicitous', u'jubilant', u'gleeful', u'gladiola', u'joyful', u'glad', u'happy'])\n" - ] - } - ], - "source": [ - "def get_unique_lemma_names(word):\n", - " l=[]\n", - " for synset in wn.synsets(word):\n", - " try:\n", - " l.extend(synset.lemma_names())\n", - " except:\n", - " continue\n", - " l=set(l)\n", - " return l\n", - "\n", - "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", - "for w in happy_words:\n", - " l=get_unique_lemma_names(w)\n", - " print(l)\n", - "\n", - "# To get a set\n", - "print(\"*\"*50)\n", - "print(\"\\nHere's a single unique list/set:\\n\")\n", - "uniq_list=[]\n", - "for w in happy_words:\n", - " l=get_unique_lemma_names(w)\n", - " uniq_list.extend(l)\n", - "print(set(uniq_list))" - ] - }, - { - "cell_type": "code", - "execution_count": 190, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Synset('ambulance.n.01')\n", - "**************************************************\n", - "[u'Model_T', u'S.U.V.', u'SUV', u'Stanley_Steamer', u'ambulance', u'beach_waggon', u'beach_wagon', u'bus', u'cab', u'compact', u'compact_car', u'convertible', u'coupe', u'cruiser', u'electric', u'electric_automobile', u'electric_car', u'estate_car', u'gas_guzzler', u'hack', u'hardtop', u'hatchback', u'heap', u'horseless_carriage', u'hot-rod', u'hot_rod', u'jalopy', u'jeep', u'landrover', u'limo', u'limousine', u'loaner', u'minicar', u'minivan', u'pace_car', u'patrol_car', u'phaeton', u'police_car', u'police_cruiser', u'prowl_car', u'race_car', u'racer', u'racing_car', u'roadster', u'runabout', u'saloon', u'secondhand_car', u'sedan', u'sport_car', u'sport_utility', u'sport_utility_vehicle', u'sports_car', u'squad_car', u'station_waggon', u'station_wagon', u'stock_car', u'subcompact', u'subcompact_car', u'taxi', u'taxicab', u'tourer', u'touring_car', u'two-seater', u'used-car', u'waggon', u'wagon']\n", - "**************************************************\n", - "ambulance beach_wagon station_wagon wagon estate_car beach_waggon station_waggon waggon bus jalopy heap cab hack taxi taxicab compact compact_car convertible coupe cruiser police_cruiser patrol_car police_car prowl_car squad_car electric electric_automobile electric_car gas_guzzler hardtop hatchback horseless_carriage hot_rod hot-rod jeep landrover limousine limo loaner minicar minivan Model_T pace_car racer race_car racing_car roadster runabout two-seater sedan saloon sport_utility sport_utility_vehicle S.U.V. SUV sports_car sport_car Stanley_Steamer stock_car subcompact subcompact_car touring_car phaeton tourer used-car secondhand_car\n" - ] - } - ], - "source": [ - "#Nice example from the book (http://www.nltk.org/book/ch02.html)\n", - "motorcar = wn.synset('car.n.01')\n", - "types_of_motorcar = motorcar.hyponyms()\n", - "print(types_of_motorcar[0]) # prints: Synset('ambulance.n.01')\n", - "print(\"*\"*50)\n", - "print(sorted(lemma.name() for synset in types_of_motorcar for lemma in synset.lemmas()))\n", - "print(\"*\"*50)\n", - "\n", - "# Remember, the tuple coprehension can be broken down as follows (with no sorting):\n", - "for synset in types_of_motorcar:\n", - " for lemma in synset.lemmas():\n", - " print(lemma.name())," - ] - }, - { - "cell_type": "code", - "execution_count": 193, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Synset('motor_vehicle.n.01')]\n", - "2\n", - "\n", - "Path 1\n", - "[u'entity.n.01', u'physical_entity.n.01', u'object.n.01', u'whole.n.02', u'artifact.n.01', u'instrumentality.n.03', u'container.n.01', u'wheeled_vehicle.n.01', u'self-propelled_vehicle.n.01', u'motor_vehicle.n.01', u'car.n.01']\n", - "\n", - "Path 2\n", - "[u'entity.n.01', u'physical_entity.n.01', u'object.n.01', u'whole.n.02', u'artifact.n.01', u'instrumentality.n.03', u'conveyance.n.03', u'vehicle.n.01', u'wheeled_vehicle.n.01', u'self-propelled_vehicle.n.01', u'motor_vehicle.n.01', u'car.n.01']\n" - ] - } - ], - "source": [ - "# Another useful example, this time on hypernyms:\n", - "motorcar = wn.synset('car.n.01')\n", - "print(motorcar.hypernyms()) # prints: [Synset('motor_vehicle.n.01')]\n", - "\n", - "paths = motorcar.hypernym_paths()\n", - "print(len(paths)) # prints 2 as there are two paths, as the book states, between car.n.01 and entity.n.01 \n", - " # because wheeled_vehicle.n.01 can be classified as both a vehicle and a container.\n", - " # Take a look at the output below\n", - "\n", - "print(\"\\nPath 1 between car.n.01 and entity.n.01\")\n", - "print([synset.name() for synset in paths[0]])\n", - "print(\"\\nPath 2 between car.n.01 and entity.n.01\")\n", - "print([synset.name() for synset in paths[1]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Try the graphical WordNet browser from your command line:\n", - "nltk.app.wordnet()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Similarity" - ] - }, - { - "cell_type": "code", - "execution_count": 227, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Synset('lilac.n.01'), Synset('lavender.s.01')]\n", - "[Synset('tulip.n.01')]\n", - "[Synset('flower.n.01'), Synset('flower.n.02'), Synset('flower.n.03'), Synset('bloom.v.01')]\n", - "[Synset('tree.n.01'), Synset('tree.n.02'), Synset('tree.n.03'), Synset('corner.v.02'), Synset('tree.v.02'), Synset('tree.v.03'), Synset('tree.v.04')]\n", - "[Synset('daffodil.n.01')]\n", - "**************************************************\n", - "[Synset('flower.n.01')]\n", - "[Synset('orchid.n.01')]\n", - "[Synset('vascular_plant.n.01')]\n", - "[Synset('vascular_plant.n.01')]\n" - ] - } - ], - "source": [ - "from nltk.corpus import wordnet as wn\n", - "print(wn.synsets('lilac'))\n", - "print(wn.synsets('tulip'))\n", - "print(wn.synsets('flower'))\n", - "print(wn.synsets('tree'))\n", - "print(wn.synsets('daffodil'))\n", - "#--------------------------\n", - "print(\"*\"*50)\n", - "african = wn.synset('african_daisy.n.01')\n", - "orchid = wn.synset('orchid.n.01')\n", - "scarlet = wn.synset('scarlet_musk_flower.n.01')\n", - "aster = wn.synset('white-topped_aster.n.01')\n", - "tree = wn.synset('tree.n.01')\n", - "daffodil = wn.synset('daffodil.n.01')\n", - "#--------------------------\n", - "print(\"*\"*50)\n", - "print(african.lowest_common_hypernyms(orchid))\n", - "print(orchid.lowest_common_hypernyms(orchid))\n", - "print(scarlet.lowest_common_hypernyms(tree))\n", - "print(aster.lowest_common_hypernyms(daffodil))\n", - "#print(wn.synset('flower.n.01').hypernyms())\n", - "#print(wn.synset('flower.n.01').hyponyms())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 164, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "ename": "AttributeError", - "evalue": "'list' object has no attribute 'lower'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 19\u001b[0m \u001b[0mhappy_words\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"happy\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"glad\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"joyful\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"joyous\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"exhuberant\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 21\u001b[0;31m \u001b[0ml\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mget_unique_lemma_names\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhappy_words\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 22\u001b[0m \u001b[0;32mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ml\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36mget_unique_lemma_names\u001b[0;34m(word)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mget_unique_lemma_names\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mword\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0ml\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0msynset\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mwn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msynsets\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mword\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0ml\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msynset\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlemma_names\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/nltk/corpus/reader/wordnet.pyc\u001b[0m in \u001b[0;36msynsets\u001b[0;34m(self, lemma, pos, lang)\u001b[0m\n\u001b[1;32m 1402\u001b[0m \u001b[0mof\u001b[0m \u001b[0mthat\u001b[0m \u001b[0mlanguage\u001b[0m \u001b[0mwill\u001b[0m \u001b[0mbe\u001b[0m \u001b[0mreturned\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1403\u001b[0m \"\"\"\n\u001b[0;32m-> 1404\u001b[0;31m \u001b[0mlemma\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlemma\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlower\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1405\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1406\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlang\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'en'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mAttributeError\u001b[0m: 'list' object has no attribute 'lower'" - ] - } - ], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "hi\n" - ] - } - ], - "source": [ - "print \"hi\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "%matplotlib inline\n", - "\n", - "import matplotlib\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "from nltk.draw.dispersion import dispersion_plot\n", - "print(text4.dispersion_plot([\"citizens\", \"democracy\", \"freedom\", \"duties\", \"America\"]))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "%matplotlib inline\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "\n", - "\n", - "x = np.linspace(0, 3*np.pi, 500)\n", - "plt.plot(x, np.sin(x**2))\n", - "plt.title('A simple chirp');\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "#!/usr/bin/python\n", - "%matplotlib inline\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "#################################\n", - "\n", - "###################\n", - "# Classification confidence per class:\n", - "#--------------------------------------\n", - "Account=[0.50597769529107606, 0.61137631750300769, 0.67732439371970943, 0.74335210285266851, 0.78045128083006687, 0.85889268848391032, 0.86004645511688793, 0.86034494338992484, 0.86110627662083916, 0.86385229563374299, 0.8652039704758846, 0.86629669936492792, 0.86714993874468849, 0.86951104057901096, 0.87107760241792387, 0.87433969633042807, 0.87731372225510584, 0.87786812374991918, 0.87810590629205709, 0.87892633430246814, 0.87915317999299925, 0.88066216034760669, 0.88408934521227001, 0.88469154406025774, 0.88796777230531254, 0.89044129384145887, 0.90267024623065706, 0.92524363229460704]\n", - "Alert=[0.47637448264043625, 0.66454777829823419, 0.83611243185409068, 0.84343541279259093, 0.84407692188080419, 0.84691874602073758, 0.84846838467167951, 0.84885499324244018, 0.84892085388116045, 0.85662976488218612, 0.85748220725433, 0.85766843771151779, 0.86282314473652266, 0.88306412005817692, 0.90953193806965249]\n", - "EventUpdate=[0.60620179952905806, 0.81414035159064524, 0.83977384250581455, 0.84197728312163245, 0.84665030060369317, 0.86063170708443371, 0.86556282358318515, 0.86670240801689957, 0.86730292811655896, 0.87129628595308339, 0.87179895532552454, 0.87237623417151511]\n", - "purchasesAndPayments= [0.70209823840705454, 0.87763882598773824, 0.87853436781994654, 0.88111290254442187, 0.88188179857430626, 0.88427555688939874, 0.88575543579520666, 0.89021547578541182, 0.89111298147392382, 0.89325185353073655, 0.89699979308063715, 0.89711891518041997, 0.90022327360644228, 0.90038922055635651, 0.90352512667392848, 0.91167136191589293, 0.91258912510512546, 0.91292772595692206, 0.91385158396318156, 0.91514777255816948, 0.91598257367845204, 0.91620376194631981, 0.91624196881789599, 0.91629208488490144, 0.91682304964064509, 0.91685424758210343, 0.91773130126105062, 0.91859239740198162, 0.91885115720464194, 0.9189813690872739, 0.91942897543635649, 0.92005261642510894, 0.92031278031339603, 0.9206131773956312, 0.92162770964147711, 0.92333172011600717, 0.92368736632287529, 0.92445312588899153, 0.92487782965565768, 0.92585645404696637, 0.92736029109282003, 0.92745082086828889, 0.92853691341173528, 0.92974148319851113, 0.93038737495076018, 0.93268166532934882, 0.93485981648392313, 0.93560689873538494, 0.93792467408291125, 0.93990750368079101, 0.94138345097080245, 0.94171127507598984, 0.94257827570703179, 0.94278726547269032, 0.94591753032286208, 0.94684854929169837, 0.94778523908566703, 0.94938466710678737, 0.9501448612157134, 0.95149247851897123, 0.95239002506270776, 0.95375471665360612, 0.95562783008800667, 0.95780240099868053, 0.9578653435017056, 0.95787837810691678, 0.95826883828494502, 0.96152066593986663, 0.96162681902834768, 0.9642282884732325, 0.96529873521893783, 0.96552974107677436, 0.96639428810283956, 0.96678961265384455, 0.96687982068407863, 0.96873869536512358, 0.96880029214978958, 0.96947401140396405, 0.97052647680045956, 0.97057034041651968]\n", - "TrackedInfo=[0.57856804762622893, 0.84336621528774514, 0.84726630715050089, 0.84865439463077963, 0.85259413447559007, 0.85367437726360995, 0.85572529471292957, 0.85595201321671999, 0.86019342820522748, 0.86280650298103134, 0.87695710743248556, 0.88755323066381953, 0.89051154523294096, 0.89159321691228055]\n", - "Travel=[0.83341902931726042, 0.84109694886055264, 0.84915529812154522, 0.84965845310243482, 0.85351817819423192, 0.86268823869067024, 0.87021845881088733]\n", - "\n", - "def createHistogram(x, cat=\"Purchases and Payments\", color='green'):\n", - " \"\"\"\n", - " Plots an individual histogram.\n", - " \"\"\"\n", - " plt.hist(x, len(x)+20, normed=1, facecolor=color, alpha=0.75)\n", - " plt.xlabel('Confidence')\n", - " #plt.ylabel('Frequency')\n", - " plt.title(cat)\n", - " plt.grid(True)\n", - " plt.show()\n", - "\n", - "def createHist(x, cat=\"Purchases and Payments\", color='green'):\n", - " \"\"\"\n", - " Used as a helper function for creating individual histograms \n", - " inside the subplotter\n", - " \"\"\"\n", - " plt.hist(x, len(x)+20, normed=1, facecolor=color, alpha=0.75)\n", - " plt.xlabel('Confidence')\n", - " plt.title(cat)\n", - " plt.grid(True)\n", - " \n", - "def subplotter():\n", - " \"\"\"\n", - " Subplots several histograms...\n", - " \"\"\"\n", - " plt.figure(figsize=(15,15))\n", - " plt.subplot(3,3 , 1 )\n", - " createHist(Account, cat=\"Account\", color=np.random.rand(3))\n", - " plt.subplot( 3,3, 2 )\n", - " createHist(Alert, cat=\"Alert\", color='magenta')\n", - " plt.subplot( 3,3, 3 )\n", - " createHist(EventUpdate, cat=\"Event Update\", color=np.random.rand(3))\n", - " plt.subplot( 3,3, 4 )\n", - " createHist(purchasesAndPayments, cat=\"Purchases and Payments\", color=np.random.rand(3)) \n", - " plt.subplot( 3,3, 5 )\n", - " createHist(TrackedInfo, cat=\"Tracked Info\", color=np.random.rand(3))\n", - " plt.subplot( 3,3, 6 )\n", - " createHist(Travel, cat=\"Travel\", color='y')\n", - " plt.show()\n", - "\n", - "def main():\n", - " subplotter()\n", - " # createHist(Account, cat=\"Account\")\n", - " # createHist(Alert, cat=\"Alert\", color='magenta')\n", - " # createHist(EventUpdate, cat=\"EventUpdate\", color='blue')\n", - " # createHist(purchasesAndPayments, cat=\"Purchases and Payments\", color=\"red\")\n", - " # createHist(TrackedInfo, cat=\"TrackedInfo\", color='brown')\n", - " # createHist(Travel, cat=\"Travel\", color='y')\n", - " \n", - " \n", - "if __name__ == \"__main__\":\n", - " main()\n", - "\n", - "######################################\n", - "# This is useful: http://cs.smith.edu/dftwiki/index.php/MatPlotLib_Tutorial_1\n", - "#---------------------------------\n", - "# This is about color maps: http://matplotlib.org/examples/color/colormaps_reference.html\n", - "\n", - "# cmaps = [('Sequential', ['Blues', 'BuGn', 'BuPu',\n", - "# 'GnBu', 'Greens', 'Greys', 'Oranges', 'OrRd',\n", - "# 'PuBu', 'PuBuGn', 'PuRd', 'Purples', 'RdPu',\n", - "# 'Reds', 'YlGn', 'YlGnBu', 'YlOrBr', 'YlOrRd']),\n", - "# ('Sequential (2)', ['afmhot', 'autumn', 'bone', 'cool', 'copper',\n", - "# 'gist_heat', 'gray', 'hot', 'pink',\n", - "# 'spring', 'summer', 'winter']),\n", - "# ('Diverging', ['BrBG', 'bwr', 'coolwarm', 'PiYG', 'PRGn', 'PuOr',\n", - "# 'RdBu', 'RdGy', 'RdYlBu', 'RdYlGn', 'Spectral',\n", - "# 'seismic']),\n", - "# ('Qualitative', ['Accent', 'Dark2', 'Paired', 'Pastel1',\n", - "# 'Pastel2', 'Set1', 'Set2', 'Set3']),\n", - "# ('Miscellaneous', ['gist_earth', 'terrain', 'ocean', 'gist_stern',\n", - "# 'brg', 'CMRmap', 'cubehelix',\n", - "# 'gnuplot', 'gnuplot2', 'gist_ncar',\n", - "# 'nipy_spectral', 'jet', 'rainbow',\n", - "# 'gist_rainbow', 'hsv', 'flag', 'prism'])]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 78, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] } ], "metadata": { From f6fa49ec2af651deac225db4df1e0e56b5307f1a Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed Date: Sat, 23 Jan 2016 22:25:13 -0500 Subject: [PATCH 03/36] Delete python_tutorial.ipynb --- python_tutorial.ipynb | 776 ------------------------------------------ 1 file changed, 776 deletions(-) delete mode 100644 python_tutorial.ipynb diff --git a/python_tutorial.ipynb b/python_tutorial.ipynb deleted file mode 100644 index c941d1c..0000000 --- a/python_tutorial.ipynb +++ /dev/null @@ -1,776 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "collapsed": false - }, - "source": [ - "# Quick Python Tutorial" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This tutorial should grow over time.\n", - "Python has a number of types. You need to be familiar with some of them as a start, then you will learn about more as you go. Let's quickly investigate some of these here:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#Integers and floats" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "22\n", - "33.33\n", - "I am an integer/int: 22\n", - "I am a float: 33.33\n" - ] - } - ], - "source": [ - "#---------------------\n", - "# Integers and floats:\n", - "#---------------------\n", - "# You can use Python as a calculator; and when you do, you are interacting with numbers that may have \n", - "# \"int\" or \"float\" types. Let's print these, with a \"print\" statement.\n", - "print 22 # an integer\n", - "print 33.33 # a float\n", - "\n", - "# You can print more than an object with the same print statement, if you use an \",\" (coma) in between\n", - "# (Hint: Both the integer 22 and the float 33.33 are 'objects' in the Pyathon language.\n", - "# They are objects of type 'int' and type 'float,' respectively)\n", - "print \"I am an integer/int:\", 22 # an integer\n", - "print \"I am a float:\",33.33 # a float" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "I am an int division: 0\n", - "I am an float division: 0.660066006601\n" - ] - } - ], - "source": [ - "# You can perform operations on ints and floats, \n", - "# but be cautious as to the difference between int division and float divison\n", - "my_int= 22\n", - "my_new_int=33\n", - "my_float= 33.33\n", - "print \"I am an int division:\", my_int/my_new_int\n", - "print \"I am an float division:\", my_int/my_float\n", - "# (Hint: We assigned the numbers to some variables above, more about 'assignment' below.\n", - "# You can think about this just as storing something in another. It's like you put something in a box and \n", - "# you are now just looking at the box from outside. Another metaphor is simply that you called each of the numbers a name\n", - "# and can now interact with the numbers using these names.)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "# Strings" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hello world\n" - ] - } - ], - "source": [ - "#--------\n", - "# String:\n", - "#--------\n", - "# The string type is for characters like \"Hello world\". We can print this string:\n", - "print \"Hello world\"" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hello world\n" - ] - } - ], - "source": [ - "# The above is called a print statement. We can assign the string to a variable\n", - "greeting = \"Hello world\"\n", - "# We cann the word \"greeting\" a \"variable\" and the string \"Hello world\" a value. \n", - "# What we did is \"assign\" the value \"Hello world\" to the variable \"greeting\".\n", - "# The \"=\" is called an operator and we use it for \"assignment\". (This is important!)\n", - "# We can now print \"grreting\"\n", - "print greeting" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The type of the variable: \n", - "The type of the value: 14\n" - ] - } - ], - "source": [ - "# For another example, we can assign another string value to another variable:\n", - "feeling=\"I love Python!\"\n", - "# Since you love Python, it loves you back and so gives you a number of \"built-in\" functions to work with. \n", - "# For more about these take a look here: https://docs.python.org/2/library/functions.html\n", - "# For example, the \"type()\" function tells us about the type of an object. \n", - "# Similarly, the \"len()\" function opertates in some objects, like strings, \n", - "# and tells us about their length in characters:\n", - "print \"The type of the variable: \", type(feeling)\n", - "print \"The type of the value: \", len(feeling)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Lists" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "I'm an empty list []\n", - "I'm list of string items ['Python', 'Lua', 'Java']\n", - "I'm list of numbers [44, 11, 55]\n", - "I'm list of string items and numbers ['Hello', 88, 4.0, 'Hey there!']\n" - ] - } - ], - "source": [ - "#--------\n", - "# List:\n", - "#--------\n", - "# A list is another Python data type where you can store and access your data with a lot of flexibility.\n", - "# The list is a square-bracketed, comma-separated sequence of items.\n", - "# So, to create a list, you use square brakets.\n", - "# This is an empty list:\n", - "my_first_list=[]\n", - "print \"I'm an empty list\", my_first_list\n", - "# Items in a list can be strings, or numbers, or a mixture\n", - "words=[\"Python\", \"Lua\", \"Java\"]\n", - "numbs= [44, 11, 55] \n", - "words_and_numbs= [\"Hello\", 88, 4.0, \"Hey there!\"]\n", - "print \"I'm list of string items\", words\n", - "print \"I'm list of numbers\", numbs\n", - "print \"I'm list of string items and numbers\", words_and_numbs\n" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "3\n", - "This is the first item in the list, and its index is zero Apple\n", - "This is the last item in the list Facebook\n" - ] - } - ], - "source": [ - "# Similar to a string, you can get the length of a list:\n", - "tech_comp=[\"Apple\", \"Google\", \"Facebook\"]\n", - "print len(tech_comp)\n", - "# You can also slice from a list, using the bractets with an integer index.\n", - "# Notice: we start from index \"zero\".\n", - "print \"This is the first item in the list, and its index is zero\", tech_comp[0]\n", - "# You can also access a list from the end, with a minus index\n", - "print \"This is the last item in the list\", tech_comp[-1]" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "This is my first tuple: ('Tiger', 'Lion', 'Monkey')\n" - ] - } - ], - "source": [ - "# Tuples:\n", - "#--------\n", - "# A tuple is like a list, but its items are immutable/unchangeable.\n", - "# The syntax is different in that the tuple employs the parathenses \"()\"\n", - "my_animals_tuple=(\"Tiger\", \"Lion\", \"Monkey\")\n", - "print \"This is my first tuple: \", my_tuple" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "My list before changes: ['Tiger', 'Lion', 'Monkey']\n", - "My list after changes: ['Goat', 'Lion', 'Monkey']\n" - ] - } - ], - "source": [ - "# So you can change an item in a list, but not in a tuple:\n", - "my_animals_list=[\"Tiger\", \"Lion\", \"Monkey\"]\n", - "print \"My list before changes: \", my_animals_list\n", - "my_animals_list[0]=\"Goat\"\n", - "print \"My list after changes: \", my_animals_list\n" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "This will give an error!\n" - ] - }, - { - "ename": "TypeError", - "evalue": "'tuple' object does not support item assignment", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Trying to change this will give an error:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"This will give an error!\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mmy_tuple\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"Goat\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mTypeError\u001b[0m: 'tuple' object does not support item assignment" - ] - } - ], - "source": [ - "# Trying to change this will give an error:\n", - "print \"This will give an error!\"\n", - "my_tuple[0]=\"Goat\"" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "# Dictionaries" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{777: 'Mary', 1111: 'John'}\n", - "2\n", - "Mary\n" - ] - } - ], - "source": [ - "# A Ptthon dictionary is a \"mapping\" type. We map a \"key\" to a \"value\".\n", - "# For example, we can map a \"student_id\" to the \"name\" of a student.\n", - "# The sytax is simple: We use the curly braces, and delimit each key:value pair by the \"colon\"\n", - "students={1111: \"John\", 777: \"Mary\"}\n", - "print \"Printing the 'students' dict: \", students\n", - "print \"The length of the 'students' dict is: \", len(students)\n", - "# This is how you access the value of the key 777\n", - "print print \"The value of the key 777 in the 'students' dict is: \", students[777]" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The value of the key 'CS' in the 'students' dict is: John\n" - ] - } - ], - "source": [ - "# The keys don't have to be integers; they can be strings too. \n", - "# Lets have keys represent the school of a student:\n", - "students={\"CS\": \"John\", \"Business\": \"Mary\"}\n", - "print \"The value of the key 'CS' in the 'students' dict is: \", students['CS']" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The value of the key 'CS' in the 'students' dict is: ['John', 'Alex', 'Amanda']\n" - ] - } - ], - "source": [ - "# A value in a Python dict can be a string, a list, another dict, etc.\n", - "# So, if \"Alex\" and \"Amanda\" are also students in CS, then we can have the value for the key 'CS' as a list:\n", - "students={\"CS\": [\"John\", \"Alex\", \"Amanda\"] , \"Business\": \"Mary\"}\n", - "# And now when we print, we get all the students in CS as a full list:\n", - "print \"The value of the key 'CS' in the 'students' dict is: \", students['CS']" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Python is very efficient with files and text processing. Let's see how we open and interact with a file\n", - "my_file=open(\"path_to_my_file\", \"r\").read() # Opens for reading and gets you the file content as a string\n", - "my_file=open(\"path_to_my_file\", \"r\").readlines() # Opens for reading and gets you the file content as a list\n", - "out_file=open(\"path_to_my_file\", \"w\") # Opens for writing\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Loops" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "apple \t--> is a fruit!\n", - "strawberry \t--> is a fruit!\n", - "grapes \t--> is a fruit!\n" - ] - } - ], - "source": [ - "# Loops\n", - "fruits=[\"apple\", \"strawberry\", \"grapes\"]\n", - "for fruit in fruits:\n", - " print fruit, \"\\t--> is a fruit!\"\n", - "# \"\\t\" is the tab characters. Also be aware of \"\\n\", \"\\r\", and \"\\r\\n\" and how these work across different platforms." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Functions" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hello Dane !\n", - "Hello Chris !\n", - "Hello Lubna !\n", - "Hello Nora !\n" - ] - } - ], - "source": [ - "def greet(name):\n", - " print \"Hello\", name, \"!\"\n", - " \n", - "# This is how you call the function:\n", - "greet(\"Dane\")\n", - "greet(\"Chris\")\n", - "greet(\"Lubna\")\n", - "greet(\"Nora\")" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'and': 4, 'learning.': 1, '(the': 1, 'family': 1, 'be': 1, 'other.': 1, 'experience,': 1, 'unknown.Artificial': 1, 'number': 1, 'numeric': 1, 'connections': 1, 'as': 1, 'brain)': 1, 'are': 4, 'learning': 1, 'in': 1, 'based': 1, 'tuned': 1, 'nets': 1, 'networks': 3, '(ANNs)': 1, 'functions': 1, 'depend': 1, 'capable': 1, 'nervous': 1, 'exchange': 1, 'generally': 2, 'approximate': 1, 'artificial': 1, 'machine': 1, 'to': 2, 'systems': 2, 'which': 1, 'between': 1, 'adaptive': 1, '\"neurons\"': 1, 'inputs': 2, 'used': 1, 'that': 2, 'models': 1, 'each': 1, 'animals,': 1, 'particular': 1, 'The': 1, 'estimate': 1, 'by': 1, 'a': 2, 'on': 2, 'central': 1, 'cognitive': 1, 'neural': 4, 'of': 5, 'inspired': 1, 'presented': 1, 'messages': 1, 'science,': 1, 'interconnected': 1, 'large': 1, 'weights': 1, 'can': 2, 'have': 1, 'In': 1, 'biological': 1, 'the': 1, 'or': 1, 'making': 1}\n" - ] - } - ], - "source": [ - "# A function can \"return\" an object.\n", - "# We provide an example here\n", - "\n", - "# text below is from https://en.wikipedia.org/wiki/Artificial_neural_network\n", - "sentences=[\"In machine learning and cognitive science, artificial neural networks (ANNs)\\\n", - " are a family of models inspired by biological neural networks (the central nervous systems of animals, \\\n", - " in particular the brain) and are used to estimate or approximate functions that can depend on a large\\\n", - " number of inputs and are generally unknown.\"\n", - " \"Artificial neural networks are generally presented as systems of interconnected \\\"neurons\\\" which \\\n", - " exchange messages between each other. The connections have numeric weights that can be tuned based \\\n", - " on experience, making neural nets adaptive to inputs and capable of learning.\"]\n", - "def get_dict(sentences):\n", - " \"\"\"\n", - " arguments:\n", - " input: @sentences: a list of sentences\n", - " returns: a dictionary of the words in the sentences.\n", - " dict key is a word and value is word frequency\n", - " \"\"\"\n", - " word_freq={}\n", - " for sent in sentences:\n", - " words=sent.split()\n", - " for w in words:\n", - " if w in word_freq:\n", - " word_freq[w]+=1\n", - " else:\n", - " word_freq[w]=1\n", - " return word_freq\n", - " \n", - " \n", - "my_word_freq_dict=get_dict(sentences)\n", - "print my_word_freq_dict" - ] - }, - { - "cell_type": "code", - "execution_count": 74, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Here're are your keys with values > 2:\n", - "****************************************\n", - "and 4\n", - "are 4\n", - "networks 3\n", - "neural 4\n", - "of 5\n", - "\n", - "Here're are your keys with values > 1 and keys of more than 5 chars:\n", - "**********************************************************************\n", - "networks 3\n", - "generally 2\n", - "systems 2\n", - "inputs 2\n", - "neural 4\n" - ] - } - ], - "source": [ - "# Here's the same function as above, but using python's \"defaultdict\"\n", - "from collections import defaultdict\n", - "sentences=[\"In machine learning and cognitive science, artificial neural networks (ANNs)\\\n", - " are a family of models inspired by biological neural networks (the central nervous systems of animals, \\\n", - " in particular the brain) and are used to estimate or approximate functions that can depend on a large\\\n", - " number of inputs and are generally unknown.\"\n", - " \"Artificial neural networks are generally presented as systems of interconnected \\\"neurons\\\" which \\\n", - " exchange messages between each other. The connections have numeric weights that can be tuned based \\\n", - " on experience, making neural nets adaptive to inputs and capable of learning.\"]\n", - "\n", - "def get_dict(sentences):\n", - " \"\"\"\n", - " arguments:\n", - " input: @sentences: a list of sentences\n", - " returns: a dictionary of the words in the sentences.\n", - " dict key is a word and value is word frequency\n", - " \"\"\"\n", - " word_freq=defaultdict(int)\n", - " for sent in sentences:\n", - " words=sent.split()\n", - " for w in words:\n", - " word_freq[w]+=1\n", - " return word_freq\n", - " \n", - "my_word_freq_dict=get_dict(sentences)\n", - "# Let's print only keys with values > 2 this time\n", - "print \"Here're are your keys with values > 2:\\n\", \"*\"*40\n", - "for k in my_word_freq_dict:\n", - " if my_word_freq_dict[k] > 2:\n", - " print k, my_word_freq_dict[k]\n", - "\n", - "# Let's print only keys whose length > 5 (so keys that have at least 6 characters/letters) and values > 1 \n", - "print \"\\nHere're are your keys with values > 1 and keys of more than 5 chars:\\n\", \"*\"*70\n", - "for k in my_word_freq_dict:\n", - " if my_word_freq_dict[k] > 1 and len(k) > 5:\n", - " print k, my_word_freq_dict[k]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Conditionals" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "This is such a short list!\n" - ] - } - ], - "source": [ - "fruits=[\"apple\", \"strawberry\", \"grapes\"]\n", - "if len(fruits) < 10:\n", - " print \"This is such a short list!\"" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "I need an apple!\n" - ] - } - ], - "source": [ - "fruits=[\"apple\", \"strawberry\", \"grapes\"]\n", - "if \"apple\" not in fruits:\n", - " print \"No apples?!\"\n", - "else:\n", - " print \"I need an apple!\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# List Comprehension" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['s', 't', 'r', 'a', 'w', 'b', 'e', 'r', 'r', 'y']\n" - ] - } - ], - "source": [ - "dessert=\"strawberry\"\n", - "chars=[char for char in dessert]\n", - "print chars" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['s', 't', 'r', 'w', 'b', 'r', 'r']\n" - ] - } - ], - "source": [ - "# With a condition\n", - "dessert=\"strawberry\"\n", - "vowels=[\"a\", \"e\", \"y\"]\n", - "chars=[char for char in dessert if char not in vowels]\n", - "print chars" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 2", - "language": "python", - "name": "python2" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 2 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.10" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} From 0a833492c93b05ac3760db9ffe2464a19df60080 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed Date: Sat, 23 Jan 2016 22:29:13 -0500 Subject: [PATCH 04/36] Update README.md --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 83b19dc..ab64afd 100644 --- a/README.md +++ b/README.md @@ -1 +1,6 @@ -# python_tutorial +# A Python Tutorial: +This is code I wrote for some courses I teach at Indiana University. +The code in this tutorial is meant for Python beginners, and is written primarily in Python 2.7. +A migration to Python 3 shoul be straightforward. +A lot of the code I write and run during class sessions and so it is shared without much polishing. +I provide some comments, before I push here, as much as I can. From 357b0dadbe60a6ff3f388d36f88b6f8de64f97d3 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed Date: Sat, 23 Jan 2016 22:32:23 -0500 Subject: [PATCH 05/36] Update README.md --- README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index ab64afd..83270f2 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,9 @@ # A Python Tutorial: -This is code I wrote for some courses I teach at Indiana University. -The code in this tutorial is meant for Python beginners, and is written primarily in Python 2.7. +This is code I wrote for courses I teach at Indiana University. +The code in this tutorial is meant for Python beginners, although some of it will be useful for intermediate level learners. +In later parts, I should be able to push more advanced parts including ones that can be used for natural language processing and machine learning. + +The code is written primarily in Python 2.7. A migration to Python 3 shoul be straightforward. A lot of the code I write and run during class sessions and so it is shared without much polishing. I provide some comments, before I push here, as much as I can. From 837a9bd192af0bc40cecbe139a5b5b9dc1a478d1 Mon Sep 17 00:00:00 2001 From: mageed Date: Sat, 23 Jan 2016 23:17:44 -0500 Subject: [PATCH 06/36] update --- python_tutorial_part_4.ipynb | 402 +++++++++++++++++++++++++++++++++++ 1 file changed, 402 insertions(+) create mode 100644 python_tutorial_part_4.ipynb diff --git a/python_tutorial_part_4.ipynb b/python_tutorial_part_4.ipynb new file mode 100644 index 0000000..7ce55b3 --- /dev/null +++ b/python_tutorial_part_4.ipynb @@ -0,0 +1,402 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Numpy Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "a --> [2 3 4 5]\n", + "b --> [5 6 7 8]\n", + "a+b --> [ 7 9 11 13]\n" + ] + } + ], + "source": [ + "# Here's a video tutorial: https://www.youtube.com/watch?v=1zmV8lZsHF4\n", + "# Here's the matplot page: http://matplotlib.org/gallery.html#lines_bars_and_markers\n", + "from numpy import *\n", + "#from numpy import array\n", + "import numpy as np\n", + "a= array([2,3,4,5])\n", + "b=array((5,6,7,8))\n", + "print type(a)\n", + "print \"a -->\", a\n", + "print \"b -->\", b\n", + "print \"a+b -->\", a+b\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This will give an error!!!\n", + "a+c -->" + ] + }, + { + "ename": "ValueError", + "evalue": "operands could not be broadcast together with shapes (4,) (6,) ", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mc\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m8\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m8\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m9\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"This will give an error!!!\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"a+c -->\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0mc\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mValueError\u001b[0m: operands could not be broadcast together with shapes (4,) (6,) " + ] + } + ], + "source": [ + "# You can only add arrays of the same shape / equal length:\n", + "c=array([5,8,8,9,5,2])\n", + "print \"This will give an error!!!\"\n", + "print \"a+c -->\", a+c" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a+1 --> [3 4 5 6]\n" + ] + } + ], + "source": [ + "# broadcasting\n", + "# If you add an array to a scalar, the scalar gets broadcast across all the array elements\n", + "print \"a+1 -->\", a+1\n", + "# Now you can broadcast arrays and so you can add arrays of different shapes..." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Printing array x: [[ 1. 2. 3. 4.]\n", + " [ 5. 6. 7. 8.]] \n", + "\n", + "\"Shape of array x is:\" (2, 4) \n", + "\n", + "\"Value at x[0][1] is:\" 2.0\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "x= np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.float32)\n", + "print \"Printing array x: \", x,\"\\n\"\n", + "print \"\\\"Shape of array x is:\\\" \", x.shape,\"\\n\"\n", + "print \"\\\"Value at x[0][1] is:\\\" \", x[0][1] # gives row0, c1 --> we start index from zero!" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 1 1 -2]\n" + ] + } + ], + "source": [ + "x=np.array([1, 3, 5, 6])\n", + "y=np.array([1,2,3,1])\n", + "d=y[1:]-y[:-1]\n", + "print d\n", + "# This runs in C, the loop happens in C, so it's fast.\n", + "# It doesn't matter what shape y is. So, it can be a very big array." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "14\n", + "[ 2 5 9 14]\n" + ] + } + ], + "source": [ + "print sum(a)\n", + "# cumsum adds every emelement to the previous element\n", + "print cumsum(a)" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-------------------\n", + "[0 1 2]\n", + "[ 0. 1. 2.]\n", + "-------------------\n", + "[2 3 4 5 6]\n", + "-------------------\n", + "[2 4 6]\n", + "-------------------\n", + "[ 100. 215.443469 464.15888336 1000. ]\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "#numpy.arange: http://docs.scipy.org/doc/numpy/reference/generated/numpy.arange.html\n", + "\"\"\"\n", + "numpy.arange([start, ]stop, [step, ]dtype=None)\n", + " Return evenly spaced values within a given interval.\n", + " Values are generated within the half-open interval [start, stop) (in other words, the interval including\n", + " start but excluding stop). For integer arguments the function is equivalent to the Python built-in range\n", + " function, but returns an ndarray rather than a list.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.arange(3)\n", + "print np.arange(3.0)\n", + "print \"-------------------\"\n", + "print np.arange(2,7)\n", + "print \"-------------------\"\n", + "print np.arange(2,7, 2)\n", + "print \"-------------------\"" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " numpy.zeros\n", + "-------------------\n", + "[ 0. 0. 0. 0. 0.]\n", + "-------------------\n", + "[0 0 0 0 0 0 0 0 0 0]\n", + "-------------------\n", + "[[ 0.]\n", + " [ 0.]\n", + " [ 0.]]\n", + "-------------------\n", + "numpy.ones\n", + "-------------------\n", + "[ 1. 1. 1. 1. 1.]\n", + "-------------------\n", + "[ 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0]\n", + "-------------------\n", + "[[ 1.]\n", + " [ 1.]\n", + " [ 1.]]\n", + "-------------------\n", + "numpy.identity\n", + "-------------------\n", + "[[ 1. 0. 0. 0. 0.]\n", + " [ 0. 1. 0. 0. 0.]\n", + " [ 0. 0. 1. 0. 0.]\n", + " [ 0. 0. 0. 1. 0.]\n", + " [ 0. 0. 0. 0. 1.]]\n", + "-------------------\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "#------------------\n", + "print \"numpy.zeros\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.zeros(shape, dtype=float, order='C')¶\n", + " Return a new array of given shape and type, filled with zeros.\n", + " \n", + "shape : int or sequence of ints\n", + " Shape of the new array, e.g., (2, 3) or 2.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.zeros(5)\n", + "print \"-------------------\"\n", + "print np.zeros((10,), dtype=np.int)\n", + "print \"-------------------\"\n", + "print np.zeros((3, 1))\n", + "print \"-------------------\"\n", + "#------------------\n", + "print \"numpy.ones\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.ones(shape, dtype=None, order='C')\n", + " Return a new array of given shape and type, filled with ones.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.ones(5)\n", + "print \"-------------------\"\n", + "print np.ones((10,), dtype=np.float128)\n", + "print \"-------------------\"\n", + "print np.ones((3, 1))\n", + "print \"-------------------\"\n", + "\n", + "#------------------\n", + "print \"numpy.identity\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.identity(n, dtype=None)\n", + " Return the identity array.\n", + " The identity array is a square array with ones on the main diagonal.\n", + "n : int\n", + " Number of rows (and columns) in n x n output.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.identity(5)\n", + "print \"-------------------\"\n" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " numpy.linspace\n", + "[ 2. 2.25 2.5 2.75 3. ]\n", + "-------------------\n", + "[ 2. 2.2 2.4 2.6 2.8]\n", + "-------------------\n", + "(array([ 2. , 2.25, 2.5 , 2.75, 3. ]), 0.25)\n", + "-------------------\n", + "\n", + "\n", + " numpy.logspace\n", + "---------------------------------------------------------\n", + "[ 100. 215.443469 464.15888336 1000. ]\n", + "---------------------------------------------------------\n", + "[ 4. 5.0396842 6.34960421 8. ]\n", + "---------------------------------------------------------\n", + "[ 4. 4.75682846 5.65685425 6.72717132]\n", + "---------------------------------------------------------\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "#------------------\n", + "print \"\\n numpy.linspace\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None)[source]¶\n", + " Return evenly spaced numbers over a specified interval.\n", + " Returns num evenly spaced samples, calculated over the interval [start, stop].\n", + " The endpoint of the interval can optionally be excluded.\n", + " \n", + "retstep : bool, optional\n", + " If True, return (samples, step), where step is the spacing between samples.\n", + "\n", + "http://docs.scipy.org/doc/numpy-1.10.1/reference/generated/numpy.linspace.html#numpy.linspace\n", + "\"\"\"\n", + "print np.linspace(2.0, 3.0, num=5)\n", + "print \"-------------------\"\n", + "print np.linspace(2.0, 3.0, num=5, endpoint=False)\n", + "print \"-------------------\"\n", + "print np.linspace(2.0, 3.0, num=5, retstep=True)\n", + "print \"-------------------\\n\"\n", + "#------------------\n", + "print \"\\n numpy.logspace\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None)\n", + " Return numbers spaced evenly on a log scale.\n", + " In linear space, the sequence starts at base ** start (base to the power of start) \n", + " and ends with base ** stop (see endpoint below).\n", + "\"\"\"\n", + "print \"-------------------\"*3\n", + "print np.logspace(2.0, 3.0, num=4)\n", + "print \"-------------------\"*3\n", + "print np.logspace(2.0, 3.0, base=2.0, num=4)\n", + "print \"-------------------\"*3\n", + "print np.logspace(2.0, 3.0, base=2.0, num=4, endpoint=False)\n", + "print \"-------------------\"*3" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From 3cd3f78440adf50af1dea0435fae46a1ad900805 Mon Sep 17 00:00:00 2001 From: mageed Date: Sat, 23 Jan 2016 23:21:58 -0500 Subject: [PATCH 07/36] update --- python_tutorial_part_4.ipynb | 2 -- 1 file changed, 2 deletions(-) diff --git a/python_tutorial_part_4.ipynb b/python_tutorial_part_4.ipynb index 7ce55b3..ad848ce 100644 --- a/python_tutorial_part_4.ipynb +++ b/python_tutorial_part_4.ipynb @@ -28,8 +28,6 @@ } ], "source": [ - "# Here's a video tutorial: https://www.youtube.com/watch?v=1zmV8lZsHF4\n", - "# Here's the matplot page: http://matplotlib.org/gallery.html#lines_bars_and_markers\n", "from numpy import *\n", "#from numpy import array\n", "import numpy as np\n", From 06a8f5b5ad7d5da21ddb63bf40684d25430cd2ac Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 24 Jan 2016 17:08:18 -0500 Subject: [PATCH 08/36] update --- python_tutorial_part_5.ipynb | 801 +++++++++++++++++++++++++++++++++++ 1 file changed, 801 insertions(+) create mode 100644 python_tutorial_part_5.ipynb diff --git a/python_tutorial_part_5.ipynb b/python_tutorial_part_5.ipynb new file mode 100644 index 0000000..6eb48a3 --- /dev/null +++ b/python_tutorial_part_5.ipynb @@ -0,0 +1,801 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Gensim Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "10\n" + ] + } + ], + "source": [ + "# Corpora and Vector Spaces: https://radimrehurek.com/gensim/tut1.html\n", + "#----------------------------------------------------------------------\n", + "import logging\n", + "logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)\n", + "from gensim import corpora, models, similarities\n", + "# Let's create a small corpus where each document is one sentence as in the gensim tutorial from the link above.\n", + "# Let's take text from Wikipedia article on deep learning: https://en.wikipedia.org/wiki/Deep_learning\n", + "# We have 10 documents (each doc is a sentence).\n", + "documents = [\n", + " \"Deep learning (deep structured learning, hierarchical learning or deep machine learning)\\\n", + " is a branch of machine learning based on a set of algorithms that attempt to model \\\n", + " high-level abstractions in data by using multiple processing layers with complex \\\n", + " structures, or otherwise composed of multiple non-linear transformations.[1][2][3][4][5][6]\",\n", + " \"Deep learning is part of a broader family of machine learning methods based on learning representations of data.\",\n", + " \"An observation (e.g., an image) can be represented in many ways such as a vector of intensity values per pixel,\\\n", + " or in a more abstract way as a set of edges, regions of particular shape, etc. Some representations make it \\\n", + " easier to learn tasks (e.g., face recognition or facial expression recognition[7]) \\\n", + " from examples. One of the promises of deep learning is replacing handcrafted features \\\n", + " with efficient algorithms for unsupervised or semi-supervised feature learning and hierarchical \\\n", + " feature extraction.[8]\",\n", + " \"Deep learning is part of a broader family of machine learning methods based on learning \\\n", + " representations of data.\",\n", + " \"An observation (e.g., an image) can be represented in many ways such as a vector of intensity\\\n", + " values per pixel, or in a more abstract way as a set of edges, regions of particular shape, etc.\",\n", + " \"Some representations make it easier to learn tasks (e.g., face recognition or facial expression recognition[7]) from examples.\",\n", + " \"One of the promises of deep learning is replacing handcrafted features with efficient algorithms for unsupervised or semi-supervised \\\n", + " feature learning and hierarchical feature extraction.[8]\",\n", + " \"Research in this area attempts to make better representations and create models to learn these representations\\\n", + " from large-scale unlabeled data.\",\n", + " \"Some of the representations are inspired by advances in neuroscience and are loosely based on interpretation of information processing\\\n", + " and communication patterns in a nervous system, such as neural coding which attempts to define a relationship between various stimuli \\\n", + " and associated neuronal responses in the brain.[9]\",\n", + " \"Various deep learning architectures such as deep neural networks, convolutional deep neural networks, \\\n", + " deep belief networks and recurrent neural networks have been applied to fields like computer vision, automatic\\\n", + " speech recognition, natural language processing, audio recognition and bioinformatics where they have been shown to produce state-of-the-art\\\n", + " results on various tasks.\"]\n", + "\n", + "print len(documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'i', u'me', u'my', u'myself', u'we', u'our', u'ours', u'ourselves', u'you', u'your', u'yours', u'yourself', u'yourselves', u'he', u'him', u'his', u'himself', u'she', u'her', u'hers', u'herself', u'it', u'its', u'itself', u'they', u'them', u'their', u'theirs', u'themselves', u'what', u'which', u'who', u'whom', u'this', u'that', u'these', u'those', u'am', u'is', u'are', u'was', u'were', u'be', u'been', u'being', u'have', u'has', u'had', u'having', u'do', u'does', u'did', u'doing', u'a', u'an', u'the', u'and', u'but', u'if', u'or', u'because', u'as', u'until', u'while', u'of', u'at', u'by', u'for', u'with', u'about', u'against', u'between', u'into', u'through', u'during', u'before', u'after', u'above', u'below', u'to', u'from', u'up', u'down', u'in', u'out', u'on', u'off', u'over', u'under', u'again', u'further', u'then', u'once', u'here', u'there', u'when', u'where', u'why', u'how', u'all', u'any', u'both', u'each', u'few', u'more', u'most', u'other', u'some', u'such', u'no', u'nor', u'not', u'only', u'own', u'same', u'so', u'than', u'too', u'very', u's', u't', u'can', u'will', u'just', u'don', u'should', u'now']\n" + ] + } + ], + "source": [ + "# Let's remove common words like \"a\" \"the\", etc. in English.\n", + "# These are called stop words and we can use nltk for a list of these in English\n", + "import nltk\n", + "from nltk.corpus import stopwords\n", + "stopwords= stopwords.words('english')\n", + "print stopwords" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[['deep', 'learning', 'deep', 'structured', 'learning', 'hierarchical', 'learning', 'deep', 'machine', 'learning', 'branch', 'machine', 'learning', 'based', 'set', 'algorithms', 'attempt', 'model', 'high', 'level', 'abstractions', 'data', 'using', 'multiple', 'processing', 'layers', 'complex', 'structures', 'otherwise', 'composed', 'multiple', 'non', 'linear', 'transformations', '1', '2', '3', '4', '5', '6']]\n" + ] + } + ], + "source": [ + "#We will need to lowercase text (some times we shouldn't do that naively if we care for things like\\\n", + "#named entities (which start with uppercase)')\n", + "# NLTK has a number of tokenization options here: http://www.nltk.org/api/nltk.tokenize.html\n", + "# Especially note that NLTK also supports Twitter tokenization, which will be useful for us\n", + "# Look at this line from the link above:\n", + "# from nltk.tokenize import TweetTokenizer\n", + "#-----------------------------------------\n", + "from nltk.tokenize import RegexpTokenizer\n", + "tokenizer = RegexpTokenizer(r'\\w+')\n", + "texts= [[w for w in tokenizer.tokenize(document.lower()) if w not in stopwords] for document in documents]\n", + "print texts[:1]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "defaultdict(, {'interpretation': 1, 'results': 1, 'brain': 1, 'attempts': 2, 'broader': 2, 'networks': 4, 'layers': 1, 'machine': 4, 'based': 4, 'nervous': 1, 'state': 1, 'better': 1, '4': 1, '8': 2, 'pixel': 2, 'non': 1, 'advances': 1, 'facial': 2, 'using': 1, 'like': 1, 'semi': 2, 'level': 1, 'fields': 1, 'loosely': 1, 'shape': 2, 'large': 1, 'vector': 2, 'neuronal': 1, 'automatic': 1, 'vision': 1, 'set': 3, 'art': 1, 'methods': 2, 'intensity': 2, 'computer': 1, 'examples': 2, 'recognition': 6, 'responses': 1, 'shown': 1, 'scale': 1, 'ways': 2, 'per': 2, 'research': 1, 'replacing': 2, '3': 1, 'various': 3, '7': 2, 'linear': 1, 'processing': 3, 'represented': 2, 'g': 4, 'many': 2, 'inspired': 1, 'abstractions': 1, 'etc': 2, 'produce': 1, 'supervised': 2, 'expression': 2, 'otherwise': 1, 'composed': 1, 'tasks': 3, 'features': 2, 'family': 2, 'communication': 1, 'image': 2, 'coding': 1, 'natural': 1, 'one': 2, 'learning': 16, 'neuroscience': 1, 'transformations': 1, 'area': 1, 'create': 1, 'structured': 1, 'system': 1, 'extraction': 2, '2': 1, 'way': 2, '6': 1, 'structures': 1, 'define': 1, 'convolutional': 1, 'relationship': 1, 'hierarchical': 3, 'particular': 2, 'e': 4, 'applied': 1, 'language': 1, 'neural': 4, 'easier': 2, 'regions': 2, 'values': 2, 'learn': 3, 'promises': 2, 'associated': 1, 'abstract': 2, 'speech': 1, 'deep': 11, 'high': 1, 'information': 1, 'efficient': 2, 'make': 3, 'recurrent': 1, 'feature': 4, '1': 1, 'belief': 1, 'complex': 1, '5': 1, 'branch': 1, '9': 1, 'handcrafted': 2, 'multiple': 2, 'unlabeled': 1, 'models': 1, 'edges': 2, 'architectures': 1, 'bioinformatics': 1, 'representations': 7, 'data': 4, 'attempt': 1, 'observation': 2, 'unsupervised': 2, 'stimuli': 1, 'face': 2, 'patterns': 1, 'part': 2, 'algorithms': 3, 'model': 1, 'audio': 1})\n" + ] + } + ], + "source": [ + "from collections import defaultdict\n", + "word_freq=defaultdict(int)\n", + "from itertools import groupby\n", + "for text in texts:\n", + " for w in text:\n", + " word_freq[w]+=1\n", + "print word_freq" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OrderedDict([('1', 1), ('2', 1), ('3', 1), ('4', 1), ('5', 1), ('6', 1), ('7', 2), ('8', 2), ('9', 1), ('abstract', 2), ('abstractions', 1), ('advances', 1), ('algorithms', 3), ('applied', 1), ('architectures', 1), ('area', 1), ('art', 1), ('associated', 1), ('attempt', 1), ('attempts', 2), ('audio', 1), ('automatic', 1), ('based', 4), ('belief', 1), ('better', 1), ('bioinformatics', 1), ('brain', 1), ('branch', 1), ('broader', 2), ('coding', 1), ('communication', 1), ('complex', 1), ('composed', 1), ('computer', 1), ('convolutional', 1), ('create', 1), ('data', 4), ('deep', 11), ('define', 1), ('e', 4), ('easier', 2), ('edges', 2), ('efficient', 2), ('etc', 2), ('examples', 2), ('expression', 2), ('extraction', 2), ('face', 2), ('facial', 2), ('family', 2), ('feature', 4), ('features', 2), ('fields', 1), ('g', 4), ('handcrafted', 2), ('hierarchical', 3), ('high', 1), ('image', 2), ('information', 1), ('inspired', 1), ('intensity', 2), ('interpretation', 1), ('language', 1), ('large', 1), ('layers', 1), ('learn', 3), ('learning', 16), ('level', 1), ('like', 1), ('linear', 1), ('loosely', 1), ('machine', 4), ('make', 3), ('many', 2), ('methods', 2), ('model', 1), ('models', 1), ('multiple', 2), ('natural', 1), ('nervous', 1), ('networks', 4), ('neural', 4), ('neuronal', 1), ('neuroscience', 1), ('non', 1), ('observation', 2), ('one', 2), ('otherwise', 1), ('part', 2), ('particular', 2), ('patterns', 1), ('per', 2), ('pixel', 2), ('processing', 3), ('produce', 1), ('promises', 2), ('recognition', 6), ('recurrent', 1), ('regions', 2), ('relationship', 1), ('replacing', 2), ('representations', 7), ('represented', 2), ('research', 1), ('responses', 1), ('results', 1), ('scale', 1), ('semi', 2), ('set', 3), ('shape', 2), ('shown', 1), ('speech', 1), ('state', 1), ('stimuli', 1), ('structured', 1), ('structures', 1), ('supervised', 2), ('system', 1), ('tasks', 3), ('transformations', 1), ('unlabeled', 1), ('unsupervised', 2), ('using', 1), ('values', 2), ('various', 3), ('vector', 2), ('vision', 1), ('way', 2), ('ways', 2)])\n" + ] + } + ], + "source": [ + "# Side note: OrderedDict in Python\n", + "# Take a look at the documentation of the Python collections module: \n", + "# https://docs.python.org/2/library/collections.html\n", + "from collections import OrderedDict\n", + "# dictionary sorted by key\n", + "print OrderedDict(sorted(word_freq.items(), key=lambda t: t[0]))" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OrderedDict([('learning', 16), ('deep', 11), ('representations', 7), ('recognition', 6), ('networks', 4), ('machine', 4), ('based', 4), ('g', 4), ('e', 4), ('neural', 4), ('feature', 4), ('data', 4), ('set', 3), ('various', 3), ('processing', 3), ('tasks', 3), ('hierarchical', 3), ('learn', 3), ('make', 3), ('algorithms', 3), ('attempts', 2), ('broader', 2), ('8', 2), ('pixel', 2), ('facial', 2), ('semi', 2), ('shape', 2), ('vector', 2), ('methods', 2), ('intensity', 2), ('examples', 2), ('ways', 2), ('per', 2), ('replacing', 2), ('7', 2), ('represented', 2), ('many', 2), ('etc', 2), ('supervised', 2), ('expression', 2), ('features', 2), ('family', 2), ('image', 2), ('one', 2), ('extraction', 2), ('way', 2), ('particular', 2), ('easier', 2), ('regions', 2), ('values', 2), ('promises', 2), ('abstract', 2), ('efficient', 2), ('handcrafted', 2), ('multiple', 2), ('edges', 2), ('observation', 2), ('unsupervised', 2), ('face', 2), ('part', 2), ('interpretation', 1), ('results', 1), ('brain', 1), ('layers', 1), ('nervous', 1), ('state', 1), ('better', 1), ('4', 1), ('non', 1), ('advances', 1), ('using', 1), ('like', 1), ('level', 1), ('fields', 1), ('loosely', 1), ('large', 1), ('neuronal', 1), ('automatic', 1), ('vision', 1), ('art', 1), ('computer', 1), ('responses', 1), ('shown', 1), ('scale', 1), ('research', 1), ('3', 1), ('linear', 1), ('inspired', 1), ('abstractions', 1), ('produce', 1), ('otherwise', 1), ('composed', 1), ('communication', 1), ('coding', 1), ('natural', 1), ('neuroscience', 1), ('transformations', 1), ('area', 1), ('create', 1), ('structured', 1), ('system', 1), ('2', 1), ('6', 1), ('structures', 1), ('define', 1), ('convolutional', 1), ('relationship', 1), ('applied', 1), ('language', 1), ('associated', 1), ('speech', 1), ('high', 1), ('information', 1), ('recurrent', 1), ('1', 1), ('belief', 1), ('complex', 1), ('5', 1), ('branch', 1), ('9', 1), ('unlabeled', 1), ('models', 1), ('architectures', 1), ('bioinformatics', 1), ('attempt', 1), ('stimuli', 1), ('patterns', 1), ('model', 1), ('audio', 1)])\n" + ] + } + ], + "source": [ + "# dictionary sorted by value, in reverse order\n", + "print OrderedDict(sorted(word_freq.items(), key=lambda t: t[1], reverse=True))" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OrderedDict([('transformations', 1), ('representations', 7), ('interpretation', 1), ('bioinformatics', 1), ('communication', 1), ('convolutional', 1), ('architectures', 1), ('abstractions', 1), ('neuroscience', 1), ('relationship', 1), ('hierarchical', 3), ('unsupervised', 2), ('recognition', 6), ('represented', 2), ('information', 1), ('handcrafted', 2), ('observation', 2), ('processing', 3), ('supervised', 2), ('expression', 2), ('structured', 1), ('extraction', 2), ('structures', 1), ('particular', 2), ('associated', 1), ('algorithms', 3), ('automatic', 1), ('intensity', 2), ('responses', 1), ('replacing', 2), ('otherwise', 1), ('efficient', 2), ('recurrent', 1), ('unlabeled', 1), ('attempts', 2), ('networks', 4), ('advances', 1), ('neuronal', 1), ('computer', 1), ('examples', 2), ('research', 1), ('inspired', 1), ('composed', 1), ('features', 2), ('learning', 16), ('language', 1), ('promises', 2), ('abstract', 2), ('multiple', 2), ('patterns', 1), ('results', 1), ('broader', 2), ('machine', 4), ('nervous', 1), ('loosely', 1), ('methods', 2), ('various', 3), ('produce', 1), ('natural', 1), ('applied', 1), ('regions', 2), ('feature', 4), ('complex', 1), ('attempt', 1), ('stimuli', 1), ('layers', 1), ('better', 1), ('facial', 2), ('fields', 1), ('vector', 2), ('vision', 1), ('linear', 1), ('family', 2), ('coding', 1), ('create', 1), ('system', 1), ('define', 1), ('neural', 4), ('easier', 2), ('values', 2), ('speech', 1), ('belief', 1), ('branch', 1), ('models', 1), ('brain', 1), ('based', 4), ('state', 1), ('pixel', 2), ('using', 1), ('level', 1), ('shape', 2), ('large', 1), ('shown', 1), ('scale', 1), ('tasks', 3), ('image', 2), ('learn', 3), ('edges', 2), ('model', 1), ('audio', 1), ('like', 1), ('semi', 2), ('ways', 2), ('many', 2), ('area', 1), ('deep', 11), ('high', 1), ('make', 3), ('data', 4), ('face', 2), ('part', 2), ('non', 1), ('set', 3), ('art', 1), ('per', 2), ('etc', 2), ('one', 2), ('way', 2), ('4', 1), ('8', 2), ('3', 1), ('7', 2), ('g', 4), ('2', 1), ('6', 1), ('e', 4), ('1', 1), ('5', 1), ('9', 1)])\n" + ] + } + ], + "source": [ + "# dictionary sorted by length of the key string, in reverse order (So you get longer keys first)\n", + "print OrderedDict(sorted(word_freq.items(), key=lambda t: len(t[0]), reverse=True))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[set(['algorithms',\n", + " 'based',\n", + " 'data',\n", + " 'deep',\n", + " 'hierarchical',\n", + " 'learning',\n", + " 'machine',\n", + " 'multiple',\n", + " 'processing',\n", + " 'set'])]\n" + ] + } + ], + "source": [ + "# Let's remove words of freq < 2 and keep only unique words, using a set\n", + "texts = [set([w for w in text if word_freq[w] > 1]) for text in texts]\n", + "from pprint import pprint\n", + "pprint(texts[:1])" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Dictionary(60 unique tokens: [u'promises', u'set', u'features', u'family', u'image']...)\n" + ] + } + ], + "source": [ + "# Let's represent each document as a bag-of-words, where each word is assigned a unique integer id\\\n", + "dictionary = corpora.Dictionary(texts)\n", + "print dictionary\n", + "# You can save this dictionary to desk for future reference, using gensim:\n", + "# dictionary.save('/tmp/word_freq.dict') # " + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{u'promises': 15, u'set': 0, u'features': 17, u'family': 12, u'image': 18, u'deep': 3, u'one': 19, u'shape': 20, u'tasks': 16, u'examples': 22, u'broader': 13, u'networks': 59, u'recognition': 23, u'methods': 14, u'regions': 28, u'based': 1, u'etc': 49, u'efficient': 21, u'make': 25, u'feature': 26, u'per': 27, u'machine': 4, u'extraction': 29, u'vector': 51, u'various': 57, u'supervised': 52, u'7': 30, u'8': 32, u'abstract': 34, u'handcrafted': 35, u'attempts': 56, u'multiple': 8, u'way': 36, u'replacing': 37, u'processing': 2, u'g': 44, u'hierarchical': 5, u'facial': 39, u'particular': 40, u'represented': 41, u'representations': 10, u'data': 9, u'values': 46, u'e': 42, u'observation': 43, u'semi': 31, u'unsupervised': 45, u'many': 33, u'edges': 38, u'neural': 58, u'intensity': 47, u'face': 48, u'ways': 24, u'easier': 50, u'part': 11, u'algorithms': 6, u'learning': 7, u'learn': 53, u'expression': 54, u'pixel': 55}\n" + ] + } + ], + "source": [ + "# You can get each word and its token id:\n", + "print(dictionary.token2id)" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(3, 2), (7, 2)]\n" + ] + } + ], + "source": [ + "# Let's add a new document and get a sparse vector of it using gensim's \"doc2bow\" \\\n", + "# dictionary attribute:\n", + "new_doc= \"Deep learning? I like deep learning a lot.\"\n", + "tokenized_and_split_doc =tokenizer.tokenize(new_doc.lower())\n", + "new_vec = dictionary.doc2bow(tokenized_and_split_doc) \n", + "# Only the words deep (id 3) and learning (id 7)\n", + "# occur in our previous dictionary, and each of these occur twice in this new document\n", + "print new_vec" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# What does the sparse vector [(3, 2), (7, 2)] mean?\n", + "# Well, all it means is that it has two words, \"deep\" and \"learning\", ids 3 and 7, respectively\n", + "# and that each of them occurs twice in this new_vec vector. This should be clear to you by now." + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[(0, 1), (1, 1), (2, 1), (3, 1), (4, 1), (5, 1), (6, 1), (7, 1), (8, 1), (9, 1)], [(1, 1), (3, 1), (4, 1), (7, 1), (9, 1), (10, 1), (11, 1), (12, 1), (13, 1), (14, 1)], [(0, 1), (3, 1), (5, 1), (6, 1), (7, 1), (10, 1), (15, 1), (16, 1), (17, 1), (18, 1), (19, 1), (20, 1), (21, 1), (22, 1), (23, 1), (24, 1), (25, 1), (26, 1), (27, 1), (28, 1), (29, 1), (30, 1), (31, 1), (32, 1), (33, 1), (34, 1), (35, 1), (36, 1), (37, 1), (38, 1), (39, 1), (40, 1), (41, 1), (42, 1), (43, 1), (44, 1), (45, 1), (46, 1), (47, 1), (48, 1), (49, 1), (50, 1), (51, 1), (52, 1), (53, 1), (54, 1), (55, 1)], [(1, 1), (3, 1), (4, 1), (7, 1), (9, 1), (10, 1), (11, 1), (12, 1), (13, 1), (14, 1)], [(0, 1), (18, 1), (20, 1), (24, 1), (27, 1), (28, 1), (33, 1), (34, 1), (36, 1), (38, 1), (40, 1), (41, 1), (42, 1), (43, 1), (44, 1), (46, 1), (47, 1), (49, 1), (51, 1), (55, 1)], [(10, 1), (16, 1), (22, 1), (23, 1), (25, 1), (30, 1), (39, 1), (42, 1), (44, 1), (48, 1), (50, 1), (53, 1), (54, 1)], [(3, 1), (5, 1), (6, 1), (7, 1), (15, 1), (17, 1), (19, 1), (21, 1), (26, 1), (29, 1), (31, 1), (32, 1), (35, 1), (37, 1), (45, 1), (52, 1)], [(9, 1), (10, 1), (25, 1), (53, 1), (56, 1)], [(1, 1), (2, 1), (10, 1), (56, 1), (57, 1), (58, 1)], [(2, 1), (3, 1), (7, 1), (16, 1), (23, 1), (57, 1), (58, 1), (59, 1)]]\n" + ] + } + ], + "source": [ + "# Then you get a sparse vector representation for each document.\n", + "# Remember, each word is represented as an integer and the code \n", + "corpus = [dictionary.doc2bow(text) for text in texts]\n", + "print corpus" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# This is how you save the dict to desk for later use, using gensim:\n", + "# corpora.MmCorpus.serialize('/tmp/dictionary.mm', corpus) " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "#corpus = [[(0, 1.0), (1, 1.0), (2, 1.0)],\n", + " [(2, 1.0), (3, 1.0), (4, 1.0), (5, 1.0), (6, 1.0), (8, 1.0)],\n", + " [(1, 1.0), (3, 1.0), (4, 1.0), (7, 1.0)],\n", + " [(0, 1.0), (4, 2.0), (7, 1.0)],\n", + " [(3, 1.0), (5, 1.0), (6, 1.0)],\n", + " [(9, 1.0)],\n", + " [(9, 1.0), (10, 1.0)],\n", + " [(9, 1.0), (10, 1.0), (11, 1.0)],\n", + " [(8, 1.0), (10, 1.0), (11, 1.0)]]\n", + "tfidf = models.TfidfModel(corpus)\n", + "print tfidf\n", + "\n", + "vec = [(0, 1), (4, 1)]\n", + "print(tfidf[vec])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# To be continued" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From e90266b9104621852d4729745d4853b89aaca104 Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 24 Jan 2016 17:10:29 -0500 Subject: [PATCH 09/36] update --- python_tutorial_part_5.ipynb | 411 ----------------------------------- 1 file changed, 411 deletions(-) diff --git a/python_tutorial_part_5.ipynb b/python_tutorial_part_5.ipynb index 6eb48a3..42b118b 100644 --- a/python_tutorial_part_5.ipynb +++ b/python_tutorial_part_5.ipynb @@ -65,15 +65,6 @@ "print len(documents)" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": 14, @@ -98,15 +89,6 @@ "print stopwords" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": 23, @@ -372,399 +354,6 @@ "# corpora.MmCorpus.serialize('/tmp/dictionary.mm', corpus) " ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "#corpus = [[(0, 1.0), (1, 1.0), (2, 1.0)],\n", - " [(2, 1.0), (3, 1.0), (4, 1.0), (5, 1.0), (6, 1.0), (8, 1.0)],\n", - " [(1, 1.0), (3, 1.0), (4, 1.0), (7, 1.0)],\n", - " [(0, 1.0), (4, 2.0), (7, 1.0)],\n", - " [(3, 1.0), (5, 1.0), (6, 1.0)],\n", - " [(9, 1.0)],\n", - " [(9, 1.0), (10, 1.0)],\n", - " [(9, 1.0), (10, 1.0), (11, 1.0)],\n", - " [(8, 1.0), (10, 1.0), (11, 1.0)]]\n", - "tfidf = models.TfidfModel(corpus)\n", - "print tfidf\n", - "\n", - "vec = [(0, 1), (4, 1)]\n", - "print(tfidf[vec])" - ] - }, { "cell_type": "code", "execution_count": null, From 7b0122059aeb8d31eab901ac9f1dec66d1ad04ed Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 24 Jan 2016 18:13:24 -0500 Subject: [PATCH 10/36] update --- python_tutorial_part_6.ipynb | 59 ++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 python_tutorial_part_6.ipynb diff --git a/python_tutorial_part_6.ipynb b/python_tutorial_part_6.ipynb new file mode 100644 index 0000000..a16403b --- /dev/null +++ b/python_tutorial_part_6.ipynb @@ -0,0 +1,59 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Scikit-learn Tutorial" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Coming up!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From 895fd513044598d7af1c20f63e13a4ea2f0a4f97 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed Date: Sun, 24 Jan 2016 18:36:27 -0500 Subject: [PATCH 11/36] Update README.md --- README.md | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 83270f2..a97e86f 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,16 @@ # A Python Tutorial: This is code I wrote for courses I teach at Indiana University. -The code in this tutorial is meant for Python beginners, although some of it will be useful for intermediate level learners. -In later parts, I should be able to push more advanced parts including ones that can be used for natural language processing and machine learning. +The first parts of the code in this tutorial are meant for Python beginners, and the code grows more advanced as you advance through the later parts. + +In the context of this tutorial, I plan to include sections covering the Natural Language Toolkit (NLTK), gensim, scikit-learn, visualization, numpy, etc. + +In addition, I plan to add more advanced code covering practical machine learning issues like vector space models to perform certain tasks like sentiment analysis. + +Finally, I also plan to introduce some deep learning tools and provide some relevant code. + +The courses teach skills for at the intersection of fields like natural language processing, machine learning, social media mining, text mining, data science, etc. The code is written primarily in Python 2.7. A migration to Python 3 shoul be straightforward. -A lot of the code I write and run during class sessions and so it is shared without much polishing. +Some of the code is written and run during class sessions and so it is shared without much polishing. I provide some comments, before I push here, as much as I can. From 39ed5ddcd601ce04429c49cd19af39617f0da963 Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 24 Jan 2016 22:54:34 -0500 Subject: [PATCH 12/36] update --- python_tutorial_part_6.ipynb | 412 ++++++++++++++++++++++++++++++++++- 1 file changed, 409 insertions(+), 3 deletions(-) diff --git a/python_tutorial_part_6.ipynb b/python_tutorial_part_6.ipynb index a16403b..88592d0 100644 --- a/python_tutorial_part_6.ipynb +++ b/python_tutorial_part_6.ipynb @@ -6,14 +6,20 @@ "collapsed": true }, "source": [ - "# Scikit-learn Tutorial" + "# A Vector Space Model, with scikit-learn" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Coming up!" + "This code will be cleaned further and more notes will be added.\n", + "The purpose is to build a vector space model for multi-class text classification.\n", + "We use scikit-learn, but build our own code to vectorize the data.\n", + "The example is based on emotion classification, with the 6 early Paul Ekman types of emotions: Anger, Fear, Happiness, Sadness, Disgust, and Surprise. There are other types of emotions, according to other theories. But the purpose here is to show how to build a vector space model, rather than get deeper into what types of emotions there are.\n", + "\n", + "There are a number of things I will change in the code, including the names of some functions.\n", + "For example, the function with the string \"OneHotVectors\" is a misnomer. A lot of the code was written and run in a couple of class sessions, to teach" ] }, { @@ -23,7 +29,407 @@ "collapsed": true }, "outputs": [], - "source": [] + "source": [ + "#!/usr/bin/env python\n", + "# -*- coding: utf-8 -*-\n", + "#######################\n", + "__version__ = \"0.5\"\n", + "__date__ = \"Nov. 30, 2015\"\n", + "__author__ = \"Muhammad Abdul-Mageed\"\n", + "####################################\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "n_jobs = 25\n", + "\n", + "def getListOfLines():\n", + " \"\"\"\n", + " Just takes a file and returns a list of its line\n", + " \"\"\"\n", + " # Change path to file\n", + " return codecs.open(\"PathToFile\", \"r\", \"utf-8\").readlines()\n", + " \n", + "def getThreeColumnFormat():\n", + " \"\"\"\n", + " \"\"\"\n", + " infileObject=codecs.open(\"PathToFile\", \"r\", \"utf-8\")\n", + " listOfLines= infileObject.readlines() \n", + " dataTuples=[(line.split(\"\\t\")[1], line.split(\"\\t\")[2].lower()) for line in listOfLines if line.split(\"\\t\")[1] !=\"NO-EMOTION\"]\n", + " return dataTuples\n", + "#####################################\n", + "\n", + "def tagInSecondHalf(tag, tweet):\n", + " \"\"\"\n", + " Conditioning position of tag in tweet.\n", + " P.S. Won't consider a tag like #happyday.\n", + " \"\"\"\n", + " tags= [\"#happy\", \"#sad\", \"#disgusted\", \"#fearful\" , \"#surprised\", \"#angry\"] #\"#scared\"\n", + " tweet=tweet.split()\n", + " if tag not in tweet:\n", + " return False\n", + " midPoint=(len(tweet)/2)\n", + " tagIndex=tweet.index(tag)\n", + " if tagIndex > midPoint:\n", + " return True\n", + " return False\n", + "\n", + "def tagInLastThird(tag, tweet):\n", + " \"\"\"\n", + " Conditioning position of tag in tweet.\n", + " P.S. Won't consider a tag like #happyday.\n", + " \"\"\"\n", + " tweet=tweet.split()\n", + " if tag not in tweet:\n", + " return False\n", + " thirdPoint=(len(tweet)/4)\n", + " tagIndex=tweet.index(tag)\n", + " if tagIndex > thirdPoint*3:\n", + " return True\n", + " return False\n", + "\n", + "def pure(tag, tweet):\n", + " tagList= [\"#happy\", \"#sad\", \"#disgusted\", \"#fearful\" , \"#surprised\", \"#angry\", \"#scared\"]\n", + " tagList.remove(tag)\n", + " for t in tagList:\n", + " if t in tweet: \n", + " return False\n", + " return True\n", + "\n", + "def removeSeed(seed, tweet):\n", + " \"\"\"\n", + " \"\"\"\n", + " if type(seed)==str:\n", + " tweet= re.sub(seed, \" \", tweet)\n", + " elif type(seed)==list:\n", + " for t in seed:\n", + " tweet= re.sub(t, \" \", tweet)\n", + " else:\n", + " print type(seed)\n", + " print \"arg1/Tag must be a string or list, you provided \", type(tag), \".\"\n", + " exit()\n", + " # clean\n", + " tweet=re.sub(\"\\s+\", \" \", tweet)\n", + " #tweet=tweet.trim()\n", + " tweet=tweet.rstrip()\n", + " tweet=tweet.lstrip()\n", + " return tweet\n", + "\n", + "def clean(tweet):\n", + " \"\"\"\n", + " \"\"\"\n", + " tweet= re.sub(\".\", \" \", tweet)\n", + " return tweet\n", + "\n", + "def longTweet(tweet):\n", + " \"\"\"\n", + " \"\"\"\n", + " if len(tweet.split()) > 10:\n", + " return True\n", + " return False\n", + " \n", + "#----------------------------------------------\n", + "def getDataDict(emotionLines):\n", + " shuffle(emotionLines)\n", + " #emotionLines=emotionLines[:10000]\n", + " tagLexicon= [\"happy\", \"sad\", \"disgusted\", \"fearful\" , \"surprised\", \"angry\", \"scared\"] #\"#scared\"\n", + " tagDict= {\"happy\": \"HAPPINESS\", \"sad\": \"SADNESS\", \"disgusted\": \"DISGUST\", \"fearful\": \"FEAR\" , \"surprised\": \"SURPRISE\", \"angry\": \"ANGER\", \"scared\": \"FEAR\"} #\"#scared\"\n", + " myData={}\n", + " for cat in tagLexicon:\n", + " tag=\"#\"+cat\n", + " myData[tagDict[cat]]=[tweet for tweet in emotionLines if tag in tweet.split() and pure(tag, tweet)\n", + " and tagInSecondHalf(tag, tweet) and len(tweet.split()) > 4\n", + " and removeSeed(tag, tweet) and clean(tweet) and longTweet(tweet)]\n", + " return myData\n", + "\n", + "def getThreeColumnDataDict(emotionLines):\n", + " shuffle(emotionLines)\n", + " #emotionLines=emotionLines[:10000]\n", + " classes= [\"HAPPINESS\", \"SADNESS\", \"DISGUST\", \"FEAR\" , \"SURPRISE\", \"ANGER\"]\n", + " myData={pair[0]: [] for pair in emotionLines}\n", + " for cat in classes:\n", + " for pair in emotionLines:\n", + " if pair[0]==cat:\n", + " myData[pair[0]].append(pair[1])\n", + " return myData\n", + "\n", + "def getDataStats(myData):\n", + " # Print some stats:\n", + " ##########################\n", + " majorClass=max([len(myData[k]) for k in myData])\n", + " totalCount=sum([len(myData[k]) for k in myData])\n", + " print \"Majority class count: \", majorClass\n", + " print \"Total data point count: \", totalCount\n", + " print \"Majority class % in train data: \", round((majorClass/float(totalCount))*100, 2), \"%\"\n", + " print \"*\"*50, \"\\n\"\n", + "\n", + "def getLabeledDataTuples(myData):\n", + " # At this point \"myData\" is a dict, with each emotion class as a key, and related tweet lines as a list of lines\n", + " ###############################################################\n", + " # The below gets me tweet body only (and filters out rest of each tweet line [e.g., tweetId.])\n", + " # newData will be a list of tuples, each tuple has 0 as an emotion class and 1 as the string/unicode of the tweet body\n", + " dataTuples=[(k, \"\".join(myData[k][i]).split(\"\\t\")[-1]) for k in myData for i in range(len(myData[k]))]\n", + " #shuffle(dataTuples)\n", + " #######################################################################\n", + " # See it: \n", + " #print \"The type of newData[0][0] is a: \", type(newData[0][0]), newData[0][0] # --> newData[0] is a string\n", + " #print \"The type of newData[0][1] is a: \", type(newData[0][1]), newData[0][1] # --> newData[1] is a unicode of tweet body\n", + " #######################################################################\n", + " return dataTuples\n", + " \n", + "def getFeatures(dataPoint):\n", + " features=defaultdict()\n", + " # label is class name, of course, and feats is just a list of words in this case.\n", + " label, feats=dataPoint[0], dataPoint[1].split()\n", + " # I could also add some code to remove the seeds from the feature dict instead of the heavy computation in\n", + " # the tweet cleaning in removeSeed\n", + " ###########################################\n", + " # Beautify the below, building \"has(word): True/False\" dict\n", + " for i in feats:\n", + " features[i]=i\n", + " if \"#fearful\" in features:\n", + " del features[\"#fearful\"]\n", + " if \"#scared\" in features:\n", + " del features[\"#scared\"]\n", + " return features, label\n", + "\n", + "#featuresets=[getFeatures(i) for i in newData]\n", + "\n", + "def getLabelsAndVectors(dataTuples):\n", + " \"\"\" \n", + " Input:\n", + " dataTuples is a list of tuples\n", + " Each tuple in the list has\n", + " 0=label\n", + " 1= tweet body as unicode/string\n", + " Returns an array of labels and another array for words \n", + " \"\"\"\n", + " labels=[]\n", + " vectors=[]\n", + " ids=[]\n", + " c=0\n", + " for dataPoint in dataTuples:\n", + " ids.append(c)\n", + " c+=1\n", + " label, vector=dataPoint[0], dataPoint[1].split()\n", + " labels.append(label)\n", + " vectors.append(vector)\n", + "\n", + " return ids, labels, vectors\n", + "\n", + "def getSpace(vectors):\n", + " # get the dictionary of all words in train; we call it the space as it is the space of features for bag of words\n", + " space={}\n", + " for dataPoint in vectors:\n", + " words=dataPoint\n", + " for w in words:\n", + " if w not in space:\n", + " space[w]=len(space)\n", + " return space\n", + "\n", + "def augmentSpace(space, featuresList):\n", + " \"\"\"\n", + " Adds a list of features to the bag-of-words dictionary, we named \"space\".\n", + " \"\"\"\n", + " for f in featuresList:\n", + " if f not in space:\n", + " space[f]=len(space) \n", + " return space\n", + "\n", + "def getReducedSpace(vectors, space):\n", + " # get the dictionary of all words in train; we call it the space as it is the space of features for bag of words\n", + " reducedSpace=defaultdict(int)\n", + " for dataPoint in vectors:\n", + " words=dataPoint\n", + " for w in words:\n", + " reducedSpace[w]+=1\n", + " for w in space:\n", + " # could parameterize with the threshold, instead of the following\n", + " if reducedSpace[w] < 3:\n", + " del reducedSpace[w]\n", + " reducedSpace={w: reducedSpace[w] for w in reducedSpace}\n", + " return reducedSpace\n", + "\n", + "\n", + "#-------------------------------------------------\n", + "def getOneHotVectors(ids, labels, vectors, space):\n", + " oneHotVectors={}\n", + " triples=zip(ids, labels, vectors)\n", + " vec = np.zeros((len(space)))\n", + " #for dataPoint in vectors:\n", + " for triple in triples:\n", + " idd, label, dataPoint= triple[0], triple[1], triple[2]\n", + " #for t in xrange(len(space)):\n", + " # populate a one-dimensional array of zeros of shape/length= len(space)\n", + " vec=np.zeros((len(space))) # ; second argument is domensionality of the array, which is 1\n", + " for w in dataPoint:\n", + " try:\n", + " vec[space[w]]=1\n", + " except:\n", + " continue\n", + " # add emotion lexicon features\n", + " vec=addEmotionLexiconFeatures(vec, dataPoint, space)\n", + " oneHotVectors[idd]=(vec, array(label))\n", + " return oneHotVectors\n", + "\n", + "def getOneHotVectorsAndLabels(oneHotVectorsDict):\n", + " vectors= array([oneHotVectorsDict[k][0] for k in oneHotVectorsDict])\n", + " labels= array([oneHotVectorsDict[k][1] for k in oneHotVectorsDict])\n", + " print \"labels.shape\", labels.shape \n", + " print \"vectors.shape\", vectors.shape \n", + " return vectors, labels\n", + "###############################\n", + "# try:\n", + "# vectors.shape[0]\n", + "# except:\n", + "# vectors=zeros(len(vectors))\n", + "\n", + "# Do grid search\n", + "#######################################\n", + "def SVM_gridSearch(trainVectors, trainLabels, kernel):\n", + " C_range = 10.0 ** arange(-2, 2)\n", + " gamma_range = 10.0 ** arange(-2, 2)\n", + " param_grid = dict(gamma=gamma_range, C=C_range)\n", + " cv = StratifiedKFold(y=trainLabels, n_folds=2)\n", + " grid = GridSearchCV(SVC(kernel=kernel), param_grid=param_grid, cv=cv, n_jobs=n_jobs) #GridSearchCV(SVC(kernel=kernel, class_weight='auto')\n", + " grid.fit(trainVectors, trainLabels)\n", + " ##################################\n", + " ## Estimated best parameters\n", + " C = grid.best_estimator_.C\n", + " gamma = grid.best_estimator_.gamma\n", + " ##################################\n", + " return C, gamma\n", + "#######################################\n", + "\n", + "def getCAndGamma(trainVectors, trainLabels, kernel = 'rbf'):\n", + " C, gamma = SVM_gridSearch(trainVectors, trainLabels, kernel)\n", + " print C\n", + " print gamma\n", + " return C, gamma\n", + "\n", + "def isRetweet(tweet):\n", + " if tweet.lower().split()[0] ==\"re\":\n", + " return True\n", + " return False\n", + "\n", + "\n", + "\n", + "emotionFeatures=[\"hasAngerWord\", \"hasDisgustWord\", \"hasFearWord\", \"hasHappinessWord\", \"hasSadnessWord\", \"hasSurpriseWord\"]\n", + "\n", + "def main():\n", + " #######################################\n", + " # Saima Aman emotion blog data\n", + " dataTuples=getThreeColumnFormat()\n", + " print \"Length of saimaDataTuples is: \", len(dataTuples)\n", + " #shuffle(dataTuples)\n", + " print \"saimaDataTuples\", dataTuples[0]\n", + " trainTuples=dataTuples#[:1000]\n", + " #testTuples=saimaDataTuples[1000:]\n", + "\n", + "# #######################################\n", + " myData=getThreeColumnDataDict(dataTuples)\n", + " totalCount=sum([len(myData[k]) for k in myData])\n", + " print totalCount\n", + "# del trainLines\n", + "# print\"*\"*50\n", + " getDataStats(myData)\n", + "# dataTuples=getLabeledDataTuples(myData)\n", + "# ####################################\n", + "# # Add first 1000 Saima tuples\n", + "# #dataTuples=dataTuples+saimaDataTuples[:1000]\n", + "# print dataTuples[0]\n", + "# del myData\n", + " ids, labels, vectors= getLabelsAndVectors(trainTuples)\n", + " space=getSpace(vectors)\n", + " print \"Total # of features in your space is: \", len(space)\n", + " # augment space with emotion features...\n", + " space= augmentSpace(space, emotionFeatures)\n", + " #reducedSpace=getReducedSpace(vectors, space)\n", + " print \"Total # of features in your augmented space is: \", len(space)\n", + " #print \"Total # of features in your reducedSpace is: \", len(reducedSpace)\n", + " oneHotVectors=getOneHotVectors(ids, labels, vectors, space)\n", + " vectors, labels=getOneHotVectorsAndLabels(oneHotVectors)\n", + " del oneHotVectors\n", + " trainVectors = vectors\n", + " trainLabels = labels\n", + " del vectors\n", + " del labels\n", + " #C, gamma = getCAndGamma(trainVectors, trainLabels, kernel = 'rbf')\n", + " # Train classifier\n", + " #clf = OneVsOneClassifier(SVC(C=C, kernel=kernel, class_weight='auto', gamma=gamma, verbose= True, probability=True))\n", + " clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + " clf.fit(trainVectors, trainLabels)\n", + " print \"\\nDone fitting classifier on training data...\\n\"\n", + " #del trainVectors\n", + " #del trainLabels\n", + "# dataTuples=getSAIMAThreeColumnFormat()\n", + "# print \"Length of dataTuples is: \", len(dataTuples)\n", + "# shuffle(dataTuples)\n", + "# print \"saimaDataTuples\", dataTuples[0]\n", + "# ids, labels, vectors= getLabelsAndVectors(testTuples)\n", + "# oneHotVectors=getOneHotVectors(ids, labels, vectors, space)\n", + "# vectors, labels=getOneHotVectorsAndLabels(oneHotVectors)\n", + "# del oneHotVectors\n", + "# testVectors = vectors\n", + "# testLabels = labels\n", + "# predicted_testLabels = clf.predict(testVectors)\n", + " #------------------------------------------------------------------------------------------\n", + " print \"=\"*50, \"\\n\"\n", + " print \"Results with 5-fold cross validation:\\n\"\n", + " print \"=\"*50, \"\\n\"\n", + " #------------------------------------------------------------------------------------------\n", + " predicted = cross_validation.cross_val_predict(clf, trainVectors, trainLabels, cv=5)\n", + " print \"*\"*20\n", + " print \"\\t accuracy_score\\t\", metrics.accuracy_score(trainLabels, predicted)\n", + " print \"*\"*20\n", + " print \"precision_score\\t\", metrics.precision_score(trainLabels, predicted)\n", + " print \"recall_score\\t\", metrics.recall_score(trainLabels, predicted)\n", + " print \"\\nclassification_report:\\n\\n\", metrics.classification_report(trainLabels, predicted)\n", + " print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(trainLabels, predicted)\n", + " \n", + " #\"------------------------------------------------------------------------------------------\n", + " print \"=\"*50, \"\\n\"\n", + " print \"Results with 10-fold cross validation:\\n\"\n", + " print \"=\"*50, \"\\n\"\n", + " #------------------------------------------------------------------------------------------\n", + " predicted = cross_validation.cross_val_predict(clf, trainVectors, trainLabels, cv=10)\n", + " print \"*\"*20\n", + " print \"\\t accuracy_score\\t\", metrics.accuracy_score(trainLabels, predicted)\n", + " print \"*\"*20\n", + " print \"precision_score\\t\", metrics.precision_score(trainLabels, predicted)\n", + " print \"recall_score\\t\", metrics.recall_score(trainLabels, predicted)\n", + " print \"\\nclassification_report:\\n\\n\", metrics.classification_report(trainLabels, predicted)\n", + " print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(trainLabels, predicted)\n", + " \n", + " #------------------------------------------------------------------------------------------\n", + " # Take a look at the metrics module at: http://scikit-learn.org/stable/modules/classes.html#module-sklearn.metrics\n", + " #------------------------------------------------------------------------------------------\n", + "\n", + "if __name__ == \"__main__\":\n", + " print \"Hello!!\"\n", + " main()" + ] }, { "cell_type": "code", From 74d05bf3b64fcf138fd614f6ccbefe9d5d151c51 Mon Sep 17 00:00:00 2001 From: mageed Date: Tue, 26 Jan 2016 19:01:52 -0500 Subject: [PATCH 13/36] update --- python_tutorial_part_7.ipynb | 160 +++++++++++++++++++++++++++++++++++ 1 file changed, 160 insertions(+) create mode 100644 python_tutorial_part_7.ipynb diff --git a/python_tutorial_part_7.ipynb b/python_tutorial_part_7.ipynb new file mode 100644 index 0000000..3890771 --- /dev/null +++ b/python_tutorial_part_7.ipynb @@ -0,0 +1,160 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Python's collections module" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Documentation: https://docs.python.org/2/library/collections.html\n", + "# Per documentation, \"this module implements specialized container datatypes\\\n", + "# providing alternatives to Python’s general purpose built-in containers, dict, list, set, and tuple\"." + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "DataDoc(tag='POSITIVE', words=['i', 'love', 'pizza'])\n", + "DataDoc(tag='POSITIVE', words=['i', 'like', 'apple'])\n", + "DataDoc(tag='POSITIVE', words=['i', 'enjoy', 'hiking'])\n", + "DataDoc(tag='POSITIVE', words=['i', 'am', 'passionate', 'about', 'traveling'])\n", + "DataDoc(tag='POSITIVE', words=['we', 'had', 'fun', 'writing', 'this', 'code'])\n", + "DataDoc(tag='NEGATIVE', words=['i', \"don't\", 'like', 'to', 'stay', 'up', 'late'])\n", + "DataDoc(tag='NEGATIVE', words=['i', 'am', 'tired'])\n", + "DataDoc(tag='NEGATIVE', words=['he', 'feels', 'sick'])\n" + ] + } + ], + "source": [ + "# namedtuple(): factory function for creating tuple subclasses with named fields\n", + "# Named tuples assign a name to each position in a tuple, thus enabling accessing\n", + "# fields by name instead of position index.\n", + "#-----------------------------------------------\n", + "# namedtuple(typename, field_names[, verbose=False][, rename=False])\n", + "# Returns a new tuple subclass named typename. \n", + "# The new subclass is used to create tuple-like objects that have fields accessible \n", + "# by attribute lookup as well as being indexable and iterable. \n", + "\n", + "from collections import namedtuple\n", + "# We create a named tuple with two fields, tags and words.\n", + "# tags will be a string\n", + "# words will be a list of words\n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "# we create a list and each item in the list will be a namedtuple element with the two fields \"tags\" and \"words\"\n", + "my_data=[]\n", + "# We have a list of document. Each document has a single sentence. \n", + "# The first word in each sentence/document is a tag from the set {POSITIVE, NEGATIVE}, so a sentiment analysis task. \n", + "documents = [\"POSITIVE I love pizza\", \"POSITIVE I like Apple\", \"POSITIVE I enjoy hiking\",\\\n", + " \"POSITIVE I am passionate about traveling\", \"POSITIVE We had fun writing this code\",\\\n", + " \"NEGATIVE I don't like to stay up late\", \"NEGATIVE I am tired\", \"NEGATIVE He feels sick\"]\n", + "\n", + "# Now we loop over the documents and populate the list of allsent, which is basically our container for the \n", + "# instances and their labels. From each document/sentence, we get the tag and the list of words\n", + "for line_no, doc in enumerate(documents):\n", + " label=doc.split()[0]\n", + " word_list=doc.lower().split()[1:]\n", + " my_data.append(DataDoc(label, word_list))\n", + " print my_data[line_no]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[DataDoc(tag='POSITIVE', words=['i', 'love', 'pizza']), DataDoc(tag='POSITIVE', words=['i', 'like', 'apple']), DataDoc(tag='POSITIVE', words=['i', 'enjoy', 'hiking']), DataDoc(tag='POSITIVE', words=['i', 'am', 'passionate', 'about', 'traveling']), DataDoc(tag='POSITIVE', words=['we', 'had', 'fun', 'writing', 'this', 'code']), DataDoc(tag='NEGATIVE', words=['i', \"don't\", 'like', 'to', 'stay', 'up', 'late']), DataDoc(tag='NEGATIVE', words=['i', 'am', 'tired']), DataDoc(tag='NEGATIVE', words=['he', 'feels', 'sick'])]\n" + ] + } + ], + "source": [ + "print my_data" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "POSITIVE\n" + ] + } + ], + "source": [ + "# Now you can access the tag of each instance\n", + "print my_data[0].tag" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['i', 'love', 'pizza']\n" + ] + } + ], + "source": [ + "# You can also access the instance word list itself\n", + "print my_data[0].words" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From 55644c62755b50b5289bdeb478b6bb9435ff2499 Mon Sep 17 00:00:00 2001 From: mageed Date: Fri, 29 Jan 2016 16:03:42 -0500 Subject: [PATCH 14/36] resuflling, updating --- .../numpyTutorial-checkpoint.ipynb | 1057 +++ .../python_tutorial_part_1-checkpoint.ipynb | 0 .../python_tutorial_part_2-checkpoint.ipynb | 1039 +++ .../python_tutorial_part_3-checkpoint.ipynb | 170 + ...t_3_rule_based_classifier-checkpoint.ipynb | 342 + .../python_tutorial_part_4-checkpoint.ipynb | 1463 ++++ .../python_tutorial_part_5-checkpoint.ipynb | 159 + .../python_tutorial_part_6-checkpoint.ipynb | 236 +- .../python_tutorial_part_7-checkpoint.ipynb | 0 .../python_tutorial_part_8-checkpoint.ipynb | 388 + .../python_tutorial_part_9-checkpoint.ipynb | 278 + hamlet.txt | 6496 +++++++++++++++++ python_tutorial_part_1_intro.ipynb | 1378 ++++ ...ipynb => python_tutorial_part_2_nltk.ipynb | 57 +- ...utorial_part_3_rule_based_classifier.ipynb | 343 + ...pynb => python_tutorial_part_4_numpy.ipynb | 0 python_tutorial_part_5_gensim.ipynb | 549 ++ python_tutorial_part_6_vector_space.ipynb | 836 +++ ...n_tutorial_part_8_collections_module.ipynb | 388 + 19 files changed, 15176 insertions(+), 3 deletions(-) create mode 100644 .ipynb_checkpoints/numpyTutorial-checkpoint.ipynb rename python_tutorial_part_1.ipynb => .ipynb_checkpoints/python_tutorial_part_1-checkpoint.ipynb (100%) create mode 100644 .ipynb_checkpoints/python_tutorial_part_2-checkpoint.ipynb create mode 100644 .ipynb_checkpoints/python_tutorial_part_3-checkpoint.ipynb create mode 100644 .ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb create mode 100644 .ipynb_checkpoints/python_tutorial_part_4-checkpoint.ipynb rename python_tutorial_part_5.ipynb => .ipynb_checkpoints/python_tutorial_part_5-checkpoint.ipynb (84%) rename python_tutorial_part_6.ipynb => .ipynb_checkpoints/python_tutorial_part_6-checkpoint.ipynb (70%) rename python_tutorial_part_7.ipynb => .ipynb_checkpoints/python_tutorial_part_7-checkpoint.ipynb (100%) create mode 100644 .ipynb_checkpoints/python_tutorial_part_8-checkpoint.ipynb create mode 100644 .ipynb_checkpoints/python_tutorial_part_9-checkpoint.ipynb create mode 100644 hamlet.txt create mode 100644 python_tutorial_part_1_intro.ipynb rename python_tutorial_part_2.ipynb => python_tutorial_part_2_nltk.ipynb (97%) create mode 100644 python_tutorial_part_3_rule_based_classifier.ipynb rename python_tutorial_part_4.ipynb => python_tutorial_part_4_numpy.ipynb (100%) create mode 100644 python_tutorial_part_5_gensim.ipynb create mode 100644 python_tutorial_part_6_vector_space.ipynb create mode 100644 python_tutorial_part_8_collections_module.ipynb diff --git a/.ipynb_checkpoints/numpyTutorial-checkpoint.ipynb b/.ipynb_checkpoints/numpyTutorial-checkpoint.ipynb new file mode 100644 index 0000000..b97c1bd --- /dev/null +++ b/.ipynb_checkpoints/numpyTutorial-checkpoint.ipynb @@ -0,0 +1,1057 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Numpy Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "a --> [2 3 4 5]\n", + "b --> [5 6 7 8]\n", + "a+b --> [ 7 9 11 13]\n" + ] + } + ], + "source": [ + "# Here's a video tutorial: https://www.youtube.com/watch?v=1zmV8lZsHF4\n", + "# Here's the matplot page: http://matplotlib.org/gallery.html#lines_bars_and_markers\n", + "from numpy import *\n", + "#from numpy import array\n", + "import numpy as np\n", + "a= array([2,3,4,5])\n", + "b=array((5,6,7,8))\n", + "print type(a)\n", + "print \"a -->\", a\n", + "print \"b -->\", b\n", + "print \"a+b -->\", a+b\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "ename": "ValueError", + "evalue": "operands could not be broadcast together with shapes (4,) (6,) ", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mc\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m8\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m8\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m9\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mprint\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0mc\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mValueError\u001b[0m: operands could not be broadcast together with shapes (4,) (6,) " + ] + } + ], + "source": [ + "# You can only add arrays of the same shape / equal length:\n", + "c=array([5,8,8,9,5,2])\n", + "print \"a+c -->\", a+c" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a+1 --> [3 4 5 6]\n" + ] + } + ], + "source": [ + "# broadcasting\n", + "# If you add an array to a scalar, the scalar gets broadcast across all the array elements\n", + "print \"a+1 -->\", a+1\n", + "# Now you can broadcast arrays and so you can add arrays of different shapes..." + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-------------------\n", + "[0 1 2]\n", + "[ 0. 1. 2.]\n", + "-------------------\n", + "[2 3 4 5 6]\n", + "-------------------\n", + "[2 4 6]\n", + "-------------------\n", + "[ 100. 215.443469 464.15888336 1000. ]\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "#numpy.arange: http://docs.scipy.org/doc/numpy/reference/generated/numpy.arange.html\n", + "\"\"\"\n", + "numpy.arange([start, ]stop, [step, ]dtype=None)\n", + " Return evenly spaced values within a given interval.\n", + " Values are generated within the half-open interval [start, stop) (in other words, the interval including\n", + " start but excluding stop). For integer arguments the function is equivalent to the Python built-in range\n", + " function, but returns an ndarray rather than a list.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.arange(3)\n", + "print np.arange(3.0)\n", + "print \"-------------------\"\n", + "print np.arange(2,7)\n", + "print \"-------------------\"\n", + "print np.arange(2,7, 2)\n", + "print \"-------------------\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "numpy.zero\n", + "-------------------\n", + "[ 0. 0. 0. 0. 0.]\n", + "-------------------\n", + "[0 0 0 0 0 0 0 0 0 0]\n", + "-------------------\n", + "[[ 0.]\n", + " [ 0.]\n", + " [ 0.]]\n", + "-------------------\n", + "numpy.ones\n", + "-------------------\n", + "[ 1. 1. 1. 1. 1.]\n", + "-------------------\n", + "[ 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0]\n", + "-------------------\n", + "[[ 1.]\n", + " [ 1.]\n", + " [ 1.]]\n", + "-------------------\n", + "numpy.identity\n", + "-------------------\n", + "[[ 1. 0. 0. 0. 0.]\n", + " [ 0. 1. 0. 0. 0.]\n", + " [ 0. 0. 1. 0. 0.]\n", + " [ 0. 0. 0. 1. 0.]\n", + " [ 0. 0. 0. 0. 1.]]\n", + "-------------------\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "#------------------\n", + "print \"numpy.zero\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.zeros(shape, dtype=float, order='C')¶\n", + " Return a new array of given shape and type, filled with zeros.\n", + " \n", + "shape : int or sequence of ints\n", + " Shape of the new array, e.g., (2, 3) or 2.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.zeros(5)\n", + "print \"-------------------\"\n", + "print np.zeros((10,), dtype=np.int)\n", + "print \"-------------------\"\n", + "print np.zeros((3, 1))\n", + "print \"-------------------\"\n", + "#------------------\n", + "print \"numpy.ones\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.ones(shape, dtype=None, order='C')\n", + " Return a new array of given shape and type, filled with ones.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.ones(5)\n", + "print \"-------------------\"\n", + "print np.ones((10,), dtype=np.float128)\n", + "print \"-------------------\"\n", + "print np.ones((3, 1))\n", + "print \"-------------------\"\n", + "\n", + "#------------------\n", + "print \"numpy.identity\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.identity(n, dtype=None)\n", + " Return the identity array.\n", + " The identity array is a square array with ones on the main diagonal.\n", + "n : int\n", + " Number of rows (and columns) in n x n output.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.identity(5)\n", + "print \"-------------------\"\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " numpy.linspace\n", + "[ 2. 2.25 2.5 2.75 3. ]\n", + "-------------------\n", + "[ 2. 2.2 2.4 2.6 2.8]\n", + "-------------------\n", + "(array([ 2. , 2.25, 2.5 , 2.75, 3. ]), 0.25)\n", + "-------------------\n", + "\n", + "\n", + " numpy.logspace\n", + "---------------------------------------------------------\n", + "[ 100. 215.443469 464.15888336 1000. ]\n", + "---------------------------------------------------------\n", + "[ 4. 5.0396842 6.34960421 8. ]\n", + "---------------------------------------------------------\n", + "[ 4. 4.75682846 5.65685425 6.72717132]\n", + "---------------------------------------------------------\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "#------------------\n", + "print \"\\n numpy.linspace\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None)[source]¶\n", + " Return evenly spaced numbers over a specified interval.\n", + " Returns num evenly spaced samples, calculated over the interval [start, stop].\n", + " The endpoint of the interval can optionally be excluded.\n", + " \n", + "retstep : bool, optional\n", + " If True, return (samples, step), where step is the spacing between samples.\n", + "\n", + "http://docs.scipy.org/doc/numpy-1.10.1/reference/generated/numpy.linspace.html#numpy.linspace\n", + "\"\"\"\n", + "print np.linspace(2.0, 3.0, num=5)\n", + "print \"-------------------\"\n", + "print np.linspace(2.0, 3.0, num=5, endpoint=False)\n", + "print \"-------------------\"\n", + "print np.linspace(2.0, 3.0, num=5, retstep=True)\n", + "print \"-------------------\\n\"\n", + "#------------------\n", + "print \"\\n numpy.logspace\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None)\n", + " Return numbers spaced evenly on a log scale.\n", + " In linear space, the sequence starts at base ** start (base to the power of start) \n", + " and ends with base ** stop (see endpoint below).\n", + "\"\"\"\n", + "print \"-------------------\"*3\n", + "print np.logspace(2.0, 3.0, num=4)\n", + "print \"-------------------\"*3\n", + "print np.logspace(2.0, 3.0, base=2.0, num=4)\n", + "print \"-------------------\"*3\n", + "print np.logspace(2.0, 3.0, base=2.0, num=4, endpoint=False)\n", + "print \"-------------------\"*3" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Printing array x: [[ 1. 2. 3. 4.]\n", + " [ 5. 6. 7. 8.]] \n", + "\n", + "\"Shape of array x is:\" (2, 4) \n", + "\n", + "\"Value at x[0][1] is:\" 2.0\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "x= np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.float32)\n", + "print \"Printing array x: \", x,\"\\n\"\n", + "print \"\\\"Shape of array x is:\\\" \", x.shape,\"\\n\"\n", + "print \"\\\"Value at x[0][1] is:\\\" \", x[0][1] # gives row0, c1 --> we start index from zero!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 1 1 -2]\n" + ] + } + ], + "source": [ + "x=np.array([1, 3, 5, 6])\n", + "y=np.array([1,2,3,1])\n", + "d=y[1:]-y[:-1]\n", + "print d\n", + "# This runs in C, the loop happens in C, so it's fast.\n", + "# It doesn't matter what shape y is. So, it can be a very big array." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "45\n", + "[ 0 1 3 6 10 15 21 28 36 45]\n" + ] + } + ], + "source": [ + "print sum(a)\n", + "# cumsum adds every emelement to the previous element\n", + "print cumsum(a)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "ename": "TypeError", + "evalue": "only length-1 arrays can be converted to Python scalars", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mmatplotlib\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpyplot\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mplt\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfigure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0maxes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_subplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshow\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0maxes\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/matplotlib/figure.pyc\u001b[0m in \u001b[0;36madd_subplot\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 956\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_axstack\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mremove\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0max\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 957\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 958\u001b[0;31m \u001b[0ma\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msubplot_class_factory\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprojection_class\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 959\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 960\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_axstack\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/matplotlib/axes/_subplots.pyc\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fig, *args, **kwargs)\u001b[0m\n\u001b[1;32m 43\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 44\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 45\u001b[0;31m \u001b[0ms\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 46\u001b[0m \u001b[0mrows\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcols\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmap\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ms\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mTypeError\u001b[0m: only length-1 arrays can be converted to Python scalars" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# matplotlib\n", + "import matplotlib.pyplot as plt\n", + "fig=plt.figure()\n", + "axes=fig.add_subplot(sin(a))\n", + "plt.show(axes)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "\n", + "def get_dataset(infile):\n", + " # Data I/O\n", + " data = open(infile, 'r').read()\n", + " words = list(set(data.split()))\n", + " data_size, vocab_size = len(data), len(words)\n", + " print 'data has %d words, %d unique.' % (data_size, vocab_size)\n", + " word_to_idx = { w:i for i,w in enumerate(words) }\n", + " idx_to_word = { i:w for i,w in enumerate(words) }\n", + " return word_to_idx, idx_to_word\n", + "\n", + "# hyperparameters\n", + "hidden_size = 100 # size of hidden layer of neurons\n", + "seq_length = 25 # number of steps to unroll the RNN for\n", + "learning_rate = 1e-1\n", + "\n", + "# model parameters\n", + "Wxh = np.random.randn(hidden_size, vocab_size)*0.01 # input to hidden\n", + "Whh = np.random.randn(hidden_size, hidden_size)*0.01 # hidden to hidden\n", + "Why = np.random.randn(vocab_size, hidden_size)*0.01 # hidden to output\n", + "bh = np.zeros((hidden_size, 1)) # hidden bias\n", + "by = np.zeros((vocab_size, 1)) # output bias\n", + "\n", + "def lossFun(inputs, targets, hprev):\n", + " \"\"\"\n", + " inputs,targets are both list of integers.\n", + " hprev is Hx1 array of initial hidden state\n", + " returns the loss, gradients on model parameters, and last hidden state\n", + " \"\"\"\n", + " xs, hs, ys, ps = {}, {}, {}, {}\n", + " hs[-1] = np.copy(hprev)\n", + " loss = 0\n", + " # forward pass\n", + " for t in xrange(len(inputs)):\n", + " xs[t] = np.zeros((vocab_size,1)) # encode in 1-of-k representation\n", + " xs[t][inputs[t]] = 1\n", + " hs[t] = np.tanh(np.dot(Wxh, xs[t]) + np.dot(Whh, hs[t-1]) + bh) # hidden state\n", + " ys[t] = np.dot(Why, hs[t]) + by # unnormalized log probabilities for next chars\n", + " ps[t] = np.exp(ys[t]) / np.sum(np.exp(ys[t])) # probabilities for next chars\n", + " loss += -np.log(ps[t][targets[t],0]) # softmax (cross-entropy loss)\n", + " # backward pass: compute gradients going backwards\n", + " dWxh, dWhh, dWhy = np.zeros_like(Wxh), np.zeros_like(Whh), np.zeros_like(Why)\n", + " dbh, dby = np.zeros_like(bh), np.zeros_like(by)\n", + " dhnext = np.zeros_like(hs[0])\n", + " for t in reversed(xrange(len(inputs))):\n", + " dy = np.copy(ps[t])\n", + " dy[targets[t]] -= 1 # backprop into y\n", + " dWhy += np.dot(dy, hs[t].T)\n", + " dby += dy\n", + " dh = np.dot(Why.T, dy) + dhnext # backprop into h\n", + " dhraw = (1 - hs[t] * hs[t]) * dh # backprop through tanh nonlinearity\n", + " dbh += dhraw\n", + " dWxh += np.dot(dhraw, xs[t].T)\n", + " dWhh += np.dot(dhraw, hs[t-1].T)\n", + " dhnext = np.dot(Whh.T, dhraw)\n", + " for dparam in [dWxh, dWhh, dWhy, dbh, dby]:\n", + " np.clip(dparam, -5, 5, out=dparam) # clip to mitigate exploding gradients\n", + " return loss, dWxh, dWhh, dWhy, dbh, dby, hs[len(inputs)-1]\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "with open('words.txt','r') as f:\n", + " for line in f:\n", + " for word in line.split():\n", + " print(word) " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Implementing a Simple Neural Network" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + ":0: FutureWarning: IPython widgets are experimental and may change in the future.\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAEACAYAAAC08h1NAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnWd4HdXRgN/Z3dtlSZYsuUnuHdu4YMAUF3qopkMICSUE\nSCGUhIQPCBBIQgu9JwFCr4FA6M1ADNgYjHvBvUu2Javcvrvz/dCVI8myLVuy5LLv8yhEe8+emV3r\nzp6dM0VUFQ8PDw+PvQejrRXw8PDw8GhdPMPv4eHhsZfhGX4PDw+PvQzP8Ht4eHjsZXiG38PDw2Mv\nwzP8Hh4eHnsZzTb8IvK4iJSIyMwtfD5ORCpEZFrm57rmyvTw8PDw2HGsFpjjCeB+4KmtjPlUVU9s\nAVkeHh4eHs2k2St+Vf0cKN/GMGmuHA8PDw+PlqE1fPwKHCQi00XkbREZ1AoyPTw8PDy2QEu4erbF\nt0CxqsZE5AfA60C/VpDr4eHh4dEIO93wq2pVnf//jog8JCJ5qlpWd5yIeEWDPDw8PHYAVd0ud/pO\nN/wi0hEoVVUVkf0BaWj0a9le5Xc2InKjqt7Y1nrUxdOp6eyKenk6NQ1Pp6azI4vmZht+EXkeGAt0\nEJEVwA2AD0BVHwVOAy4VERuIAWc1V6aHh4eHx47TbMOvqmdv4/MHgQebK8fDw8PDo2XwMne3zsS2\nVqARJra1Ao0wsa0V2AIT21qBRpjY1go0wsS2VqARJra1Ao0wsa0VaClkV2nEIiK6q/n4PTw8PHZ1\ndsR2eit+Dw8Pj72M1ojj9/DYIiISAA4AbGCKqtptrJKHxx6PZ/g92gxD5FQfxuMdCOKgbCSZFpGz\nVPXDttbNw2NPxvPxe7QJIjIkiPnVbxke7inZAMzRMu5jRjSFO0BVV7aiLiZwNDAKWAO8qKoVrSXf\nw6M5eD5+j92GIOZlR9MtUGv0AQZJHgfRybKQn7aWHiKSE8T8tjPhF4+nxw3D6HCXD2OFiBzUWjp4\neLQ2nqvHo02wMPoUkWU2PN6NdgEfpX1bS48A5u0jKOh/IQMDIgIQmaHreYhZ/xaRzt6eg8eeiLfi\n92gTEjiTZrIhWfdYXG3eZXnawZ3gF7M6LNarItJrZ+rhoOdMoGet0QdgqHQgn6AfOHRnyvbwaCs8\nw+/RJti4D3zF2vgHukKT6hDVNDcyhe60893AqPCtjI4cTbcJfoypItJVRAosMa7PEt+HQbEeFZEh\nLaGHixuM1FQYqUfmWFZLyPDw2NXwNnc92gwRGRjCeiiJfagCnQlzMweYdVffz+j81OeseVrg5P0o\nDO9Lh+Bqos57LE8mcc53VV9qjg5h8X1yKr3GHiZFm4SWaYJr+CqRxu2iqttqMuTh0absiO30DL9H\nmyMiPhO59SR6Xnm89Kj32Vwt4yFmVR1Dt8hx0mPTG+oyreLPfFOdxi1U1XgzZA/1Y0w6im7Bfcm3\n1hLjVRZFo9g3p9S5bYcvysOjldgR2+lt7nq0OaqaFpFVa4klgGDdz9aRwMbNOoyien/Y3aUdnTTs\nrqD6YGCH4/5VdYaIDP+AFdd8zMoxwMoY9u2q+s6OzunhsavjGX6PXYVnv6b05nHalT6SA0C5JnmN\nxdEUbtBFN4sActGa/zQTVV0IXNjceTw8dhc8V4/HLoOI/MCH8VI3stwQlsxno1/gZhMZdThFx58i\nvTcZ/4VawR1Mq8i4elJtqbeHR1vi+fg9dntEJAQcBYSBj1S1VESK/BhfD6B9uxEURFZSnfqM1XYK\n9wxVfauNVfbwaFM8w++xxyIi7YBzw1hj07iL0rh/U9UlbaBHF2AAsKQt5Ht4NMQz/HswmXoyfYBq\nVV3VBvLbAecEMfdP4S5w0SdUtaS19WgrRCQQwnzSQSd0JpIsIeYXmBTHOW1n1vWRmtjWdkBcVdM7\nS47H7otn+PdQDJHT/RgPBTCDCRzLxJgRxz5TVZe2hnwR6eHH+Ko/7bOGkh9ZQmXia0rtNO4xqjqp\nNXRoa0JiPdCHnAsuZXAoJBZpdXiK+clvWfdBTO0TdoZMQ+SEIOa9KdxiAcfCeD6B8ytVrd4Z8jx2\nTzzDvwciIoeGsd69nH3DfSQHW13eZ4XzJkvXJnF6tsYqMCy+D46h2/gTpMemzdVpuo7HmLM6iVOs\nqs2OrNmVEZGghVF2G6ND7SWw6XhSHX7N54kUbi9VXdPCMg8PY71xCfuE9yGPSlK8wMLEDNZPjant\nlZLw2IRXnXMPJIx1/en0DtWGOFpicKx0NzsTzgZO3NnyRSSUwhl3FMX1wimH0YEwVjaw787WYRcg\n10Koa/QBAmKSSyAFdG1pgWGsP/2IfuHBko+IkCMBLmJQ0IcxXERGtrQ8j70Lz/Dv+vTvRc5mT/MB\ntA8B/VpBvlnzP/VVEBEsxIVGCt3seaxz0cSKBh6WCk1STtIPfN/SAtO4+wykfb1jhggDao4NbWl5\nHnsXnuHf9Zm9kIrN/HFzKIsD83a2cFWtDmBO+4K19XRYpBVUkHKAaS0hR0RGB8X6R0R8/xKRG/1i\n3hcQ834RGSd1i/c0T4YpIhdkiW9Klvhm+8T4o4jkbes8VXVcuPF+ZkQXayUAq7Sau5geVfQd4EgR\n6VBHTueQWC9YYiQsMZJhsV4TkR7bo6uFsWoF9R80qsoyqlxg2fbM5eHREM/Hv4sjIqNDmB/+kqHh\nAeSSxuVtltnvsWJ1EqdPa/j4RWSYH+OzMXQJ7EOefxlVzrssTyZwzlHV15s7f0DM6/2Yvz+absGF\nbDQWU8k4umBh6ERWx+LYbyVwzm7OXoKISBDz1UJCR51Ez0gIi09ZnfiO9aVJnGHbKsYmImIiF1sY\nN9m4eQIJQXxdiCTDWCykIiBwUwr3/gDmvPF07Xg03SwT4SNWOu+wrDzTWWxDU/Q1RM7tQPDhqxkR\nyZcgrirvstx5k6XLkjh99/R9FY+m423u7qEYIif5MR82kZw0rmlhfBXH/lErtyfs5sf4tR/zQBt3\nfgLnHlWd0QLz9gpgzrqVA0PrSfAIs7mBUWRJjQcpqQ43MqW6hPiPVfW1Zsg5OAf/e7czOuKT/21X\nPKqzE1Mpvc1W98YmziNAkQ9j7pXsG+kvNe6Yck3yR76OVpB6fDB5F1wpwyJ1z3tEZ8W/Yd3Ntrp/\naaocP8Z1CtcUEEpVkPS56II4zgRV9Vb8HpvwNnf3UFzVfydximLY+6Rxi2OaHteaRh9AVZcn1bmq\nSlMHx9W+oCWMfoaT9qfQyJEAX1PKGDpvMvpQs4F6LN2zwljNqqUjcPQhdA7XNfoAY+gcDGKe2tR5\ntGaldPJwOhi1Rh+gvQQ4hV6RMNYZ+1EYaXjeCApCIazDtkdOUp2b07gdVxM9Loo9Iqb2cM/oe7QE\nnuHfTVBVV1WXquq6ttZlZ2Hj4mOzWmz4MBAINHJKk1GIVpPezC0WwwZoNC5eRPqHxXo1IGZ5WKzl\nlhjXiIjfgE5diIQaji8kBGCuJrqZnDXEHBt3u422qlap6iRVnb+953p4bAnP8O/FSA3dRaRzG6rx\nxhRK3QpN0oN2fMJK0nXc164qH7IyFsV+tplyXvyStW6JxjYdSKrDv1kSjWI/0nCwiPT1Y3x9LN0n\n3MIBuVcyrLgPOdcFMV9z4csplFY3dJN+y7q0jfvORFall2Q2gQFWajXv1eyJ3N/Ma/DwaBGa7eMX\nkceB44BSVW20HZ6I3Af8AIgB56nqZpEgno+/dRGR8UHMJwQpdHDFxJgTxz5HVXd6pFBDAmL+wY/5\nuxBmOIIPQTiCIiwMPmIFy6muSOB0VNXktmfbMpYYPzORew+koxHBZ01ibTyF83YC56yGm6UhsZ45\nmm5nnSQ9N72C2OpyFZNiVaQPC2I+ti8d+k+gZyCExees5k2Wqo17pcJSC+OpLkTUQFhJteXgXuyo\nPtMc/T08GqNNNndF5FBqXpWfaszwi8ixwC9V9VgROQC4V1UPbGScZ/hbCREZ6Mf4+hIGR/YlHwfl\nU1bryywsy2ShVm57lhbX6dQQ5ov3cIj5DeuYyjocXAaTxwt8n3KgsCVq4ohId+AMoAswE3ilsesN\nibX6WkZ27ir12+4+pfNTE1n1e+BxP8YSC6O9izKUfEZSyDPMj1WRPj+AebyDe6aDWkHMz+I4v1TV\n2c3V38OjIW3SgUtVP99GjPKJwD8zYyeLSK6IdNybCnztagQwrzqa4sCwTOi5hXA4RTJLNwSns+GH\nwGauj60hIv1DmLfZ6OEGEnfRf6Rx/7idLRG/D2LFfGK2O5BOHEgnoCZ2/RUWuw5OGGiJYmiJEOZJ\nCiNyCaQ2kHggKNZdSZzrtc4qyEA2bCDZuWuDfuvriKWADcCIHPy+WxlN3TQDW93w08z/5ygKjZPo\n6Q9h8V/WjH2FRV+KyNDWqq/k4bE1WqMDV1dgRZ3fVwJFgGf42wgfxr59yd3s334A7SNzKW/UXbcl\nRKSnH2PKcfTIGk0nI0o6618svnwe5YeKyKHa9FfKuVHSzjKtoru023RwNmVIzd/K2u3Rawu6ShDz\n3bF03ecUevksMUJlmuCvfHdFCfFVwMO1Y2PY97zMwnv7ak4kJDW3aq6WsaDm2fMacE4/co2GuWUp\nHDoQDJzHgE15Z0dSLGWaCH7Cqt8Cv9iGjkFgPDUZ0RPb4u3LY8+ntTZ3G76G7BrJA3spNu7MRVQ4\nDY/PZ2M0hbtd7ogA5rVHUBw+Vrob7SVAkWTxS4YEI/j2BcY1dR5VTdu4l93JtNjnupoVWs3HulIf\nYlYsgfPz7XiAbI0RAcy+p9HbZ0nNn36eBDmfgWE/xv81GPvEBhLPX8Wk+MM6K/oX/abyHmZUpXFP\nUNUqYP4CKtyGas1gA6PouFmy8TA6+HwYY7amnIgc68Mo6U7WC33IedrCWGuJcXGzr9rDowGtseJf\nBRTX+b0oc2wzROTGOr9OVNWJO0+tvZcEzl/fYfmZPTU7PJg8XJTPWaNzKEsB2xU9YyLjR1JQ7+/I\nEGF/LQy/w/KDgU+aOpej+rSIrHyRhdcq9BOYkcC5WVUnb49OW6FnMVmO0cAoF5NFCqdT3WOZzd6L\nROTWrykdR42b6a067qtPq0itfJlFvU/Snr6AmCzTKuZQljIRpUH46RpiaP0333qISLEf4+XfMDxc\nW5CvRGPcwtSHTZFRLlymWickyWOvRUTGsR2LqsZoDcP/BvBL4AURORDYuCX/vqre2Ar67PWo6mwR\nmfAwsx43kTwHNQS+T+GevQMbqKXriPfqSXa9g2uIxYG4iARVNbEdun3CdjwstpOZi6j02epSu+IH\nmE85QayFW9BnEbCokeOuiIz7lNXPfsTKQ4Jq2kmclI173XQ23DZLNwQGSz4A6zTOayyOxbD/uiXF\nTOS80XQya40+QEcJc7z2kI9ZeV4V6SEicpCqbvam5rF3kVkQT6z9XURu2N45WiKq53lgLNCBGl/s\nDWQqNqrqo5kxDwDHAFHgfFX9tpF5vKieVibjj+gNJFV1i6vRbcxxegHBJ65jv0g78QMwV8u5h+kq\naMoFtTCeTeDsEivWsFjvDCRv3I/oF8zBz/dU8CAzY1Wkf9RYSQgR6S1wliBZbk1Bts8bup1EpADI\npaYdoy0iY/0Yr3UgZIYwWU61T+GatDr3bkmvoFiPnUyvi46S4nrHp+t6PmIlG0hUryF2jqq+0TJ3\nwmNPwavV49HqiIgEMG9V9LKBtE9XkrJWEQ2dTh8OlyIqNMlTzE/Mo/yjmNrH7wL6Hh3E/LuN21UQ\nNZANaZwrHNXNXFw+MS41kbsOorORhc/3X9bE4tgfJXBO2dbKW0R8wKHUNI3/fFtvUiJybl9yHvo9\nI7Lq7g88rfNphw8fBm+w9P6UOpft2JV77Kl4tXo8dhoism9YrJci4lsUEd/7IjIeamrKJNT+XQq3\nz3Q2XLqS6opLGczhUgRAjgSYQM+ggx4hIhNCYv29nfgnBcS8e3tLFTcXS4yfRrD+dSZ9ul7FMDmS\nYnVxwy4sbuR6exrIXTdxQPBc6e8/WXrJrYyOdCR8OHD+tmSpalpVP1bV/zTRffbycqpLn2a+lmmC\nak3zli5lGusYT1c2kEw56PoduW4Pj4Z4K/49ABHJNuDCENbxLloax3lYVT/LfGYB/ua4WTKui7dP\nomdwAO2NFVTzMgtjCZxf2uo+UXesIeI8yjjDEoNSjfMYsyknSQCTDSQYQK47niJjHuXpiaxKpnAP\nU9Wvm3kLmnINIR9G6R8YldVV/ldD7Utdy7Ms+C6q6eENxl89hi43nycD/HWPf6Ol/J05C5K4twJT\nVXVmC+pY4Md4TuEIAxhGASfTkzQuNzM1nsId5OUBeDSkTRK4PNoWEekQwJw6kPYFo+kU3khS/8PS\nE/1i/tVEik3kbAUrLNaieE2j7ve3V0YI65HzGRDeTwoB6Ek2vTQ7fDNT7xWR5+qWUghgliyjqnMP\nbcfdfMd4ijiCIgwRSjTG3Uw3HJSzpK+vm2b5nuX7v9M67RtHdiDo1jX6AKMo5B/MHSIi4QYPx1AE\nq973Y63GeJ7vySPYp4is++ZQZoTE+iyBc3LDDWwRCQA/jmD9CHCi2E8Az6uqvSUFVXWdiBzlx/gz\nyOWA+zzfu3MoNx3cizyj79FSeK6e3Rw/xvX7U9j5MhkaHiWFHCnFchP7hwWuG0j7s+/koMBjjDN/\nyqB+QczXROTgxuYRkaBPzJtDYq0JiLkxLNbLItJHRHLSOL1HUFBvfJFkkUdAgXor5TTuX55gbvRL\n1pKFj6OkmNrwyY4S5nR681EmqvEAOpLGHSCSCX/ZuUTj2EbDN9wEDgIu0NAgvzOJNfFkxpWvqjzE\nTI6lO3+SA41LZXDW3RwS7kvuWD/GzXVPFBF/EPOT3uTc/WMGjDmHfuOLyXo4iPmmiGz1O5cpx3xN\nEmfgZEqums6GX6Vxi2r3IETkiIj4vgiKWRYR37ciMqG5N8Zj78Mz/Ls5BnL6ERTXc0fkSoAh5Mtg\n8gI5EsAQYbgUcBZ9wyGsWxrOkclofX8g7a/6HSM63cIBOUfT7RQ/xlSgo1JjIOviqhLDNoGquscd\n9IENJO59kvnpzmxWlp5i2rGOmsWxi5LJ5Ts8Ir5vA2JWRsQ3XURObtZNaZzvEjgbJlOyyfKrKm+y\nNO3H+I+qphqM/zqF+8YtTI1O0RI+YiVxHMbX6atuicEP6RtSaJhkdVZnIkOvYURklBRyoHTievaL\n5OA/hJpihdskU4L7EVV9UlXLAAyRU8NY/z6HfqNv4cD25zNgeA7+Z31i/HzHbonH3opn+HdzBByH\nzbvwKYrZ4J93EO1x0cYadY+L4Bt+GUNC3aUdHSTEidLTOIyicADzCj/mO2+wJF13tTyJNZrGXQPM\nqSdXVRPqXOuiJ8yhLN5whT2Pcooz9W8+YpVrYSxrh++J8xgw/DZGt7uQgUNz8D/jE+PSHbwljaKq\nmsA5+UnmVdyl31W/rou5ia+rPmP18jjOJVsY/6NVRC9+mvn/fYWFS3Lxpxtm5OYTJI2bVTdVN4J1\n9uF0jdRNFLPEYDxFWUHM03dEfxExApj3/Yoh4dHSiXwJMlIK+Q3Dw4Lcmin14OHRJDzDv5tjo0+/\nzbJEXQNbojFmUcYwOtQbu4JqTKSxzl2H7E9h2GzghRhJgc9Ejoxj/+xTVi+/ka+rXtVFeodOq36W\nBWWJmjaAW4oOeD+GPe9J5qWqNIWrygxdz0sspIAQ9+mM6OssLnPQ/F+zb3g/KSRHAgyXAq5iWNhA\n/iIi/i3MvUOo6rQUbrdZlF3+Bkv/uJzq85I4A1W1dAvjXVV9Nop9Koi1gmpfVYMXg2msJ4w1o+59\nUEikGnkYp3BcpenJbLVkeh4/p9CpH7n1PusqEbLxKzBge+f12HvxNnd3c9K4f5lF2fG3MLXnQdo5\nq5yk/TEr04omp1CSfZgWGYYI6zXOc3wfjWH/uZFp1pcST1ATc76JDSSQmj4LJSIyYAXVx6+gegiw\nFHh1a5FCqqoicthUSh/+grWnCIiFsTqJ89HnrJYkzjcKn0awJveS+lm/RZJFWC0jRaoPDd4omkum\nzs4/tuecIObtB9O5kx+Du5nOGdqHzoSZSRnPsSCewLmi7vgY9hPvsPyI0dopK5Bp9RjVNB+yIp7E\nfXp7ZBsipwcxnzyC4sAHrDDiOITrfG1tdYmS9tUMleHAwsw1enhsES+ccxdCRMLAWQHM0WncxS76\nhKpusyplZmV8WgjzGBtdl8b9B5AKYr7pwyjOwW+vJe434JakOpsZfhHpaSKzf8Hg0DCp2cSt1jQ3\nMzW6jvj5qvpyM68rAISAirorYxHJtTDW3sshgdoKmAApdbiMz5Mp3B5Nuf6dgYh0CmD+WdFTFHL+\nzIHkEeBTVvMJq9hIEgNxKkhdp6q3NjhXgphPBTFPHk/XsI3qJ6xKpHH/llD78u3QYbiBfDmeLoEj\nKOZ1lpCFjx/SFxFBVXmDpfb7LK+y0XAu/mQ5KZ+J3J/EuUYbNJfx2DPxMnd3Y0SkawBzci+yc4bR\nIWs5VYkplDpp3ONU9dMdnFOAwUAeMK1hiV8R8QUxH3TQH+cScDaSDLfDl+5Gu+Qcyizg4RTu5DDW\n2QqpOPY/gbdbqFImAGGx3hxNp6POoZ+/1pi9zKL0p6z+PKbpw1tKzvYgItl+jLlj6VJ4BMXWTXzN\nzRxAGIuFVGAi9CGHO5lWuYCK87ZQ6kGAQ30Ypyjq2OiLqjqlifIliHmPgVw8mo4BECZTwmF0ZTZl\nxLDpTjuWUlVVTtIaTJ5xIQMDQbEo1yT3MD26ltjNKXVua/Gb47HL4Rn+3ZiwWK8fRtFxp0rvTUvf\nWbqBB5i5PoXbaWcU5wqKdU8P2l30cwaH24mfmNr8nTmpuZTNTuK+FMA8t4Bg9yMojti4vMvy6mrS\nrydwftxSxl9E8oOYH4aw+vQjVxZRodWklydwDttSMb+djSly+b50+NOvZGgY4B86hyhpFlJJZ8LY\nuGwgSTXppIt23IHCdltFRI5uT+DVP7J/JCI+AMo1yc18zWUMpZIU9zIDBSeAIfcxxvDV2Z9ZqdXc\nwtTyFG5+Sz6kPXZNvASu3RQRsQzkuB/Qrd6/x2DJJ0cDgXXEDwC+2MK5fsDU7et2VZvJetFFDArX\nFlcLi8WFOtB/JZOGF5M1yEAC1zGS2k3fg7Vz1jV8dXICZxwtVEFTVTeIyIgEzujJlAwEvqeRQmit\nSQjrqFEUbtrvGEUhjzKbaxhJUaYV43wt5y6mi4u2o2U6g9WV/7Pj6b7J6AO0lwBjtAuTKcGPyQgK\nOIhO5tssw9dgU75IsrDVzQaCwHb9XXjsHXhRPbsGoqhYjfxz+DAU2Cy6RUS6hsV600SqDaQqIr7J\nmc29ptLBh0FegyjAiPgoJEQEX+Aoiqkb6RMQk8PpGvZjnLEdcraJ1vCFqv5DVT9r61Wqg65eR3zT\nG9Y01nMs3TcZfYD+0p6D6KQmckFLyzcgJ4Jvs+MRfHzLOr5iLWfTl+60Yy0xkg1eBpdoJT7MDcB2\nRxB57B14hn8XQFXTIawvPmdNPYO3XKtYR1yAr+oeF5FQAGPy4RQdcx+H+h5hrHkGffb3Y3wmIj2b\nKLYkjeuWNAjMqdAUZSQJYqCNNErTmoyrPbomfALnoXdZnqy9N+Uk6dJIMloRWQEfRq+Wlh/FfuVT\nVkfrPv9cVT5llRYS4ib2J0+C5EmQfenAI8xiY6Zqxkqt5hFmRW3cG9v6Aeqx6+IZ/l2EGPbPX2Zh\n1bO6IDlD1/O2LnNv49uYg3tJwzowwBk9yM45RXpbIbGwxGCMdOEwigJ+jCubIk9VUwJ3PsDM2NqM\ngVuvcR5hFmPozP505BNWYdcJDImrzUesjKdwX5QazBa7AW2AiARFpFsm6mgTqvptCveKPzAlfodO\nq1pDNPUdmxfG/JZ11Qmcz3eCak8toXLZA8yMz9UyZukG7mRarIxkSQ+y7WCdCKif0J9yks7VfGH/\nQj+L38LUjRtJXe+gj+wEvTz2ELzN3V0IESn2Yfw6gHmwjbsogXO3qn7TcFxAzAcn0Ovnx0i3esdn\naxmPMvubKk3t10R5hh/j/xSuthDLRq0uRIzrGGkCPMJs1hPnELqQxtEPWBmLYb/mmJbluPYpqq7l\n94W+SKVjv1TV6S1yE1oBEbECmLe66KV+DFK4roHcm8T5Q90QSBFpDxwFZPsx7phAr3bj6GLYKO+w\nzP6YlSVJ3H7NqXy6FR3bmcivApjnCNgx7CcUPglgfvFbhodrcx/majn3Mj2awh0OxICSrRWC21UQ\nkYBhWNcYhnWxuk7EMKxP0nb896o6t611293wonr2EgyRK0ZR+KdLZHCo7vF3dbn7Jkteiql99vbM\nl2kaUgCYfoxvDqOo/aF0tmLYPMOCVAmxSuCtBM6zpuF/qE/3Md337T/BZ1lBFi3/XKfOej7quKl9\nVXWzuva7IgExH8/Cd24En5WDn6F04HNWRUuJ35tQ59rGzhGR/mGs++LYhwuiAYzX4zVdxda0pu6G\nyAQfxpP5BMVB2UgynWmZ+UFr6tEcRER8Vuj9Du17Hzxi0BmhUCCbxSu/1OnzX6t2nNRIVf2+rXXc\nnfAM/16CiHTwYSy+hH3aDaMDIsJyreI2vo3FccY1p769iBQFMG8CThRI2LiP2+itqhoXkdPzc3v+\n49gxN7arW7Pm2zkvpect/uAfaTvRovV1dgYiMtSPMX08XRlJIWUkeJOl9COH/7I2msbt0Ihrre75\nBpn96FZUu6EOfuAAavZapuwOK/y6iMjoUDD3g1OPvCtiGP9zW30371/OnEXvPp9Ox89tQ/W2iohE\nDDEvt6zATwCx7eQzrjp3tWW2tBfOuZegqutF5OjHmPNKBCvbr6ZbRgIb99LmGP3M3CuBCxv7TMQc\nVdxpZD2jD9C1cKhvwdKJhzRHbmsRxLztSIo5WWr3ZHMYpHlcw5cI+CNYX4XEmpFxs01reH5LZsNm\n9hYmAH2BecAbjVQJ3YzMmJ2xt9BaHNy98yh/XaMP0L3zfubcRe+NaxuVto2IBCwrOKlT/sB++/T5\nQQgR5i569/er180+XURGbW9IdVviGf7dFFX9UkSKkzgjgAA13aCS2zpPRI4MY/0pjTvQh7Eyhn2z\nqj7XNJk8uQTnAAAgAElEQVTO8rKKZTEa1PTZWLVKVd2lO3IdOxMRKQSOBYSajOMSFx19CJ3rjcsS\nH/00lwSO7xi67bucqsFvsexUQ+THruqrO0m3Xn6M/xaTldWf9pG5lEVXE6sQkYNVdflOkGcCR1PT\n9GYZ8FobGqr1ldGSJNSPWa2OrUfEaNP2kpmM61HACGAF8F6dN6ozctt17TP+gMtDtYufwrx+wfcm\n/blH6Yb5P2Q7a0C1JV5Uz26AiJimGL8Ki29hUKwNYbH+LSKDM9Ujp6rqpKYYfUPk5DDW6z+m/6jb\nGZ11MfsMyCf4WEDM3zdRledWlXznrFz7v4VwZfUavpv3ajxtx+7c0evbGfjEuNSHsWwYHe4fTof7\nfBhLfWJeZiDRKOnNxidwGEsXhkg+x0kP8zcMD/sw/pbZ/2hxQpgvTqBXx2tlv3anSW/jehnV7gd0\n6xTC2q4ibk0hkx09szPhF46h2839yH3Ej7FCRPZpaVlN5NWS9XN1demsTQeSqSjfzHkxmkpH72oj\nnRCRiM8KfRoM5Hzcq/jgv7bPLn7eMv3LRaQ/gN8XPrFPtzGRum+8IkLfbmMjfl9kt2qI4634dwOC\nmE90InzqmfQJ5xHkG9ad8DqLDxORg7SJPV8z9V/u/TmDw4MkD4AhBLhah0euY/L1InK/qka3Noeq\nlonIMZ9Nfej1UDAn6LNCbkXVap+qe9WO1hPaGYjIsBDmnTewf7BQava/12ucG/n6L0mc515j8TmX\n6dBQbXLaQq1gOVXsW6eMdS/JJlcDZinxEcDkFtavKIA5+AiK6i28jqGb9RbLDhCRDqot11g9hPnI\ngXTq/aNMPSSg3ee6Wp/n+3+LSN/W3q9Q1SoROf6TyXe/2T6nG6FAjrFm3SwL5AngmdbUpS6WGbyj\nc+HgUWP2+0XQyPxtzF/ycdY3c174j4j0s8xAeSJZ6dJgwZxIVaqqU9YWOu8o3op/F0dE+gKnXc3w\ncH9pT4GEOEa6ycn0ioQwGyuxvCXyHLTjQNrXO1ggIfIJpqkp5rZNVPUL20l2roqWHltWsex0x013\ndFx7l4oZD2BefBTdArVGH6CDhDiabgETMb+nYuo1fFX9ii7Uh3Rm8g6mcR4DCNRJS1BV0rgGsE2f\n+w4QCWDYVoNSCz4MLAwXGskW20FExJ/CPelkevnrrlQPobP4MTsCQ1pK1vagqp85brrj+vJFP16x\n9ttf2E5qQNpO/KqtNs1FRFy1zxs1+IebjD5Avx7jxWcFOwHDbSf59zmL3k3E4uWbPo8nKpj9/Vvx\ntJ34WxuovcN4K/5dn0OHkO/WTdoBGEWh/ItFh4pIO6C6CV+YqKJaRZrsOhUgbHWpIOWHRjKUtkCm\nYNx/m34JO5eMO6YdsFFVXRPpmEdgs+SyfAKmD6Mgij02gTP+bZYfahjWWVlWdr9V6Zgxss7Yaawn\nhl0B7Iz8hIVJ3PhCrcjqIzmbDs6lHEXLgJb08fsACVH/dogIWWo5laSyGz9t55OJnvp3W8lvgOm6\nTjAUqN/oRkQIB9s78cTGfFX9wLICf3r9o6uv7951fwRDlq76ygXuVNVd5vvQFLwV/y5GJpv0NBG5\nVET2BcrWk6hXIsFWl9dYDEi2iWwIYK62xPjJ1uZV1YSF8cqLLEy6mWeEqvI2y2zQOaq6aKdd1E5C\nRAI+K3SfYfg2moZvjWUG1piG78IY9jtfUhJt+Cz8kpJoFPvtTG2gj4GnTcPqPv6Qa4yP/eXcZczh\nA13BYzKfR5mtSZzTWzKKpxZVdVI4l97N9NgnukqXaRUf6gp9gJmxBM6lLbnqVdWoH3PBtw2e62s0\nyjoSFvBtS8nanVFV22eFZq9YWz9fMpbYyMbKlQHgGwDbTv7ZdpKDFi3//JqFyz/9P9tJDknbiRva\nQufm4MXx70KIyCgfxnvdyLI6Erams14d9BMb96CfM6T9MKnxQT+hcykhxgUMpIAQC6ngYWbFKkld\n6qg+tZX5s0OY7/owhw4glyVUuRUkS1O411Cz4v+iKZvEmbn8wCl+jENtdJWL/lNVV7XEfWgqfl/4\nhYL2vU8cPfzCUCSUz7qyRXz69X2xeGLjr/0YvxtJQfExdAsAfMCK1BRKVyVxhtTuZYjIEfm5PV85\nbuxNOWk7ydKVX1CxcRmhSAHfzHnJBY1sLaa/uYjIwWGsa4EBCrPj2Leo6g7tJ4iI30KutjAucdEs\nE/kkXtOMZZ6IjA1gvj2BnsGBtDdWUs1LLIzGsK9Oq/tQC1/WbouIHGZZwTdHDf5huHPBYCqqVjFl\n5jPRWKL8bttOXt/W+m0JL4FrN0ZE/H6MNRezT97wTBestLrcxXfxhVQ8ZSBndiPLyCXgm8GG0N0c\nQriO+2eBbuQepq9K4BRva8UoIvsBQ4FOfoyrOxASAdYRJ437E1f19W2cnxfA/KIL4a6j6Ji1hmhy\nMiVOCvd0VX272TejCYhIsWX6F5x+zANBn/W/CqMl6+fx8eS7VqbtxL5+jOsN5GxAXPSFFO4fVXXD\n5nM8GPRZ/yvXs758Ee9Puq3EdhKdd6VCZ5lQw0FAFjC99qGU2bh/pyfZY06jdygbP5Mpcd9gSTSF\nO1JVv89seP9BkJHA0hj2n1X1vba8nl0RETnI7wvf4rrOMMMw16bSsb8Az+xKfwcN8RK4dm+O6ETY\nqjX6AD4xOFP7hG5n2okJnM6LqDwOOKQH7S4Mi9Wu7sl9ySGF04mamP6trlJVdaqIVPgxvvstw8O9\nM37mxVrJHUx7VkRGqOr8LZ0fwLxtfwp7nseA2g3DwKHahTuY9qKIFLZSfPig3OzipM8K1qsrXZjf\nn7Sd6ApUJdW5Arii8dNBVVf4feG3Pp/64LGjh10QCgVzKa9cyWdTH4q5bnqXqm4pIoOCmK9ZGF0j\nWE4ZSTHFuNxR93Fgfz/mIVewb6h2w/hYuhspdcLvs+IPwLmq+h1wShPkGICvqW9+exqq+gVwWFvr\nsbPxDH8LkUmM+oWBFMSx/+Ogj6hq+bbP3EReHsHN9lzaE8RBszOru1dFZGYJ8YttdakbFbKWGCZG\n1MVt0hfWj3HJeLr6etfZXOwl2RyuRf4PWfEL4LItneuiZ51Aj3pRIn0kh64acZdSdRjwVu3xTKGz\nsUAS+KQFXSdLK6vX+F3Xpm4G6MbKFZimf6PjpJpUxiBtx39UsmHe/a9+cOWPTMOnrjpJVfcmV51H\nW0jPZiMiIT/GZ2fSN+9QOoshwkqt5nam3S8iS4Hh+1HgaxgltB+F5gesGN9EGZEA5p0W8hMHDYTF\n930c+9feW0HjZB6Qx1im/wRX3WrXtZ9W1RltrVdTafbmrogcIyLzROR7EfldI5+PE5EKEZmW+bmu\nuTJ3NQJi/jEH/2un0vuk8xlw0AgKrvdjzMpkjjaVSXMpt+INyq5MpVQtjEm1v6vqAtBvXmRhqrZk\ncrWm+QdzY8A9TV2lWhi9i8jylWmCL3Ut3+l6bHXpSsTaVo15RS0/m1dkDtQc2+Qz8Yn5ax/G6n7k\n/rM7WS/4MEpE5OitzS0ioczm9k8zoayN66A6X1W/+XrWcynHqYm4jCcrmTTt71FVvaOp90FVE6l0\n/CLXtfPTdryv46QKHCfd5Pu4levoExTrb1nimxMR39si0iQDvAVO7Um2f6x0ESPzsC2SLE6jdziM\ndQ2wvoT4ZmGn60kgyIaGxxsjhPnmEPLOu5XRob8x3vgpA/sHMf8lImObofceiYj4fFbw/exIpxeH\n9j/5kkG9f3CFzwp9ZZn+a9pat6bSLB9/Jg18PnAEsAr4Gjhb65RWFZFxwJWqeuI25totffwi0s2P\nMf92Dgpmy//CJJ/W+elJrHk0qc6vmjpXSKy/FRA6+0z6RDoQYhrr9DUWx1K4h2Re1Wtl5oew/gU6\nqgOh1FpiARN5KoHzc21ib15D5MpOhG+rJGUNJI9KUpQQowvh5HwqbnDU3WKj7rBYrx9D9xNOkB6b\nFg7rNM51TE6kcbuoarmIjM3C9/b17BcuyMTTL9CN/JXvYmnc3qq6tuG8IjLGNHxv5uf2lHCovbGq\nZLoJPJ+2Ez9tLLpGRPJ9VuglRUdHQvmp6mhpQAzzUdtOXLkzonGaiogM92N8diTFwSHkW6uJ6r9Y\nHEtg/zat7sM7MN91x9L9ptOkd72F2jKt4g6mLY5hD/NhrP41Q7Nqk/NiavNnpkZXE/ulqj65jflH\ntMP32V0cHKnbcW2SruF5vp8U1fRuUYdpZyAiRZYZuFbEOFZEKlPp2P2A2aF97zuPOeTacO3bZixe\nzusfXZ2wneS+NYuzVtWx1X38+wMLVXVpRoEXgJOAhjW1dzuDvh38YDgd3LpGH2A8XX1fsvY0oMmG\nP4Fz8Uqqv32IWVc4aL6B/DeFe13D7NzMBuVYEem9guoiYE5Kdd32KK0QU9S6ldHU9nadqRu4nxl+\nF10pIhcB01R1asNz4zhXvcXSMWWaCI+kIFBCXP/NkrjC/9W6t8JYV0ygZ6igThJVP8lllBYaX7L2\nx8DtdecUkSzT8P1n/AFXtOtSWJNLlrYTvPv5LWeUVy7/Cnhss2uouQ+Hi0j3iqpVXYG56qS2x722\nUwhj3X8avbPGSVcA+pErA7R95Aam3CkiT20rQ7oRZs5gQ/RU7VWvQN48yl1Fp9Vmwt7LjDe6aRa5\nBMxZlJkCTwP/bML8wweRV6/NJsAg8rBxh4hINtAZWLkDuu+2iEiRafq/69tjfHavooN8iWQl0+a+\ncld1rDQ1pN+J4bouxnCoPb27HWrOX/LhacD2JFa2Cc01/F2pKWRUy0pqysXWRYGDRGQ6NW8Fv1HV\nOc2UuyuRTuFutrpM4yI0UhRmK2RWqQ9nfpoyfhGwQ/H3YaxLzqAvdRt6dyREAJNC/H/rRjudQ7mG\nxPo2gXOsqlbXlSsi+3zB2sumUnqECyvi2Peo6me1YwSKOxLe7IHflUjQwihqRKWTCvL6Umv0AXxW\nkJH7nBn5bOqDl9OI4a+jzzJqCo+1OSJiCow+mE71jneSMJ01nF5O9UHA9tbOf2sd8ZIXWBg6SXtY\nQSymsY7XWewmce8FUNVPRaTTIip/AOQAE1V1SRPnX7Gcqs3+hldRjYmhFpREsNIxbCso1gPJmjDR\nPbr9JoBlBq7t12N89n6Df7jpS9Ixv3/klfcvD5nG5iWcLNNvQP1ubrsqzTX8TfETfQsUq2pMRH4A\nvA70a2ygiNxY59eJqjqxmfq1Bm/MouzBNRqls9Rk2ruq/IeliTTuk22r2pZxcQuipPlG19GLbHLx\n8wizOYGecpQUh6DmOh5j9v4z2HA3cFHd87WmAck1mZ/NSOF+Mo11g/chz1/nHL6mtCqN21iWY4d2\nkcLNvk3hUB6qmteca21lXAOx4zh+X4N9kDiOAWz3illVbRG58HNWfzyRVRgIHQnRl1xdSMWDIjJM\nawr2xYF/7YDOH5WR3PiBrogcTpFhiFCuSZ5hgZ1HIPRbhvuzxR8s0wQPMPMXa4imgUYb1uxJiBjH\n9So+uN7fpGUFyM7qpPMWv5/uXLCPr/YNLJ2Os3D5Z0nQnZ6JnHGfj2vWHM308R8I3Kiqx2R+vwZw\nVXWL/mERWQKMVNWyBsd3Sx8/gCXGTyyMh8fQxcoj4PuCtdXriC9I4IzZWa/GItLeh/EbH8ZZQDqB\n87iL3teUqBkR6R/A/DYXf7gTYRZSwVDyWcBGbuMgjDruhHJN8ju+SNhoVmOrPBE5NYx1cxKnlx9z\nRQL7Jlf1GRHp6seYdQI9s8fQ2Uji8CZLU1MoXZLEGaoN6s6LyPCAP+u/px11b9g0//ddmz7/dWfO\nwrdfTqXj29VVrC0JifXcQXQ69Zz/FUVjmq7jMWaXJHG77shqOSzWi8fS/bQjKTZsXMLiQ1W5lslV\na4md3tzoGxHpHcT8jx+zuD0BezXRAKj1Vw62suq4MUs1zvVMrk7j5jf8N9zTCPgjs8fs94tBXQrr\nlzP66Ms7oyUb5lcW5PXN6ddjfDiVjjFzwRvRRKrqhXQ6/tPW1rMtfPxTgb4i0gNYDZwJ1PuCikhH\noFRVVUT2p+Zhs1tVstsWtrr/FJFJn7DyPBOjIInzPvBvbaHOSCLSBQgBS1TVFZF2Acypw+nQ9TCK\nAjYub7H0xkVUniwih25NroiYAcyPzqR3aCxdERFimuZOvsNF6xl9gBz8uKgP8AP14vN9YlyQg//+\n8xgQ7ksuS6js8yTzHvWLWaiqd4nI/m+z9K+vs/goA0kbyAtJnN81ZjBUdZrfF/7wgy9uO2LkPmeG\nQ8Fclqyc7M76/j8xx0ntVinxCZzLJrF2v4VUdBqhBe2WUx2byQYnjXvyjrpIBDlgMPmGX0xqI6pE\nhGHaIfwuy0cCO2T4RaTAh/HbMNZJoNWVpO6oJPUFkCok9O8s8efUHV8oISw1jDRuB2q+83ss6XT8\nwWlzX7mjML9/2DJrHn7ryxezdv1cw3HT+69dN3tC2calZyhuVSodewx4o201bjrNMvyZV9BfUvNH\nZwL/UNW5InJx5vNHgdOAS0XEpqYZ9FnN1HmXRFUXAk0OVRWRXAMuCGIdksZdlMZ9RBvUyxGRPiHM\n5/0Yg30Yro1WiMhFBvTrT26nixgUqF1R9tPc0B+YMng10eOpcadtifF5BLLHSdEmCx8WH2drX+5m\nOmWaIE/+lxM1nfUEsBbFNF3P6Nc8QIzbf83QcI9M4+99yOMqHRa+gSk3ichDwChBeplIwsKYGcN+\nemsP/bQdP21d+cIrPvzyzl+ouu1EjE8cJ3Vta0dJNBet6ZC2z3KqT1xJ9Ui3Zh/seVXd2IxpV6wm\n2r079fL2WEZVnJq9te1GRAoCGNP3p2P+IXT2R7F5ncX9S4m/k8C5uJxksFrTZNXZByrVOHbNnlab\nNkzZGYhIgWn4f2ea1gSQmKKPVFStfvfV9684pkfXA6xYojy9umSG4bj2WVrTqe6BzM9uh1eyoQ0Q\nkZ5+jMlDyI/sS4fwSqrTE1mVTuGeoapvZcaE/BhLTqZXweEUGSbCPMq5n5kxE5l/HgOGj2yQJvC+\nLuc1lvw9ofZFjQqumffcERQ89EsZklX3eKWm+A2T0u0Jpn5Ev0hXIsylnGdZEEvgnNLQlSAixWGs\neQ/ImDAN+I1Oqqok9WQugQt+RL9IEVl15zpNVd9p1g3cCxGR43Lwv/R/jAwXSAhVZQqlPM7cjeka\n91Fse+f0i3n7gXT89fkycJMvJ6UOV/NFtJL0+BDmz3uRc8ZPGRTOET8ZH39sNdG7kurssrVrdgQR\n6WCZgek9iw7s0KfbWH8qHWP6/NdiFVWrP0nb8RuoyeatAF6tW/ZjV8Ar2bCbEMJ66BiK806QnrW7\nf76RWuC7k++eyZQ8SAOn9SQ7crR02xRjN5A8TtKewTdY0rGqkYChClK2wzazhafOpdxMq4OvTv35\nGWzAhzllHfFHH2X2NTZuFwtjRgLnurrROnXFpXDMqKbrRQYl1aGatB/46e8YEcrPvD0cTGci6gv/\njdn3UdNj1mM7UNW3/GJefx2Tb+mikVQ1aaOadEUa98QdMfoAPowJh9C5XhyyX0xGa6fge6w4Ko5z\nySIq0lfzxbkhtew4tmEgD6Rwb2yRi9qFMA3fld277p8/etiFm+5Hpw4Dw699+JtxaTseUtU72lK/\nlsYry9zKiIiZxD7yCIrrhXz0lVw61JRsuCQsvld8GH8aSPushuf3JsdwUedNlkar9X/Gf73G+YRV\naRt3q3HbqjpX0XfvYUZ8lVaTVIevdC3PsiAex/69qj4d1fSgpDq5UU2P2YLRR1UrfRhv1i3z7Kry\nLxanDWRWR8KpfKlXRoeh5JPC7SEiLdZoZG8ipc5dadyOy6g6fQOJI5M43bWRhvCNISIHhcR6Okt8\n7xkivxKRLIGqKJtvB1WSTlPT4yEZV/tnadzCSlIj0rgFCbV/tyeGcpqm/6TexYcG6h/z0av4kDDI\nkW2l187CW/G3AcoWM9oCIczbT6ZnYB1xmUc5J9Kz3oBFVLgCk2PYy67mi58foB2NNK5OpRSF36nq\n7G3JT+CctZCKG25m6qVp3OwQ1ndJnKt0O5tJxHF++g2lb09n/bDemuMsodJI4nyfwLm8gtS7rtbf\nLK6qaWblUFO3x2MHUNUq6uQBZGrGHG0gY110HfBcJtR2E34xf5+F7/pj6R7MxW98Sckh8ym/Ior9\nzpPMHXKc9vCNphNZ4mOVRplKKcBLDWRWtc4VthmVydTml5hIVqRA97hr93z8bUBYfO8dS7cjjqtT\n8mCRVnA707iFAyiQECl1uI7JHEZXjqAYE2EO5TxY06zjUFX9VkT6ASdQkyj2r8yGU6sjIsOBgcD3\n1ER6EcScOYGeA4/KuKpcVf7OnOR3rH8urvYFbaHnnoaIhIOYn+QSGHQgHbNKiSe+ptRN456lqm9m\nxhT5Mb7/C6OD7TO5RUl1uJav1MKw96fQt4oocymnJ9mphVS4LnqRrW6b9b5tC0Tk3Jx2XR8+dsyN\nkdoS3RurVvHWxBsSjpvqp6ortjFFm7EjttMz/G2AiPT2Y0wZQUFoKPmhlUTtj1jpdibs/kFGbfKP\nlGqMvzGHFVTjw4g56MYEzkXaSjXvm4OI9ApgTswnkNudbHMOZZrEmZHAOTqzgtxtkZomNKf6rNAJ\nrjpljpN6XFVbvZOVX8w/Dybvil8wJFj7ZrVEK7mVb6Np3E6qWi0iPxtF4V2XyuBN7rWXdSHrSXAx\n+2x6I5ur5dzD9EQad2BtCZa9CRExLCv4T1OsU3oWj/YnU9Xp5auniqvuxa5rb7G50a6At7m7m5Ap\nedD/a0ovmsmGQ9O4C1O4yyL4bgA2Gf5CCfMTHcAtTF0fxT4AWNrSxccyroIR1OQJTNUWqqWvqotF\npOdqYkeuJtaNmt61U3QnrzQyhQMLgIqWupYG80csK/h5Tlbnvn26jcmKJyuceYvfP98y/X+wndRf\nW1re1jCR80+kZ7CuO62nZNNLs935bDwOeBFwnToJ9o66TGItv2RIPTfcQGlPV42kllI1GFjaahex\ni5D5Xp0rInfNW/zBUdSEnr/cWDHBPQHP8LcRqroe+EvmBxHJnc/Gm1dpNV0lq3YMb7EsqfCUqi5u\naR1EZL8A5mthrNwQprOOhGmJcZmt7hONjB0EDAGWAF83xYBnNgHfbWm9t4Rp+i74f/bOO7yqKmvj\n7zrttvROCAkJIaH3jggiFkCwO7axO2MvM6OObUYddfSzjtjbWMaxKyoiDL2X0DshhCQkIb3eetr6\n/rg3mAoJhOr9PU+eh3vObudw7zr77L3WuyTR8jxAISbrUGTb55ruvetIvV5aQyDxvoToXr3OGnmf\nzf/MhNgzZYJ95oIHnyair5m5MxOlHxIGW2yt/IQdkAX4H+QA8NNWVM4oYw92oxrfIxcmGK9gE/py\nFK5Dr4N++iGQCf7sXr9ZApvl7dowP5UJLvUcBUQUJgA32SCdb4CLvTDe5FbULNuLSHS1DOH9cUiU\nYmGTV6HEWQJ3vhfGGGau6+Sxh8sQCm5Bn7BhiAURoYideA4b3C7o5zVs9BKR3QZxJoHGpiPcKEC9\n4IWR64Vx7omeDRFRDPxvK2UAetksER+cNfI+e0xkGjzeWqzZ8rH3QPn2xarmntxZfVoUR/ZZI+/v\nGR+d2eT48g3veHP3r3iImV/rrL4Oh42k/5yFxCu7IETMRR0ioGAAovEs1qsyxBwv9F4yhDoGrwYw\nMQSKcg8GIIVC4WUd3yEX+ajHXzEEdVDxIFZ5NZjdmbn0eF3D8YKIBgB0sf8Tf8fNFG9PZYJLPccR\nIoq3QFzXG5FRIxBnr4TX/AUFv5NJeOBIE1gbzP8lojWLUXSjBCHBC2M+/Ju2x0IT5cq+iBKHNwoC\n60ohuJjTbN8h90EAywHACvG1voga9wf0tUokNLhs9l6Eou8AjDkG4zosRESSaHlBFOQ7I8K6+Vye\nSkk3fMrgPlfIMZH+HDI2azjGDb3d+tWcu8YTUUYnRv+KArX0ghZIJBxn92gvjJcWouiqVIRhCGJR\nAjeexXpIEMTrkdlnMGJRAU/EZ8gen4s67VpkKCnkj/y1koQruScexmr8iH1YhgMuAl46HY2+LFlf\nUGT7nenJZ8oAkFOw9CFZsr6h6d4HDlUvsAyaAsDFzGXHY6zHi6DhP0IsEJ85A13ir6GMhuglYTjH\n2x/DmpeI6Msjje4LyDYc8yxlAig1FaEt/OmTEEIE9AAAIrJIoGuuQYa1Ia2fQISLOFVahMJBRJTK\n7Zf+PSLIr0kxEcAoAKUAvibQ1Q57zO3njn3YarOEWZlN7MiZg+17ZqFHtzEILMFAFGVER6SqJRU7\n+gDoFMOvG+p/d+yd+5czI9OtDXIZHl8d8orWMICf2nlNcRYIfyPQZQQYOvhTDeYzHd30tkK8fwwS\nzGsp8+ADJ4/rMRFdxREUDwDoAgfu5QG2+7AcoWgqfioQoRs7zNko2KPBfIj52CtLHm+IaLzNEn77\ntInP2qyK/6HXL2Oa9NPCR24nop8a4lQCS5mD4ZfXWAbgAkmyvi2QEGGYuqjIjg2a7r4afnmMUPj3\nkE5Ysp+jJWj4jxAGXzIJSU1+SXFkQy+O0Lah6nwAn52gobULE7xhEyrrpyG1ifjLDlTpBnhN4KOD\nQEIYmiaZkUhAOFtUD9zx8K/5HxOIyCFLtvkWJaRfSuJwe219sedA+fZXBEFyjhxwvd1mCQuUE9An\nfTL27l+O0opdSIjt479GNlFdt18G0Gn7I6apv1hUuuXyeSuf75aecqbd66vjbXtmeRj8cnOtpTau\nKdwCcf0YJMSfha6yAcbPyL9vGyqnENGwjrzd6TAvm4KUJr/hErgxADEwmZGPemgwkYpQZCDCXIdy\nIR0Rv14LM/ah3q3BvJGZV3XoRpwiyJL1pj7pU+wNRh8ArEoo+qRPtm/ePfMmIlorS7ZvFdl+Vnx0\nL726bj+8vro6gKPOGnGfLSGmN0xTx7acn0ds2/PzVmZDAkgRSDAlUVljmNodfArl2m0gaPiPEAKx\n2cC5wMsAACAASURBVEo6gsCxU2EmMLMYrue+5D3WC9BdtkDEGpRiDgq8KsznAmWqRVDFHtQmZjQy\nGFXsRSW8CoDDBosdDaKgPJkQ22fQ+OF3W32qE/XOEkf3rqOwctP7IRGhXZuUJSKEOuJQVVeAhNg+\n0A0VG3Z8qZqmvq0zf5jMXEtEQ0oqdlxXVZt/EbNZpemed7mduSME0B/6Iir695R5cNJwO/e1/APr\nUvNQfymAz9s9FkCQmq0uhUPBOpRhLgoggGCBiEp4IQB6DmrNsdzF2o1C4GMD32Kv5oG+F8Dq9vZ5\nqkEkhCpyy4RAimwngYQwSbQ8HxedMXHCiHutoiCBmbFtz88hOfmLKSGmN4gIoijD5a4UYyPTQsYM\nvgUOWwwqqnOFpeteH+v11mYR0eXMfMoocwJBw3/EMPjLOSi4+QbudVBz/QC7sAe1EoCTXoSMmVUi\nGr0UxW/PQ+FUgAUrpE0qzDsa1sOZmUWiv7yOre/fxL3tvRCBAjjxEXa5CHj5WPvjE9GNAzMvsq7d\n8jHyitYgLKQL6pwlkEQLF5dvpbSkX7cYDFNHaeUuvbBks741+yfWNLdEgrjJMNSpnT2ugJfQ24G/\nDmGDNHU04m2NjxERxnBCSAnc56IDhl+BMGcxiqZNR+pB698f0fgCe3Ar+mIIYkBEKOB6PI+Nshf6\ni89g3a1WliQPdFmCsNgL49pj7WJ7IlE197fZeQvPSU8eF9KwBMhsIjtvkVPV3N8IgvTeyAHXWcVA\nGkUiQr+eUyg7bwFq6vYjMjwZbk8VCg6sw6XnvAxZ9v/XxUb1wBlDb8OKDe8oHm/dh0QUfypJWQQN\n/xGiwnx8LcrOK4cnfhQnOMrg0ReiUDXBdx6l/O5xI7CRdzERyQAkVzPpZQAwmD8nIvf72PGMD0ZP\nBcIBL4x/8iFSIXYWJhu27LxFcHmqcMk5L0GRHdB1HxaseZnWbP7YlESLkJQwGC53JbK2fuox2dwq\nCEKfyLAkIdQRbxYcWNdPIPEDIrosIHx3wjFhllXB10K1oxJeTUfHNhA9MO6fjfwzKtjrGIwYawnc\n5iIU6v0RIw2l2IMPg2QKxXTuzj9iX5oHRpwKNQ1AlcpGh/I0n6J8Xes8cM+8lc/375M+xQYA23Nm\nu+ucB7YD+ME0jU8dtugmFYgE2K2R8PjqEAmgpr4YUeHJB41+A3FRGXB5qmG1hCkerzoAp5AbaNDw\nHyHMXElE/Xeh5qoCOM/TYRapMN/jUzCfcMAotmkYA5t+x3Tjj4gGAhgLv877T8zsEUhakrt/xfkX\nnv08FNm/Dy1JFkwccR++/OUOY8WGd/dquiddECQPkfBvNs3fTxz1p5CGvL0j+v8e81b936SKqpw/\nAWgzK1yjMZwvS/YXdMPbWxSUCpP1l01Tf7EzN/E8MN6YhbypwznOHhGQUChhNxajWNdhftiRtph5\nHxH1WY2SOzehYqIOs4ABoQfCWuS8SEaIIEDowaxrAHZ3ztWc/ATebMeXVOz6Y1VtwXUAoGruTwB+\nh5m9imzfW1S6uWdSwuCDdTy+OlTV5sMwVDAzbNYIVNfth2nqaJxgvba+CFYlFKahCwBOqWxkQT/+\nICcUIpJlyfaNIIiTkhIGU72zVK+syTMMU50MwCJLtsVXTX2nRb3v5/+ltt5VNhHAVgA6gEtio3r+\ne/K4x5tsVpdX7cH8VS8WqJo75TDjmKrI9q9GD7rZ3jVuAGqdxVi9+SN3nfPAf1TN88dOvGRYSHwU\nwGMDEGNoMHgHqiUTfEdrgXMdhYiu7YnwNx+moU3uwzecoy1A0Qde1m9vVDYN/jiIQgBrmJkD2d76\nAsjvRBfYkxYimiJLtq9HDbrRnhjbDzX1RViz+SNXvbt8PoFGiaISYhiqSCRoqUmjQ4b3u5pEUYHX\nV4fFWTPgsEZif+mmPF33pp2oJbOgVk+QUw5RlB+NjUx/ZNLoBw7m2i0s2Ygl696oNgy1qygqpVPP\nfDI0IuzXzVyPtwbfzfuz1zC1BGauBQAiuqV715GvnjnsziYuqk53OX5c+EiNpnsjDzUORXbsOmPo\nHzO7NZr5+VQXvpl7j88wte6dHaxGRF0BTIb/oTUrEMndGe1aLRCzz0ZSlylIlhSIWIUSfIZspwpz\nUEAuRLZC/IyBaekIV4vhEtzQigm03QRP7ooQXwncCsAbPDAuPFLX5FMFIpqkyPbndEPtIwpymW74\nnmc234Z/Oa4HABcAryzZZjF4dIgtBk53ORz2GL3eWeY2WZvEzFkncPzBAK4gpxaiIN0xtO+VTRKs\nJyUMRnhIolhVmzcJzE8tWvvqExOG3+3w+Oqwbc8sVNcVsCBIxYap9URADRTAksLSzYKmeZqsxe4r\nXGMSCYsONYZAoE5m1/iBTY5bFAeiIrp7y6v2DAHQqcJ4zFwE4P3ObDPQrpeIRi1C0VtzUTCZAcEK\ncYMK8/YGd1MFwlPdEXrBvRhotZBoZWa8gI09Geh5DwaQjSSrzia+RM7wlSj5FsCEzh7nyQQzzwcw\nrLVT8CvONjCGiPrU1BdeLwpKfJ3zwBZm85POemgfT4KJWIKcUEzTDLNbW07GHfZoAUCkYWovuTyV\nj/+89AnXsvVvoke3sTh3zF9pYK9LUiXRsoSIzgYAZt4D4Is5y592F5dtRU19ETbvnmlsyZ7p0nTP\nI4caAzOboiDX1TtLmx03Ue8qkwAc1LcnIiISrrcojm2ybCtTZPsPfjmAkwdmLnazdqEBDjHBoS7W\nRjDz+oPngTt+j0ybJZCBzQSjEC66Cb3JRv65oEQCrkC6wuARgSWhEw4R2YmEe6xK6CqLErKEiH4f\nEOU7bjDzDmZ+SDd8N5im8fKpaPSB4Iw/yAlGEKRl+cVrz+/d47yDr6qq5sGBsm0SgGWBdee3REF+\n8ryxjyAiLAkAEBmeTKH2WPvyDe+8RUSZzMy67r2lum7/6qXr3riPmaMALDYM9W/tWqsmenPNlo/u\nnTjyTzZJsjT4cxuG4csHsKmhmCRaXrbbom4d3u9qR6gjHoUlm6Zt3PXN2UQ04Wh0mo4FrQWDBd5u\nwuLxa6pkFSY0mIhB04xpMgmIZZu6H84kdGIQXLPxjJQl+7Mm68MFksp13fMSg99uvqFORDZJsq6K\njUxP75U6ya6bKrZlzxrqdJddRkQXnc4uqceCoOE/BhBRMoCz4Zd2nX0of3ciOscG6U4RFO2GPsv0\nexucEu6gnYGmu/+6YefX4wxTtyd3GSY43eVYv/0LFxF90UgOYoDdFsUNRr+BpIRBMEw9BUAEgOqA\nsXgXR+Bqahjq3ypr8tK+mnPX9Nionmp17X5J1V1O09Q/BRADoJyIEkVBvv38cY9ZGiJB+6SfT5Jk\ncazf8eVLAMYf+Z04PjCzaSd59xZUZg5CDADAChFhkLEXdUhH+MGyTtZQArcFwDHxVCOiMZJomTe0\n7+/sSfGDUOcqDc3a+ukLda7SCy2KQwdIVDXXZwC+INBNMRFp6ZNGP2BviJtJThjqmLngoYma7p0I\nYMGxGGMHroUAXKrIjnsAxBiG+othai9ys2xoJwvBzd1OhIjIAvF5Bt/dH9G6Exrnok4IZESa1by8\nhcSnbZDum47ujghYsAolnq2oqvTBGMLMvwUfawAAEfWVJdvTzHyGIAhVmuZ5hcHvNsz6iKiXRQlZ\nd8X5rzuokUCaqrnw1S93qSYbEe3V3iciKwA7/A+KFl9+IkoTReVNgYQJKYkjWNXcRlHpZtFk42Zm\nU+0S2/eDc8Y8FNa4jqq58OUvd6mmqVuat3cyQkRT7JC+vhG97P0QjWK48Ba2qRpM4Tb0lTIQgWK4\n8W/sdBfD9bGH9TuOxTgUxbFqRL9rRvVIHnfwmKq58PXcezEo81JYrWHYuXeuq95Vso5IFEcPuumM\nlMThTdrYmv0Tb8n+4U1d9911LMbYXmTJ+orVEn7r4N6XOey2SOQVrlZz9i+rMwx1CB/j7F3Bzd0T\nz0UhkO/4G4ZZQ8mvb5PLdXgeG74MCJodDNAhou4KhD8/iRHWsEDZwYi1fcy74lah5FEA952QKzgB\nsD9P8MWHKLLbNI392XmLMjNTz6ZAHWzeNVMTReUXU/eEioL0oCzZxhus5+q6dwYzb27cABGFy5Lt\nbYHES4gECIJUQiTczWw2D7UfYrdGnjF1/JMWRfYvh9TUFeLnpU+8bxjqjW5PdYvBuT01EAXRdVQ3\n4TjCzLOJ6NJ/Y9fzXhh9FAgVGsyXTXDpv7DlWR+MLjKEegZe1mA+c6zGoeveYcldmu6pKrIDXWL7\nIsQRg5TE4UjtOsoxe+mTw+pdpbm63jJVs6Z7TWaz0xPudAQiSpNEy21Txz9ptSh+p7L46ExFkqwR\nu/MWPAngpEs1GjT8nYgD0r2XIM3RYPQBII3CMJRjaTVKrwLwr0bFpwxBLIdRUwG0s9BVWYvSy/Eb\nMvyHI7DOf/G67Z8vyS1caYuNSrcVlW7xuNyVB3TD+6woKju7J46wJyUMttbUF525PWf21YIg3mya\nxueA/01MlqzzkxOH9R/a50rFooTgQPn25CVZM74goguYeWFDX4rsuHtQr0scDUYfACLCkpDadZSQ\nU7C0u9NT4c0vXhuWkjgCAGCaOtZv/8LDzMc8krkzYeY5aCVJDhF9CkBRYarHet1cFOQ6p6cyKlL+\ndQmPmeFyV8ISWEoTBBG90851rN78kb5tz0+ulMThDimQE9fjrUV23gLVNPUTLYh4TreEIWaD0W8g\nPXmctDtvwQUnaEyHJGj4O5e46GYbZAAQB5uN/OkAG6OpMFtEhGowAdBJIS9wMsHMu4goubxqz0Xl\nVXtSAWwD8Iss2Wf16zk1on/GNAEAUjBc7JYwxPbL0iffJaKZgSWg0bJk6z1m0M2WhqWixLh+GDHg\nOlvW1k//AWBho64ibZbw5t3DbouyEAnhhqFOXr7h3fk79s6RwkO7yoUlGw3D0FYbpvbEsb4Hx4OA\nsW85tT4mfZlvZW39z5/OHvVnW4M77979y6EbPsRHZxwsZ7IBQRD3u73V2d8veOCCjJSz7Lqhmtl5\ni1Rm4wVm3tRWH8cJt09ztvgtq5obBDqhbyNtETT8nYgGc24WynpkIOLgNN5kxmqUOhlY2qz4D1tR\n+VoJu5FA9oNlf0a+V4PxUVt9BDaRUgB4jmXSDCJyADgr8HERM5/wpQxm9sGfRxaA/17ohveczNRJ\nTdySo8KTERbaxaiuLTgDwDwA/RNi+xA1S6Dil9w1+jQ+ZhjqrNzClRkJsX0Ortebpo7c/SuczOYC\nZt5ARInlVTnTyqtyEuCPeF17DC73tMcwtacqa/YN+Hru3ZMSY/ubNc4icrrKbZPGPHjw/0o3VGzP\nme3UdM/HAL7XdO/YrXt+uhjMXpONL/jkyKT1Y2nFrrcra/IQHdEdAGCaBjbv+t5jGFqnx2p0BkHD\n34moMF9chuLrbCyFn4Euogc6ZiLXWwd1K4D5jcsyc5lEwl1PImvGeE6UomCRV6DEWQ7Pbh38Qmvt\nE9FkK8T3CBRlwBTsJG/0QL+GOzkfr0h0lQzh/W4I0QFgP5ySSHSzwfxFZ/bTSbQqisimQQAaTuyr\nqM5tUaiqNh+CIDbJkWuY2it5RWtuEgU5Kj1lvKzpHmze9b3Hp9ZnIfBmwMxeAF93+pX8xgi4m04n\non55xWtGACgRBeXCJWtfu6ZX2jl2QZCwK3e+26c55wKYGXgbWR74O24QUSr8AV4lAFY0dzX1S3UL\n18xZ9o//piQOpxB7rDW3cKXTp9ZvNFl/oVlb3YjEG0RBStYN3xL4E7oflzesJuMIevV0LkTU3Qrx\naQZPIZBXB3+o+7MrtfrKR0QZEugGEUKMD8ZcAD8ws95KucEWiMvvRD97X0RBB2M+9hs/YF+5CjOt\nvV4t7Rh/XwvErEcw1NYtkPS9gOvxT2zw+GAMO9lE6BTZ/l1m6qRpQ/pcfnASU1aZjXmr/q/WMNS4\ngEiXKImWnP4Z07v1TZ8iCoKIelcp5q74p9vtqbqBmZsYcSJKkETlUSLxIiLyqJr334C5C4AM/9vP\nSe1xRUQjbBDvFyGk+WAs02C+ysyFndBuLxF0jQAK02DOBjCvMwXsAn0QgDMkUbkaIFk3fF8F+jnu\nhoqIJFmyfsTMl8ZFZ6h1zhLy+uoqdcN3biBgsHn5BABXAhQF8FIACxqPm4guEEXlyx7dzhDDQ7pY\n8orXOqtrC0p1wzfqaALBTohWDxGdD+BVACKA95m5hQoiEb0Gvy6JG8AN7M9k37zMaWH4jxV2kr6a\nhtRLz6fkJusVz/GG+mzU3MHM/+mMfiwkvn4uuv3xEurR5G3wG96rz8f+t3xs3NMZ/XQWRNRVEi1Z\ncdEZYcldhjlq6vb79hQsNQxDvZyZZzcqlyJLtu+IqJdVCdNdnkoRwN91Q33pMO2fKwryNxFhSazI\nDpRV7raA6Cld9z17zC/uCJBIuF6B+OY0dLcmwiFsQYW6HAe8KszRR/PQlkm8RwI9dyYSpVDI0lIc\ncNVDXe2FMYVPEsnrzkYU5UejI1IfmTT6AbssWcHM2L1vvrlhx1cFuuHrQSTeJYryX3XdFy9L1lxN\n9z7MbH7TWltEZBMFuezcsQ+HxEalA/BvZK/Z/JGWW7jqU0333Hyk4zzu7pyBcOnXAUwCUAQgi4h+\nZOadjcpMAZDOzD2JaCSAt+DPnxqkAwig/ukIbyGx0QeRoXtQ07uz+pEhpCTA3uJ70QV2SYZwSIXL\nEwEzFxFRZnHZ1mvLq/eOMww11zT195k5r1m5fABDiShd1dzRALayP6FKq/hnnvSgKMj/nDTmQYqP\nzgQAuD1V+HnJE48Q0YaAZ8xJAxHZZQhvPIIh9q6Bt7UBiFbi2S7/gH0z4A8qPJJ2Uy0Qnn8KI6wx\n5NdBOo+TQ/4PG8fkoPYW+H/Tpx0CifeOHHCdXZb8DhtEhMzUScKOvXOi3Z7qT8JC4i8ePegWe1R4\nMkoqd6avWP/ux4IgWk3TaG0SNjEiLMlsMPoN7fXLmCbv3b/8dwCO2PAfCUer1TMCQA4z5wWe+l8A\nuLBZmekAPgYAZl4DIIIokAk6SLsxge17UdvitXoXqp3cifrqHugL16O8hUHcgHK3G/rC1uqcSIjI\nAmAogO2a5r7eMLTHmhv9xjBzDjOvaTD6fu0dGktEvyOing3lJNHyrNUS+lRK1xEHjT4A2G1RGNz7\nMocs2d62KCG7LUrISiK6ihrCSTs+/qN+yyWiOJmEpxyQ1tkhKfXNUiuciUTyQJ/QYne7/Vw2EglC\ng9EH/Fo+FyDF7oD0h6MY+gmBiEKJKJMo8HRsA8PUIkMdTU0VESHUkQAGXz5pzEP22KgeEEUZXeMG\nYPyIu+2iqPxfG/+niigqLQ6KogJm87jvtR6t4e8Kf1b6BgoDxw5XJglBOoQH+nM/YJ93F1eDmaGz\niXm838xFnQeduNFoAh9uR1Xd15yj17IPtezD15yjb0dVHQMfdVY/nQGRcJkoyGURoV1/CAvp8pMo\nKqVEdF7761OKJFl3O2zRc7rGDXhXlmybFdk+k4jimM37UhJHKmGOlnOUEEccLEpI8sSR92eMHnTj\n6LCQhPck0fJmB/q1SpLlBUlUagEYFsWxgYiOSO6BiJIVCNtGIv6BW9Cn91SkyB9gB/7Hv+5ZazBB\nIBNoJUl0+1CsEFuIoVkggoFTIloZAIhIkWXbm6Igl9msEVmiIJfJsu1VImrV8MqSdXNhSVNPUU33\norwq2xJii/barRFNzsVFZYBNIxpAaxLgiyqrc+V6V1NHvD15C01RVDpV+bU9HO2Tpr1fpOZPwFbr\nEdETjT4u5nYmsP4twMzriOjKf2HLuwqEUBWmKAA7VL8cRKf5Cvs9FGjYIhS9PA/7pwOABOEHFeaf\nOaB9fzJARP0k0fLJuWMftsVE+sUjSyt2Yf7qF78joj6BpZ1D1SdZss7unzE9rW/6VJGIYBgqFq35\n17kHKna8EuKI8yXG9bNu3f0D+mdciMaTuIID65CcOIziAr7mXWL7Ob79333XE9HLrW36NUeWbN/F\nRvWcMKL/tTaHPQYFB9YNXrXxg1+IaBIzr+zIfbBCfO5sJEVdSj0OGuYhHIu/YS1GcwJCScHPyNMU\nCDM9rB+p4f95BQ48ciGn2hvUO5kZC1Hk9cE4GT29WkWWrDOiI9KuHTf0dqvNGm51e6qxZN3rt1bX\n7TcA/Ll5eVVz/2XV5g9/BrhBSwhrt3ziBtFcr1p/vmHqEBtl5PJ4q8FgE4CzeVvMXCOK8l9+XvLE\nCwMyL7SFORKo4MA6776i1W7DUP/Skesgogk4Sqnso9rcJaJRAJ5g5vMDnx8GYDbe4CWit+E34l8E\nPu8CML65D3pwc7d9BF7X0wG4O8NT41RFlqzv9EmffNOgXpc0mbys2fKJuid/8QuGoT12qPpENMhq\nCVt++XmvNdH/qXOW4seFD7tEURYuP3+Gbd6K5xDqiEf/jOlQZBv25C/Brtx5mDrhSTTO1bp8w7ue\n3P3LH2DmNw7Tb39Fdqy+/PwZ9sZGY0/+Eqzb/vlSVXV1aOavkFj3LEaFRlPTwMFXeBNCoKASXmc+\n6qt9MEbwUSSTsZH0fhiUK6ehu8MBGUtR7NmJ6mIfjKEn04SgLYgoTBTk0kvPfcVqtfwqteTyVGHm\n/AfchqnFtDaBIqKzZNn+f7ruHSgKcrXJxr9MU39elmyLe3Y/a+SQPlfIAgkwDA1L1r3uLanY+ZGm\neW5v3k6j9sbIku1eQRC7abp3oWnqM442HudEaPWsA9CTiLoDKAbwOwBXNSvzI4C7AHwReFDUHMvA\no9OdgPvcaZ8S73CIgpweGZbU4vsbGdZNkUSlBwAQ0RmybH/GNPWBoiAVq5r7nwD+E3Cxiw+xx+rN\nl71DHLEwWbeLkNdu2T1z6MRRf5a2Zv+I/638J1TNDYFEfeTAG6TmCbp9ar0Bf6amwzE8MbYfNzb6\nANA1fiDWbvlkcBt12kQANBUt4xi8MHw7ULrUAH8G4KujfSv0wrjVC88vn2PPHwkI90D/2gTeZea6\no2n3ONLNYglVrZawJk9Ihy0KkmRlQ9USAOxrXomZFwEY3vw4EV22J2/R7NyC5ZkRYUl6ZU2uBaD5\nuu7906EGEXij69Bb3bHgqAw/M+tEdBeAufC7c37AzDuJ6I+B8+8EBKGmEFEO/D+MG4961EF+82iG\nd1lhyeYxKYkjmvyQC0s2ujTNs4KIzpEk68xhfa+yJ8b1R03d/vA1Wz59y+ur7QHgCQAbq2sLrF61\nHg0SywBQXLoZsmTbo+mei3bvmz93b8GyHpHhyYau+2SBxEWa7l2Zk7/40e6Jw20Nibcra/JQUr5D\nADCzHUMvqqkvbLFJX1dfDEGQylqrAPhzEwMYBH9S7y0N/uEM/HcW8m+9hXtbGpaj8rkeeag3DPAl\nzNxi2aEjkF/N9GwAVgCLXKx9ezTtnUD2e331isdXB1uTGX8ldN1L8AdntZvA5HUoEQ0uqdjRHcC2\n9izznSwEA7iCnJIQUbwoKjsHZV4cnpF6tsBsYsfeOcb2nNkVhqFmyJJ99dght/ZO7jL0YB23pwrf\nz3/Aa5haF2aukWXba6H2uJtHDbzBHhHaFcXlW7Fq04duVXNfwcw/B7wzhsIvkbGVmbOJSJEl62xF\ndoxMTRrtcHkqvQXF62CY+tXM5mENPxFJkmjJH9bv6i49UyYQEcGr1uN/y59119YX/9n053ptVke4\nWBTlD2yWMMkwNNJ0X41ueC9j5jVEFGGFuCIB9uSRiA8phdu3EiWmBvMak/n7o7zH58kQvu4KB1sh\nYi/qFACPqmy8fDTtHk+IqItA0q2SpAzQDS0pKqJ7/wnD77bbrRFwe6qwOGuGu6au8E1N9z5wiDYi\nACjcSF33ZOKEBHB1FkHDH6SjEFGGLNne0HXvWSBiSbTM0XTPnQBKCeS+Zvq/BaHZUs6sxY/XVtXm\nX8LMC4lIIBLvkUT5L7qhxcqSZYequf/KzHMP0y8BOBP+DbZqAF92ZPmSiHpJovUXi+KIcdiijcqa\nfRYSxHd03Xt/8whVIpoikTSzS2RPOS6+P9KSz0RZ1W4s3/BOvWGoPZi5nIgUABdbIJylg4sN8MeH\n29xuxxi7yBBy/oxB9gzye69UsAf/wDp3PbRp3EjRNFDeBuAaRXZcaLJRqevedzu6Ud2sPQIQDcAZ\nkMg4kjaGi4K8IDVptBwXlWEtqdzpyS/KEplNU1HsuqZ5RBLEt3Td+1Ab0fLdZcn+sWGqo4gEFkjM\n03TPrcy87Eiv61gQNPxBfpMEAgkPivYQkSAIkuuSSS9a7baog+WYTXw99x6X11c3lpvp9R9vAoZt\nJPzZvbJae3AQ0TkKhNnjKUlKYQe2i/XYKtTi7HGPYUv2TE9e0ZrHTdM4ZOTxkSIQPTgaCU/eQn2a\nLKUt5EL+DrmzXKxNbzTOMEmyro4O756cnnymw+OrNbfn/OzVDfVZXfd1WM9fEMQrRFF5hdmMZjZZ\nFORvNN1zBx8ik11z/F5btuxRA29MT036NV40p2ApZ239bIemey4GUNRaEB8RJYqC8jQI1/fvOU3o\nkz4ZoiCh4MA6rNjwnks3fMOYeVdHr+tYcSI2d4MEOeFwM5U2ZjZlyfZJ1rb/Xjdu2B3Whln/rn0L\nTMNQCwFsORHjbExgZr+6rfNEJCoQ/nsvBki9EQUQMMbsgv+Z+7Fi88dI6DLYtv/Axsy26h8tIoTE\nRDhaaIzHw07ULA5HEKQHu8YNSD1z2J3WwD6DkNZtrH3m/L88RkSfHSqgrjlENNUih/x7woh77HHR\nmfCp9cja9tnlhSWbugMYd7j6jUglosTuXUc0OZiWNJbWbvk0HYCrDaMfK4nKxtiojGgiQRiQ+Ws8\nakriCFTXFVp27J3zIE7C5Cod4WgDuIIEOSnRDe+fisu2Zn0z9x7X8vXvuH9c+HD9xh1fVWi69+oT\nIfh1BAwJhWLpTVFNDk7gRByo2o39pZucuuHNOlad6zBXZKGsvvmt2ohyVYXZZJlHFJSr+/Q4/xim\nOwAAIABJREFUv8HoAwDs1ggk+9MkTkcb+APoLC9aLaGLZcn6JhFlKrL96VGDbrTHx/QCEcFqCcPY\nwX+wCII0hIg64vVE/vChZhNhIgDELU/4EQT57pTEEeGR4d3EhJheLc4nRPeSRJKGdGAcJyVBwx/k\ntISZXZruGe/11Z2TV7R6XZ2zxKLIDqsoyCsU2f5BQOrh1IQZZZXZLgCfd0ZzRCQRUUQzqYGZpXAX\nfoidahl7UMcqZnGeuRwH3BrMV5o2ALQak+l/ajAR2QLeQY37HCaKyraeKRPuHj3o5vG90s69VRKV\nDbqh9o5vZnAFQUR8dKYJoF8HLiuX2SjbX7KhycH8orUgojxmLmqtkixapnRPGmUJtcejsiavxfmK\nmn2mycbOljVPLYKGP8hpCzOzJFmvjo3qOeyy8/6lXHbeq2GXnveqNTYq/SpZsh0y0OokYEM9VN9O\nbprjdxGKIEGsMU1tVCe4alpkyfaaIMi1giCVSaK1WBCk3wMAM2teGGPWofzDx7Gm7i9Y4f0F+T+p\nMIc3N5qGoX66PWe2t/HbgctThbzitYIs2W4gEuqIhHpFts8jv7Y9ZMn24aiBN4aM6H+tktxlKIb0\nuVyaMOI+uyjIUlVN031pZkZlzT6gFT/7tmBm1nTv75etf8u5dut/1LyiNViz5RN15ab3nZruub7N\nejDL3J5qpCaNQlllNnIKlqFBebqsMhtbs3/warqn1XwZpxLBzd0gpy1E5BAEufySSS/a7LZf5VO8\nvjp8+7/7vYapJZzMUadEdK4C4fsJ6CqnIFTehkrvepQ3SCwf9eaiItu/jo1Mnzpq0I02hy0a5VV7\nsDhrhtvrq7uZ2Wy3FAMRhUqSdWVkWLfU9ORxDo+31tieM1s1TUMY1u8qJT15HJlsYlfu/4wt2T9W\nGoY6UhTk3VdNfVcRhF8lgJgZX8y+zWtRQvjcsX+1hdhjYZg6tu7+Qd+ROzdH1719OrpMR0TdRUG5\nUxSVAYbh22CY2pvMvP8Q5SfbrZFfXzDhHw6PtwYrNr4Hj88fo6Zp7mrdUG9qj9vu8STo1RMkSCOI\nKM2ihG753eQ3HM3PfTP33nq3t3oUH+PEMgE3x0sApMGfJ/in1lwHD1G/hwLhdhlCphfGKgP8HndC\nIhgi6i6Jlp1XnP+6tSF5OQAcKN+OxWtf26dq7rQOtmcFcIUi26ebplFpsmFJTz7zylEDb2iypLZw\n9cuuwtLNTxIJTw/MvFg5UL4Noiije9dRSE0ag6/n3O0yDN97AP7gsEXrHl+tDGCzpnsuZebio73u\n9iBJlqcB/DkpfpChah4uqdgpMet/B/ByR/7vjhdBr54gQZpyQNe95HRXIMQec/Cg21sDn+qU0FQ1\nttMhot6iqCyJiUizxUb1dBSXbXHWOUsqiGgsMx9oTxvMvBdAh0S82knfqPAUnyRZmqy9J8T0hqZ7\nUolI4A5k1wr42n8S+IPVEro4Ma5/i32UrvGDHCUVu4YAMCtr96FfxgXQdR925PyCnPylMEyt3DC1\nPwF4rM5V0hdAOTO3e4mnM9B132NE9E5+8drz4E88/xMz1xzPMRxrgmv8QU5bmNlDJMxYkjXD7XT7\nJ8lOdwWWZM1wkyC+1xG/8I4S8CP/bljfq2POO+ORkCF9LqcLJvwjNDN1UjdZsn14rPrtAHm1zmLZ\nbGbbq+v2QxItFR0x+q1hGHp2VW1+i9lxVe0+r2FqUQkxvTBh+D3oGjcAKYnDce7Yv8LpLoNh+J5j\nPy5mXnu8jX4DzLyfmd9n5k9PN6MPBA1/kNMc3fA9Wltf/K8fFvzV9cXs290/LHjIXV1b8Kaue/9M\nRBYiiqV2JijxR/pSSDPvl7bIIBKSe3af0KRs/4zpkmGoZ9NhkoC0czxWIhpBRL3aOaaDMPN20zS2\nrt/+hWaYfvvs8dVh5cb3XSYbR715qRve13bk/KKWV/0qX1NUugW5hasMWbLGZ3Sf2MT9UxQVpKdM\nYEGQ0ltr70ghf8KVR4mEJ4jolHfD7CyCSz1BTmsCwV2PENFThqnFASgDAEmyvmaa+o1EAhGoThSk\nRwxTf7+1NohIFEXl76Ig38ts2gVBqhIF6XHD1N89RNchimzTm0tGyJIFAT/ycLSi295eREH6gyjI\nL9ptUayqLslkPZ+ILmbmdmdj03TPtJz8Jd/m5C8Z5rBFq/WuUguR8J5p6i8e6bgaYOZtRHTN/1Y+\n/2+7NUIwTYN8qtNjGOoVoiA/qektVRg0zW0wm22mw+wokmR5VJKsj6Ynj5NFQRZyCpY+IEu2z3TD\n+8dTJJbjmBHc3A3ym0OR7d/FRWecP3rgjTa7LQoV1XuxaO2/3F5f3W2maXzasrztjfDQrjeMHXyr\nPTw0ERXVe7Ek63W3x1f7F8PQWs03S0SKKCrlk8c9HhYV7k9V7HRXYNn6t1BRvRcADEm0rtF0923M\nvLUj4yeicy1K6PfnjX3YHhGWBGYT2XkLzXXbv6gwDDWZmX0dbC8NQCKAHcxc1ZG67WhbBjAMgA5g\nAzMbRHR1RGjXd6ec+YSjYWPZ5anEDwsf9ui6d0hneCwR0SBFtq+YPvE5e0OmLFXzYNbix1xOd/lV\nzPzT0fZxshD06gkS5DC05c1SUrETi9a8WqBq7pRm5aNEQS669NxXrVbLr/LNlTV5mLv8mQrd8MW3\ntR4uCNJ1smR9a2if39nCQhNoydrXkZE6EX17TIYgiMgpWMbrtn9ebxhqX+5AUh2L4lgyvP/vz+zR\nbWyT478se6q+vCrnFmb+qtk1pMMvd1AJYG5HHwydDREJsmT9XBItU9NTxts1zaPn7F+ms2k8rhvq\nIbWHAstyE4mEs5jNagCftxaMJYnKK33SJ989uPdlTVJG7slfjPXbv/jJp7rajCg+1Qh69QQJcnha\neLN4vDWoqSuEpnuSyZ+2sbGLZ2aIPdZntYQ28X6JjugOBofCn1+1srWOTFP/hIjy1+/44mHD0IbG\nRfeKaJwxLDP1bKqpK7TkFCy9G8BDjesGZsqpAKqYuaLxOWakRYUnt+gvIrSbraJ63+1WJfRhhnlA\n1dwzZMl6pSRZr+ga199wuiuNmvpCg4gmM/Oadt+xToaZTSK6UtO9o7btmTWNmT0Af8GH0bP3B5xZ\n51qU0KFp3caGuDyVvryiNU8RCTcwm80edkKIRXa0yBMsS3YAQljz4781goY/yG8CIiKCcJMkWh+v\nqS8KN02jYdaNdds+Q1L8IKSnjEd+0dp1smT7SDe8dwbWgQtdnkqLbqiQROVge053BeDfPzhkBipm\nXgJgiSLbP+qWMLhFxGhiXH9LXvGaMY2PiaJ8mygq/5Qlq6RpHlmR7Ys13XPtrw8A3lRSsbNrZFi3\ng7O8elc59hWuFNOTx41NSRwhuzyVg9Zt/e/ZoY44Ou+MR2VZ8j+39pdsxNKsN+YQUZcjlTvuDAL3\ndlXgr10IJN4fHZE2YtLoB2yBwC9Ln7Tz8PPSJz8iovmNl6l0w/dDdv6iK3ulnRMiCBLcnmrkH8jC\nrtx5qqo5F3f6BZ1iBL16gvwmkETlyRBH7L/OGnlvSmRYN6zb9hlq6oqwfvvnmHLm33HG0NswZtDN\nuPTcV2xWS9h1AC4G/G59RMLSrK2fqoahAgBUzY2VG993EwlvM7PWnv4NQ91TVZvXwtBW1+03DFM/\nONMlokstcshLU8b9LeKK818PuWLyG5Ye3c44S5as8xo8dzTd8+TGHV97Cg6sB7MJVXNh0dpXzV5p\n59LoQTfJiXH90DNlPBz2aGVYv2sOGn0A6JYwGFHhySKAKUdzP08EomT5w6Bel9gaR/tGhicjMbaf\nAeCiZsV/8Xhr1s5d/qx79eaPMHPBQyirzEFCTG9ZlmwPypJ1Rkc9oU4ngoY/yGkPEUUw8wPnjX3Y\n0SW2L8aPuBtOTyV+XvJ3pCSOQFhIl4NlFdmOgb0udiiy/Y6GY5ru+V1e0dplX865yzNr8eO1X8+5\n21dZs+9b3fD9tb1jMNn4cF/haqOo9FdF6IrqXGzL+dmn695XG/X/5OhBN9ojA0s5smTF8P7XKLJk\nS4dfvx/MvE43fBeu2PDuns9m3ap+9ctdar2r1MzoPqFJn6rmRvPcwAAQ4ogT4c8DcGrBbGUABQfW\no7hsKxrcUBXFIQKwNy3KhqZ7p1bW5mXtLVgGQRBQVrkTYSFd6JJzXrJZLaE3Aph2/C/i5CC41BPk\nt8DAsJAEn90WZQUAqxKKiSPvx6pNH8IW8PhojNUSCoJwUNwnEMAziYhSq2rzUwDs0g21ozlaDxDR\n1CVZM76yKKE2UZDY5a1iw9BuZuaDTwPD0FJjIpu6shMJiI1KR35xVgYCGv7MPJ+IMgFEAPBKkPM1\n3RfbuF5cdAYKDmShb/qvk3vDUFFUuokArOjI+I81RDRQlmxPATyKSChTNfeLAEoB9IdfnO1HUVD2\nLVj1Qpe46AxomgdO97sYNehGFBxYBwBzmrcpSdbnwkO6jBk39A6EhcSjuq4QKza8C2YTAzIucqzb\n/t87APx4XC/0JCFo+IP8Fih3e6tlk0009quPj87Epl3foX/PaWi8fLC3YLlX0z0tDEIgivSII0mZ\neQkRJeoe31AAMvyZt9SG80TUR5ZslvLqHHRLGNyonomyqhwAyG7WHsOf+hGSaPlky+6Zd5057A5L\nQzxat4TBWLnpAxAJSEkcAbenEuu3f+lm5l+YefuRXkdnQ0TDRVFZPDDzYltSwkCqc5bGZW377781\nza2lJo2m8uocb1VtviZLVvuU8X9HiN3/fDtQvgML17wMMD5j5pxmbYaJgvzHiSPvlxse7pFhSRg3\n9HbMXfEsRg24AUS/Ptx/awQNf5DTHmbeocj2vTtyZvfpmz5VJCKYpoH8A+u9XrW+at7K5yMGZF5k\nlyULsvMWq4UlGytMNmYco7EYANa2dk6WbN907zpKyNr6GRy2aESFJ0PTvdiw/UvWdc8eAG164him\n+kRR2ZZJPy56ND0taYzD6S5HfnEWBve+DOVVe7B590yA2W0Y6pNmG8nSAyksHxJIvJzBLmbz+ePh\n767I9peG9r3S3jNlAgAgLKQLYiJ70MwFDyj9ek6FzRohz1r8N2SmTjxo9AGgS2wfJMUP0vKL17YW\nB5Fms0aoNmtEE2+s8NAuIBCy8xd5Vc19VMnoT2WChv8kgogutEN6UoPZQ4awzw39CWb+7kSP63RA\n0z3Ttuz+YeGe/CWxUeHJVFKxUzBNY42uey8tq8q+cUnWa7cyw2aa+jeGqT7f2YFMh4OIeiqyI2XU\nwOtpT/5SzFv5fxAFAarmBcCsG76LDxVtysxOIhpWW1/08t6CZbelp4yXp5/1DOy2KKDH+ahzlmLW\n4se8hqn/Xxv9OyTJWmCzhEeldRsLl7sC+4pW/ygK8izD1I7pWrime0enJI5scsxqCUVcVAbKqvYg\nJXE4VM2NUEdCi7qRYcny/gPrW/q2AoUeb42iam4o8q/L/25PFTTdg9KKXeXM5tudfS2nCkHDf5Ig\nkXBDKOQ3rkMvezrCkYva/p9g96cyCXHab/gL2lkwcz4R9ax3lY6vd5UmA9jMzJsCp18O/J1IHLJk\nNcoqs7Fz7xxIogxmht0WCae7XAfgOVwDzKwT0SqHPfqGfj2nyo3PaboHRNRm4BaR+FFcVEbUxFF/\nOrgc1ivtHPyy9KkLiOgCZp51tBfYFoIgOT3emghFtjU57vHWQJH9ito+1YnCko1onA6RmZFfvAYm\nGy2kL5i5QpHtP63a9MG0MYNuscqyDT7VhWUb3mEAGwxTPe90FF9rL0HDfxJARJIC4aX7MdDenfyx\nJYMRi1i22Z/Gun8S0QftdRsM0jaBCNtFx7tfIhoAvx7/9kMEKW33+OrMRWv/hbGDb0VSYI1/f8kG\nLFv/lgxAaaNec34urcwWK2v2IToiFYB/j2DL7ple0zT+3VYlSVSmDcy8uMkeSFR4ChJi+6C4bOtr\nAI6Z4Qfw/vrtn981YcS91oa9lryitfBpTjSkYTRMDbmFK2C3RqBH8pnQdA+2Zv8It7fGBNDqfoWm\ne24sLtv26Vdz754cYovxOd3lFkGQvtQN3x8a7638Fgka/pODZAWipcHoN5BEIbCxJKlQewA4av2S\nIMcXIoqRJdtsixLaJzKsm15Zs09RZPtCTfdc7o9W/RVm1kRRXpLaddT0bl1+FZFM7jIUPbqN1XIK\nlt0G4LF2dOtmNr6fs+yZa1K6DkeYIx65hSs9bm/NNsPUnmmrEoOl5jNuoCHSlbq2+6KPAMNQ/1Za\nuXvot/+7b0RSwmCxpq6Qq2oLbL3SJnFdfTFV1uZDEmUjPXmCUFaVQxt3fQtRkNEtYSg0zaMCWNLq\nNTG7AFxCRF1qncXdAeTohnrUSWxOB4KG/+Sg2gdD8rAOG/36X+JjAx7oEoDjut4cpHOQJdtX6cnj\nBg3td7UskADDULF03Vtnl1Rsfw3Arc3LS6JFio3u2aKd2MieSl7R2nYlGpcl28eRYd0u6p8xHZU1\n+1BRnQunu1IwTe1BZm5T+ZJNI3dP/uKew/pdffCYV61HYckGAG0vEXUG/rwJdLame4bvyV88EkB/\nSbRO2ZU7LyE7b7HObOzSDd/bO3PnvNCv5wW2AZnTxTpnKdZv/8JFgvAK61x2mPYPAGhX4pv2QETh\nALoBKGDmQ0Zun6wEDf9JADNX20le8C32TrqaMxSBCMyM75GriRCWMxuH/GIHOfkgomRJtI4e0ud3\ncsPyiSgqGDnweut38/50LRHd3VwyQTfU9aUVu85OSxrTJHNVScVOr6771rejzzRJtF5y9ugHrLJk\nQdf4AQCA3P0rLGu3fvocgFFt1TVM7fe78xauVjUPUpNGw+2txqad30KWbWDV/VVb9TqLwMb1Wlmy\nXhVij7l6RP/fO8JDE1FUukVcu+0/PQHsNgx12Pac2Y/vyJkzjogOqJrrRQDfHuuxNUBEiixZ3xQF\n+RqLJVT1+eoVWbZ9oOve+07GlIyHImj4TxI80K9biZL5G1GR3pPDKQe17Iae54VxzYkeW5AjIsFu\njVBFUW7iTmizhEMgESb0MABNDL9pam/l7l95X2xUupKWNJYAxt79KziveI1q8iG1/xsYHh+TqcnN\n0ikmJQzCyo3vDz1URWZeI5D03L6iVQ8Ulm4SBRIhSRbd46qpMFn/W3svujFENEaWbHcKJCaqunsu\ns/kOM1cfony8KMi3nTv2kYNKqOkpZ0KUFPvqTR+9omquQQCuPZKxdAayZH0zJjL96nFDb7daLaFW\nj7cWS7Jm3FhVm68BuP9EjetIOGLDT0RRAL4EkAIgD8AVre2SE1Ee/EJWBgCNmUccaZ+nM8xcSURD\nvDBGr0VZL/iDdVb81hNGnMLsdHoqZbenyu9SGaCiOhfw/x4qmlcIRPeOz9r62UdrNn/cCwAEQd5h\nGOoNzFzajj5L65wlYGY0lqGpc5ZCEGXxcN45JusPE9FcgcS7WBATvK7aX0w23jqUa2tAJrkHAC8z\nH8xhLIryfRY55Jl+GdNsIfYYyitaM7KodPO9RDTkEPmGh0dHpLZQQk3uMgzL1r05gAL6w+24D50O\nEUUIgnxNwOgDAGzWcIwbdod95vwH/khEjx5qKe1k42i0ev4KYB4zZwBYEPjcGgxgAjMPDhr9QxPI\nNbqSmT9k5uVBo3/qwsz1AgmvzFv1gqu8ai9MU0dx2TYsXvuq2zD1h9rS8GfmjarmHmiYWjfD1JJU\nzTWYmTe3s9ulHm+Nc3feQjR8dTTdi/U7vkRyl2GkyPbn2jHuxarmvszrqz/DMPVnDmP0J0uSdb8i\nOzaKgrJHEi3ZRDSAiGIJ9M+pE56y902fTCmJwzF++F22nt0nxEiStc0NZgDlTk+F2Pxr73JXQBRk\n5wn+PSRZLaFq45wMAOCwRUGSLCaA+BMzrCPjaJZ6pgMYH/j3xwAWo23j/5tVwQvy20U3fI/VO0tK\n5q16/mFd98XLkjVP072PMJtftlY+EDmbDsDVkcQsDQR07p/btPObV/fkLaJQRyxKK3cjuctwDOl7\nJfYVrsw82mtqNNaBkmj5ZmifK+3ZeQug6h5IgtLT6a7YZJjqB/HRvXRFdqCxTEav1EnSnrxFFwO4\nqY1m16qquzw7b6EjM/VsAgDD1JG19TMPiN7prLEfIQVeX53i8dbCZg0/eNDproCu+whAh7SbTjRH\nnIGLiKqZOTLwb4I/YUQL7QsiygVQC/9SzzvM/F4b7QUzcAX5zUIkXCyJytuSaHHohk8kErZruud3\nzLy3Y+3QCJslYsG4YbeHeH31iIlMRYg9FuVVezFv1fMHNM2T2BnjVWT7f3r3OO/KPflLxCG9L0da\nt7EgIlTW5OGXZf8wRUESmE1IkgW9UiehX8Z01DtL8POSv1dqurdNZVAiypBEyyKHPTo0MrSbUFy+\njZjNpZruuehEZw5TZNsbkWHJN5w57A673RYFp7sCS7JmuGvri2dourfdSq2dTadn4CKieQBaxkkD\njzb+wMxMRG09QcYG1i5jAcwjol3MvKyN/p5o9HExMy8+1PiCBDkdIKJRsmT7z8RRf7LHR2fCNHXs\nzJ03ePOub5cTUfcOGrws3fAWlFTszBiQcaEkCCI8vjqs3vyh2zC0VuUajgSBxH6GoYoxEWnokXzG\nwePlVTlw2KKEcUNvR0xkGmrrD2D15g+hah74VKcK8OeHapeZs4kouba++Oza+uKuANZxB3MSHys0\n3XtvVV2B9v38B/4gSRZT131EJMzQDd+jh6/deRDRBAATjqqNo5jx74J/7b6EiP6/vXsPj6su8wD+\n/Z5zZiaZpGkampKkSW+00ILcKi21okSwbMELFEQXlVXZFXS98CgiKO7DrnvzUXRV2Oq6XhfL6upi\nKZRbq4V2RUoKLb1f0muatGlL2zRpkplzefePmUraTCYzycyZSc/7eR4ezsz59fzeeZO+PfM75/x+\ntQBWiMj0Qf7MgwC6RKTfupp6xq+CKhIuW3LZ9FveO33KvNN+/59Z+XXv8LHme0Ukq+kkSI4PWaVP\nGjQvKIueY3d0tpWQxg8cN3bPqWsLyZszGgDsHsq96OFQ6X9VjZ700ZqxM3jp9AUAAE88PP78F3DN\nnHvQd2nIntgJPL7si4Cw2fVis9Pd2TMSkIwiMaZ/oJCrmPWJJ+vaOZyLu0sAnFpK7mMAFqcIKEpy\nVHK7DMB1AIriX2+ligcvHDvmvH5/cWuqLzRI4x9JZrVGrIi0xu3umTG7a9bRjr03u55dbzu9X0he\nAygNh6KLTCPUWlY6dqVphNpDodKHSWZ1vc92er95+Ghz/MDhN2dLiMW74Ho2zlwPuDRSgWhJVY/r\nxT4x0os+AIhIt4jsLoaiP1TDKfzfADCP5HYA1yRfg2QdyaXJNjUAVpFch8SUsk+JyPPDCVipTDAh\nNHjLwhORjYeObu/31fvw0WZUVjSYAD6c4o9lctzNIvIH6bNYe8gq/cm5Y6cv+MBffL/kluu+U3Hz\nvO+UjBlVf4dlRtLdbZMyZk+cm4527HXXbPoV4nYPLKsEnucm1iPuw3Zi6Ok9SgC7hvI5VO4Neagn\n13SoR+UCSRqGdY9B837HjVdZVuSg69p/53nOTwod20BIzrKskpcbZ33eqK2+CJ5nY/PO59C8byWm\nTWzE+m2/+6HtxD6dg36qTSO079b5D5f0naq4q/swnvj9/Sddzz4n2wuoJGtDVski17WvAgmD1omx\nYyaXN86+OxIOReG4cax+/eexfQdeXR63u9873M+g+sv5xV2lRhrLjPxLedm4z101886yMRUTcORY\nc+3KNQu/b5nhqOPG87K4ynCJSJNphn7/x9f+Yx5AOG4c1VXnYd7c+9C0YVG369q5Wi1rQrR0TCwc\nip72gFR5tBqGYRmuZ58DoC3L2A8AuIZkFIKwB6f7jeN7fvybZz/3wYry2t7Ok+0R0njRdnqG9K1F\n5Yee8auzBslRphFqv+nd3yot6/O07NGOfXhm1dePuW58XLHOqULyItMIvzLn0k9E68a9BZHIKOzc\nt0pe2fBoh+vGJ+di7niSVaYRar3luu+W9H0Q6UTXATy54mudyTP+nEz/TbIGwPkA9orI3lwcc4B+\nLgBwLYAuAE+ISEe++ipWesavgm5aaUmlXVZaddr8wlWjJ8CgWeIC45DlGa1fRGQTyQWr1//iZ6YZ\nGu15jgFgl+vGb8vVgiEicjQcKv3li00Pf/iqt34qWlZahc6T7Xih6ZFukA8NpeiTLAXwVgDdANae\nerpWRA4ijw81kaRllfwoZJV+pKH2reiNnXDbj2z5Acm/FB+WixzptPCrs0lbT6wjbDsxhKw3J7js\n6T0O17OJ5MLkxUpEnifZ4Li9UwHERWRPrvuwnd6/faNjT/fi5fd+0jTD4rpxD+RDrhv/p2yPZRjW\nHaYR+l55tNqLO92GbfceI3mTiLyW67hT+GhZSdVt17/zwdJT6wgcObYLz/3fP/+a5IS+F7RVfzrU\no84q4VB06cS6WddeeenHI6ZhwXFiWLlmYU/7G1sWxe2efnPgB1XyTH0sgHYZwmpUJK8Oh8qenn/V\nA9HKinqICPa0rsZLa3983PXiE0SkM/dRvykSLm+ae/nfXDGh9vRJR19serh7b1vTvSKyMJ/9FxMd\n6lGBZzs9H9nb1vT43ramOaNH1cWPn9gfIY1nbaf384WOrZhIYgWwlkEbDiAcin555oW3llZW1AMA\nSGJy/Rzs2v/HUGv76x8C8OMchToAGRMtqfzzK9eN4+CRrRDxSjDCJkwrBC386qySHA+/huS0I8d2\nTgawLZ8XF3OF5BjDsL5gmZFbAfTE7ZM/AlDEay3zvDEVDf3OMsdWTilrbV8/Md+9u57z3O79f/rk\n2DHnhXa1/BFNGx/D6PJaeJ5jGEboPsOw9nqe89N8xzFSaeFXZyVJLGo+0MLmBUVytEHrbsuKfBBA\nb9w++ahphO9uqJ1Ze8Gka0pspxfrtz/xUEdn640kb8jldMQkzwtZpd9x3Ph8kq5hWL9PGjq3AAAM\nFUlEQVRxnN4viUhWa9GKuKvbDm+aWl01zXzzPUHLwbWdgKzLVbwDcd34v27b84eP7G1bMzpmd+G6\nufejumoqAKCjsy3y9MqvP0xyg4g05TuWkUjH+JXyEckKy4y8WjfukvoLJl9b4rgxrN+22O6NdZo3\nz/u2cWoBFddzsHj5vV0ne964UUT+kKO+a0wzvPmS898/etqkdxmea2PDjqfsnftWtThu7KJspiAg\nOcM0w01zLv1E2eTxV8J2Yli//Ql7x54VLY4bm57vbyok600zvLWu+uKystIqzL7k9tP2b9zxlLdh\n+5OPxe3u2wc4xFlDx/iVKnKk8ana6ovGXz3rsyWninzduItDT654AAePbEZt9UUAANOwMHXi1WUb\nti+5AUBOCr9phD43pX5u9OLz3//nqVpmX3x76FjHvnGHjm6/FcCjmR5LRLaQfPcr6x9d+NLa/7wE\noGeZ4accN/YpP4anTCP0makN77AEHirKa/vtryivNQzDzPuQ00g1nLl6lFJZClvRD0yb2Fjad2lE\n07Awuf5taDt0+vyFvbETjnhu1jNnDsSyIu9qqJl52kLuJDGxbla5ZUbeme3xROTluH1ypohXIeKW\nx+3um0XkUK7iTceyIlfUVM+InFM5Ba2H+i9Q1nLgtV7b6c3JP5hnIy38SvlIIJ1xu//SrLF4F8g3\n/zp2njyEnftWuQJ5LFd9e56770TXwX5ju8c7W2Ou5+wb6nGTs1VmfUvocDhufOPhY7vsyePnoOvk\nITRtXITu3uPojXdi/bbFsqdtdbfnOYG5pTNbOtSjlI/i9skfrt+++MqG2pllISsxZU5X9xE0713p\n0jDinueE43a3s7v1ZRHxvigizbnq23Z6vrd++xPvq6+5LFpRnlhf6fDRHdi1/yVXxP1Zrvrxg+vG\nH9m2e/mdNWNnhObN/QrWbf0tFi+/F67nwDLDy1w3/mm/vn2MRHpxVykfJaca+LlphD4wdcI7InG7\nx929/yXPE+9+z7NXAbgBQA+A3+bjNlTTsO4CjX87p3KS7bpxdHS2wfXs20Tk6Vz3lW8kr7HMkkct\nKzxaROB59hHb6b1NRP5U6Nj8NJTaqYVfqQIgeQUSRT4G4Dci4ttc9cmFXRoBCIBDAA6JyG6/+s8l\nJsbHpgPwkHhmozgKmo+08CulMmIa1p2k+VBpSYXE4idDALbYTs8t+ZgfSOWXFn6l1KBIvqckUvE/\n1829P1pZUQ/Pc7F557Pu+m2LDzhubHKxTl2tUvN7zV2l1AgUDpV9bdZbPho9Nc+OYZh4y7T3mKPK\nzq0AcH1ho1N+0MKvVMCIuFPOqZzU7/3qqqkRAFN8D0j5Tgu/UgFDGpva39h22nsiggOHN8UB5GqZ\nR1XEtPArFTBxu/vBNRsf6247tBEigrjdjaaNi+yeWEcLcjQ9hCpuenFXqQAi+b6QVfLvIjLOE5em\nEXrWdnr+WleuGnn0rh6lVMaYmDBoHICTItJV6HjU0GjhV0qNCCTHAhAReaPQsYx0ejunUqqokbws\nHIquNY1Qq2mE2sKh6KskLy50XEGjZ/xKKV+QrDXN8NbZF98+6ryGtxMAmvetkqaNi064bvx8nVRt\naPSMXylVtAzDumvy+LeFp028moZhwTAsnD/pXZxYNztsGNadhY4vSLTwK6V8YZmRWTXVM0rOfL92\n7IWllhm5ohAxBdWQCz/JW0luIumSnJmm3XySW0nuIHnfUPtTSo1srmdvOnJsV78FW44c2xlz3bg+\nOOaj4ZzxbwCwAMDKgRqQNAE8AmA+gAsB3EZyxjD6VEqNUK4bX7hj7wv2/oNrISIQEbQceA3NLats\n17N/WOj4gmTIK3CJyFYgsWZnGrMBNJ+a6pXkrwDcCGDLUPtVSo1MIrKH5HtWrvnBIssKV0IAx4sf\ndd34h0WkpdDxBUm+l14cD6DvD3Q/gCvz3KdSqkiJyIskGxy399Q3/y1BXDyl0NIWfpLLANSk2PVV\nEXkyg+PrD1QpdZpkod9c6DiCLG3hF5F5wzx+K4CGPq8bkDjrT4nk3/d5+YKIvDDM/pVS6qxCshGJ\npTOHfozhfssiuQLAl0Tk1RT7LADbAFwLoA3AKwBuE5F+Y/z6AJdSSmXP1we4SC4g2QJgDoClJJ9J\nvl9HcikAJJdw+yyA55D4avfrVEVfKaWUf3TKBqWUGsF0ygallFKD0sKvlFIBo4VfKaUCRgu/UkoF\njBZ+pZQKGC38SikVMFr4lVIqYLTwK6VUwGjhV0qpgNHCr5RSAaOFXymlAkYLv1JKBYwWfqWUChgt\n/EopFTBa+JVSKmC08CulVMBo4VdKqYDRwq+UUgGjhV8ppQJGC79SSgWMFn6llAoYLfxKKRUwWviV\nUipgtPArpVTAaOFXSqmA0cKvlFIBo4VfKaUCRgu/UkoFzJALP8lbSW4i6ZKcmabdHpLrSa4l+cpQ\n+1NKKZUbwznj3wBgAYCVg7QTAI0icrmIzB5Gf74j2VjoGM6kMWWuGOPSmDKjMeXXkAu/iGwVke0Z\nNudQ+ymwxkIHkEJjoQNIobHQAQygsdABpNBY6ABSaCx0ACk0FjqAFBoLHUCu+DHGLwCWk1xD8pM+\n9KeUUioNK91OkssA1KTY9VUReTLDPt4uIgdIVgNYRnKriKzKNlCllFK5QREZ3gHIFQDuEZHXMmj7\nIIAuEfl2in3DC0QppQJKRLIaTk97xp+FlJ2SjAIwRaSTZBmA6wD8Q6q22QaulFJqaIZzO+cCki0A\n5gBYSvKZ5Pt1JJcmm9UAWEVyHYDVAJ4SkeeHG7RSSqmhG/ZQj1JKqZGlIE/ukvwWyS0kXyf5OMnR\nA7SbT3IryR0k7/MhrqJ7KC2LmHzLFckqkstIbif5PMnKAdrlPU+ZfG6S30/uf53k5fmII5uYSDaS\n7EjmZS3Jr/kQ009JtpPckKaN33lKG1OB8tRAckXy79xGkp8foJ1vucokpqxzJSK+/wdgHgAjuf0N\nAN9I0cYE0AxgEoAQgHUAZuQ5rukAzgewAsDMNO12A6jyKVeDxuR3rgB8E8CXk9v3pfr5+ZGnTD43\ngBsAPJ3cvhLAy3n+eWUSUyOAJX78/vTp8x0ALgewYYD9vuYpw5gKkacaAJclt8sBbCuC36lMYsoq\nVwU54xeRZSLiJV+uBlCfotlsAM0iskdEbAC/AnBjnuMquofSMozJ71y9H8Avktu/AHBTmrb5zFMm\nn/vPsYrIagCVJM8tcEyAzw81SuIW6mNpmvidp0xiAvzP00ERWZfc7gKwBUDdGc18zVWGMQFZ5KoY\nJmm7A8DTKd4fD6Clz+v9yfeKQbE9lOZ3rs4VkfbkdjuAgX7p852nTD53qjapTjT8jEkAzE0OEzxN\n8sI8xpMpv/OUiYLmieQkJL6RrD5jV8FylSamrHKVq9s5+8nk4S+SDwCIi8hjKdrl5apzMT6UloOY\ncp6rNDE9cFrHIpLmGYx8P7yX6ec+80won3c0ZHLs1wA0iEg3yesBLEZiOK/Q/MxTJgqWJ5LlAH4L\n4O7kWXa/Jme8znuuBokpq1zlrfCLyLx0+0l+HImxsmsHaNIKoKHP6wYk/mXNa1wZHuNA8v+HSf4O\nia/3Qy5oOYgp57lKF1PyglyNiBwkWQvg0ADHyGmeUsjkc5/Zpj75Xr4MGpOIdPbZfobkQpJVInI0\nj3ENxu88DapQeSIZAvC/AH4pIotTNPE9V4PFlG2uCnVXz3wA9wK4UUR6B2i2BsA0kpNIhgF8CMAS\nv2JEmofSSI5Kbp96KG3AOyX8iAn+52oJgI8ltz+GxNnFaXzKUyafewmAv0rGMQfA8T7DVPkwaEwk\nzyXJ5PZsJG6rLmTRB/zP06AKkadkfz8BsFlEvjtAM19zlUlMWecqn1ej01yl3gFgL4C1yf8WJt+v\nA7C0T7vrkbiC3QzgKz7EtQCJsbseAAcBPHNmXACmIHGnxjoAG/MdVyYx+Z0rAFUAlgPYDuB5AJWF\nylOqzw3gLgB39WnzSHL/60hzt5ZfMQH4TDIn6wC8BGCODzH9N4A2APHk79MdRZCntDEVKE9XAfCS\nfZ6qT9cXMleZxJRtrvQBLqWUCphiuKtHKaWUj7TwK6VUwGjhV0qpgNHCr5RSAaOFXymlAkYLv1JK\nBYwWfqWUChgt/EopFTD/D18g0y5/DAqxAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import sklearn\n", + "from sklearn import *\n", + "###########################\n", + "# From here: http://www.wildml.com/2015/09/implementing-a-neural-network-from-scratch/\n", + "# Some code from here: https://github.com/adamwalz/mlclass_ipython/blob/master/notebooks/Ex2%20-%20Logistic%20Regression.ipynb\n", + "# Also this code: https://github.com/dennybritz/nn-from-scratch/blob/master/nn-from-scratch.ipynb\n", + "###########################\n", + "# Import numpy for linear algebra and numerical computing functions, and matplotlib for plotting graphs\n", + "import numpy as np\n", + "from numpy import ones, zeros, newaxis, r_, c_, mat, dot, e, size, log\n", + "from numpy.linalg import pinv\n", + "from scipy import optimize\n", + "import matplotlib.pyplot as plt\n", + "\n", + "from IPython.html import widgets\n", + "from IPython.html.widgets import *\n", + "from IPython.display import display\n", + "\n", + "# Enable matplotlib inline plotting for this notebook\n", + "%matplotlib inline\n", + "###############################\n", + "# Generate a dataset and plot it\n", + "np.random.seed(0)\n", + "X, y = sklearn.datasets.make_moons(200, noise=0.20)\n", + "plt.scatter(X[:,0], X[:,1], s=40, c=y, cmap=plt.cm.Spectral)" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEKCAYAAADpfBXhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xd4m9X58PHvrWkN771HnL1DCBmMhJa9SiktXZSW7gUF\nyi6EVfYsFGgLLYX3xyptoYy2FAi0QPZy9rAdz3gPWZK1nvP+IWU7iRPPmPO5Ll/Y0qPnHAnl1qNz\n7nMfUUqhaZqmjSymoe6Apmma1v90cNc0TRuBdHDXNE0bgXRw1zRNG4F0cNc0TRuBdHDXNE0bgXRw\n14YlEXlSRG4+iscViIhHRGQg+jVcicjbIvLNoe6HNnyIznPX+kpEKoHLlVLvDVHb31FKvd/H81wG\nPAP4AAOoAG5SSr3V1z5q2lDQV+5af1Cxn6Fqu7+u0j9WSsUDScBvgZdEJKGfzr2biOh/d9qA028y\nbcCIiF1EHhGR2tjPwyJi2+v+a0WkTkRqROS7ImKISEnsvj+JyB2x39NE5E0RaRORFhH5SKKeBwqA\nf8SGYq4RkaLYeUyxx6aIyB9j7beKyN8O1WUAFf06+wLgAkbv9VweEJEdIrIzNmwUdwTP5cnY0EkX\nMF9EckTkNRFpFJFyEfnZXueaJSLLRaQj1taDsdvjROQFEWmOvRZLRSQ9dt8iEbk89ruIyM0iUiki\nDSLy3K4Pqb1en0tjz6VJRG7s0/9obVjSwV0bSDcBs4CpsZ9ZwM0AInIm8Avgc0QD6Pz9Hrv3t4Gr\ngWogDcgAblBR3wSqgHOVUvFKqQd66MPzQBwwIfbYhw7XaRExA98GgsCO2M33AKWx51EK5AK39PK5\nAHwVuEMp5QY+Bf4BrAJyYo+7UkROjx37KPCwUioRKAFejt3+LSAByANSgB8A3bH79n69vh07dn7s\n8W7g8f36Mw8YE2v7FhEZd7jXRTu26OCuDaSvAbcrpZqVUs3AbcCuSb8vA88qpTYqpfzArYc4TxDI\nBoqUUhGl1Me9aVxEsoEzgR8qpTqUUmGl1H8P8ZDZItIG+IH7gW8opZpjk7PfA65SSrUrpbqAu4FL\njuC5/F0p9Wns9ylAmlLqzlifKoA/7HW+IDBaRNKUUj6l1NK9bk8FRsc+3FYppTw9tPV14EGlVKVS\nygvcAFyy33DQbUqpgFJqLbCG6IeWNoLo4K4NpBz2XPlC9Co7J/Z7NtGr8V1qenj8rrH0+4FtwL9F\nZLuIXNfL9vOBVqVURy+PX6yUSgaSgTeAk2O3pwNOYEVsOKQNeIfoN4nePBe1322FQM6uc8XOdwPR\nbxYAlxO9qt4YG3o5J3b788C/iM4F1IrIvSJi6eF5ZHPg624BMve6bedev/uIDkFpI4gO7tpAqgOK\n9vq7AKiN/V5PNPjusvfv+1BKdSmlrlFKjQLOB64SkQW77j5E+9VAiogkHkmnY1e7PwK+KSJTgWai\nV/MTlFLJsZ8kpdSuydbePJe9+1kFVOx1rmSlVIJS6txY+9uUUl9TSqUD9wJ/ERFH7Cr/dqXURGAu\ncC5waQ9t9fS6h4GGI3kdtGObDu5af7HFJvx2/ViAF4GbYxOiaUTHqF+IHf8K8G0RGSciTuBX+51v\ndwaMiJwrIqWx4ZFOIEI0XRGiAWtUTx1SStUTvcL+rYgkiYhVRE7u6dgeHttGdKjkVqWUAfweeGSv\nCczcvcbIe/1cYpYCntgkrENEzCIySURmxs79jV3tAB1EPxgMEVkgIpNjcwIeIBR7Lfb3IvCL2OSp\nG/g18FLseRzMZ2pdwGeBDu5af3mb6Nf7XT+3AHcCy4G1sZ/lsdtQSv0TeAz4ANhCdJIRIBD7794T\nhKXAu0QD2ifAE0qpD2P33U30A6RNRK7a67G7fJNoENxE9IPg5wfpf0/pnI8AZ4vIJOA6okNDi0Wk\nI9afMUfxXIgF2XOBaUA50AT8juhkKcAZwDoR8QAPA5copQJEh1VeJRrwNwCLiA7V7O/Z2O0fxc7v\nA3621/09fdvRC15GmD4tYhKRfODPRMcKFfA7pdRjPRz3GHAW0TfZZUqpVUfdqDYiich4oAywHeYK\nc9gbSc9FO3b19co9BPwiNgY4G/hJ7I29m4icDZQqpUYD3wee7GOb2gghIhfG8seTiY4tv3GsBsOR\n9Fy0kaFPwV0ptVMptTr2exewkT3ZELucDzwXO2YJkCQimWha9MO+gehwR4joJOaxaiQ9F20E6CmN\n6qiISBEwHViy3125HJgmloeeuf/MU0qdNdR96C8j6bloI0O/TKjGZuT/AlwRu4I/4JD9/taTN5qm\naQOoz1fuImIFXgNeUEr9vYdDatk37zePPbnOe59HB3xN07SjoJQ6IJW1T8E9lnf8DLBBKfXIQQ57\nA/gp0VV1s4F2pVSPQzL7d1BEFiqlFvaljyOBfh320K9FlH4dovTrcPAL475euc8DvgGsFZFd6Y03\nEl0Rh1LqaaXU2yJytohsA7xEixppmqZpA6hPwV0p9T96MW6vlPppX9rRNE3TjsxwX6G6aKg7MEws\nGuoODCOLhroDw8Sioe7AMLFoqDswXA2bbfZERPU0KaBpmqYd3MFi53C/ctc0TdOOgg7umqZpI5AO\n7pqmaSNQv5UfGCqfTD53eEwaaJqmHaW5ZW/2+3yjvnLXNE0bgXRw1zRNG4F0cNc0TRuBdHDXNE0b\ngXRw1zRNG4F0cNc0TRuBdHDXNE0bgXRw1zRNG4F0cNc0TRuBdHDXNE0bgXRw1zRNG4F0cNc0TRuB\ndHDXNE0bgXRw1zRNG4F0cNc0TRuBdHDXNE0bgfoc3EXkWRFpEJGyg9w/X0Q6RGRV7OfmvrapaZqm\nHVp/7MT0R+A3wJ8PccyHSqnz+6EtTdM0rRf6fOWulPov0HaYw/p9CylN0zTt4AZjzF0Bc0VkjYi8\nLSITBqFNTdO0z7TB2CB7JZCvlPKJyFnA34Exg9CupmnaZ9aAB3ellGev398Rkd+KSIpSqnX/Y0Vk\n4V5/LlJKLRro/mmaph1LRGQ+MP9wxw14cBeRTKBRKaVEZBYgPQV2AKXUwoHuj6Zp2rEsdtG7aNff\nInJrT8f1ObiLyIvAKUCaiFQDtwLWWCeeBr4E/EhEwoAPuKSvbWqapmmH1ufgrpT66mHufwJ4oq/t\naJqmab2nV6hqmqaNQDq4a5qmjUA6uGsDqjMcpKLbQ8CIDHVXNO0zZTDy3LXPqNeaK3m6YTMJYiNA\nhHuKjmOiM3mou6Vpnwk6uGsDYnt3J882bOVWdTxpOFipmrhxx0r+Pu5URAa2GkVzqJtnG7bSFOpm\niiuZr6aXYBH9JVX7bNHveG1AVHR3MUaSSBMHADMkHZ8RxhMJDWi7XZEQP9r+KcF2mObN4MPmBh6s\nXTegbWracKSDuzYgcu1OylUnnSoIwGbVhgl4sbmc39ZvZIu/Y0DaXdrVTJrh4GIpZaZk8FNjCm+3\n1xAyjAFpT9OGKz0sow2I8Y4kzk/N59aWpWSJgxrlxYTQ2BzChokrWpdwT9FMprpS2Orv5H+dO4kz\nWzgzKZdki/2o21Wofa5YZK/bNe2zRAd3bcBcnjWGM5JzaQp382ZLNXGdNs6XYgDSlYPnGrbx1YwS\nbtmxknkqG494eLWpgj+MPpGUowzws9zpPCGb+JtRTgkJvC81nJaQi81k7s+npmnDnh6W0QZUnt3F\ndFcqhlIksSdgJ2HHb4T5Xf1mvqnGcbGU8h0mMMFI5a8tlUfdXrzZylOlcwgnhPnYUcestFSuz5vc\nD89E044t+spdGxTzk7J4uGsDWcqJDRN/kW18IbGAv7VWkU7c7uPSlQNPONyntjKsDm4umNrXLmva\nMU0Hd21QnJKYTVckzItNW4mgODs5l4vTimgKd/Nq63YuVWPpJMh7Us3NiTowa1pfiVLDY6JJRJRS\n6ogToD+ZfO7weALaUQkrg8frNvJ+Rz1xJjOXZZZydnL+UHdL0wbV3LI3j3rxx8Fip75y14aURUxc\nmTuRK3MnDmq7YWWw2ttKtxFhijOZBIttUNvXtIGmg/swFjIM/udpwBMJMd2VSr7dNeBt7gz6eLJ+\nEw2hbia7kvlu5hjsIyzTJGBE+EX5UjqCIeKxUideflMym0K7u1/biSjFGm8rXiPMJGdSn1I8Ne1I\n6eA+TAWNCD8vX0IwqMhQDp5kE7cXzuB4d9qAtdkZW905z8hmusrkvUANdwTXcGfhjAFrcyi81lKJ\nOWDiZjUTkwjvGTU8VLOeR0ed0G9thJXBtRXLqev2kUIcO/DwUPEsxjoS+60NTTsUHdyHqX+210JA\nuFZNQ0QoUy08VLuOF8fOH7A2V3Q1k61cnEcxCJSqRH7m+S9+I4zDNHLeKvVBP2NUMqZYjZuxJPFR\nqLZf23i7rQZPd5hbjOMxi4lP1U7uqynjmdEn9ms7mnYwOs99mGoNB8hX7t1FtgqJpy0cHNA2zSKE\n2LNMPxxb12liYAt9DbaJziQWy068KoShFIuklnHO/r2i3hn0U2okYo4VLBtLEg0hf7+2oWmHMnIu\nx0aYaa4U/iqrmKeyScfBG1LBNFfKgLZ5vDuNp8ybeSG8mVEqkY+kjnMS8/o85t4ZDvJ803aag92k\n2uIIGwY2k4kvpBaSY3P26dzNoW5+W7+JuqCPcY5Evp81Fqf50G/rM5Jy2ezv5JrWj7GIiVG2eC5K\nKOSttmomOpIoiosHoLzbw93Va6kN+Rhlj+fG/Clk97K/451JPGbawAIjjwSsvEcN4x1JfXqumnYk\ndCrkMPZ6yw6e2LmJbhXhOGcqtxVMH/Csjo5wkD81bqMp2M1kdxJfSi3G3IcSvb5ImO9u+5jicAJW\nZeJ/1HM2hfgIs9i0k6dL55F7lAG+24hw2db/MjmUxkSS+Z/UE3ZEeLh4Vq/KCnsjYbqNMA/WrqfS\n10UebspUC9flTWamO42vbfmQ8yLFTCGVj6WeZZYGnh9zcq/LBz/TsIX/17Qdq5jIs7m4r2gmqda4\nwz9Q+8zRqZCfMRekFnJ+SgER1KDVI0+02LgiZ0K/nW9JVxPxERuXMo57WMHlTGCGpAMghvDX5kp+\ndpTtrfO1ERex8CUZBcAYlcRV/o9pDgdI70UQdZktrPa2sMPn5SZjJhYxUaE6ubd2DXcVzCBVxXGy\n5ABwDkX8N1JPXdBHQS+zai7PHMPX0kvoNiIkmW0DXsde0/amg/swJyJYjuEx77AysBMd1glgEI91\n930JWOk2jn4ewYQQxkAphYgQQWGgsBwkiK7ztbHY04jbbOWc5HzizVZaYnMbuz48C4nHa4SJM5lp\nUwFCKoJVzHhViC5CuI5wYtlhsoyoyWjt2NHny0EReVZEGkSk7BDHPCYiW0VkjYhM72ub2tEJGQZh\nNbh1zWe606iUTv6pqojDzHNsolx1UqZaeEeqODUp+6jPPdmZjM1q4o+ykcVqJ09IGSe403vMJ3+/\nvY7rKpbT1BRmeUMr3936MZ5IiInOJNbSQpXyYCjFW1Qyxp7IeEcSU90p3C+r+Ksq537TKs5LztfD\nKtoxo89j7iJyEtAF/FkpdUD5PRE5G/ipUupsETkBeFQpNbuH4/SY+wAJK4N7qst4tzOa7nduUj5X\n5U7q01j6kagOeLmvpoyN/nZOIZf1tBLGwGsK8fb40/o0XOGNhHiucRu1AR/jnIkH3VLvK5sW8fXw\nGMZKdA/Xp1nHCZlpfCWtmHfba7m3toyAimDDTJbVwYPFx5NhdfDv9hq2dXuY4EhiQWK2HlrRBsSw\nHHNXSv1XRIoOccj5wHOxY5eISJKIZCqlGvrattY7zzVuo8bj4zecTATF4x1redleztfSRx3ReQJG\nhB2BLlxm6xFNgubbXVyQVoC51sSXVenu23+qPqIzEiKxD5PELrOVH2WN49OuRqoCXlZ0tXBCfPoB\nx/mMMGk4dv+dphx4Y1v+Fdrd2JSJXzKdQuJ5O7yDm3as5MrcCTxRv5mgivCW1OC2WJnlPvDcmjYc\nDcYsXS5QvdffNUDeILSrxazpauVzKh+7mHGKhQUqjzVdbUd0jpqAl69v+YhbKlbxva0fc3f1Wo7k\nW1+uzUm58uCJbbu3RbVjEcFtth7mkYf3cN0GHq3eyKYGD/dXreOp+k0HHDMvIYOXZCvNys9G1cr/\npJ458RkAbPR3MFXSKJIERISzVCGbAx1cX7mCbxpjeYyT+ZExiVurVtE+wGsNNK2/DNZMz/5fGfRQ\nyiBKs8ZR0d3JFFIBKJcO0qxHVufknpoyTgrncKYU0K3CPOBZxbsddZyelNurx493JHFeah63tiwl\nW5zU4mVh/vQ+Dw1VBbp4r72Ou9RsHGKhS4W4sfVTLkor2idj5hc5E3mE9dzbuRKX2coN2ZMZ74zm\nnadZ7OwQD2HDiGbM0Em8WHBhZapEyz2MlWQycbIj0EWSpef1BkopXmjaztutNZhF+Gp6Ceek6AqX\n2tAYjOBeC+z9Ds+L3XYAEVm415+LlFKLBq5bnx3fzxrDj7yfUqU8RFA0m/w8lTnniM5RGfDwdcYC\nECcWJhtpVHR7jugc38say+nJuTSFuimJiz/qrfT21hEJkipxOGJvZbdYSRQ7nZHgPsHdbjJzXd6U\nHs8xJz6Dd1y13OldTi4u1tPKFTkTeahuPc3KT5o46FBBGvAdMsXyleYK3mmq5dtqAgEiPFG3jk88\njdyYNxXXYRZWaVpvich8YP7hjhuMd9wbwE+Bl0RkNtB+sPF2pdTCQejPZ06WzclzY05mqacJEZjt\nTsd1hMMhBTY3y7ubOJ18AirCOlMLF1jz2RHoItfm7HUefqHd3a/VF0vs8bQT4FO1kxmks4QGQqYI\nebbeV9A0iXB7wXSWdTXTFg7wc+c4CuxuvJEwv25YQakkUk4nl6QXH3JF7QcdO/mSKqVQoitcL1Ql\n/MtTxRXlS3hy1BysJl3tQ+u72EXvol1/i8itPR3XH9kyLwKnAGlAA3ArRJOZlVJPx455HDgT8ALf\nVkqt7OE8OltmGKsJeLmyYglWw0ynCpJqtVMf9OE22bCbTTxcPKvXS/P722ZfO7dVr6E25KXYFs8t\nBdMoiZUQ2NsGXzsvNpUTNAzOTMlhQWLOYc9d3u2hMtBFns3JmMNUdPxF+VIm+9KYJ9H0zjdUBe0E\nqDB1cm3h5AEvH6Edu4ZrtsxXe3HMT/vajjZ4dgS6aI4NnezKGc+zu3hhzClUBDxs8LXzSn0F9zIX\nl7LydngHd1Wv5fFRs2kLB2gNB8ixOQdl8c6OQBe3VK2iLRLEKiYuSS/uMbBv9ndwdcVSzlfFOLHw\nqG8jAcPgzORDz+2XxMX3eL6eXJZZynWVy2lUfgJEWMxOruc4nmUDkUFeX6BpeiDwGKWUYo2vjbZw\ngPGOJLJsjsM/qBeert/MG61VZImTOuXdp4Z8nMnMeEcSH3c2MI0MXGKlUwWpwsMWfwff3/ox5QEP\nKaY4fIS4t2gmE53J/dKvg7mhcgULwvkskFxqlZcH6lYxxpF4QEB+s7Wa01Q+p0o0mLuVldead3BS\nQiZWMWHrhw1JprpSeKz4BG7YsQJLxMQljOZ/1BMwRwb8ddC0/engfgxSSnFb9WrWd7WTjZP7VBm3\nFU7HjPCnhm10GxEWJGVxSVrJ7prlvbHe18Y7rTXcrk7AjZVNqo2FVat4c/zn91m8k29385GU41f5\nPMRqRpPEz5jCp4GddBLmFmMma2jhph0r+du4Uwds4Y8vEqY+5GM+0eGVXHExQZLZ4u84ILjvX7o4\nQIS6oI/zNv4HBXwtrYTvZo45oK9+I8wbrdW0hQMc50477GYpY5yJPD/mZJ7auYlPfPXk2p08nj2b\nuBG2m5U2/Ongfgz6xNPIlq5ObjWOxypmNqpW7qhaQ0QZXKLGkISNV5q2EVIG38oYfcDjA0aEJ+o3\nsdTTRLzZyk9yxjPNlUJN0EeJJOCO1X8ZJ8l0GxG8RniffPTTEnP4pKORmzxLMGPia4xGRBitErmB\nxdTjY4ak8/vI+t2PDSsDM9Kvgd5hMmMXM5XKQzEJBFSEHeIh3Vp0wLHnpORxVftSnCqa4vgCm5mu\n0riUcXQR4oHWVZQ6EliQuKccQrcR4cfbPiUxZCdHubmzZQ2XZZVyYWrhIfvlNFu4KnfSAbc3hvw8\nVLOeqqCXkrh4rsqd2C8ZQ9qxZW7Z1QDMv35Pff9FA9CODu7HoIaQn2ISsEr0anA0SbQZAc6niBMk\nE4BvGmP5U9umHoP7g7XrqOvs5ntqInVhLzdULuep0rmU2OPZotp3p/8tU40kmq0HFMsyibCwYBof\ndOzk0doNGEphJlq4K0Q0iK9XrThNluh2gZUrWO1vJU7MXJkzgbOT+yf3W0S4IW8y99SsYYwkUU0X\nJySkMcOVesCx4x1J3Fs0k5eaKggaBtZuE2cahZhESMDGHCOLMm/bPsH9w8567GELP1STEBGOVxnc\nu3PlYYN7TwJGhJ9tX8Jx4XQ+RwFLwg1cWb6EZ0efOGgVP7WhM+fZKSx4LbYL1/WDs2mLDu7HoHGO\nJP6ottGofKTj4D/UkGGOozsS2X1MNxGsB6km+UHnTn6tZpMgNvJws4V2Fnua+EpaMd/OHM3ChmXE\ni5WwyeDeopk9Xm2LCPMTs3ijtYqn/OuZqtJYSgPdhHlWNtAsAe4smMFd1WtJ6XbwJKewU/l4uG41\nBXY3k/ppDPqUxGxGxSWwpbuTNIudyc7kg347mOJKYYorhTJfK9eWL2cL7WThxFCK7dLBPNu+pQW8\nkTApyr77fKnE4Vfh3VUoeytgRPiwcycmQ7hASgDIV25uDC+mOuCluJcTttqxI+6DLwJw1QNZ0Rte\nG/w+6OB+DJrgTOLyrNEs3LkMC0KaJY5f5U7jV1UrsRpmkrDxjlTxk4yxPT7eLiY6VZAEojVdPAR3\njwlflFbEacm5tIUDZFkdh9yFySTCvUUzebGpnMruDuY50rghYTIdkSBFdjeJFhvX7ljG/WouFjGR\nh5tZZLLW19pvwR2imTx59t7ltTeFurmucgVnUcBrbGe5aqSNAMl2Gxem7HtFPtOdxu/ZwhTVRD5u\n/i4VzHVlHFFg3+Tv4NrKZZgMoVtFCBNdBRvCoFtF+rzLlTY87HNlDvDA0PVlF70T0zBR0e2hzNdG\nssXO3PiMXi3LDxkGXUZo90YQNQEvrzRX0m2EmZ+UzdxY7ZS9hZXBy03lvNJYyankUy9eKswdPDP6\nxH6p87K/L236gG+ExzJekjGU4iHTar6YXcBZh0lB7G/rfG283FRBfciPLWDmCqbSoQJspp1n2chf\nxp1Kag/j3yu7Wni0bgPtkSDHuVK5OndirxeAbfN3cEXFUk43CjidfB5mNSEMjieTlaZGMp1xOE1m\nFnc14zZb+HH2eE5KyOzvp64NkBvP+XG/nWvRPWcPvzx3re8WddRzb00ZUyWNGrp4w1HFPUUzDxvg\nrSYTyaY9ASnP7uKq3IkHPf6/nTu5s3rN7se2uLyMjXNzY+okdgS6eKMlWt/tvNT8fruyvjp3IrdX\nrWYqaTSYfLjsFj7fi8VD/WmDr51fVizjAlWMHSsraKKOLurwERfbSCT+IDn5M9ypPDfmpCNu86Wm\ncp5v3E62cvEvqhDgCqbya1ZQ5mhiQUIW5T4PjZ4gN6jjaDR83F29lvSSWYw7zGIpbfD1NAk63Ong\nPgw8ULuOn6uplJBARBnc41/Jx54GTk7I6rc2GkN+7qpey5VqGiWSwFKjgVd928i3u/h1zVpWels4\nVxUhwLWdy/h10XFM62Fi8kjNic/gqdK5rPa2kmC2clJC5qBPIL7eUsVZqpAFkoehFMtp5E5WMIpE\nduBhiiO5X/Lcd2kKdfPHxq0sVLNIkTjaVIBbWMJEUmgjQKPfxxZ/JwLcyWySxU4qccxVWSzxNOng\nPgxMOyvMDV+4lDVvxDY1P4aC+i46uA+xiFJ0GCEKiNZbMYuJPOWiJRzo8fiagJdPPI1YTSY+l5Dd\n6w2zt3V7KJJ4SkgAYJZk8kJkM+ubO2gmwEWMYr5EKzzGKQuvNFX2S3CH/q8nc6T2znEPYdBJiJuZ\nSa646FIhFnYvZau/k9GOhH5przncTZrEkUK0yFiy2HErK3exgjxc3MBx+AlzHZ/SjJ9kot++mqWb\n8WY9uTpUXn76awB7AvobQ9iZfqCD+xAzizAxLok3uys5XxVTQxerpYXvOEsPOHajv52rKpYyQ6Xj\nlzAvNG7n96XzepUrnWmNo1Z56VIh3GKlXnkJYPAdJvB7Nuze5xQgDjOhEbRc/rzUPK7tXE6cMu9O\n1cyV6ASsW6zki5uGkL/fgnu+zUUbQdarViZKChtUKz4JEy8WvmOMxySCCytTVCq/oYxTVA5N4qfJ\n4uPMXpZQ1vpmzrN7KoTungg9xoP5/nRwHwbuKJzBzTtW8oPuRTjFwtW5EymNOzDQPFW3mYuMUZwk\nOaDg/yJbeKmpnB9njz9sG6PiEjgnJY/bW5dRLPFspJ1i5cYuZuaqLP4fW3Co6Fbcf5HtXJly+HMO\nRxGlDpirmOxM4a7C43i5qYKgEcEaMLHMaOR4yWCH8lBBJ6X9mI7oNlu5K/b/NKwUFpNwV8EM/tSw\njQ3+NrJxYSiFX8KcnZyL22yl1OzinOS8I67WqfXe3LKr94yZD0Fq4mDT2TLDyOFWcV625b98OTia\nURIdk/1A1dKa6OPG/J7rlPdko6+dupCPRLOVW6pWc5Exinzc/JlNeE0hsmwOLkwrZLorlTvqNrDJ\n30aGzcVNWeN2b24xHG32d3DLjlXUhb3kWJwsLJzOeEfP/d3k7+D6yuUEjAgRFDfmTWF+4tFv1H0w\nEaXojARJMNswi1DZ7eHnFUvIUk48hEiz23mw+Ph+He/vb02hbu7buZmKQBcFdhfXZo7ttzpGA21u\n2dWsbK7Yk2s+jA1EtowO7seQ39ZvZG1rO5erCfgI87is5Qe5Y/l80tFln2zyd/B43cbddVN+kj0O\nu8mMUopvVSzGlTuXsSVnsLN5I6vX/pH/G9W7IaDB1hEO8uVNHyAITixMJIXVpmZeGjv/oJtkRJSi\nPRwg0WIb1AneznCQdf52HCYzk53Jw3p1asgw+GbFpyTnnUhR3lyq65bRsON9XiiZM+zy8wNGhMJT\nuslw2Ti740NxAAAgAElEQVTXfMVQd+eI6VTIz7jvZY7loch6buxYjFVMfCO95KgDO8A4RyKPj5p9\nwO1tkSD1QT8Xjb8YEaE4bzbVOz5gva99WOZhP1K3gRzcfIfxdBLkSdZhU8KqrmZy7S7y7a4DgqhZ\nhNRD7Kp0JI5kxWqCxdbj+oPhqCrYhU8szI+9D5IS8qit/ZTygOeg34oG066MllUvvcMnT/4F83Yr\nNncyEy9uIC5x+L1PB5sO7scQq8nEdXmTuS5vcq8fU97t4cPOndhNJs5MyuvVlbfTZCFshOkOdOCI\nS8IwwnT5W4iPHx4pep5ICJuYdl89rvO28UMmkSEOMnBwqsrlTVXJnTVrcYmFOLOZh0tmkWHt3+GE\ngBHhnpoyFnXWE2cy852M0VycVtyvbSilaA4HECDVYh+wCps9sYuZQLibiBHCYrYRiYQIhPzEyeBf\ntSulGP/oGBx2K9ebpu3OaGl/Yh1bX/8nFyz4NS5HKuu2vcXm1+9n6qXDYInoENPBfRiLKMWrzRWs\n97aTbXdwaUbpEa0iXe1t5YbK5cxV2fgkxCtNlfy+dN4h9wGFaN32b6aP4m//vZ283Nm0NG+k1Gxh\ninNodxLyRELcVLmC9d3tGEpxcWoRP8oaR7zZSlPET14snbQBHw4s3K3mYFMm3jAquL9mHfcXH9+v\n/flN3UZaPUEe4kQ6jACPNawl2+bkxH76duM3wtxYuYJN/g4MYIYrhdsLZgzadn25NieznCl8+PE9\nZOfOYmf9cibHuSka5LTWaxZ8nY2v3UnXWR9gGBEK53yFgnmXANC1cyv5mdNxO6OlmMcXn8aaTX8d\n1P4NVzq4D2P31qyl3NPFPCObzd52fuJZzO9K5/Z6vPP39Zv5qhqzu1LkS5GtvNpc0avsmu+kj2J8\nXDwbO7aQ5XRzetKYI6oNPxAerl2Pq9vG4+pkvIR4qG01pY4EfpA9ltuqVrFNddBOgDJpYYHKwx67\nwpytsni0e02/92dZVzPfVxNxigUnFk5RuSzvau634P77nVsQv4kH1TwM4ElvGS80befbmQdW+hwI\nIsLC3En8o62KbfWLOcXm4PysKQP67WHOs1NYVVy6zyTottfuIMuRz/FnXYs/0Mk/P7kbV2YJqaWz\nsCdkUN/2HpFICLPZys6WTcTFpx+ihc8OHdyHqa5IiPc66nmIecSJhTkqi7vDK1jtbeWEXr55u4ww\naey5Sk8jDk+k58VRPZkTn8GcYTQ+vN7Xzg/UREwixGNjtpHFOm87V+VO5JGSE/hfZwNFJgdVbV7W\nBVs5VxVhFRMraSK/l4XFjkSi2Upt2Lv7G0OtdDHB0j+58gCbfR2cqvIxiwkz0Q+pdb7mfjt/b5hF\n+ELKkZc47o1pZ4UBcN533SFTFDvrNnPyibcgYsIZl0RJzmza6zaRWjqLtDFzaNn4P17/8Gbi3Zm0\ntJUz/sIbB6S/xxod3IepiFKYRLCo6FdwEcGqTCztaqIi4GG6K5Wxh1mmPi8hg9datnOZGoeXMO9K\nNb9MOHATieFoZ9DHR50NmEQ4NTGbFIudDGscW8Id5OLeXaZ3li26inaMI5ExjkT+3LQdS2IRiJXr\nWpbhMEx4pJtn8048dINH4We547m2YjmbaKNTgrSau/lVau/TUg8n1+5iXaCViSo6HLZBWsmzD80m\n5P1p10rQG3u5tN8en0ZDy2ZK8uZiKIPG9m3EF50CgIiJsRdcS2ftRkL+DgqzxmCP75+V1cc6nQo5\nTCzvaubh2vW0R4JMd6Vybe4kbq9ejeET5qtcNtDK+9RSYHKTo9ysoJFf5E48ZLZMWBk8Wb+Jd9vr\nsIqJb2WWcn5KwRH1K6IUOwJdQLSMQG+qVfZVebeHH+9YRm72TIxImKbGtfyheBY+I8IV5UsowI2H\nEA6bmcdKTthnmOq22vV0FixgdOF82j01NLVup37L6/xfD1lB/aEm4GVJVxNxJjPzE7IPmnp5KGFl\n8K/2WnYG/Yx3Ju3OpmkLB/jp9sWYIyYiKGxWE78pOWFAqncOpH0WDx0FT/1W1r16K+nJo/D6WzHF\nJzHxywsxHWOvw6HoPPcejITgXhXo4gfbPuE7ajz5xPO6lBN2Rri9cDpP129mg68dEQgH4Bo1HZMI\nFaqTx01reXPCab1qI6wMHqvbyNvt1ZgQvpJWzHcyRh9y/NQXCXNNxTLqA35EIMMWx4PFs44qgB2J\n62rWEMw7hXGjzgBg9cZXKWhexw05E2gLB1jrayNOzMxwpR4wufh/zeX8Iwwnzb4Gk8nCyrLnyWnf\nysIetr0bbAEjgs8I7y7RDGAoxXWVy2n1BSlViayQJs5Ny+Oy2Lh6wIiwztcWLVPhSB60ydS+mHZW\nmLNNP+/Xcwa6Wums2YDZ7iS5cCoyzPLs+2pY5rmLyJnAI4AZ+INS6t797p8PvA6Ux256TSl1Z1/b\nHUlWeluYRhpTJDrj/3U1hp94P8IuZq6MlfB9taWCNTs7dk9q5uLCY4R6nWP958ZtbOho59dqDiEi\nPN5SRoY1jvMOcSX/bMNWXAErv1ZTQcGfAhv5Q8MWrsiZ0A/P+uDaI2Gy4vd8I0lw59LRuBqAZIud\nUw5RLfPLqUWsqVnL6/++AqvJSqrJxJUFMwa0v73xYlM5v2/YjFVMZFmd3Fc0k0ybgzW+Vqr8Xm5V\nx2MWE59TeVzf9CmXpJcQZzJjN5k57hCbchtK0R4J4jZZhmyl665dh57bEren6FY/s7tTSB/X/0Nr\nI1mfgruImIHHgc8DtcAyEXlDKbVxv0M/VEqd35e2hrvV3laeqt+EJxJidnw6P8ga2+t/bG6TlWbp\nRhnRQN1EN3Fi3meTvOmuVJ5lK3NUFnm4eE3KmeFI63XmwlJPM+cYRSRKtIrk6UY+SzubDxncK7o9\nzFRZuz9QZqgMPvXX7XOMJxLirbZquiIh5sRnMLEf6sDPcyXzj42vkRifQyQSZvOW17k8sXeTyBYx\ncU/eVGqDPkLK6HEB05EKGhEEOeqr5hVdzbzcWMFdzCZZ2flHqJLbq1fzxKg5dEVCpBKHOdbHBGxY\nxYzfCO/eHetgdgS6uLZiGR2REGEMrsiecMj/n/1lzrNTuDI0aU8gHyYp5Y0bP6Kp7H3EbCVn1gUk\n5Q/9t7Wh1Ncr91nANqVUJYCIvARcAOwf3Ic2h26AVXZ7uKFyOV9TY8jEyd/aynnE2MC1vVxsdHJC\nJi81VfCbYBl5ysWnspMfZ43bJ3CXxiVwff4UHq5dT0ckyAxHKrcWTOt1HxNNVhZRy39UDU4sse35\nDl0uuCjOzUp/E9NU9MpxpTRS7NiT4+yJhPjeto/JDbtJU3Fc27ycX+ZNOmidlg2+dv7YsgO/MjjN\nncb5yXk9fjh9PbWYtoYtvPX+DdEhpNRCzk3q/c5NItLrbfcOJawM7q0p498dtQCclZTHNbmTjvjD\nYpO/gxmkkyLRzKXPq3yu6a4CYKIzmR2UsUQ1MI4k3qOGHJuTJPPhSznfVLmCBeE8FkgeDcrHvfUr\nGRubWO5v++w6NAyKbnV3NuJvrSMuKQtHUhYN696natGfmTn+K4TCflb89U4mXryQhJxxQ93VIdPX\n4J4LVO/1dw1wwn7HKGCuiKwhenV/jVJqQx/bHVY+9jQyi0xmxfLJv6XGcVvH0l4Hd5vJzBOjZvN2\nWw2t4QALXdOZ4T5wxv+UhKxDDkkcSrrdwWJfE1+khHp8vEklT6TMxhcJYxVTj1ell2eO4WrfUm4K\nLo6ewxbHLZlTd9//dlsNOWEX32ciCExUKTxVv7nH4L69u5NfVK1k0sSvkGRP4tn1L+I3IlySVnTA\nsWYRrsgayxVZPe8BO1ieb9xOVaePxzgJBTzRUcaLtnK+mXFgOeZDybQ6+LfUETai+6dupo0MSzTQ\np1jsPFB0PPfWlPFieAvj4pK4L7/nTcn3FjAiVIe8zCdaIjhTnEyUFDb7O/oc3Oc8OwU5PjqXMxx3\nHmpY9z7l//kdiQl5dHTWUHjKpTSt+4A5ky8lN/b+DIZ81K95Vwf3PujNZOZKIF8p5RORs4C/A2P6\n2O6wEmcy4yG4+28PwSMe/7SbzFyYOjD5xAAfdNRzPTPIkGgq3U7l5a7qtdSFfAB8Pa2EyzPH7BNU\nnGYLj4+aQ2XAgwKK7fH7ZMt4IyHSlGP397I0HHiNcI/t/6u9npKSzzOm6FQA4uyJ/G35Yz0G9+Fi\nTVcrp6o84iT6z2SBymWVt/GIz7MgMZv32uu4xbuEZBVHBZ38LH3PQrLxziT+dIRb+dnEhMtkZZvR\nwWiSCKgIFdLJ8UYK39/6MW2xrKsrcybgPMwE+LSzwgfmmr82/II6QMjvYfu7T3HWiTeTFJ+Lx9vE\nmx/eijMpZ59gpJQBQ7zobqj1NbjXAvl7/Z1P9Op9N6WUZ6/f3xGR34pIilKqdf+TicjCvf5cpJRa\n1Mf+DYrTEnN4qamC5yObyVQO3pMaLjvCq7uBFlYGZbQyQSmyxUUlHkaFErmZ4/EQ4sHWVYxyJLBg\nv6tuswijYrXlQ4aBCXZ/AMyOz+CXzcuYoFJIJ46XZRvzEnpe9CRE64PsolR0HHs4S7PGUeHvZBrR\nYalyOkk7imJjZhEmOJLY5O1kFIlMJoWn6zczxZlCyVHWkRcRfpU/ldurVlMqidSKl0muZJ7ZuYWv\nMoYC4nmzs5K7Imu4q+i4Ax5/wK5Dw/AKvScBTzNxjmSS4qPfWOJd6cS7s4kfNZPFa55jRvAiQuFu\n1pW/zaQv3zHEvR0YsSSV+Yc7rq/BfTkwWkSKgDrgK8BX9+tIJtColFIiMoto+uUBgR1AKbWwj/0Z\nEgkWG78rnctrLZV0hENcmzCJufH9X5XOFwnzQO06Fnc14TZZ+FnOeE7qxTDNU/WbMCsTq2ji75Qz\nQUX38jyTAkwiJGJjjpFFmbftgOAOUBv0cXPlCrYFPSSYrNyYN4V5CZlMcCZxXf5knqzbhNcIMzch\ng1/k9LxB99nJubxe+R42qwtHXDLrN77Kd5OH965D388eww+9n1KtPBhAs9nPk5lzjupcb7XV8CM1\niUKJBnOvCvPv9lp+mHV0wwZKKSY6k3mm9ES2BjpJtdjZ5u8k0GUwm+h74ltqHD/r+oiIUpz4x+hw\nxbG+61BcYibd3R00tGwmM3UsLe2VeLp2MnbG2bgzi9kSm1Cd+KWFxGcPTpmGwRa76F20628RubWn\n4/oU3JVSYRH5KfAvoqmQzyilNorID2L3Pw18CfiRiIQBH3BJX9ocrpItdr6b2fsx4oARwWuESd4r\n53lvy7uaWdHVTLLFznkp+ThMFu6rLaPTE+YWdTwNho+7q8vIKHEccqXqtu5O3mqt4Q5O2L293m0s\no8jmZluwg2xcKKUol07mWA8c51dKcX3lcmYGM7mW4yg3Ormzeg2/L51Hnt3FREcSP8kZT4LZymRn\n8kHHigvtbp4onMnz9YvxKYMrUgs4rQ/ligdDhtXBc2NOYmlXEwAnuNOPegGRSYQQe7YuDGMc9YKw\ndb42btqxAq8Rxi5mbi+YziRnMrVBHx0S3J111UEQMdu4+ZwfI68N729Jh6KUgbexgkgogDuzhHHn\nX8P7bzyAzeYkGPQy5qyfY3MlkzZmLmlj5g51d4eNPue5K6XeAd7Z77an9/r9CeCJvrYzkrzcVM7T\nsZznDKuD+4tmkmXbs6z8by07+NPObcxT2WyQJt5preGp0rl86mnkDjWbRLGRjJ3ZKpOlXU2HDO6N\nIT954sJNNChlx37/XtYY7qpZy1pa6JAAdpuJL6YWHfB4rxGmLujjDPIREUpJZJwks9HfTlskwHWV\nKygknib8jHMmcHvhjIMWGBsVlzAsFhMdiXizlc8l9v1D6MtpRTyzcwPnqiLaCbDY1MDvko48EPmN\nMNdXLucbxlimSzobjVZ+VbWK7Y0vo7z1PDXvTp6s30xxxMF7liYKT/zGoJYJ7m9GJMzGv96Fv7ES\nq9VF0Ohm8ld/zQk//hMBTxM2dyqWEVCSYSDo2jKDbLW3lf9rrOBOZpOi7LwV2sHCqtU8VbrnH/rv\nGjZzrZpBjkSvqh8Or2FRZz1uU7S0bSLRNLkm8TPZfOjMiBJ7ApXKQ4XqpFgSWKoaMJuFWfHp/Hn0\nSazyteIQM7Pc6T1mzDhMFkRgp/KRjYuQilArXlIsdu6uLuMbxliOk3TCyuA+30o+6Kzvl2A40nwh\ntRC32cqH7TtxmC38Nn32UaVr1gZ9uLAyXaJ5/+MlhXhrIp+/YhuJeRMY/aVHqVv1Fg2eVrKLp5FW\nun/y2rGlbuWbWLx+LlxwDyaThbVb3mD7v59k4sW34kzNP/wJPsN0cB9km/ztTCeN1FjO82kqn390\nV9IU6ub5xm20BgN4jTCpsWqOIkKqsuOLRPhR9jgeri1jrsqiQfy0WwOckXjocessm4Mb8qZwZ81q\nBMFhNnNP0UwsYiLVGsfnDxOIzSJclTOR++tWMZlUdpg8THAnMsOVSkPYz3iii5YsYqJEJbIzeGxM\nzA2FzyflHNHOWYZSvN5aRZm3jUxbHI++cj5/9I2m8YKltBrdpEgcnSpIS6SLHHe0uJjF7qRg9sUD\n9RQGXXdrHfnpUzCZoqEqP2s6W1d9OsS9Ojbo4D7IMq0O3pJaQoaBNZbznGaJ4/vbPua4SAYlJLOS\nFp5jE19UJVTTxSpp5ofuMRTY3WTZHCzramasOZ6zk/IOm+YGcHJiFm8nZOCJhEg02464LvvZyfmM\njktko7+dC6x5zHanIyKMtSfyXqCGc1Uh7QRZLc2c69BX7b1VE/DyQtN2usJhTkzM4Iyk3H2GUF4a\n28C7/6pnrjedzeYuCr/4KmlTT8OdlMPNrcsptaSxQ3WSPetCHEnDfxPoo+HMKKJi5X8YXTgfs9nG\n9ppPcKUXDXW3jgm6cNggM5TilqpVbPZ2kIWTbaqDc1LyKG/z8gMVHY+uVV38mhW4zBaSzHZ+njP+\nkPVFhsrOoJ9fVi6jOdRNQEX4dsboI17gMxwEjAghZQxqtcWGoJ/Lt/2PU4xc0nHwtuzgwox8yi57\nEAAjHOSTh77Eo2ouTrGilOIa02KSxMGpkUw2mjpZ6+im9AvXkZjXc4bSSKCMCFvefoTWbcuwWB2Y\n41xMuuQObK6+l7kYToZl4TDtyJhEuKNgOmt8rXREQkxwJPG/zgaq1Z7hjHhsKIG/j/vcsJ4My7I5\neG70SbSGA7jMFhymY+vtpJTiqcZtvNxSjiCMd6ZwT/5UEgYhyP+no45pKp3zJbrnar5yc1/nVnZl\npCsjgohgU9HFcEEMOo1u7uR4HGJhrpHF7aF1hAO+Ae/rUBKTmbHnXk13ZyNGKIgjOXvEVYQcKMfW\nv8YRQkSY5tqTdjgvIYM/NGzhXaOaPFy8JTs4O7HnuitHSynFi83lvNRUQQTF2cl5/DBr3AHpeJ5I\niKZQN5lWR69K+5pEjmphT28ppXipZQf/9DRiFxOXpeb32xqC9zrr+Zffw4WnP4LN5mb56md5YOcm\nbs/t/QbkR2LXzkNnm35O1aevUNz0we413hZM0VWVMWabg9SCqTxZvZn0iIUVNGHDxBIamE90+MaG\nGWVEBqSvw03cQRbHaQeng3sfbfZ38I+WagwU56bkM8F55CVPM6wOHi+ZzdP1m1kXaWZ2fBrf6ufh\njX+11/L3xmp+oaZhw8TvWzcQb97OpXu18257LffXriNJ7HhUkFsLpjF7CLbZqw54qQ56KbC5+K+n\nkVe87Uyb9j2CwS5uW/0M9+ZZmebq+2bda30dFBSeQpw9ugJ3TOlZLPnknj6fd2+7VoLCXjsPAWlj\n5/HxJ6/QHPZix8QOczepEz7Hlldvp7N+M3HuNPJP/wE7PnyOqppt/ICJRDB4mg10qACYzNRZgkw7\nxisfGpEwjes/IOBpJiF3HMlF04e6SyOGDu59sMHXztUVSzldFWABru5Yyt1FM48q8BTHxXNP8cz+\n72TMJ52NnKkKyJFo+t0Fqph/d+7YHdybQt08WLue69QM8nCzVbVzW9VqXht3aq8mbfvLX1qq+F3z\ndtIT8mnqXI9DzBw36+dkpkYXiHl8Tfx755J+Ce5ZVjsrmzeiRp2JiImmls27C3r1RsCI0BoOkGKx\n794Nam7Z1VzxSf2eZf0HWQlqtjkwma0khe0kY6fM6CBQvowZHjNnRMazxd/Oi6/ehis5l68ymlKJ\nprxerEbxkrUad8FEJp12M5Y4d88NHAOUEWH9qwux+AJkJI1i++rHyJx5HvknfPGgjwl4mums3YjZ\n7ia5cIoeojkEHdz74OWmcs5TxXxOouVoXcrKS43lTCvue+DpbwlmGw3sGZ9twE/8XmPL1QEvOeLa\nvdnzaEnCJVYaQn6KzUdX/+Rwlnc180zLDvxGhNPj0/l8QhZPN23lzAV34Xam4/E28I/3b8Lra4HY\nKFYo1IW9n4arLkop5P0dK3h30a9w2hNpaS/nN4UH1mHpyWJPI7dVr8aqTHRZzIy58AZSimfEarQc\n/ttb/aq3OSGUyKUSXSKfo5z8oX0jX+cUTCJk4GAxHdQZITr3elwHQRJKplP6heuO4hkPL22VqzE6\n2zj9pIWYxMTY4lP523vXkTvzfExmC12N5dQu+StGKEDquBOxJ2aw4S+3k54ymi5fM3XJ6RTNv4z2\nHWswIiHSxszDmaKztXbRwb0PgsrAuddL6MRCcK9x0+HkGxklfN/zCR1GEBsmlpuaeCRr1u77c2wO\n6pSXRuUnQxxUKQ+dBEkfoPH0jb52bqotY/rUy8iyJ/Hq2ueoDfpIdKbhdkYX6MS7Mkl0JLNy7XME\ngh6CwS4qyv/DjcX9szDHbjLzZOFMVnib8RsRpo2aS7LFfsjHzC27mg+2lnHbtOv5iTGZ0ZLE5lAb\nj/7tbo778R97fSWtAn5SDOvuipqpODBQfEQdy1W08mS9ESJ18rm8/NELtIWDRFC8Z21k4twr+/S8\nh4twwIvbmY4pVh/fGRctX2GEg/jb6ih78SYmjzoHR3wiqz74E2EM5ky+lMKcWRhGhDc/upW1L1xL\nQfZMfN5GVn38Eqmj5zD2vKuQPm7QMhLo4N4HZyTn8rB3A25lxYTwmmznhylDW4P8YLJsTp4tPZH3\nOuqJYPD9+NH7rJDMsjn5QdZY7tq5nGxxUo+P63InD1h64LudOxlVcgYt7RVU1iwGET7wtxJG0dS6\nlfSU0TS0bCYQ6OTWnAn8s+ZDIkaE+/Knkd8PG3HsYjWZDjmvMOfZKcBeBbeu99NZF8YdcTFaolfo\nYyWZJInD31bf62JVyWPn8K81/6Y4nEAiNl60VOJwZfO3jnIuYxwGij9FNuNIzmbiN+9l9dr3QIRJ\nU0/HlTbwuy0NhsTcCWz/95NU1S0nPWU067a/jTu9BIvdSdUn/2FswXwmlp4FgMuZyvtLHiYj9u/L\nZDITDHRx0owfkpc1DaUUHy57nJYdZexc+y7ZU88Yyqc2LOjg3gfzE7MJKoO/NFWigO+mjeb0pOFb\n6TDNGsdX0ooPev+FqYXMjc+gPuQjz+Ya0CwYmwj1TesQk4UzTryJQLCLDxY/xCWJGbzy6f1YzHYi\nkQC3505hW8DLUm8rya5Mrqlexa9yJnFSQu8yZiJKoVBHtHvS4XYdssen0xLuoln5SRMHjcpPW8RH\ncfyBhdcOJil/EoXnXskzi/6MEeomZfzJuOq3cVFHyu7SAj4V5l+r/snoi2/B/fmSXp/7WGFPSGPC\nRb9i+T+fILC2hYScsUy46KbonUrtc/VtEhMms5X15f/kuPH/v737jq+6uh8//jp33+y9d8ggJCHs\nJUMEBRQ3KNZvta12OjqstmorjlpXtVbt+rVaa+soghMRcCCo7BkIIWSRkL13ctf5/XFDAEkgO5dw\nno+HD/gkn/s5596HvPPJ+bzP+72c1vY6LLZWfLyc/96EEPh4hWO1tdJSkd/dcBccFdwH6FKfcJcO\n6H0VbDATbDAP6jWz2xrY2FCGDsFS3wgijO4s9Y3kfwXbmT/9l3i6B+HpHkRq0pVUlXzFB4nzqLF1\n4K8zUmZp5belh1h88eO4mf2orsvjka+f4EOPgK6HmN2RUvL3qlzeqM7DIeEi71AeCks94zUnGlVA\n77sOGT39iZ53Cw998W8itT4U2+uJvvi7GDz69qwlMGkWgUmzuo6PvPEgFqxdx1YcMIwPs0eCd0QK\nk247s65gUOp8Mt/4NW4mX0xGL3ZnryJsylWUHN1Ozrof4nDY8QiIYdehN5mZ8T1aWqvJPbYZo9kb\nP9/u2zxeaEb3/znKiNvbUsN9x/czJm4RdnsH7xV8zt9ipxJt9CDG4E5zaxWBfvEAtDZX4KXVYtRo\nCeusklliaSXAOxI3szNwBvjGo9MaqLN1nFZJ85vW1ZewrrWJqxf+Eb3ezLZdL/Fi5VFe+44zO+jX\nV397QI0qQidfiXfcJNrqSkj1DcfNb+A/4INmXMfrpY/TZrPjQPKu7jhJU2/v+r7DbqNk+2raS46g\n948gYtaNtNYUU71/AwgNQRMX4xE0Ou7wPYJiGbdsJXlbV+Oo7yB05vWEpF9K5Izl2Dqa0epMSIeN\nw+89yf/W/QSNRodGZ8Ls40/YxMtHevouQQV3ZUi9XHOMCWnfJi7SeYeq0xp5s3of94WmcFdQPPfu\nf4W6+nwamkqprclmeeg4pJRdG7iijR5UlWXR2FyGl0copZUHwWHD/xwpi3vaGoiLuwyzyZlCmJx4\nJZuP/YslmrucJwxCswo3v8EJ6if4xkwgYdlv2bRnHQhB8uQf4hV+splH7rtP4F2YxyW2APYf28b+\nnK3YW+q4whaGHclHWZtIuelxPENGR5MKr7Dkk8s0p9B3da/Sk7psJXZLG00VeWj1RjyC49XD1E4q\nuCtDqs3hwM94siyxyeRDq8OZUZTh7s9fY6bwYvkeCixtRIZN4U81Oexta+CXoc4eoxFGd+4KSuCP\nm36Lu9ETi7WFxyPGd1ue+IQZL6cz/s/1rPq8gBOPt6vrC5Di9Po8UkoqD31OY9FBdO4+REy9Br15\naLsnZPUAACAASURBVNI+e8snKg2fqDN3yHY011JXsJeV9mnohZbp9mB+1vg1y2Qss4Uz/c9g1bJl\n62o8r/nVcE+7S1NFHrW5O9AaTASnLhiWz1NrMONznm/mGgoquCtD6lLPQF7PfA3DxO9js3Vw+Mga\n7g86eWcZqDexr62eKy55CnezH1ZrG2s//SXXtDcyprN36xW+EczxCqba2k6owdxtDZsZL6efzGhZ\nDVbv8ZQ33cv6bU9h0LtRUZND+orHT3tN0VdvUHdwM8nR86kpP8b+1+4h45bnXLP5g8OBRgg0nbmT\nQggEAndOZjO5o0PaLD1dod+kdFC8dRU1R75GqzcSedGKbneS1ubv5sgHz5AQOZuW9nr27vqQCbc+\nh97sNehzUs5NBfdBJqXk04YyDrfVE2FwZ6lfZLeZGu0OO38uy2Z/Sy1+OiN3ho3td7NkV7bMLwqL\ndPDhzhfQCsGd/jFcdEqmS4PNglnvhnvnmrpeb8bHLZBaW8dp1/HS6k8r6GX63LmL8efPdJa6/UZW\ni97kyYRbnqMmbycdjZUEBARQtm8dgSlz8Q4fi5SS4u2ruXr+E11jf7L9WWpytxE8bv5gfwwDZvD0\nxy0ojv9XeZS5tiAOaOqwGIy8bivEZNPiQPK2rpiI8YOfA1/05Rs0Zm9n5ribaG2rY9t7T+EbN5m2\n6iIM7j5Ez7sVj6BYjm16lYvG30ZESAYAX+37B6V7PyJ65vB31nTYrBRseoXa3B3oDG5Ezf02/vFD\ntwPcFangPsheKDvMtvoqJjuC+FiUsqWhgqdjp5xRoOuxon00t9hZIRMptDRyV/42/pUwe0jTD0eC\nEIKbA2K5uYcUzBCDGb3DxtHCTcRHz6GsMpO6plISgp3nN9gsJF5sJcTdwOXau0++8BnnHw3Hs2gs\nPYLJK4CAxJmnbUfXGkx4BMeRt/EvJEbMwaA3c+jtR0lc+gt8YzKQDhsG/cm7dIPeDYftZLaKKxFC\nkLR8JcWfv8IrpTkY/aLIWPAotbk7eGXXhyAEYdN/REBi/xp4n03VoU1cPOHH+Hk78+vrG4+Tf2wr\nF038PnUNxex98wEm3Po8NksrHu6BXa/zcguiqr1l0OfTG/mf/gNHWRELJt5Fc2sVX374B1JveBTP\nkPOvJHV/qeA+iBptFt6rK+JpORN3oWeRdLCyfSeHWutIP6UWitXhYEtzBS8xB73QEosXR6hnV3M1\ni3wjRvAdDD+d0PBs1ETuz1nDtv2v4G9w54nI8cy5XMP0rFZyPtmGpkCHe1AsKde1oDtlA1PJng8p\n+eotokImUV7/OVVZmxl7za9Pe6BWuut9kqMuJiPZeafv6R7Ega2r8IubRFDSbLbs+RvpCVdQ01BE\nafUhJsb+YNg/g97SGd2IXfST074WkrGIkIxFQzquRmfAYj0ZpNstTUSHTibIL4EgvwQq63Opzd+F\nX/wUdmW9yfS0W2htr+Pwsc9IvPKeIZ1bT6pzvmbJzAfwdA/ExyuchJrZ1OTuuKCCu3qsPIjapR0T\n2q6SBFqhwRsDbd8oy3pi7bQV59ellLRiw3CBFkGKM3ly7IsfYbFuYNzP3uJ/y3/DxAPxVO4sZdml\nz7P8shfw0/iS/+k/u17jsFsp+PxlFs18gGlp/8eSWQ/QUVlEfVHmadd22CyYDCeXu4wGTxxW55JP\nwpK7ICyCL7NeI7d+P2k3PobRy/Waooy08OnXsWXv3zict4HdWW+RV7yFyNCJXd+32trQaHTEXvxd\nRHA4a796lM0H/kHMxd/BN3r8iMxZqzfS1l7XddxqqUc7yPs3XJ26cx9EAToTwQYzb3fkMVeGcYg6\nKjStjP1GGWCtEKzwj+O5un3MdYRRKJpo1lqY4RnY7XWllFRY23EgCdWbB73Oe2ZrHfV2C2PNPkNW\nS6Y7b/3tppO55quB1ScfBraUHmVM+Ez0nSmPSdHz+PLwa13ft1vaEUKDh5szGGs0Orw8QrC2nlpm\nCwLGzuHAB3/A0z0Yg97MjkOvEzDhUudrdAbiF3x/CN/h6BCcOh+9mxclR7ai8TYTkrGYrQf+xdiY\nBdQ1FVPbUkp04gw0Oj1jFv2EMd/47WIkRM++mU2fvsTYmEtobK2ivD6XCWl3jPS0htWAg7sQYhHw\nR0AL/ENK+WQ35/wJWAy0ArdKKfcOdFxXpBGCZ2Kn8FRxJs+27yNU78bzEdO67exze0gikSZ39jXX\nEqd354HAtG6zQCwOOw8e28PB1no0QIzJkydjJveqkca5OKTkkeJ9HGyuJxgzBTTyWNQkJnr0fht9\nX5zxEPQsueZG32BKC7JIiJ6LEBpKqw5h9D7ZJ1Rn8sDsE8qBnPdIiV9MZe0RKmtyCA8/PbD4xU4g\n7tIfsGvbGqTdRkDGJYRPuWrQ39tgOjXP31X4xU3GL875QFJKSWXIZxwr3I8uwIfxl/9hWEsPS4ed\nysNf0F5fgUdIPP7xU884J2jcPAyeflTn7kQbEMWEK39wRtZO5eHNVGdtRqM3Ej7tWjyD44frLQyL\nAfVQFUJogSPAAqAE2AmskFIePuWcJcAdUsolQohpwPNSyundXOuC6KHaVy9X5LCnuo4fyVQ0CF4W\nhwnzMfGz8IH3zdzSWM5fj+fwa8dE9EJLpqzhTV0ObycPPFvk1K5D/WG3tJP55oOItlb0ejPNHXWk\n3/R7TN4nM23aG6s48v7TNJbnYHL3Z8ySu865DFB3bD+V+zaARhAyYQneESn9mt9QqMndQf5Hz9Pe\n3ohfSBJjrr0fYx9LGpxgt7RTuOEvNB07gN7dl6jLftTromauTEpJ9ru/x15TSahfMscqduOXOpeY\n2TcD0HD8EO0NFbgHxeFxlkba5Qc2cHzLG0xIupb2jiYO5H5A+k2/H7Hm267YQ3UqkCulLOwc5E3g\nKuDwKedcCbwKIKXcLoTwEUIESykrBjj2BeFoWxNTZXBXOuUMGcInbUWDcu1yaxvx0gu9cK71J+ND\npa2933eOZzSqGACtwUT6t56gsSQLh92GV1jyGfnnJq9Axt/8VK+uJx128je9TOX+T5g49nocDgf7\n3n6ElOt/6xIBvrXmOHnvPcXdtrHE4Mm7FUXsfPtRUm99rl/Xy33vKcKLivm+LYZjzU38940HGH/b\ni+d9u7qm0iO0luVx1bzH0Wp0jI2/jDWf3EPE1Gs4tuW/1B3Zhr9vLAVV/yB63i09Vocs2/Uhs8Z/\nj5AA52Y5i7WF8gOfEH/JbcP5dobUQIN7OFB8yvFx4JvFtrs7JwJQwb0XIo1uZLbUMEUGIYBMUXNa\nqd6BSDZ78xp5XCaj8MfEp5SQaPTudWCfmfkL4JSCW71sVNEb0mHH2tqAV9hYNLq+lR22NNdis7Rh\n9glBaLTOu733n6a56BBTU28mLnKm80QBBdvfOaXZw4xh37pvaamj8vBmGo5nkYA3iZ1lhK93xLCh\n8gscdiuaPpZdlg47lQU7WSlnYxBaIvBgP03UFe4jNP3SoXgbg0ZKx1nLB1jbm3F380fbuYRpMnii\n15tpKDlMzeEvuWre7zDo3WhsLufDTx8iKGUu2m6fI51eddL5d9fsxdBfAw3uvV0S+Wa0GNVLKYPp\nlqAEftaynZWWHegQaHSCP4UOTrOKNDc/bg6O57cVOzCgwU9n5KnoKed8XVdJ3H4U3OqNpvKjZK1+\nDGmzYndYSVx0F4FjZ5/zdVJK8j/5OxUHP0WvN6MxuTFu+SPY2hppPp6Nj2c42lOeVWg1ehqKMvG1\nmzHqPTj41m9JvureXvXxdNgs5H/2T+rydqEzeRA971b8YvvW/7OjsZoD/7qbNKs7EQ4NWx1V5NNA\nnPCmjBZ0WgOim+cw5yQ0aDQ6mu1W/HD+cGvAiu4cjUhGUkt1EUfee5KmmmOYPYNIWvoLvCPOXHr0\nCk3gaFMJ+cVfExqUSk7hZ+jcvJAOG95e4V37Frw8QtDpTFjbmroN7sETFvPV1peZlHw9HZYmDhdu\nJO3G3w35+xxOAw3uJUDkKceROO/Mz3ZOROfXziCEWHnK4SYp5aYBzu+8567V8ef4GWS3NeCQkmSz\n96CmTC4PiOUqvyia7VZ8dUY0p9y1Zyy2kX3vcuCUh6BDTDrsZK1+jKnJNxITPpXahiI2bHgSj9AE\nzD5nn0NV9haa8/dz3YJn0OvcOJDzHrnrXiBi5nJMJh8Souey8+DrgMAh7ezOeosg/ySmp98CgJ93\nFPs3/7dXwT1v418RVZUsnPxTGlsq+Or9J0lb8Xs8gnqul/9NJdtWcVGHNzdK54O8WNx5QRxivCaI\n3aKa2Et/1K/lMSEEUTNu4Int73OJNZB8bSuVHnrSEgbnpmCwOew2Dq1aSXrsEhJmzKG0MpMvVz/G\npNv/isHN+7Rz9W7ejFv2MAfW/Ynth/6DZ1A845Y/jEBwtL6gq9FLXtGXCJ2+x2cWYROWoNEZOJS1\nBY3ewLhlK/E4Tx6oCiHmAfPOdd5Ag/suIEEIEQOUAjcAK75xzvvAHcCbQojpQH1P6+1SypUDnM+o\npBMaUt18h+z6Ro2zzO6JrkM/taaeXDd/ZsiG7ZalpQ5psxIT7syA8POOwt83npaqwq7g3lJdRHtD\nJe6BUaetIbdUFhAdMgmD3rlsFR95EdlfbcIjKI7WjjraOppIS1jKvuw1tFub8QhPIsT9ZOcsd7M/\ndmt7r+ZZnbOVK+c8gpvZD2/PMOKrZ1Gbv7NPwd3R2kiYw9z1e20IbuDuTcm0RSSFpwzoAWjkrBup\nCojk68J96Dz9SZ10ZQ/LEyOvo7ESYbeTFHMxABEhGXgXRNBckd/tb0OeIWOY8J0/nfH1xCt+zicf\nPovDZsHo7kfKsofO2kA7JG0BIWkLBu+NDJPOm95NJ46FEA91d96AgruU0iaEuANYjzMV8p9SysNC\niB90fv9vUsqPhBBLhBC5QAvwnYGMqQy+rmWWbroODTe92Ru73UpdQxG+3lF0WJqpbygmzMu5B+DY\nl69TtmctPl6R5DQUMmbRnV0NL8y+YZTkriN1zOVotXqOV+zH7BOKzuhG6g2PkrPuRdrqS/EIjCFj\nyWO01pRw6MNn8feOwWjwZEfWG/j3cvu+Vm+itb2+q858S0cdBn1Mn96rV+J0Psj/K/FWL0zo+J+u\nCL+0hURMHliq5okMuG82A3FVOrMnFksLrW21uHUWj2tuLsfg7n3uF5/CP34qM+56HbulHa1hcPeD\nnI8GlAo5mFQq5PA44yGoC6rM+oK8jX/F3zeO+oYigtIXEjP32zRXFXLojQdZOvdRzEYvauoL2bD1\nSabf+R80OgPSYSf7vadoLs3BbPKhpb2W1BsePWvP0YqDn3F86yocNgsBY+cQM+fms97tnVCe+QlF\nm14lOXo+DS0VlDfkMuHWP3ab712Z9QVFX76Ow9pBQNIsYi7+DprOtf+S7Wso3fY2DruNoNT5xCy4\nvVfjd0c67BRu/DulmRsQQNiEy4me/12XrW9ubWuiNn8XIGmrLaNq/wbCAtOoqM3BKy6DMZf9+JzX\nGC2GIhVSBfdR7ESuudtT97l0MO9OW30ZLVWFGL2CujaXVB/dRu3WD1g49edd57214S4ybv0jps47\neyklzZX52Dta8QiOH9LyvXXH9lOXvxudyZ3QjCXd1i4v2v42JVveIDJkIgmx89mb8w6GmGTiLv7u\noM/n+Ndv4dj6EXfbnA22n9Mfxjj7+jM2bUmHndI9a2mtLsItIIqwiZf36weK3dqBrb0Zg4dvn3+A\ndDRWs+8/9+DvGYUAqhuLiFtwO5aWOsw+ofjGTuz2zttht5H/yd+pzNqE0OqJnLGMiClX93nursYV\n89wVF/TW324C4P4BtJEbaWafUMw+p/fCdA+I5mhtHvWNx/HxiqC4bA9oNBjcTz6PEEIM205D3+jx\nZ900VZ75CVWbX+cqRwRVpUV8XfUi06f8mK8OvTokwb0pdzcrbGF4CQMAV1jDeDdvN5wS3KWUZL/3\nJKK+nqjgCRRlfsnhokzGXnN/n5YxSveuI//zf6DTGtEa3Ri37CHc/CPP/cJOx758nTGh05k4dhkA\n+7LXUHl0O0lX/Lzb86XDTsGmf1G272P8PCO46uLfY7G28NnO5zF6BhCYfFGvx75QqOA+CpzWqAIG\npYWcKzL7hhK38Ad8tP4x9HozDhykXPtg1xKHqyn94jXudqQSL5xrxx22I+QXfzlkv03oPHwoFhWc\n+HFTLFrQepyeYdRWW0Lj8cNcN/9ptFo9idEXs/qzX9JWe7zH4CylxNJSh1ZvRGd0p6kij+It/+XK\nuY/i6R7MkYLPOPTO75l02597PVdrcy0BASefbwT4xFJS/kWP5xdve5u2goN4mv2ZNO5G3Ew+uJl8\nSIm9lOL8PSq4d8M1/1UoZ5Wx2Hb6tn4XeBA6XILHXUxAwgzqCvdRdfgLSravwZJ68ZDUMR8ou82C\nJ4auYy+Hlh0l2xl7zf1DMl7ExbeyrugXFDnacADZuhbS5nzrtHMctg70OjOazvx5jUaHQW/G3kMH\nJ0tLPVmrHqatvgy73ULYhMsx+UcQGpSKp7uzFERizMXsyHwNh83a6w1nXlFpHDq0nuCAsQghOJi/\nHq/knptp1OftZnLiNRw8upbG5jIC/Zylexuay9EFqE5P3VHB/TwwM/MX7KkuGLZcc1fXVlfC0XV/\nZPyYqzCY3diz/i84bB0Epcwb6amdJmDsbF4+uJObbNFU0c4mTTkJi+8acEegumP7Kd22BofdSmDq\nxYSkLwSc2UIZt71Ede52BILxiTPOyBN3C4hC6rXszV5NbNhUCkp3YNdqsbbUc+h/K5F2G4Fp8wlO\nddYXyv34JcI84pg87QEs1hY+3voEQqenri4Pq60Dvc5IZc0RDGZPRB9+g4qYejV5DRWs2nAXSElI\n2kIip1/f4/lakwcNzeVMGHsdn257lsrao3RYW6hqOkbGkmf78SmOfuqBqgvKWGzj11d/e1BqtIxG\nuev/TGCHG+mJSwEoqdjP7mMfMf7bw5yUfw4Ou42iz1+hPmcrOqM74Zd8t1cbpM6moeQwWW8/wtSU\nFej1buzMeoOwmcsI7UPDjo6mGvI2/JW2mmLM/pEEpc3n6Lo/MSVlBXqdiZ1ZbxAx51uEpC1gx0u3\nsmj6vV136Zk5H1DlbsNhaaMhfw9enmHU1OWTtPQe/OIm9fn9yM5eB+d6oNtckUfmmw8SEzqFtvYG\nSmuyiJx2HWETloyKHq3qgeooduIhaFdAH6Xr5oNDojmtLogWhvAmRUoHZfs+prnsKEafYCKmXN2r\nDUEarY6YBbfDgtsHbS6VmZ+SFn85cZHO/HWd1sDO/e/2KbgbPf1Jue6BruOcj/5E+pgriO+8plaj\nZ/fetYSkLUBn8uSz7c8hEIQEjKOupQTPiDmETbicprIjWFrqiAoe05Wt1Fe9zdLxCI4n45Znqc7Z\nikGjZWry3Rj6WTHzQqGC+wiY8XI6YorzV+muFEUVzHstKO0SDq56GJPBE4PejZ2H3yJi9tA1Yc5d\n/2csx3OJD59FWX4WB/N+Q9pNvx+RB7lCo8XuONnn1W63wgDz2IVGg8N6sluYw2FDaDS01pbQ3lTJ\ntNSb8fEMZ/ehN2lsryJx/GUIIfAKSx7QuH1l9gklcuq1wzrm+UwF92EyM/MXJwP5amD1+Zee6Cq8\nwpIZe+0DHN22BofdRuTcm7vWiAebtb2JikOfs/zS59HrzSTGzOODzQ/RWJKFT1T6kIx5NiEZi8h8\n4360Gj0GvRt7c94hboDdpEIyFnHwrQfRanTodCbnNRf+gNr8XcSETSUuwllFc/akH7Pm03v6XKWy\nJw67jY6magxu3hdcC7zhoIL7EDmj69B5mGvuynwiU/GJTO067mispuDzl+loqMQjLIGYObegNXS/\ndNJYku3clWrtwD9lNiHpl/aY4y1tVjRaHdrOiopCaDAY3HH0kF3SG2315eR9/CItNcW4+0cyZvGd\npzUhORuPoFhSb3yM0p3v42i3MmbJnd12IuoLz5AxpC5/lOJd7yPbbF3XLN23jvaOpq7z2i2NaHSG\ns1yp95rKc8l6+xGEBKu1jfgFtxPi4uWIzzfqgeogGWjnIaX/bB2t7H3lLuJDphIakMKRY5/TrLcx\nbtnKM4K288HcA0xMuh6T0Yvd2asInnol4ZOWdnttKSWZr/8aP60/SdHzKK08SFbBejQ6PTqDO1Fz\n/69P9VscNgu7//FjksPnEB02hcKS7Rwp+4pJ33tp0ALnYLF1tLD3lZ8S5pOIj2cYhws/IXTaNYRP\nvnJA15VSsuMv32FK0jJiw6fT2FzGuq8eJ+1bT5y1VMRoph6oupAzcs2VEdNYkoW7wYcJydcBEOSX\nwFvr78Ta1nhGKmDloU0kRy8gsbMCocngyVf7/ttjcBdCkHLdg+R/+g+2ZL2K3dpBgG8801JvpqW1\nmi8+fgmjhz9e4b1bf26pLkIvdKQmXA5AWuJS8kq30lpTjHtQXNcPI4fNSnnmBjqaavGOSOlXJspA\n6YzuZNzyB0p2vU9ZWyMxl/6AgIQzOmSeRjrslO5dS3NpDkbvYCKmXXfGpi1bWyP2jlZiw53X8vII\nJdA/kZaqwgs2uA8FFdx76UTBrcFqI6cMDktrA03ludg62wMCZBd8ipCQ8+GzRF204owHf1Ke7Ljj\nkA7O7CVzOp3Jg8TLfwrA9pduYVrqzXi6B+HpHkRi1Fxq8naeFty7Gjg3VOIRPOa0vHatwUxHRzM2\nuwWd1oDN1kF7RxNFW16npnAPWp2ByBk3UHt0O2a7lkDvWPIyX6R1ypVETL1moB9Xn+nNXl39SXsj\nd/1LWEsLGRNxEWXHs8nMv5/xNz992uYmnckDhKCqNo9Av3g6LM3U1hcQ2MulKaV3VHA/hzO7DqnA\n7ioay3LIWvUw3p7hNDdX8OXevyMddmrqC7ho0g9pbatlz6qHT2t8HJy+kAP/vQ+D3g2z0Zu9OWuI\nmP3NFgQ90xnMNLdW4+nurCPf3F6DzhjQ9X0pHRx+9wkctVWE+CVRuO8vNKdfQvRFzlRXs28YPjEZ\nbNj6JJFBGRRV7sPg5oOpzcbyy16gvaOBdV/9DnezHwtmP4QQGsZEzeHdz35F+OQr+10xcjjYOlqo\nyNrE8kv/hF5vZkzUHNZ++TANxw+elt8vNFqSrvgFn659Fg+3EJqaSpE4OPbFv0m4/O7zvs+rq1DB\n/RQzXk5nb+wYtRP0PJH70fNMHXcTseHTaWtvYO3mldjsHSyc8Uv8fZxNM5pbq6nI2kTc3FsBcA+I\nIm3F7yjZvgZHUxHR87/bqxZ+J0TP/Tab171IYtRcmtpqqGzMJyP9zq7vN5Zk015RyJVzH0Or0ZES\ndxlrPr2H8ClXozO6IYQgcenPqTj4KVVVRQTEXsHxr/9HRtI1GPRmDHozYf5jsTqsXZUWnTXjJQ67\nDa0LB3dptwEa2i1NSBwY9O7odCYcdtsZ5/qPmUrkjBsp/uoNPNwCSI5dSGt7LYf+9xATv/uiS/8Q\nO19csMH9RNchMWXh6SmKynmjraGSsEBnxozZ5E1MxHTyS7fhcJwMJg5pO6McrUdQHElL7+nXmAGJ\nMzF4+FGbtwutMZCM9DvRm06W+rW2NmA2ep9s4Gz0QqczYbe0dq09C6EhJG1h12sq9q2nrrEYXy9n\n4S6b3UJ51SGOle4gwDeezNy1+ISnoNWPbA9Uu7WDY1v+S3PpEYzegUTMvIHm8qM47Db84iYjHXa0\nOj1rv3gIh8OGv28cje3VJHTTC7U6Zysl295mxnhn754dmf9h+vhbySn6gvbGyjMqgip9d8EF9zO6\nDql88/OWZ3A8Rwo/Iy1hKe0djRSV78Y/aRab9/6djMSraW2rJff4V2TM/8OgjusVltztBh6H3Urx\n1rdprSuk4Pg2QgJTOFLwKQYP37Pupoyd/z12vP0wpdVZtLRU0dRczpjoeWzd9zJotPhEpZF8xa8G\n9T30x5H3n8bUamNS9BLKa7LZ96+fEuCfiNHgzt4vXsXsG0FS1Dwykq+jw9rMR5sfJvKiG7qtglmx\nfwNTUlYQHeZsyG6zdXC0cBNWays6w9DV4L+QjPrgftrmIRfW3lhFS9UxTF6BuAdGj/R0zguJS3/O\nof89xOGCjdhs7UROX0b0rBVUhidz9MhWtAYT6d96ErNv2LDMp+rIV5jsgotm3se2/f9i2/5XQGiY\neNtLZ21m4RWeTMYtz5Kz7nk8dGaumv8Eer2Z8OA0duV/wNhrH+jxtcU736F062qkw4bJP5LUGx5B\nNwQbgmztzdQV7uWGRS+h1eoJC0qlovowKTGXEBEygcP5G9ibtYqkS25DCIHJ4Elc+EzqGrptl4wQ\nmtN32jqsVDfkO5ueuPWtvZ7SvVEV3LvNNT8PAnt1ztfkffAckVovCu1NBE5eStTc/xvpabk8k1cQ\nE7/7Ih3NteiM7l13iEEpcwlKmTvs87G2NuLlHkpdYzGe7kH4eUeTW/wVRs+Ac77W7BOKu380QR1u\n6PXO4KzVGJDS3uNrKrO3ULT5P8yacBtGgxfb9r/C3ld/ypTb/zZo76mL0CCROKQDLc5cdYfD3lX6\nwMcjHISgpDKThOi5OBw2SioPYPLqfhdv6JQr2f3uk9jsHSAle7NXEz79OqJm3DD4c79AnffB3fT5\ntbya49yJeP95mKLosFs5+sEf+JUtlRi7F03SwoO7PsAveSYew9RR6HwmNNp+F63qC+mw09Fci8HN\nu8fNRj5RqRzY/BrVNTmkJS6lvqkEnc6ApaUOYy+KXAWlXszBVQ/jZvLFYPBgZ9YbhEzruYVc2e61\npCUsJTrMuUN11oTb+WTr03S01GN0H9x/CzqjG4FJF/HZzudIjJxHWXUWTS3l+HiEYrW2kZm3FofD\nxr7Dqyk4vpXWtlpsDivB4WO7vZ5v9HiSr72fon0fA4Jxyx/GOyJlUOd8oTvvg/v5ntlibW1Eh4YY\n4Sxb6ikMRGq8aW+oUMHdRTSWHiFrzWMIh8Rm7yBh0Z0EjZ1zxnkeQXGg0TBv6t14eThztpvb6bFo\n3wAAEx5JREFUa6nK/pKIXu3qFBg9AtiZ9RY6g5mwadcQOmHJWc6XWKwtXUcWWysgaK0qwOg+sNLC\n3UlccjfHd7xDdsleDAFBBAVeyruf3w9IgpJn423U4C09CPZLoLG5kryybfhEpfV4vW+WkFAG13kf\n3M93Bncf0OnZbatikgikVLZQ4KgnNUCtu7sCh91G1prHmJFyM1Fhk6ltKGLDhifxDE3E7NPdjYVA\nc0oan1ajO23TVE/a6ss4tGolU8beiI9XOHuPvENz2VHExJ43WEXPuZmDb/4GjUaHyejF/ux3cGBH\nf0pP2cEkNFoip1+PlA6ObfkPZXvXodObCM1YTPSc/8NuaSV/4985VPwZBk9/0m96fFjWzy3NtTSW\nHkFrdMMnMlWlUXbqd3AXQvgBbwHRQCGwXEpZ3815hUAjYAesUsqBVTkaZYRGS9Kyh3h51UpesxfQ\n7rASd+lPcPMLH+mpKYClpRbhkESFOXeZ+nlH4ecTS2t1UbfBPWT8pXyx5y9kJFxFQ3MZRRV7mbD4\nlnOOU5u3i6iQicRHOXuBXjThdlZv/HnXztju+ESmEZyxiKz96zHozSA0hKQuxKNzw1ZfOOw2bG2N\n6N28zxkcS3Z/QNORnVwx+yGkhE27X6DU04/wiVeQeMXP+jz2QDSW5XBo1Ur8fWJpbauhxDeYcdf/\nVgV4Bnbn/itgo5TyKSHEfZ3H3eVrSWCelLJ2AGONal5hSUy+4990NNVgcPPpsZqhMvz0Zm9s9g7q\nGorw9Y6ivaOJ+oZiwntY54+ddyvHzWvYm7cBndmT9BW/79WOS43OQKuluevYYmkGh4PG0uyz1k1P\nWPhDgsbOoaWqAJNPaL86PdXm7yb7g6cREtBoGXvNr8+6XNKQt5v0MUvxcHN+BuljlnIobzvhE6/o\n89gDlbvuBaaN+xax4dNxOOxs2PYU5Qc/JVRVmBxQcL8SOJGS8Cqwie6DO5yreIeCRqvv4dd8ZSRp\n9UYSLruD9RufxN83jrqGYoIzFuERFNvt+UJoiJx+/Vn7gXYnMGkWu774N1/v/Qe+3lFk528kKmwK\n+Rv/TsYtZ+8R6h2Rcs6HkVJKSna9R1XmZwiNjvDp1xCYPBtrawNHPniG+ZPvItg/iZLKA2x553Gm\n/vCfPdZY15o9aWwu7zpuaC5DZ/bs9tyh1tFUTYi/84efRqMl2DeBxobKEZmLqxlIcA+WUp5IYq0A\neqr6I4FPhBB24G9Syv83gDEVZdgFpczFMyyRlqpjhHkH9xjYpcNOe2MlWp2xzy3gdCYP/JMvork4\nD41Gx8SU5fh6RbF+x9OD8RYo2f0+VbvW4eMewvHyvRx+/ykqDnxC+LRr8XAPJMAnDoDwoHSMBg/a\n6st7fJ9Rs1aw/7/30dBSDkiOVx1g/M2DM8++8gxNJCt/PRPHLqeto4GCsh3EpP9wRObias4a3IUQ\nG4HubidP21UhpZRCiJ7qqs+SUpYJIQKBjUKIbCnllv5NV1FGhtkn9Kxb4i3NtRz830PYWhqw2doJ\nHDuHMZf9pMcmIN3xjZvIsaO7mDXx+5iN3mw98Cpeg5QeWH1wEyF+iTQ0lbB88UsIBJ9se4a8DX+l\nte44b3z0AxKi5jI2fhFtbWdP3XTzj2Dirc9TlfMVABMu/16vcvmHQuKSuzn09iPkfPxjHHYrUTNv\nxD9+yojMxdWcNbhLKRf29D0hRIUQIkRKWS6ECAW6/V1ISlnW+WeVEOIdYCrQbXAXQqw85XCTlHLT\n2aevKK4hd/1fiPQZy6QZy7HZ2vn4699Ttu9jwiYs7vU1AsZMo626mHc+vQ+kA9/oDJIvu3dQ5qfR\nGahrKCJlzGIMeudmLzeTLzpbG1df/lvsDjsbv36CtZsfInberefMcjF6BRAx+apBmdtAGDz8yLjl\nOWxtjWj0phGvvzMchBDzgHnnOm8gyzLvA7cAT3b++W43k3ADtFLKJiGEO3Ap8HBPF5RSrhzAfBRl\nxLRU5jNz8s8QQqDXm4kLn87BLf8leNy8PvUHjZx+PRHTrkXa7afVQLdbOyjetoq2qiLMAZFEzliG\nVt/7B+/hM5Zx5L2nqKrN7arnUttQxPTxt6LVGtBqYWz8ZRyu2UnYxMt7/8ZdgBDigipZ0HnTu+nE\nsRDioe7OG0jb9CeAhUKIHGB+5zFCiDAhxNrOc0KALUKIfcB24EMp5YYBjKkoLsnsG8rx8r0AOBw2\nSiszMRs8KTuwsc/XEkJzWmCX0kHW6kdxFOaS5JWOLMrn0KqHkY6eSxN8k3/8ZBKX/ozc41+y/usn\n+GTHc3TYWqiszek6p7IuF7N/ZJ/nq7imft+5d6Y2Lujm66XA5Z1/zwcy+j07RenU3lBB7scv0VZb\ngltAFGMu+wlGr5FZ5+1O/GU/Zs8/7+BY2U46LM14e4QSEZROU1vjgK/dVltCW1URSy55Bo1GS3T4\nNN757F5aqot6fOjZnYCEGfj8MI3a/N2AJMT/O2S9/SiVDfnY7VZarA2Mv/ypAc9XcQ1qh6ri8uzW\nDjLfeICksIuImrycgpKtZL71GyZ+909otPpzX2AYmH3D8BszFVFby7Tk6zHozXy283mSpgy8VK90\n2NFotF0PZwUCTS93vn6TzuRxWlG1id97gfrCfQiNlsSYCX1aQlJcmwruistrqSpELwykJTqbWI9P\nuob80u201pb0azfmUElYfBe5617g813Poze4EX3xrYNSO8XNPxKdhy9bD7xKbNhUjpXtQpjccB+E\nEhV6kyeByb3vRNUXHU3VFH35OpbmOrwiU4mYerXaOTqMVHBXXJ5Wb8JiacZut6DVGrDbLVitrS6X\nGaEzupF89X2Dfl2h0TLuhocp3PQquwo/xBwQQerSx9Bo+/bPt+LQ51Qf+gKNzkD49GvPuvN1oKzt\nTex/7ZfEhU4jwH8aWYc2ktdQwZjLfjxkYyqnU8FdcXluAVF4Roxlw/Y/EBmYTlHFHnzjJ2HyvnB2\n9OpNniQsuqPfry/bv56SL99i8thldFia2bNqJak3/g7PIao8Wpu/Gz/PCCaNXQZAaGAK//v4TuIX\n/kDdvQ8TFdwVlyeEIPmqeyk7sIHK6mL8piwmJH1hnzYIXejK93zErPTvEBLo3BTV3tFMxYFP8Fw4\nRGWlpeTUqiP1jaVI6cDS2tCr2vbKwKngrpwXhEZLWEbvNwQNFykdCKFBSgfF297uXPbQEz5jGYFJ\nswZxHImlqQYpHRi9Avv1g02e9nfHkP5w9IubTOGmV9m2/9/UNRyjsaUcP59Y9rx8BynXPqgacwwD\nFdwVpR8aS7I58sEztDaW4+EXjXfMeFrzM5mddgsd1ha+Wv8XdCYPfKPHD3gsh91K9ntP0VCUidBo\ncQuIIuW633TbeLonoZOu4OstLzMx6To6LM0cLvyEtBW/G/DceqI3exKcvoC87WswG724duEf0OtM\nHC/fy9YPnmHqj14esrEVJxXcFaWPrG1NZK1+lBmp3yYiJIPc4q/Ytf915k+9mwBf5zJHavxiyrK/\nHJTgXrx9DfqmFpYt/CNCo+Xrff+g8ItXGXPpj3p9jZD0hWh0BrKzNqPRG0ld/oizc9QQaasro2zP\nWtISrqC1vRa9zrmbNiwonbYdf+z6jUcZOurTVZQ+aqkqwNM9iKiwyWg0OhKj56LXmahrKuk6p62j\nEaEbnGye1vJ84sJnoNXq0QgN8REX0VpR0OfrBKXMJeX635B81b14hiYMytx60lZfjo9XBCEBYymp\nOEBrWx0AR4u+wNM/WgX2YaDu3BWlj/RuPjS3VGGxtmHQm2lrb8Bqb2d/zrvYbO10WFvILfmajPnP\nDMp4Jr9QjpdmEhs+HRAcr9yPybfnCpWuwM0/gvqGYvR6N5JiF/Dup/ei1RrQGM2MW95jeSllEAkp\ne6rUO7yEEFJK2ecnPPN+9ZFrvAHlgpK38a805O4hxD+JkqqDBE9YhHdUGtXZXyJ0ekIzFg9a8xW7\npY3MNx9Etrag1eqwYCf9pscxDFGv1MFSmbWJ3PV/xmD0wGJpJW7B9wkaO6fP+fkXgk1PLOn30+2e\nYqcK7orSD1JK6gr20FZXintQ7KDsRD0bh91GU1kOUjrwCk1EozMM6XiDxdbRiqW5BqNXYJ+qWF5o\nhiK4qx+hitIPQgj84iYBk4ZlPI1Wh1d4MqV7P6Js5/voPXyJnHmDy+eM64xufcrqUQaPeqqhKOeJ\ngs9foXb3BhK90vBqsrP/tXuwtjeN9LQUF6WCu6KcB6R0ULLnQy6Z+jPiImYyZdwK/DwiqMndMdJT\nU1yUCu6Kcj6QEpBoNSdXUrUaPTj6XvZXuTCo4K4o5wGh0RIybj6f73qR0sqDHDy6loq6HPziJ4/0\n1BQXpYK7opwn4i/9MeYx6ewuXsdxWynpNz3h8umQyshR2TKKcp7QaHVEz/7WSE9DOU+oO3dFUbol\npQNra0OfGnErrkPduSuKcobG0iMcXvM77NZ2EIKkpb/EX63vn1dUcFcU5TQOm5WsNY8xI+VmosIm\nU1l7lM8+/AMTv/eSy2+aUk7q97KMEGKZEOKQEMIuhJh4lvMWCSGyhRBHhRCD32BSUZRB1d5YiVbo\niApz3qkH+SXg7RVBa/WxEZ6Z0hcDWXPPBK4BNvd0ghBCC7wILAJSgBVCiLEDGFNRlCFmcPfB0tFE\nY3MFAO0dTTQ2lWL0DBjhmSl90e9lGSllNnCuVl1TgVwpZWHnuW8CVwGH+zuuoihDS2d0J27+baz7\n4jEC/ROpqcsnJGMxbv6RIz01pQ+Ges09HCg+5fg4MG2Ix1QUZYBCMxbhFZFCS9Uxgny/hWfI0Db3\nUAbfWYO7EGIj0F1R6vullB/04vqqHK+inKfcA6JwD4ga6Wko/XTW4C6lXDjA65cAp/4uF4nz7r1b\nQoiVpxxuklJuGuD4iqIoo4oQYh4w71znDdayTE8L77uABCFEDFAK3ACs6OkiUsqVgzQfRVGUUanz\npnfTiWMhxEPdnTeQVMhrhBDFwHRgrRBiXefXw4QQazsnYQPuANYDWcBbUkr1MFVRFGWIDSRb5h3g\nnW6+XgpcfsrxOmBdf8dRFEVR+k7VllEURRmFVHBXFEUZhVRwVxRFGYVUcFcURRmFVHBXFEUZhVRw\nVxRFGYVUcFcURRmFVHBXFEUZhVRwVxRFGYVUcFcURRmFVHBXFEUZhVRwVxRFGYVUcFcURRmFVHBX\nFEUZhVRwVxRFGYVUcFcURRmFVHBXFEUZhVRwVxRFGYVUcFcURRmFVHBXFEUZhVRwVxRFGYVUcFcU\nRRmF+h3chRDLhBCHhBB2IcTEs5xXKIQ4IITYK4TY0d/xFEVRlN4TUsr+vVCIZMAB/A34hZRyTw/n\nFQCTpJS157ielFKKfk1GURTlAtVT7NT194JSyuzOC/dq/P6OoyiKovTdcKy5S+ATIcQuIcTtwzCe\noijKBe+sd+5CiI1ASDfful9K+UEvx5glpSwTQgQCG4UQ2VLKLX2dqKIoitJ7Zw3uUsqFAx1ASlnW\n+WeVEOIdYCrQbXAXQqw85XCTlHLTQMdXFEUZTYQQ84B55zyvvw9UTxnoc+AeKeXubr7nBmillE1C\nCHdgA/CwlHJDN+eqB6qKoih91FPsHEgq5DVCiGJgOrBWCLGu8+thQoi1naeFAFuEEPuA7cCH3QX2\ns4wxr7/zG03U53CS+iyc1OfgpD6HnvU7uEsp35FSRkopzVLKECnl4s6vl0opL+/8e76UMqPzv1Qp\n5e/7OMy8/s5vlJk30hNwIfNGegIuYt5IT8BFzBvpCbgqtUNVURRlFFLBXVEUZRQa8APVwSKEcI2J\nKIqinGe6e6DqMsFdURRFGTxqWUZRFGUUUsFdURRlFHL54C6EeFoIcVgIsV8IsUYI4T3ScxoJvS2x\nPFoJIRYJIbKFEEeFEPeN9HxGihDiZSFEhRAic6TnMpKEEJFCiM87/00cFELcNdJzcjUuH9xx7mod\nJ6UcD+QAvx7h+YyUTOAaYPNIT2S4CSG0wIvAIiAFWCGEGDuysxoxr+D8HC50VuBnUspxODdS/uQC\n/n+iWy4f3KWUG6WUjs7D7UDESM5npEgps6WUOSM9jxEyFciVUhZKKa3Am8BVIzynEdFZdK9upOcx\n0qSU5VLKfZ1/bwYOA2EjOyvX4vLB/Ru+C3w00pNQhl04UHzK8fHOrykKQogYYALOmz+lU7+bdQym\n3pQWFkI8AFiklK8P6+SG0SCVWB6NVL6u0i0hhAfwNnB35x280sklgvu5SgsLIW4FlgCXDMuERshg\nlFgepUqAyFOOI3HevSsXMCGEHlgN/EdK+e5Iz8fVuPyyjBBiEfBL4CopZftIz8dFXGilkXcBCUKI\nGCGEAbgBeH+E56SMIOHs7/lPIEtK+ceRno8rcvngDrwAeODs4rRXCPHnkZ7QSOipxPKFQEppA+4A\n1gNZwFtSysMjO6uRIYR4A/gaSBRCFAshvjPScxohs4CbgYs748LezhtBpZMqP6AoijIKnQ937oqi\nKEofqeCuKIoyCqngriiKMgqp4K4oijIKqeCuKIoyCqngriiKMgqp4K4oijIKqeCuKIoyCv1/rD8G\nashZAScAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Train the logistic rgeression classifier\n", + "clf = sklearn.linear_model.LogisticRegressionCV()\n", + "clf.fit(X, y)\n", + " \n", + "# Helper function to plot a decision boundary.\n", + "# If you don't fully understand this function don't worry, it just generates the contour plot below.\n", + "# see whole code: https://github.com/dennybritz/nn-from-scratch/blob/master/nn-from-scratch.ipynb\n", + "def plot_decision_boundary(pred_func):\n", + " # Set min and max values and give it some padding\n", + " x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5\n", + " y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5\n", + " h = 0.01\n", + " # Generate a grid of points with distance h between them\n", + " xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n", + " # Predict the function value for the whole gid\n", + " Z = pred_func(np.c_[xx.ravel(), yy.ravel()])\n", + " Z = Z.reshape(xx.shape)\n", + " # Plot the contour and training examples\n", + " plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)\n", + " plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Spectral)\n", + "#######################################\n", + "\n", + "# def plot_decision_boundary(theta, X, y):\n", + "# plot_data(X[:, 1:3], y)\n", + " \n", + "# if X.shape[1] <= 3:\n", + "# plot_x = r_[X[:,2].min()-2, X[:,2].max()+2]\n", + "# plot_y = (-1./theta[2]) * (theta[1]*plot_x + theta[0])\n", + " \n", + "# plt.plot(plot_x, plot_y)\n", + "# plt.legend(['Admitted', 'Not admitted', 'Decision Boundary'])\n", + "# plt.axis([30, 100, 30, 100])\n", + "# else:\n", + "# pass\n", + "#############################\n", + "# Plot the decision boundary\n", + "plot_decision_boundary(lambda x: clf.predict(x))\n", + "plt.title(\"Logistic Regression\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Implementation\n", + "\n" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "Now we are ready for our implementation. We start by defining some useful variables and parameters for gradient descent:" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loss after iteration 0: 0.432387\n", + "Loss after iteration 1000: 0.068947\n", + "Loss after iteration 2000: 0.068936\n", + "Loss after iteration 3000: 0.071218\n", + "Loss after iteration 4000: 0.071253\n", + "Loss after iteration 5000: 0.071278\n", + "Loss after iteration 6000: 0.071293\n", + "Loss after iteration 7000: 0.071303\n", + "Loss after iteration 8000: 0.071308\n", + "Loss after iteration 9000: 0.071312\n", + "Loss after iteration 10000: 0.071314\n", + "Loss after iteration 11000: 0.071315\n", + "Loss after iteration 12000: 0.071315\n", + "Loss after iteration 13000: 0.071316\n", + "Loss after iteration 14000: 0.071316\n", + "Loss after iteration 15000: 0.071316\n", + "Loss after iteration 16000: 0.071316\n", + "Loss after iteration 17000: 0.071316\n", + "Loss after iteration 18000: 0.071316\n", + "Loss after iteration 19000: 0.071316\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEKCAYAAADpfBXhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXeUJFXVwH+3w3T39ISePDthZ3Y258CybCAsQXJQFAVU\nRFQQlCAoYfWDBclRCQKiIIICIiBIUIIseXPOYcJOzqGnu6dTve+P6l16ZyfnXep3Tp9T1fWq7q1X\n1bffu++++0QphYGBgYHB4YVpuBUwMDAwMBh4DONuYGBgcBhiGHcDAwODwxDDuBsYGBgchhjG3cDA\nwOAwxDDuBgYGBochhnEfYYjI2yLy/R6Uc4tI/uBrNHyISLGInDhEshaJyK5IvZ49ANf7i4j8tovj\nnT4/EblYRD7p4txlIvKj/urYwXWXishzA33dgURENovIscOtx6GAYdz7QMToeEWkRUQaReQzEblM\nRKS/11ZKna6U6vYHppSKV0oV91dee6LuzS0iDSLypojkDLScHqIin6HgNuDhSL2+MQDX61L3fj6/\nwaqXET/pRSk1TSn18UBeU0Q+FJEaEWkWkfUD8ec+EjCMe99QwJlKqQRgNHA3cAPw52HVamDYd2/x\nwCigGnhkeFXqHSJi6cNpo4GtfZRn7st5I5B+N056LbBvz2qguQrIVkolApcCz4tIxjDr1G8M495P\nlFJupdS/ge8APxCRqQAiYhOR+0WkRESqRORxEbHvO09Ezom0EppFZLeInBz5fn+XW0TGichHItIk\nIrUi8mLU+ZqIFES2E0Xkr5HWR7GI/HpfLyLSxf9URO6LtMQLReTUHt6bH3gFmBIltytZB3TrRSQ/\noqcp6t5ui+jTIiL/FZGUqPLfj9RXnYgsidZFROaJyBeRnlKFiDwiItZ29XGFiOwEdorIoyJyf7tr\nvCEi17S/TxHZAxQA/47oZRWRrEj5+oi75sdR5ZeKyD9F5DkRaQZ+0EkVJkd6Pi0isnzf84rSd9/z\nS4nIahaRFcDYdvp9TUS2R96DR9CNsEQdv0REtkae739EZHQ7OZeJyM5I3T3aia4HISIvi0hlRO5H\nIjIl8v2RkXc6WodzRWR9ZNskIjdG3us6EXlJRJIix/a9E5eISAnwfgdyUyP11hip/4+jjhWLyAmR\n7SbRe5huEWmNXHd05NiZov++9vWsp3d2n0qpTUqpYNRXViC3p/U0YlFKGZ9efoAi4IQOvi8BLots\nPwT8C3ABccAbwJ2RY/OAJuDEyH4WMDGy/SFwSWT7BeCmyHYMsDBKlgYURLb/CrwGOIE8YEfUNS4G\nAsCP0A3CT4Hybu5tn16xwLPAX6KOdyXrFuC5qLL5ET1Nkf1lwC5gHGCP3OtdkWNTADdwdOReHwCC\n++oZmBOpN1NE7lbg6nb18d9IfduAI4FyQCLHUwEPkNaTZwp8DDwa0WUmUAMcHzm2NFKnZ0f27R1c\n7y9AHTAXMAPPAy908vxejHwcwFSgDPg4Su8W4NzIda6J1Mu+Oj8nUqcTI3Xza+CzdnLeABLQDVYN\ncEondbC03fO7OPKcrejv87qoY1uAU6P2XwN+Edm+Gvgc/b22Ak8Af2/3Tvwlcr+2DvS4C3g8cr9m\nYFEPfnt3or9fZmA2eo/zSPR3/qLIeTFdvPdvAr6Ibm8Pt40ZiM+wK3Aofrp4wb4Aboq8UK37fryR\nYwuAwsj2k8ADnVw72rg/Gymb3UE5Db21aQb8wKSoY5cCH0a2LwZ2RR2LjZyb3on8YnQj24huwMqA\naZFj3clqbxz2/ZBNUfe2JOr45cA7ke2b9xmAKD39HdVz5Pg1wKvt6mNxuzJbgZMi2z8H3uzJM0U3\ngiHAGXX8TuCZqPtc1s078gzwx6j904BtnTy/ADAh6tgdwCeR7YuAz9tduzTqHXln33Zk34T+J5Yb\nJSe6UfAScEMnOh/w/Nodc0WuFR/Zvx54PrKdHJGZEVXv0X+UoyL3aIp6J/K7qLtb0RtGY3vy20Pv\nNRcBKZH9x4Hb2pXZDhzbzTMzA6cS+ZM61D+GW2ZgyQEa0FtbscCaSLewEf1HmBpVbk8Prnc9+h/F\nStGjBH7YQZlU9NZRSdR3e4HsqP2qfRtKKW9kM64TmQo4RymVhN4CvhL4SETSeyirO6qitn1RemSh\n/5FE61m/b19EJkS66pURV8gdQAoHUtpu/1nge5Ht7wE9jQTJAhqUUp6o79rfZxndUx21HX2v0aQB\nFg7UfW87XdrLii6bB/w+6j3bV2cdPn/A24keByAiZhG5O+JaaUY3noov3+G/AWeJSCzwbfSexr77\nzQdei9JpK/qfZbQfu/2ziuY+YDfwrojsEZEbutBzNvqY0NeVUvvuPQ+4bp/8iA456H8ynaKUCiul\n/gOcLCJndVX2UMAw7gOEiByJ/kP8FP0H5gOmKKWSIh+X0gdgQX+xx3V3TaVUtVLqUqVUNnAZ8Ido\nv22EOvRuen7Ud6PpmfHpTr5SSr0GhNHdJd3J8qD/qe0jsxfiKojyc0aMRrTxfhzdSIxT+sDXrzn4\n/W0f7fE34BwRmQlMQm8N9lSXZBGJNoLt63SgIktq0Q3f6Kjvorfb14twoD94L3Bp1HuWpJRyKqWW\n91OvC4Gz0V10icAYonz9Sqly9J7quRz8x7kX3WUTrVOsUqoyqkxXkUStSqlfKqXGRnS4VkSOb18u\n0uB4DbhCKbWhnfw72smPU0q91MN7t6L3qg5pDOPed/YNIiaIyJno/vHnlFJblFIa8BTwOxFJi5TL\nlsigKXpUzQ9F5ITI4FO2iEw8SIDIefJlGGIT+g9Ciy6jlAoD/wDuEJE4EckDfoHu4+3vvYmInAMk\nobsUupO1DjhWRHJFJBHdRdXhtTvgFeBM0ePNY9BDE6Pfzzh0d5FXRCahu3S6RClVBqxCHyf4p9IH\niLtFKVWK7jO+S/SB8RnAJfSuTnsUeRKp01eBpSLiiAxa/oAvjd/bwFQR+YbokSVXceCf5hPAkqjB\nzkQROa+/eqHXtx9oEBEnuluqPX9FjxKbFrmHaJ3ujBrcTJNehBeKyBmiBxMI+nhDmHbvfaQu/onu\nGvpnu0s8BfxU9EF4ERFn5JoH9VhEZKKInBape6uIfA84Bviop/qOVAzj3nf+LSIt6K2Em9AHAKPd\nJjegdy2XR7q17wETAJRSqyJlH0I32ss4sLW2j7mR893A68BV6svY6OiWz5XoreZC4BP0FuszUeXa\nt5K6a3X+OyKzGfgtcJFSalt3spRS76P7dDeiG9V/dyN7v25KqS3Az4C/o7dWGziw6/5L9NZkC/BH\n9AHI9tfqiGeB6fTcJbOPC9B7KBXohutmpdT/2uvdBd3Ve/T2z9GNaRXwdOSjF1KqDjgPPdy2Dr3H\n92nU8X8B9wAvRt6zTcApncjpTvfoY39Fd7+VA5vRW+ntz3sN/b19TSnVFvX979EHcd+N/Ea+QB8M\n70yn9oxH/7240f9kH1NKtTe2Oei9yWuiImZaRCRHKbUG+An6gHgD+oDzRZ3IEvRAgGr0weYrgW8r\npdZ3o+OIZ18kQd9OFslFfwnS0R/YH5VSD3dQ7mH0ASUvcLFSal2fhRoY9ALRZzM+p5TKG25dDkdE\nZDe6W+h/3RY2GFL6O4EgiD6yvD7S5VkjIu9FtfIQkdPR/aTjReQodN/p/H7KNTDoFtHj4K9G76Yb\nDDAi8k1AMwz7yKRfbhmlVNW+7otSqhXYhj6oGM3Z6F1jlFIrAJccBrO/DEY2IjIZPZwzA/jdMKtz\n2CEiy4DH0F1pBiOQAZv6K3oSpNnAinaHsjnQd1qG7i+rxsBgkIj0HrsN+TPoG0qpxcOtg0HXDMiA\nasQl80/0GYOtHRVptz9QYWQGBgYGBh3Q75Z7xK/5CnpIUkdxxOUcGJebE/mu/XUMg29gYGDQB5RS\nB4W49su4R+JQ/wxsVUp15td8Az3U60URmQ80Rc1k61JBEVmqlFraHx0PB4x6+BKjLnSMetAx6qHz\nhnF/W+6L0GenbRSRfeGNS4jEbCulnlRKvS0ip0dCpjwcGAtuYGBgYDAI9Mu4K6U+pQd+e6XUz/sj\nx8DAwMCgd4z0GarLhluBEcKy4VZgBLFsuBUYISwbbgVGCMuGW4GRSr9mqA4kIqI6GhQwMDAwMOic\nzmznSG+5GxgYGBj0AcO4GxgYGByGGMbdwMDA4DBkJKw83i8+n37myBg0MDD4CmP/8Fyuvb83a7MY\nRLPs7tMHfLzRaLkbGBj0G8OwjzwM425gYNAvZp0WGm4VDDrAMO4GBgYGhyGGcTcwMDA4DDGMu4GB\nQb843XTVcKtg0AGGcTcwMDA4DDGMu4GBgcFhiGHcDQwM+oz9w3OHWwWDTjCMu4GBgcFhiGHcDQwM\nDA5DDONuYGBgcBhiGHcDA4M+Y6QdGLkYxt3AwMDgMMQw7gYGBgaHIYZxNzAw6BNLzrhiuFUw6ALD\nuBsYGBgchvTbuIvI0yJSLSKbOjm+WESaRWRd5POb/so0MDAwMOiagViJ6RngEeCvXZT5SCl19gDI\nMjAwMDDoAf1uuSulPgEauyk24EtIGRgYDB9G2oGRz1D43BWwUEQ2iMjbIjJlCGQaGBgYfKUZigWy\n1wK5SimviJwG/AuYMARyDQwMDL6yDLpxV0q5o7bfEZE/iEiyUqqhfVkRWRq1u0wptWyw9TMwMOg9\nxszU4UNEFgOLuys36MZdRDKAGqWUEpF5gHRk2AGUUksHWx8DAwODQ5lIo3fZvn0RuaWjcv027iLy\nAnAckCoipcAtgDWixJPAt4DLRSQEeIHz+yvTwMDAwKBr+m3clVIXdHP8MeCx/soxMDAYGdg/PBfu\nH24tDLrDmKFqYGDQKwx/+6GBYdwNDAwMDkMM424wqLSEAhS1ufFr4eFWxcDgK8VQxLkbfEV5pa6Y\nJ6t3kCAx+Alzd/4RTI1NGm61DPrBwk3XwY2+4VbDoAcYxt1gUNjT1sLT1bu4RR1JKg7WqlqWlKzl\nX5NOQGRws1HUBdt4unoXtcE2ZjiTuCCtAIsYnVSDrxbGG28wKBS1tTJBXKSKA4A5koZXC+EOBwdV\nbms4yOV7viDQBLM86XxUV80D5ZsHVaaBwUjEMO4Gg0K2LZZC1UKLCgCwQzViAl6oK+QPldvY6Wse\nFLkrW+tI1RycJ+OYK+n8XJvB201lBDVtUOR91VhsuGQOGQy3jMGgMNnh4uyUXG6pX0mmOChTHkwI\nNXVBYjBxdcMK7s6fy0xnMrt8LXzaUoXdbOFUVzZJFluf5SrUAS0WifrewOCrhGHcDQaNH2VO4JSk\nbGpDbbxZX4q9JYazZQwAacrBs9W7uSC9gJtL1rJIjcItbl6uLeJP448muY8Gfl5cGo/Jdl7TCikg\ngf9JGV9LyCbGZB7IWzMwGPEYbhmDQSXH5mS2MwVNKVx8abBd2PBpIf5YuYPvq0mcJ+O4hClM0VJ4\ntb64z/LizVaeGLeAUEKIzxwVzEtN4cac6QNwJwYGhxZGy91gSFjsyuSh1q1kqlhiMPFP2c3XE0fz\nWsNe0rDvL5emHLhDoX7JSrc6+M3omf1V2aAdxoLYhxaGcTcYEo5LHEVrOMQLtbsIozg9KZvzUvOp\nDbXxcsMeLlITaSHAB1LKbxINw2xg0F8M424wZJyRnMsZybkHfHdZ5kQe1bZxb/Na7CYzP82YyLy4\ntGHS0MDg8MEw7gbDikVMXJM9lWuypw6p3JDSWO9poE0LMyM2iQRLzJDKP9QwZqYeehjGfQQT1DQ+\ndVfjDgeZ7Uwh1+YcdJlVAS+PV26nOtjGdGcSP86YgO0wizTxa2F+UbiS5kCQeKxUiIdHCuaTZ4sb\nUDlhpdjgacCjhZgW6+pXiKeBQW8xjPsIJaCFuapwBYGAIl05eJzt3JY3hyPjUgdNZktkducibRSz\nVQYf+Mv4bWADt+fNGTSZw8Er9cWY/SZ+o+ZiEuEDrYwHy7bw+7FHDZiMkNK4vmg1FW1ekrFTgpsH\nx8xjoiNxwGQYGHSFYdxHKP9pKge/cL2ahYiwSdXzYPlmXpi4eNBkrmmtY5RychZjQGCcSuRK9yf4\ntBAO0+HzqlQGfExQSZgiOW4m4uLjYPmAyni7sQx3W4ibtSMxi4kvVBX3lm3iz+OPHlA5BgadYcS5\nj1AaQn5yVdz+JFt5xNMYCgyqTLMIQb6cph+KzOs0MbiJvoaaqbEulksVHhVEU4plUs6k2IFtUVcF\nfIzTEjFHEpZNxEV18ND1WRtpBw49Dp/m2GHGLGcyr8o6FqlRpOHgDSliljN5UGUeGZfKE+YdPB/a\nwViVyMdSwRmJOf32ubeEAjxXu4e6QBspMXZCmkaMycTXU/LIiont17Xrgm38oXI7FQEvkxyJXJo5\nkVhz16/1Ka5sdvha+GXDZ1jExNiYeL6ZkMdbjaVMdbjIt8cDUNjm5q7SjZQHvYy1xbMkdwajeqjv\n5FgXD5u2cryWQwJWPqCMyQ5Xv+7VwKA3iFIjI+eGiCilVK+biJ9PP3Nk3MAg8Hp9CY9VbadNhTki\nNoVbR88e9KiO5lCAv9TspjbQxvQ4F99KGYO5Hyl6veEQP979GWNCCViViU+p5HTy8BJiuamKJ8ct\nIruPBr5NC3Pxrk+YHkxlKkl8KpWEHGEeGjOvR2mFPeEQbVqIB8q3UOxtJYc4Nql6bsiZzty4VC7c\n+RFnhccwgxQ+k0pWWap5bsKxPU4f/Ofqnfytdg9WMZET4+Te/LmkWO3dnzgCMSYwDS7L7j69zz+y\nzmyn0XIfwZyTksfZyaMJo4YsH3miJYars6YM2PVWtNYSH47hIiZxN2v4EVOYI3ocu2jCq3XFXNlH\neZu9jdjDFr4lYwGYoFxc6/uMupCftB4YUafZwnpPPSVeD7/W5mIRE0WqhXvKN3DH6DmkKDvHShYA\nZ5DPJ+FKKgJeRvcwquZHGRO4MK2ANi2Myxwz6HnsBwvDsB+aGMZ9hCMiWA5hn3dIadjQ3Tp+NOKx\n7j+WgJU2re/jCCaEEBpKKUSEMAoNhaUTI7rZ28hydw1xZitnJOUSb7ZSHxnb2PfnmUc8Hi2E3WSm\nUfkJqjBWMeNRQVoJ4uzlwLLDZDmsBqMNDh363RwUkadFpFpENnVR5mER2SUiG0Rkdn9lGvSNoKYR\nUkOb13xuXCrF0sJ/1F7smHmW7RSqFjapet6RvZzgGtXna0+PTSLGauIZ2cZyVcVjsomj4tI6jCf/\nX1MFNxStprY2xOrqBn686zPc4SBTY11spJ69yo2mFG9RzARbIpMdLmbGJXOfrONVVch9pnWclZR7\nyLpVDL56DEST4hngEeCvHR0UkdOBcUqp8SJyFPA4MH8A5Br0kJDSuLt0E++16OF+Z7pyuTZ7Wr98\n6T0lyWLjsYIF3Fu2ib0+N8eRzbNs11vcopjjTOnzta0mE48UHMWzNbvZ6W9gUWwaF6QVdFj2yaqd\nXKamMlH0NVyfDG/m7cYyvpM6hl9kT+Ge8rX4VZgYzGRqDqqDPm4ZPYt3m8rY3ebmJ47xHJ/Y9z8i\nA4Ohpt/GXSn1iYjkd1HkbODZSNkVIuISkQylVHV/ZRv0jGdrdlPm9vIIxxJG8WjzRl6yFXJh2the\nXcevhSnxt+I0W3s1CJprc3JO6mjM5Sa+rcbt//7n6mNawkES+zFI7DRbuTxzEl+01rDX72FNaz1H\nxR+cm8arhUjFsX8/VTnwRJb8y7PFEaNM/IrZ5BHP26ESfl2ylmuyp/BY5Q4CKsxbUkacxfqVy3tj\npB04dBmKUbpsoDRqvwzIGQK5BhE2tDZwosrFJmZixcLxKocNrY29ukaZ38N3d37MzUXr+Mmuz7ir\ndCO9ibTKjomlULlxR5bd26masIgQZ7Z2c2b3PFSxld+XbmN7tZv79m7micrtB5VZlJDOi7KLOuVj\nm2rgU6lkQXw6ANt8zcyUVPIlARHhNJXHDn8zNxav4fvaRB7mWC7XpnHL3nU0DfJcAwODgWKoRnra\n9/8P2/DFkUiq1U5RWwsz0F0ghdJMqrV3eU7uLtvEMaEsTpXRtKkQ97vX8V5zBSe7snt0/mSHi7NS\ncrilfiWjJJZyPCzNnd1v19BefysfNFVwh5qPQyy0qiBLGr7gm6n5B0TM/CJrKr9jC/e0rMVptnLT\nqOlMjtXjzlMtNkrETUjT9IgZWogXC06szBQ93cNESSKDWEr8rbgsHc83UErxfO0e3m4owyzCBWkF\nB2XBPNRYW1cEZA63GgZ9YCiMezkQ/YbnRL47CBFZGrW7TCm1bPDU+upwaeYELvd8wV7lJoyizuTj\niYwFvbpGsd/Nd5kIgF0sTNdSKWpz9+oaP8mcyMlJ2dQG2yiwx/d5Kb1omsMBUsSOI/Iqx4mVRLHR\nEg4cYNxtJjM35Mzo8BoL4tN5x1nO7Z7VZONkCw1cnTWVByu2UKd8pIqDZhWgGm+XIZb/qCvindpy\nfqim4CfMYxWb+dxdw5KcmTi7mVg1Urn2fsOwjzREZDGwuLtyQ/HGvQH8HHhRROYDTZ3525VSS4dA\nn68cmTGxPDvhWFa6axGB+XFpOHvpDhkdE8fqtlpOJhe/CrPZVM851lxK/K1kx8T2OA4/zxY3oNkX\nC2zxNOHnC1XFHNJYQTVBU5icmJ5n0DSJcNvo2axqraMx5Oeq2EmMtsXhCYe4s3oN4ySRQlo4P21M\nlzNqP2yu4ltqHHmiz3D9hirgv+69XF24gsfHLsBqMrJ9GPSfSKN32b59Ebmlo3L9Nu4i8gJwHJAq\nIqXALaAHMyulnlRKvS0ip4vIbsAD/LC/Mg16T4LZykmurD6fvyR3BtcUreBzrZIWAqRYbDxRuZ04\nUww2s4mHxszr8dT8gcRptnJf/lxuLd3AM8FtjImJ54HR8zpMmbDV28QLtYUENI1Tk7M4PvHL+jCJ\nHDQQ+83UfGbHpVDsbyUnJpYJ3WR0dJgsNOHfv9+En4m4KAq2sMXXNOjpIwwMohmIaJkLelDm5/2V\nYzB0lPhbqYu4TvbFjOfYnDw/4TiK/G62epv4R2UR97AQp7LydqiEO0o38ujY+TSG/DSE/GTFxA7J\n5J0Sfys3711HYziAVUycnzaGgkhumGh2+Jq5rmglZ6sxxGLh995t+DWNU5O6HtsvsMd3eL2OuDhj\nHDcUr6ZG+fATZjlV3MgRPM1WwkM8v2AgMCJlDm0OTUegAUopNngbaQz5mexwkRnj6P6kHvBk5Q7e\naNhLpsRSoTwH5JC3m8xMdrj4rKWaWaTjFCstKsBe3Oz0NXPprs8o9LtJNtnxEuSe/LlMjU0aEL06\n46biNRwfyuV4yaZcebi/Yh0THIkHGeQ3G0r5msrlBNGNeZyy8kpdCcckZGAVEzEDsCDJTGcyD485\niptK1mAJmzif8XxKJX5zeNDrYTAwMkEe2hjG/RBEKcWtpevZ0trEKGK5V23i1rzZmBH+Ur2bNi3M\n8a5Mzk8t2J+zvCds8TbyTkMZt6mjiMPKdtXI0r3reHPySQfkRcm1xfGxFOJTuTzIesbj4kpm8IW/\nihZC3KzNZQP1/LpkLa9NOmHQcqp4wyEqg14Wo7tXssXJFElip6/5IOPePnWxnzAVAS9nbXsfBVyY\nWsCPMyYcpKtPC/FGQymNIT9HxKV2u1jKhNhEnptwLE9UbedzbyXZtlgeHTUf+2G2mpXByMcw7ocg\nn7tr2Nnawi3akVjFzDbVwG/3biCsNM5XE3ARwz9qdxNUGj9IH3/Q+X4tzGOV21npriXebOVnWZOZ\n5UymLOClQBKIi+R/mSRJtGlhPFrogHj0ryVm8XlzDb92r8CMiQsZj4gwXiVyE8upxMscSeOp8Jb9\n54aUhhkZUEPvMJmxiZli5WYMCfhVmBJxk2bNP6jsGck5XNu0klilhzg+zw5mq1QuYhKtBLm/YR3j\nHAkHzEJt08JcsfsLEoM2slQct9dv4OLMcXwjJa9LvWLNFq7NnnbQ9zVBHw+WbWFvwEOBPZ5rs6cO\nSMSQgUFHGMP3hyDVQR9jSMAqemtwPC4aNT+LVTZHSQYTJYnvaxP5T2NFh+c/UL6ZwqZWfhKayrH+\nbG4qXk2Jv5UCWzw7VRN1Su+Or1I1JJqtByXLMomwdPQsrsuZhghokWkLYRRBdCO+RTUQa7LoywXu\nWc4JW/7DKVvf5e3G0oP06Ssiwk050/m9bOAPsomlspK5CSkdpjSY7HBxT/5cSuKaWR9bg9Vk4lTy\nMImQIDEs0DLZ5DlwYtdHLZXYQhZ+qqZxjozhGjWTJ6t29ElXvxbmyj0rSPY4+GFwMvZWK9cUrhjy\nXD89ZeGm64ZbBYN+YrTcD0EmOVw8o3ZTo7yk4eB9ykg322kLh/eXaSOMtZNskh+2VHGnmk+CxJBD\nHDtpYrm7lu+kjuGHGeNZWr2KeLESMmnckz+3w9a2iLA4MZM3GvbyhG8LM1UqK6mmjRBPy1bqxM/t\no+dwR+lGktscPM5xVCkvD1WsZ7QtjmkD5IM+LnEUY+0J7GxrIdViY3psUqe9gxnOZGY4k9nkbeD6\nwtXspIlMYtGUYo80syjmwGgZTzhEsrLtv14KdnwqtD8LZU/xa2E+aqnCpAnniJ77JlfFsSS0nFK/\nhzE9HLA1MOgNhnE/BJkS6+JHmeNZWrUKC0Kqxc7/Zc/i//auxaqZcRHDO7KXn6VP7PB8m5hoUQES\n0HO6uAns9wl/MzWfryVl0xjyk2l1dLkKk0mEe/Ln8kJtIcVtzSxypHJTwnSawwHybXEkWmK4vmQV\n96mFWMREDnHMI4ON3oYBM+6gR/Lk2HoW114bbOOG4jWcxmheYQ+rVQ2N+EmyxfCN5APdLXPjUnmK\nncxQteQSx7+kiIXO9F4Z9u2+Zq4vXoVJE9pUmBD6LNggGm0q3O9VrgwMOsNYiWmEUNTmZpO3kSSL\njYXx6T2alh/UNFq14P6FIMr8Hv5RV0ybFmKxaxQLI7lTogkpjZdqC/lHTTEnkEuleCgyN/Pn8UcP\nSJ6X9nxr+4d8LzSRyZKEphQPmtZz7qjRnNZNCOJAs9nbyEu1RVQGfcT4zVzNTJqVnx008TTb+Oek\nE0jpwP+9trWe31dspSkc4AhnCtdlT+3xBLDdvmauLlrJydpoTiaXh1hPEI0jyWCtqYaMWDuxJjPL\nW+uIM1tzzXexAAAgAElEQVS4YtRkjknIGOhb7xPGAh1Di7ES02HKsuZK7inbxExJpYxW3nDs5e78\nud0aeKvJRJLpS4OUY3NybfbUTst/0lLF7aUb9p9b7/Qw0R7HkpRplPhbeaNe94eflZI7YC3r67Kn\nctve9cwklWqTF6fNwkmJfZ9M1Re2epv4VdEqzlFjsGFlDbVU0EoFXuyRhUTiO4nJnxOXwrMTjum1\nzBdrC3muZg+jlJP/shcBrmYmd7KGTY5ajk/IpNDrpsYd4CZ1BDWal7tKN5JWMI9J3UyWMjDoCYZx\nHwHcX76Zq9RMCkggrDTu9q3lM3c1xyYMXF6PmqCPO0o3co2aRYEksFKr5mXvbnJtTu4s28haTz1n\nqnwEuL5lFXfmH8GsfuRa38eC+HSeGLeQ9Z4GEsxWjknIGLIlA/fxev1eTlN5HC85aEqxmhpuZw1j\nSaQENzMcSQMS576P2mAbz9TsYqmaR7LYaVR+bmYFU0mmET81Pi87fS0IcDvzSRIbKdhZqDJZ4a41\njLvBgGAY92EmrBTNWpDR6PlWzGIiRzmpD/k7LF/m9/C5W4/2ODFhVI8XzN7d5iZf4ikgAYB5ksHz\n4R1sqWumDj/fZCyLRc/waFcW/lFbPCDGHQY+n0xviY5xD6LRQpDfMJdscdKqgixtW8kuXwvjHQkD\nIq8u1Eaq2ElGTzKWJDbilJU7WEMOTm7iCHyEuIEvqMNHEnrvq07amGwe/sFV+4fnwv3DrYVBfzFC\nIYcZswhT7S7epBhNKfYqN+upZ3oHbpFtviZ+suczNlQ38UlVDT/c/SkNnfwJtCfDaqdceWhV+gIV\nlcqDH41LmEIStv3rnALYMRMcoSF6feGslBzekmI+URV8RiVmhGzRB2DjxEquxFEdHLjZmLkxThoJ\nsEU1ALBVNeCVEPEmC5cwGZMITrEygxQeYROvqD08wWZqLV5O7WEK5cHEyAR5eGC03EcAv82bw29K\n1nJZ2zJixcJ12VMZZz+4FflExQ6+qY3lGMkCBX8P7+TF2kKuGDW5Wxlj7QmckZzDbQ2rGCPxbKOJ\nMSoOm5hZqDL5GztxKH0p7n/KHq5J7v6aI5GwUgeNVUyPTeaOvCN4qbaIgBbG6jexSqvhSEmnRLkp\nooVxAxiOGGe2ckfkmYaUwmIS7hg9h79U72arr5FRONGUwichTk/KJs5sZZzZyRlJOb3O1mlg0BmG\ncR8BpFntPDluYbezOJvDAbL4MuRvlHLSEPL2WM5PR03iuMRMKoJevmcew8171/OpVkkucbiI4UXT\nTjJjHPwsdRLTYpO5qmQt232NpMc4+XXmpP2LW4xEdviaublkHRUhD1mWWJbmzWay40t958SlMCdO\ndzNt9zVzY/Fq/qbtICyKJTkzyBzgjJaznSm8MfkkWsIBEswxmEVIsdi4qmgF61UtbgmSarNxeeak\nAfX3DzR+dz173n0cb91eHMnZjD3lcuwJB0dhGYw8jFDIQ4g/VG5jY0MTP1JT8BLiUdnIZdkT+5zK\nd7uvmUcrtu3Pm/KzUZOwmcwopfhB0XKc2QuZWHAKVXXbWL/xGf4+dtGgTJefdVqI2Htv6DRR1dva\nw6x/p/N2SHMowLe3f4ggxGJhKsmsN9Xx4sTFnS6SEVaKppCfREvMkA7wtoQCbPY14TCZmR6bNOSD\ny92xcNN1+5+DFg6y7pmryE+ZRUH2fEoqV7O7agVzfvQopn6sezsYhIN+wn4PVqcLGWF12hOMUMiv\nOD/JmMiD4S0saV6OVUx8L62gXznaJzkSeXTs/IO+bwwHqAz4+Obk8xARxuTMp7TkQ7Z4mwY0Djva\nkHSVWvZ001VwBsw8u4kfTGij7fhXDzj+u4qtZBHHJUymhQCPs5kYJaxrrSPb5iTX5jzIiJpFSOli\nVaXe0JsZqwmWmA7nH4xEvA3lEAgye9I3ERFcCTkUV63GU1tC/KiDcxYNF2Wr/kXxx89hNluJiUti\n6nlLsSeOjPkCw4lh3A8hrCYTN+RM54ac6T0+p7DNzUctVdhMJk515fSo5R1rshDSQrT5m3HYXWha\niFZfPfHxAxeiZ//w3F6nlN3whotrgdCJF/G6PMWO9/V72exp5KdMI10cpOPgBJXNm6qY28s24hQL\ndrOZhwrmkW4dmLTI+/BrYe4u28SylkrsJjOXpI/nvNQxAypDKUVdyI8AKRbboGXY7AizxUYw5COs\nBbGYYwiHgwQCHky9XH93IFBKEfK3YrbYDug1NJVupnLFa5xz/J04HSls3v0WO16/j5kXGeE+hnEf\nwYSV4uW6IrZ4mhhlc3BR+rhezSJd72ngpuLVLFSj8EqQf9QW89S4RV2uAwp63vbvp43ltU9uIyd7\nPvV12xhntjAjdmBWEnrpyQvZcH/v/fehtlZ2vHwbzVU7yUCRO+dsnve1EW+2Uhv2kRMJJ63GiwML\nd6kFxCgTb2hF3Fe2mfvGHDkg+u/jkYptNLgDPMjRNGt+Hq7eyKiYWI4eoN6NTwuxpHgN233NaMAc\nZzK3jZ4zqMv1Rf/h2l2ZuPJm8t7y+8nLnENpzXrisicSmzK0i34HvM1se+V2WmuL0LQweQu+w+hF\n5wPQWrWL3IzZxMXqqZgnj/kaG7a/2tXlvjIces6prxD3lG3k/dpKClpd7G3w8bM9y/Fr4e5PjPBU\n5Q4uUBP4tozjYiYzO5zGy3VFPTr3krSxLEkbw5HNO7k4No57cmf2Kjd8Z7z05IVseKNvA7PF/3mM\nidXNPK4dzYPafPwbPuTHBVOJ+fov+ats52W1m6fUFtZLHceQhU3MiAjzyaSwl4t594RVrXX6yk5i\nYZQ4OU5ls7q1bsCu/1TVTsRn4gG1iAfUIlo8IZ6v3TNg1+8OEWHiWdfhOuIkKq0txM88jslfv3FI\new8Au995mExHLhec9jjnnnQ/tRvep373SgBsCenUNO4mHNZDfKvqt2Nvt1ziVxWj5T5CaQ0H+aC5\nkgdZhF0sLFCZ3BVaw3pPw0FrfXZ6DS1EKl+20lOx4w73LC4e9NmlCwbQP7zkjCv05dL7iLt8O6eG\n8zGJEE8MxwZT+Lx0CwWn/AzrDx5gzs7f07DHwd5GD5sDDZyp8rGKibXUktvDxGK9IdFspTzk2d9j\nKJdWplgGZiIUwA5vMyeoXMxiwgzMV5ls9g7cn0dPEJOZrFmnDanM9rRU7ODYo29GxESs3UVB1nya\nKraTMm4eqRMWUL/tU17/6DfEx2VQ31jI5G8sGVZ9RwqGcR+hhJXCJIJF6Z0rEcGqTKxsraXI72a2\nM4WJ3UxTX5SQziv1e7hYTcJDiPeklF8lHLyIxFAw67RQr8q3NddQt+sLREykTTqaGGcStvhUdnia\nySYOTSm2WzxYIwNn8Rlj+TTjYaYd/zSWP1pBrNxQvwqHZsItbTydc/SA39OV2ZO5vmg122mkRQI0\nmNv4v5QZA3b9bJuTzf4GpirdHbZVGsixDd4i5CN1ZqotPpXq+h0U5CxEUxo1TbuJzz8OABETE8+5\nnpbybQR9zeRlTsAWPzAzqw91jFDIEcLq1joeKt9CUzjAbGcK12dP47bS9WheYbHKZisN/I9yRpvi\nyFJxrKGGX2RP7TJaJqQ0Hq/czntNFVjFxA8yxnF28uhe6RVWihJ/K6CnEehJtsqO6E2WQU9tMRtf\nWMLojNmEtRAV9VuZ9f0HCAd8bPnbjeQrJ24CeF3JTPnePZijBvh2vvkgeZbRjM9bTJO7jNqGPVTu\nfJ2/dxAVNBCU+T2saK3FbjKzOGFUp6GXXRFSGv9tKqcq4GNyrGt/NE1jyM/P9yzHHDYRRhFjNfFI\nwVGDkr0TRm4mSHflLja/fAtpSWPx+BowxbuY+u2lmA6jCV+DEQppGPcRwF5/K5ft/pxL1GRyied1\nKSQUG+a2vNk8WbmDrd4mRCDkh1+q2ZhEKFItPGrayJtTvtYjGSGl8XDFNt5uKsWE8J3UMVySPr5L\n/6k3HOKXRauo9PsQgfQYOw+MmddrA2b/8NxeTWnf9uodjLYVMGXsyQCs2/ZP6h1Bxp/6cwLeZlrK\ntmCy2HDlzTjoB1664hW821Zx4pHXYDJZWLvpObKadrG0g2Xvhhq/FsarhfanaAbQlOKG4tU0eAOM\nU4mskVrOTM3h4ozx+8/Z7G3U01Q4kgZ1MHWkGncAf2sDLWVbMdtiScqbiYzgiV99YUTGuYvIqcDv\nADPwJ6XUPe2OLwZeBwojX72ilLq9v3IPJ9Z66plFKjNEH/H/rprAzzwfYxMz10RS+L5cX8SGqub9\ng5rZOHFrwR7HWP+1Zjdbm5u4Uy0gSJhH6zeRbrVzVhct+aerd+H0W7lTzQQFf/Fv40/VO7k6a0qv\n7q+3uUqCPjeu1C/XMk2My6LavRmAmNhEUics7PTc7LnnsL18O//84Dos5hgKMi1ck9F5GuSh4oXa\nQp6q3oFVTGRaY7k3fy4ZMQ42eBvY6/NwizoSs5g4UeVwY+0XnJ9WgN1kxmYyc0QXi3JrStEUDhBn\nsozoma79xRaXTNqkgXetHc70y7iLiBl4FDgJKAdWicgbSqlt7Yp+pJQ6uz+yRjrrPQ08UbkddzjI\n/Pg0Lsuc2OMfW5zJSp20oTTdUNfShl3MByySN9uZwtPsYoHKJAcnr0ghcxypPY5cWOmu4wwtn0TR\nY4RP1nJZ2VLXpXEvanMzV2Xu/0OZo9L5wnfguqzucJC3GktpDQdZEJ/O1HYJzxZuuq7LCUodkVRw\nBOu3vk5ifBbhcIhNe94iY8E3enSuyWxh8jeW0NZUhRYOEpuczen3xvH59Ad6pUM0AS2MIH1uNa9p\nreOlmiLuYD5Jysa/g8XcVrqex8YuoDUcJAU75sgkqwRisIoZnxbavzpWZ5T4W7m+aBXN4SAhNK4e\nNaXL59kVfXlOI42abR9Tu+l/iNlK1rxzcOUOf29tOOlvy30esFspVQwgIi8C5wDtjfvQxk4NMcVt\nbm4qXs2FagIZxPJaYyG/07ZyfQ8nGx2bkMGLtUU8EthEjnLyhVRxReakAwz3OHsCN+bO4KHyLTSH\nA8xxpHDL6Fk91jHRZGUZ5byvyojFElmer+sp5Pn2ONb6apml9JbjWqlhjOPL1L3ucJCf7P6M7FAc\nqcrO9XWr+VXONBYnftnqjo6bbqnYQdkXL6MF/aRMWkTmzFM6/HPKOepcirzNvPHR/yFiInvuOWTO\nOLnH9yoiOJIO1OHBD889aGZrd4SUxj1lm3i3uRyA01w5/DJ7Wq9TBmz3NTOHNJJFj1w6SeXyy7a9\nAEyNTaKETaxQ1UzCxQeUkRUTi8vc/fT+Xxev4fhQDsdLDtXKyz2Va5noSGTCVyAffFtLDb6GCuyu\nTByuTKo3/4+9y/7K3MnfIRjysebV25l63lISsiYNt6rDRn+NezYQvZx9GXBUuzIKWCgiG9Bb979U\nSm3tp9wRxWfuGuaRwTzRIzd+oCZxa/PKHhv3GJOZx8bO5+3GMhpCfpY6Z+9PchXNcQmZHNfHBTzS\nbA6We2s5lwIq8fImxTyWPB9vOIRVTB22Sn+UMYHrvCv5dWC5fo0YOzdnzNx//O3GMrJCTi5lKghM\nVck8Ubljv3F/6ckL94c+ttYWs+XlpcyZeC4Om4s1X7xMONhGzpFfP0iumMwUnPhjCk78cZ/utSOu\nvT+TmU9eyHcu+3uPz3muZg97W7w8zDEo4LHmTbwQU8j308f1SnaG1cG7UkFI09dP3UEj6Rbd0Cdb\nbNyffyT3lG3ihdBOJtld3Jvb8aLk0fi1MKVBD4vRUwRnSCxTJZkdvubD3rhXb/4fhe//kcSEHJpb\nysg77iJqN3/IgukXkR15PwNBL5Ub3jOMez/oyWDmWiBXKeUVkdOAfwET+il3RGE3mXET2L/vJtBr\n/6fNZOYbKXndF+wjHzZXciNzSBc9lK5KebijdCMVQT2r5HdTC/hRxoQDjEqs2cKjYxdQ7HejgDG2\n+AOiZTzhIKnKsb9flooDj9ZxyGPNlg+ZOHoxE/JPAMBuS+ST9c90aNwHiw1vuKAXBn5DawMnqBzs\nov9MjlfZrPPU9Fru8Ymj+KCpgps9K0hSdopo4cq0L1MqT4518ZdeLuUXIyacJiu7tWbG48KvwhRJ\nC0dqyVy66zMaI1FX12RNIbYHA+C9TQUxXAR9bva89wSnHf0bXPHZuD21vPnRLcS6sg4wRkppMMST\nrUYa/R16Lwei5yLnorfe96OUciulvJHtdwCriHQ4j11ElkZ9FvdTtyHja4lZlJpbeY4dvKv28phs\n4uJetu4Gm5DS2EQDlcoDQDFucoPx/IHjuIeFvNdQybKWqoPOM4sw1p7AOHsCmlJER1fNj0/nM6lk\nm2qkTvl4UXaxKJIOdtZpoQNnoooccK5SYYbDW7fhDVePo0JSrboh3kchLaT2IdmYWYQpDhcaMJZE\nziafJyt39GvWrIjwf7kzeUw28ahsZKlpJZOdLv5ctZNF/iyuCE2nviXAHZE1cw8X/O467I4kXPF6\njyXemUZ83ChcY+eyfNOzFJZ+xo6iD9hc+DaZM08ZZm0HBxFZHG0rOyvX35b7amC8iOQDFcB3gAva\nKZIB1CillIjMQw+/bOjoYkqpThUdySRYYvjjuIW8Ul9McyjI9QnTWBg/8FnpvOEQ95dvZnlrLXEm\nC1dmTeaYHrhpnqjcjlmZWEct/6KQKUpfy/NURmMSIZEYFmiZbPI0cnyUv3wf5QEvvylew+6AmwST\nlSU5M1iUkMGUWBc35E7n8YrteLQQCxPS+UWWHpkSe+8NBwzQZU47iQ3P/4qYGCexNhfrdr5G1sLz\nBq5yesmSM67gzrf+0GWZS0dN4KeeLyhVbjSgzuzj8YwFfZL3VmMZl6tp5Im+KIhHhXi3qZyfZvbN\nbaCUYmpsEn8edzS7/C2kWGzs9rXgb9WYj/5O/EBN4srWjztcwORQxZ6YQVtbM9X1O8hImUh9UzHu\n1iomzjmduIwx7IwMqE791tIRlblyIFFKLQOW7dsXkVs6Ktcv466UConIz4H/oodC/lkptU1ELosc\nfxL4FnC5iIQAL3B+f2SOVJIsNn6cMbHH5f1aGI8WIikq5jma1a11rGmtI8li46zkXBwmC/eWb6LF\nHeJmdSTVmpe7SjeRXuDocqbq7rYW3moo47ccRZxYqVQebmUV+TFx7A40MwonSikKpYUF1oP9/Eop\nbixezdxABtdzBIVaC7eXbuCpcYvIsTmZ6nDxs6zJJJitTI9N2n8v7bv5sSk5TL/wLsqXv4Lm3svo\n4y8ifcriHtfXYNCdgU+3Onh2wjGsbK0F4Ki4tD5PIDKJEOTLpQtDaH02uJu9jfy6ZA0eLYRNzNw2\nejbTYpMoD3hplsD+qKtmAljF1G33PHpsZCSilIanpohw0E9cRgGTzv4l/3vjfmJiYgkEPEw47Spi\nnEmkTljYZZjsV41+x7lHXC3vtPvuyajtx4DH+ivncOKl2kKejMQ8p1sd3Jc/94CVgF6rL+EvVbtZ\npEaxVWp5p6GMJ8Yt5At3Db9V80mUGJKwMV9lsLK1tkvjXhP0kSNO4tCN0qjI9k8yJ3BH2UY2Uk+z\n+LHFmDg3Jf+g8z1aiIqAl1PIRUQYRyKTJIltviYaw35uKF5DHvHU4mNSbAK35c1hzukdJzeLS8tn\n4lnX9a/yBpjuDHy82cqJiX3Pmb+Pb6fm8+eqrZyp8mnCz3JTNX909d4Q+bQQNxav5nvaRGZLGtu0\nBv5v7zpemHAcxyVk8reaPTwV3EquiuNjqeDH6RO6HZztayK3oUALh9j26h34aoqxWp0EtDamX3An\nR13xF/zuWmLiUrAMYkqGQxkjK+QQs97TwN9ririd+fxeHcOsYBpL964/oMwfq3dwjZrJOTKGy9U0\nYkIWlrVUEmeyUsuXLeJa8XXbkiywJVCs3BQp3Xe8UlVjNgnz4tP46/hjODs7h0tzJvBYwQJsHQwC\nO0wWRKAKfeA1qMKU4yHZYuOu0k18T5vINWomt2rzKPV6+bClku3Xf7u/1TSkDMXMzK+n5HFF9iSK\n4ppoSwzyh4L55PQhmVl5wIsTK7NFTx43WZJJw06JvxW7yczjYxcwK82FJIe5Nncq56cVDPStDCkV\na9/E4vHxjePv5uxjb2VC1iL2vPs45hg7sSm5hmHvAiNx2BCz3dfEbFJJicQ8f03l8u+2YmqDbTxX\ns5uGgB+PFiIlks1RREhRNrzhMJePmsRD5ZtYqDKpFh9NVj+nJGZ3KS8zxsFNOTO4vWw9guAwm7k7\nfy4WMZFitXNSN61SswjXZk3lvop1TCeFEpObKXGJzHGmUB3yMRl90pJFTBSoRKoCh0bURXvsfYiD\n7y0nubJ6tXKWphSvN+xlk6eRjBg7300bS4rFRoPy06DaSBY7LSpADb79A71Os7VXoZq9Teg21LQ1\nVJCbNgOTSTdVuZmz2bXui2HW6tDAMO5DTIbVwVtSTlDTsEZinlMtdi7d/RlHhNMpIIm11PMs2zlX\nFVBKK+ukjp/GTWC0LY7MGAerWuuYaI7ndFdOj8Lcjk3M5O2EdNzhIInmmF7nZT89KZfx9kS2+Zo4\nx5rD/Lg0Pde3LZEP/GWcqfJoIsB6qePq4zJ6nW5gJHDt/Zks23Rdv2ay9pYyv4fna/fQGgpxdGI6\np7iyD3Ch/L5iK+uaGjhGZbFHmrm8+XNOS84hz+rk5sAKJpFEibj5TuoYsvq4wHf7ge+RRmx6PkVr\n32d83mLM5hj2lH2OMy1/uNU6JDCM+xBzXEImHzRVcqtnJZnEsptmzkjMobDRw3mit7jyVTx3sobd\n5iZcZhu3Z81htE2fGTotNolp7ab49wSLmEjqx+LW4x0JjHccmKv85tGz+FXxKj4IluJXYX6YNp7F\neSnc22cpw4MWCqCFgyy+kT7NZO0L1QEfP93zOcdp2Ywjnqc9u2kM+bkgbSygD7i/3riXh1hErFg5\nRo3i+uDnvFtTybEqi1RpYpepidtHz2GGc2BWyBqJjJp5Cu7ybfzz/WuxWB2Y7U6mnf/b4VbrkMAw\n7kOMSYTfjp7NBm8DzeEgUxwuPm2pplR92XqKJwYl8K9JJw75qje9ITPGwbPjj6Eh5MdptuAwWTjd\nNHIzC7ZHKUXxx89RtupVREwkjJrIlW1LmNvLmax94f3mCmapNM4Wfc3VXBXHo3Ub9xv3sFIIEIM+\nDhJAo5kAt6mjcIiFhSqTe9RaWjuZNNZTRvrkJTGZmXjmdbS11KAFAziSRh12GSEHC8O4DwMiwizn\nl2GHixLS+VP1Tt7TSsnByVtSwumJOQNq2JVSvFBXyIu1RYRRnJ6Uw08zJx0UjucOB6kNtpFhdfQo\nta9JpE8Te3qKUory1a9Tt+UjTBYr2Qu+RcrYeQNy7drtn9C87XO+9bWHiImJY/nGZ9nz7hNY7b/q\n1UzWvhBGYVGyfx6XBVNUoKQ+O/hIZyp/8mzBpeysoRYrJlZQzWJ0900MZsIjJGX3YGNPGLgVwb4q\nGMa9n+zwNfPv+lI0FGcm5zIltvdhZelWB48WzOfJyh1sDtcxPz6VHwzwDNf/NpXzr5pSfqFmEYOJ\npxq2Em/ew0VRct5rKue+8s24xIZbBbhl9Czm92KZvYHKLOhtKMfXWEFscg71u5ZTv+49jpr6XQKB\nVr5483eYz10yIBn/Wsq2MS5nEXab7m6aUnAy7695GNDDAzf0YKJTXzk+YRR/r/mMGuUjBhPleDgp\ncRRLitew1ddEusXBz7Mm8VTVTtb6armMqYTReJItNCs/JhFqxcusQ9wlo4VD1Gz5EL+7joTsSSTl\nzx5ulQ4bDOPeD7Z6m7iuaCUnq9FYgOuaV3JX/tw+/eDG2OO5e8zcgVcywuctNZyqRpMlevjdOWoM\n77aU7DfutcE2Hijfwg1qDjnEsUs1ceve9bwy6YQeDdoCrK0rAvo3mFq+5k32fvo3kl15NDSVYLba\nOXbmT8hI0SeIub21VG39eECMuy0hleqdG5iiNERM1NTvxBZ/YO70ruLg/VqYhpCfZIutwzDSrnCY\nzFjFhEvZSMLGVhpZ6a6jIJTIdWo2u8JN3FSyhryYOC5gPONEn8twnhrHa6Y9THcm84dRC4jvx2pE\nC56eAa/0+fR+o7QwW15eisXrJ901lj3rHyZj7lnkHnVup+f43XW0lG/DbIsjKW+G4aLpAsO494OX\nags5S43hRMkBwKmsvFhTyKwxI681lWCOoToSqw5Qje8Aw1Dq95Alzv2LPY8XF06xUh30McYcPyg6\nNRavp+zzlwgH2kiZfAxpk4+h5JPnOOvYW4mLTcPtqeaND3+Nx1sPES9WIOhBHAOzvFrWnDPZtOML\n3vr0Nux2F/VNhUw7/46DynVk4Je7a7i1dD1WZSIoGktzZ/d44XKAfzWUMEtL5Xui/2mNUrH8KbiN\nJczFJEI6DtZRi1cFaYlKStdMgCPiUrm5F+meO+P4V4Z38YvG4vVoLY38f3v3HV91eT7+/3WffbL3\nXiRkkkDYQ5AhKEPcYFG/1dpa+7HWtmrtsK246tZWa639tVqrdVRwIzKUJbJnSAghhJC958k66/79\nkRhAEsjkhHA/Hw8fcJL3eb/vE8x13ue+r/u6Lp+xHI3QkDhiDh9++WvCJ1yFRqvDUpFH8Y4POkpE\nT8foHUTWikcI9IvH0lxFiW8gMbNuo+7EAZwOGwEJl+Dm1/8NZ8OFCu79YJVO3E75EbqhwyqdZ3mG\n69wSFMuPG7+h3mnFgIbdmkr+HHJy7jrMYKZENlEhWwgSZgpkIw1YCezhfHr6AjsLe5EC2VCaQ/bH\nTzE59RbMRh927n+blrpSPNyD8HBrD5Ke7sG4uweyI/Mt2qyNtNmaOFKwkTH/b2C6OGv1Rkbf/AR1\nJw7gsLURHTEKg3vX02qnBvgGu5XlBfu5W6YRL3w44qxleeE+/pc4u8d30i1OBz6czF7yx4wTyWZK\n2C3bK09W0cIy31heLz9KnbTiwMlmTQkvBQ1OP9jzzd7WhIdbIJqO+vhupvbyFU67lZbaEjLeeZC0\nuKLh1DcAACAASURBVEWYPb3Zt+Hf2HEyNe37RIdNwul08Nnmhzj41gNEhU6guamCfVvfxT9+KomL\n70X0sub+cKSCez9c4RvOC01ZeEg9GgQrxTF+4tfz+jLnU4jBjddGTufL+lIcOPmxZ/xpOyRDDG7c\nGZLI42W7CRVulNLMr8PTBq0Zc9XhLSRHX0Z13XHyi7aDEDQf+QbpdFBZc5RAv3jKq4/Qam0kYdG9\nnMjciNNpJ+X6P+Dmd/aNW72h0erxi+3ZdNi3Ab7Q2kSgMBFP+xtBovDFFyNF1iaSzT1bc5nhFcKD\nNXsYIb3wxsAKkUu41o0P7XncRhJOJG/II4QZ3Hgxdgpf1BWhQcsrvlOJMQ3OJ6nzzTs8hWNrX6Gg\nZDeBfvEcOvY5HoGx6IxuFHyznsSoWYwauQAAdzd/vtrxAkEdv18ajRZrm4UZ435CREg6Uko27for\n1ScyKDu4jtBhWhGyN1Rw74dZ3qFYpZMVlflI4EcB8VzuM3CBZ6AF6E3cGDCi2+9f6x/NNM8gSm3N\nRBjce5UFs1BzT6/GIrQ6Sir3o9FouWL6g7RZLXy143mCxy9m/c4X0GmNOJw2Eq/6Fc0V+dTl78PL\nK4zMFY+QsPAXBCT07O5VOh1IKdH0sql3d6Zl3Edl0uNUyBaqZAsBwtz+d1o7G3D0RLq7H7+KSOW1\nsqO0SgezvEPIbWnkSvuIztICzdLOFzXFPBYzjnhz7/rWnour59uhfc0j5fo/sPuLl2k7WI1XWCIp\n1z/Y/k0pT7v71ggNGq2ezLwvGJ+8lObWWqz2Zny82n/fhBD4eIVjszfTVJ7X1eUuOiq499PlPuFD\nOqD3VrDBTLDBPKDnbCw7SmXWZtBoCR1zOWbfMELGXM7+Pau4bMr9eLoH4ekeRFrCVRQ3VTPl7jdp\ns9Rg9PCjpa6MnE+e5aqZj+Jm9qOq9hjrPn8Ov9g30ZylTaCUkhNb3qJw5wcgnQTETyVh0S/R6vu+\nkQva88I3Zj/Ij0PLeKx8NzHCk3wa+b+QJPx7mRI6yzv0tJaEDxzfhZWTRdds/agceaHwjkhh/I/O\nrCsYlDqHjHd+i5vJF5PRiz3Z7xM28WqKj+4gZ/VPcDodeATEsDvzXaal/5Cm5ipyT2zGaPbGz/fM\nstUXIxXclUFVV5DB4Q8fJzlmHnZ7M/v/cx9jbnkaN/9IzP4RWJorCfRr37jT2FyJ1t8Tjc6A2ad9\n/r61rgxfn2jczO2L1AG+cWi1BqxNdZi8u0/TLD/0JXWHv+H6uc+h15vZsu9V8jf+m7h5d/b7Nc36\nTQvPZ9zLxGn/ocjaRKTBg8g+FAH7riWBI3ioeR8t0oETySqRz9OBJ6eM7NLJO5V5HGluINzoxveD\nRnKizcLnNUVoBCz2izpjF3F3XL2Yei4eQSMYtWQ5x7atxFnXRui0GwgZfTmRU5dib7Og1ZmQTjuH\nP36K/63+KRqNDo3OhNnHn7Bxi1w9/CFBBXel30wbroNu1jiLvvkfk1JuIjbyEgB0OiPFuz4mfv7d\nxMy5nR0fPEZ1/XHqLaWU1x4lPuWnSCk7N3C5+UdSU5dPg6UUL49QSioO4ZQODB5nL8HQUJhJUtQc\nzKb2FMK0uIV8nfXmgL3me58NYcy/fzygG50megTweMw4PqsuRCB4JmDiaaUmlhfsp8rSxhQZQmZT\nDXc0bKXW3sYVMgoHknvqtvNC7GSShkkPVa+wpJPTNKfQd6456EldshyHtYXG8mNo9UY8guPUYmoH\nFdyVQeW0tWEyngw2ZqM3TlsdAD6RqYy+6UnyNrxOc10+0SETKNr0Fg1FWYy8/P/aj/cNZcTs2/ns\ny4cxmXyw2ppIvua3aM6x0Kv38KWq5DjfLm9X1R1H/503BCklFZkbaCg4hM7dh4hJ16I393yxsrc9\nWXtirLs/Y93PbJpSZWtll6WK5+Q09ELLFBnMfbatXE8cM0R7+p9Ravlf5fFzpkkOZiXIxvJj1OTu\nRGswEZw6t1c/z77SGswDsu9huFHBXem3s1WB9E+ezq4973KJ/gfY7W0czP2UEfNP1p8xegbQUJTJ\n1bOfwN3sh83Wwkcbf4slfT4eQe2LvyGj5+GfMAVrYw0m72C0hnPPbUdMupYDbz7Amu1PY9C7UV6d\nw+hlfzrtmIKt71B7aDNJ0XOoLjvBgTfvJ/3WF3pVI3ywd7J+y4lEA2g66hUIIRAS3Dn5JueGjjLn\nuVNxe7v4LaWTwm3vU33kG7R6I5HTl3W5k7Qmbw9HPn2W+MgZNLXWsW/3Z4y97QX0PZwqUgaWCu4D\nTErJl/WlHG6pI8LgzmK/SHRdfExsdTr4W2k2B5pq8NMZ+VlYMrEXYIrbue4Cw8Yvxmm3sTnj3wiN\nlqhZ3ydg5OTO79taGjAY3HHvmFPX6814eoRgbao77Tx6k+cpH8fPTW/yZOytL1B9bBdtDRUEBARQ\nun81gSkz8Q5PRkpJ4Y6VXDPnyc5rr9/xPNW52wkeNafH1/lWT3qy9kegzkS8yYvXW7O5RIaSSTUa\nDayQuZikFieSj8Vx7vMbNeDXLvj6HRqydzBt1E00t9Sy/eOn8Y2dQEtVAQZ3H6Jn3YZH0AhObHyD\n6WN+RERI+yeHrfv/Scm+z4medv47azrtNo5vfJ2a3J3oDG5Ezfw+/nGDtwN8KFKTUwPspdLDvFZy\nFFu14IuyEh44vrvL4k6PFeynoK6JZdYEEpt9uSdvO1W2VheMeHAJIYiccj3j7/gb4374EsGpl532\nfaNXEFKjIefEJpzSSXH5Aeoaijrv2m0tDbRZapDdFMiqL8qicOeHVGZvQTpPb++nNZjwCI6laMdK\n/Jq0BLeaObziUWry9oB0Ip12DPqTd+kGvRtOu63Pr3UwOzoJIXgiZgKhPia+MObj8HLwevwMbgsd\nySrjcb4wnuCn4Ulceo6G6VNfG93ra1dmbmTa6B8Q5BdPTPgkkqLm0HQik2mJy4jzTCXj3QdpbajE\nbm3Gw/3kLl0vtyDsrU29vt5AyPvynziKjjN33D1MiL2anM+eo7Es1yVjcRV15z6AGuxWPq4t4Bk5\nDXehZ750srx1F5nNtafV3LY5nWyxlPMyl6IXWkbgxRHq2G2pYr5vhAtfQe/19iP+d2m0OkYtWc6h\nj55k+4HXMHkEkHzdg+jNnuR8/iKV2ZvRaHS4B40g5frfozslK6V472cUb32PqJDxlNVtoDJrM8nX\n/va0BbWS3Z+QFDWb9KT2eiWe7kEc3PY+frHjCUqcwZa9rzI6/kqq6wsoqcpk3Ij+ZdMM5h28u1bH\nveGn35kv9otisV/UoFzvWxqdAavtZJButTYSHTqBIL94gvziqajLpSZvN35xE9md9S5T0m6lubWW\nwye+IuGq+wd1bN2pyvmGhdMexNM9EB+vcOKrZ1CduxPPkIEtyDeUqeA+gFqlAxPazpIEWqHBGwMt\n37mj1AiBBkEzDrzRIqWkWdgxXKRFkNwDoxl/xytIp6OzEFTx3s+wleaz5PK/oNUa2HbgdfK+/BcJ\nC9vfTJwOG8c3vMZVs/6Ep3sgTqedTzb/kbqCDHyjx3Se22m3YjKcXEg1Gjxx2toAiF94D8c3/puv\ns95E7+5D2vcew+h1euGwvpj62mi23X6w3+cZDH1JgQyfcj1bNrxKauxCmttqOVa4hXnTft35fZu9\nBYNGx4jZt5O3/h+s2vooWr2JmNk/OO3f4nzS6o20tNbi2fFJotlah9ZwceW/q+A+gAJ0JoINZla0\nHWOmDCOTWso1zSR/pwywVgiW+cfyQu1+ZjrDyBeNWLRWpnZTeEpKSbmtFSeSUL15wOu8ZzTXUuew\nkmz26XEtmcFwaoW/ppKjjAyfhr5j12di9Cy+PnwyldFhbUUIDR5u7cFYo9Hh5RGCrbnhtHMGJF/K\nwU+fw9M9GIPezM7MtwkYe3n7c3QG4ub+eMBfx+yV09mYMe+8tuwbTMGpc9C7eVF8ZBsabzMh6QvY\ndvDfJMfMpbaxkJqmEqITpqLR6Rk5/6eMnP9TVw+Z6Bm3sPHLl0mOuYyG5krK6nIZm3a3q4d1XvU7\nuAsh5gN/BrTAP6WUT3VxzIvAAqAZuE1Kua+/1x2KNELw7IiJPF2YwfOt+wnVu/GXiMl4dZG2d0dI\nApEmd/ZbaojVu/NgYBpmzZn/HFang9+f2Muh5jo0QIzJk6diJvSokca5OKXkkcL9HLLUEYyZ4zTw\nWNR4xnmcmYrXlbPlt/eX0TeYkuNZxEfPRAgNJZWZGL1PzifrTB6YfUI5mPMxKXELqKg5QkV1DuHh\npwcWvxFjib38TnZv/wDpsBOQfhnhE68enEGfYtZvWvrck/XUPP+B9N6rN8EnfXuuX+yEzho8Ukoq\nQr7iRP4BdAE+jFn0HDqTxwCO9Oyk00HF4U201pXjERLXZfOWoFGzMHj6UZW7C21AFGOvuvOMrJ2K\nw5upytqMRm8kfPJ1eAbHna+XcF6I7haqevRkIbTAEWAuUAzsApZJKQ+fcsxC4G4p5UIhxGTgL1LK\nMwqDCCGklLLX/0d/k3blsG5F81p5Dnuravk/mYoGwWviMGE+Jn4Z3v+siC0NZfy9KIffOsehF1oy\nZDXv6nJYkdSzbBHThusGrRm2w9pKxru/R7Q0o9ebsbTVMvqmJzB5B3ce09pQyZFPnqGhLAeTuz8j\nF95zzmmA2hMHqNi/FjSCkLEL8Y4Y2Jot3/X8/WU97sm6taGcZ4oPUetoI83sx/Ko9D53uWpx2nmx\nJIt9lhp8dUbuCU/mzRt+16dzDSVSSrI/egJHdQWhfkmcKN+DX+pMYmbcAkB9USat9eW4B8XicZZG\n2mUH11K05R3GJl5Ha1sjB3M/ZfRNT7is+fbGJxf2+d28u9jZ39u/SUCulDK/4yLvAlcDh0855irg\nDQAp5Q4hhI8QIlhKWd7Pa18UjrY0MkkGd6ZTTpUhrG8pGJBzl9laiJNe6EX7dEgSPlTYW3t85zhY\ngR3aM11G3/wkDcVZOB12vMKSzsg/N3kFMuaWnrXjlk4HeRtfo+LAesYl34DT6WT/ikdIueGPgxrg\n7302hDE92Oh0os3CY4UHuEumEYMnn7Tk84cTe3ll5LQ+XffRggO0Njm5Q47ihL2RB0r3EN9QccG3\nq2ssOUJz6TGunvUntBodyXFX8MH6+4mYdC0ntvyX2iPb8fcdwfHKfxI969Zuq0OW7v6MS8b8kJCA\nZKC9T0DZwfXEXfaj8/lyBlV/g3s4UHjK4yJgcg+OiQBUcO+BSKMbGU3VTJRBCCBDVJ9Wqrc/ksze\nvMkxrpBR+GPiS4pJMHr3KLAP5i5HaA/GtuZ6vMKS0eh6V3bYaqnBbm3B7BOC0LQvWGd/8gyWgkwm\npd5CbGRHwBRwfMeHpzR7mIpnSPyAv5az7WStsbexvq6Eg801jMSbBNG+PnOdjOUnrRuxOZ3oNb3L\nWLZLJ1st5fyVSzEILRF4cMDWSG3+fkJHXz4gr2mwyI6uWN2xtVpwd/NH2zGFaTJ4otebqS8+TPXh\nr7l61uMY9G40WMr47MuHCEqZibbLTz+nV51s//vQ7MXQV/0N7j2dEvlutBjWUykD6dageH7ZtIPl\n1p3oEGh0ghdDv/v+2Tdpbn7cEhzHH8t3YkCDn87I09ETB+Tc/dFYdpSslY8h7TYcThsJ8+8hMHnG\nOZ8npSRv/T8oP/Qler0ZjcmNUUsfwd7SgKUoGx/PcLSnrFVoNXrqCzLwdZgx6j049N4fSbr6gR71\n8XTareR99S9qj+1GZ/IgetZt+I3o/nld7WStsLXw49xvSHD6YJBaMqkkj3pihTelNGEUWnR9mHvX\nItAJDRZpw68jG6seGzpd/ypiDqamqgKOfPwUjdUnMHsGkbj4Prwjzpx69AqN52hjMXmF3xAalEpO\n/lfo3LyQTjveXuGd+xa8PELQ6UzYWhq7DO7BYxewddtrjE+6gTZrI4fz15HWRReuC1l/g3sxEHnK\n40ja78zPdkxEx9fOIIRYfsrDjVLKjf0c3wXPXavjb3FTyW6pxyklSWbvAU2ZXBowgqv9orA4bPjq\njGh6GEx+e833+7w4dzbS6SBr5WNMSvoeMeGTqKkvYO3ap/AIje+sFNmdyuwtWPIOcP3cZ9Hr3DiY\n8zG5q18iYtpSTCYf4qNnsuvQ24DAKR3syXqPIP9Epoy+FQA/7ygObP5vj4L7sXV/R1RWMG/CL2ho\nKmfrJ0+RtuyJzs1X3fndorvY+KSZb9Ke4+3KPMY7glgqRoKAaOnJy2QwlkD2ikp+EZbSp4VVIQS3\nBo7k+cr9zJBh5GmbqfDQkxY/MDcFA83psJP5/nJGj1hI/NRLKanI4OuVjzH+jr9jcDu9CJrezZtR\nSx7m4OoX2ZH5Fp5BcYxa+jACwdG6452NXo4VfI3Q6TF6dN3yMmzsQjQ6A5lZW9DoDYxashyPC2RB\nVQgxC5h1ruP6G9x3A/FCiBigBLgRWPadYz4B7gbeFUJMAeq6m2+XUi7v53iGJZ3QnFYdcKAZNdpe\nN3g+8EnPOg71lrWpFmm3ERPengHh5x2Fv28cTZX5ncG9qaqA1voK3AOjTptDbqo4TnTIeAz69mmr\nuMjpZG/diEdQLM1ttbS0NZIWv5j92R/QarPgEZ5IiPvJzlnuZn8cPdwlXJWzjasufQQ3sx/enmHE\nVV1CTd6ucwZ36CgZvOE6GuL2E8rJkgohuOGp05MW4M1N7jE97urUle8HjSTK6E7Jwgh2ZnmTOv6q\nbqYnXK+toQLhcJAYMxuAiJB0vI9HYCnP6/LTkGfISMb+4MUzvp5w5b2s/+x5nHYrRnc/UpY8dNYG\n2iFpcwlJmztwL+Q86bjp3fjtYyHEQ10d16/gLqW0CyHuBtbQngr5LynlYSHEnR3ff1VK+bkQYqEQ\nIhdoAn7Qn2sqw5ve7I3DYaO2vgBf7yjarBbq6gsJ82rfA3Di67cp3bsKH69IcurzGTn/ZwQmtpcT\nNvuGUZy7mtSRi9Bq9RSVH8DsE4rO6EbqjY+Ss/qvtNSV4BEYQ/rCx2iuLibzs+fx947BaPBkZ9Y7\n+CdM7dE4tXoTza11nXXmm9pqMehjevw67302hCueXcRLd75FnPTChI6PRB5zfcNYepZuWT3xbQbc\nLO9QfudzJ1F9W5M9b3RmT6zWJppbanDrKB5nsZRhcO9d6WL/uElMvedtHNZWtIaB3Q9yIepXKuRA\nUqmQF47BTIEEqMjaxLF1f8ffN5a6+gKCRs8jZub3sVTmk/nO71k881HMRi+q6/JZu+0ppvzsLTQ6\nA9LpIPvjp7GU5GA2+dDUWkPqjY/iHtD99vzyQ19RtO19nHYrAcmXEnPpLWe92/tWWcZ6Cja+QVL0\nHOqbyimrz2XsbX/uMt+7ImsTBV+/jdPWRkDiJcTM/kFn2z+n5b9k/X0ldim5wiecn4YmdVloricc\nUvJSaRaf1hYigKDxVxE95/YhW9/c1tJITd5uQNJSU0rlgbWEBaZRXpODV2w6I68YvFo9Q81QTIVU\nlAEXlDITz7AEmirzCfUK6txc8m1XJrOxfTOKv08MGq0ea3M9Jq9AhEZL0jW/wVKRh6OtGY/guHOW\n7w1OnUNwau+rQIakzcXoFUhV3h50gSNIT/9pl4G9YMcKire8Q2TIOOJHzGFfzofkb/4PsbNvB0Dj\ncTOp99/Mhuu/7nfJgrcrj3GgtpYn5VScSF44sIkSr8AzNm1Jp4OSvatorirALSCKsHGLevSG9l0O\nWxv2VgsGD99ev4G0NVSx/6378feMQgBVDQXEzvsx1qZaYnxm4ztiXJfPczrs5K3/BxVZGxFaPZFT\nlxAx8Zpej/1ioIK7MiSZfUIx+5xeC8Q9IJqjNceoayjCxyuCwtK9oNFgcD+5HiGEOG87DX2jx5x1\n01RZxnoqN7/N1c4IKksK+Kbyr0yZeBdbM9/oDO7fmr1yOmNeTe1X449djdUskNF4ifbeslfawvjo\n2B44JbhLKcn++ClEXR1RwWMpyPiawwUZJF/7u15NY5TsW03ehn+i0xrRGt0YteQh3Pwjz/3EDie+\nfpuRoVMYl7wEgP3ZH1BxdAeJV97b5fHS6eD4xn9Tuv8L/DwjuHr2E1htTXy16y8YPQMITBrabQNd\nQQV3pdcGc0rmbMy+ocTOu5PP1zyGXm/GiZOU637fOcUx1JRsepOfO1OJE+1zx232I+QVft3tp4lv\n0yX7ehfvqzNQKJr59u2mUDSh9Tj936qlppiGosNcP+cZtFo9CdGzWfnVr2ipKeo2OEspsTbVotUb\n0RndaSw/RuGW/3LVzEfxdA/myPGvyPzwCcb/qOfVMG2WGgICTq5vBPiMoLhsU7fHF25fQcvxQ3ia\n/Rk/6nu4mXxwM/mQMuJyCvP2quDehaH5W6Eo3QgeNZuA+KnU5u+n8vAmind8gDV1NgE9XAg9nxx2\nK54YOh97ObXsLN5B8rVnLwMwe+V0WDS9V6UL0hfYaah/mK1v3EeBswUnkK1rIu3Sm087zmlvQ68z\no+nYBKTR6DDozTjs1i7Pa22qI+v9h2mpK8XhsBI2dhEm/whCg1LxdG8vBZEQM5udGW/itNt6vOHM\nKyqNzMw1BAckI4TgUN4avJK6b6ZRd2wPExKu5dDRVTRYSgn0ay/dW28pQxegOj11RQV3pVcGs1hY\nT7XUFnN09Z8ZM/JqDGY39q55Bae9jaCUWa4d2HcEJM/gtUO7uMkeTSWtbNSUEb/gnh53BLr32RBY\ndBfP318GQNLT/2P/ah17LFX8p6YQe6IPAXOnY3O2T7uYfSH9Ry9TlbsDgWBMwtQz8sTdAqKQei37\nslcyImwSx0t24tBqsTXVkfm/5UiHncC0k+sQuV+8TJhHLBMmP4jV1sQX255E6PTU1h7DZm9DrzNS\nUX0Eg9kT0YtPUBGTruFYfTnvr70HpCQkbR6RU27o9nityYN6Sxljk6/ny+3PU1FzlDZbE5WNJ0hf\n+HyPr3sxUdkySq8MZrehnspd8zcC29wYnbAYgOLyA+w58Tljvu/id53vcDrsFGx4nbqcbeiM7oRf\ndnuPNkidTX3xYbJWPMKklGXo9W7synqHsGlLCE2f3+NztDVWc2zt32mpLsTsH0lQ2hyOrn6RiSnL\n0OtM7Mp6h4hLbyYkbS47X76N+VMe6LxLz8j5lEp3O05rC/V5e/HyDKO6No/ExffjFzu+16/n2+5Z\n51rQtZQfI+Pd3xMTOpGW1npKqrOInHw9YWMXDoserSpbRlEAkGhOqwuihUG8SZHSSen+L7CUHsXo\nE0zExGt6tCFIo9URM/cOmHvHgI2lIuNL0uIWERvZntuv0xrYdeCjXgV3o6c/Kdc/2Pk45/MXGT3y\nSuI6zqnV6NmzbxUhaXPRmTz5ascLCAQhAaOobSrGM+JSwsYuorH0CNamWqKCR2Ly6roXwbn0NEvH\nIziO9FufpypnGwaNlklJP8fQze5TpZ0K7soFJyjtMg69/zAmgycGvRu7Dr9HxIzBa8Kcu+ZvWIty\niQu/hNK8LA4d+wNpNz3hkoVcodHicJ7s8+pw2KCfeexCo8FpO9ktzOm0IzQammuKaW2sYHLqLfh4\nhrMn810aWitJGHMFQgi8wpL6dd3eMvuEEjnpuvN6zQvZ0NzdoAxJpg1D4xfLKyyJ5Ose5GhDBhkV\nW4mceQshg1Tt0NbaSHnmBuZNvp/EEXOYOf4uaG6ioThrUK53LiHp88k6vpbM3NUcPbGJbzL+TVg/\nm4+EpM/nUN7nHD62hqMnNrHt0H8InXAVNXm7iQmbRGzENPy8o5kx/i5srY1oumg+0xdOh52WujIc\n1pYBOZ9yOnXnrlyQfCJT8YlM7Xzc1lDF8Q2v0VZfgUdYPDGX3orW0PXUSUNxdvuuVFsb/ikzCBl9\nebc53tJuQ6PVoe2oqCiEBoPBHWc32SU90VJXxrEv/kpTdSHu/pGMXPCz05qQnI1H0AhSv/cYJbs+\nwdlqY+TCn3XZiag3PENGkrr0UQp3f4JssXees2T/alrbGjuPa7U2oNEZznKmnmssyyVrxSMICTZb\nC3Fz7xi0N+iLlVpQVXpsKCymdsXe1sy+1+8hLmQSoQEpHDmxAYvezqgly88I2u0Lcw8yLvEGTEYv\n9mS/T/Ckqwgfv7jLc0spyXj7t/hp/UmMnkVJxSGyjq9Bo9OjM7gTNfP/dda26Qmn3cqef95FUvil\nRIdNJL94B0dKtzL+hy8PWOAcKPa2Jva9/gvCfBLw8QzjcP56QidfS/iEq/p1XiklO1/5ARMTlzAi\nfAoNllJWb/0TaTc/edZSEcOZWlBVlC40FGfhbvBhbNL1AAT5xfPemp9ha2k4IxWwInMjSdFzSeio\nQGgyeLJ1/3+7De5CCFKu/z15X/6TLVlv4LC1EeAbx+TUW2hqrmLTFy9j9PDHK7xn889NVQXohY7U\n+EUApCUs5ljJNpqrC3EPiu18M3LabZRlrKWtsQbviJQ+ZaL0l87oTvqtz1G8+xNKWxqIufxOAuLP\n6JB5Gul0ULJvFZaSHIzewURMvv6MTVv2lgYcbc2MCG8/l5dHKIH+7eUmLtbgPhhUcFcuaNbmehrL\ncrF3tAcEyD7+JUJCzmfPEzV92RkLf1Ke7LjjlE7O7CVzOp3Jg4RFvwBgx8u3Mjn1Fjzdg/B0DyIh\naibVx3adFtw7GzjXV+ARPPK0vHatwUxbmwW7w4pOa8Bub6O1rZGCLW9Tnb8Xrc5A5NQbqTm6A7ND\nS6D3CI5l/JXmiVcRMena/v64ek1v9ursT9oTuWtexlaSz8iI6ZQWZZOR9zvG3PLMaZubdCYPEILK\nmmME+sXRZrVQU3ecwB5OTSk9o4K70iNDcUqmoTSHrPcfxtszHIulnK/3/QPpdFBdd5zp439Cc0sN\ne99/+LTGx8Gj53Hwv7/GoHfDbPRmX84HRMz4bguC7ukMZizNVXi6t9eRt7RWozMGdH5fSieHP3oS\nZ00lIX6J5O9/Bcvoy4iefhPQXpbYJyadtdueIjIonYKK/RjcfDC12Fl6xUu0ttWzeuvjuJv9kWf9\n8AAAGUVJREFUmDvjIYTQMDLqUj766jeET7iqTwW+zhd7WxPlWRtZevmL6PVmRkZdyqqvH6a+6NBp\n+f1CoyXxyvv4ctXzeLiF0NhYgsTJiU3/IX7Rzy/4Pq9DhcqWUS5YuZ//hUmjbmL+1F9zzZynKK/K\npqTyEJdOuIuI4DEkxMwmIXIm5VkbO5/jHhBF2rLHKZZl5DRmED3n9l4t5EXP/D6b977CvsMr2Lz3\nVcrr8057fkNxNq3l+cyf9lvGpyxlwbQHKdyxAntbM9A+zZOw+F78Jy+i0sNBwOQrkU476YnXYtCb\n8fIIIcw/GTeTX2elxfaa8RKnY3D71vaXdNgBDa3WRqy2JoQQ6HSmLsftP3ISkVO/h8VSiodbABNH\n3UykWyyZ/3uoc2OT0j/qzl25YLXUVxAW2J4xYzZ5ExMxhbyS7TidJ4OJU9rPKEfrERRL4uL7+3TN\ngIRpGDz8qDm2G60xkPTRP0NvOtlNydZcj9nofbKBs9ELnc6Ew9rcOfcshIaQtHmdzynfv4bahkJ8\nvdoLd9kdVsoqMzlRspMA3zgyclfhE56CVu/aHqgOWxsntvwXS8kRjN6BREy7EUvZUZwOO36xE5BO\nB1qdnlWbHsLptOPvG0tDaxXxXfRCrcrZRvH2FUwd0967Z2fGW0wZcxs5BZtobag4oyKo0nsquCvn\nNPW10bDS1aM4k2dwHEfyvyItfjGtbQ0UlO3BP/ESNu/7B+kJ19DcUkNu0VbS5zw3oNf1CkvqcgOP\n02GjcNsKmmvzOV60nZDAFI4c/xKDh+9Zd1OOmPNDdq54mJKqLJqaKmm0lDEyehbb9r8GGi0+UWkk\nXfmbAX0NfXHkk2cwNdsZH72Qsups9v/7FwT4J2A0uLNv0xuYfSNIjJpFetL1tNksfL75YSKn39hl\nFczyA2uZmLKM6LD2hux2extH8zdiszWjM5y9Br/SMyq4DxHl1hby2hoJ1puJPeVOcCjYN2Kkq4fQ\npYTF95L5v4c4fHwddnsrkVOWEH3JMirCkzh6ZBtag4nRNz+F2TfsvIyn8shWTA7B9Gm/ZvuBf7P9\nwOsgNIz70ctnbWbhFZ5E+q3Pk7P6L3jozFw950n0ejPhwWnszvuU5Ose7Pa5hbs+pGTbSqTTjsk/\nktQbH0FnMA/4a7O3WqjN38eN819Gq9UTFpRKedVhUmIuIyJkLIfz1rIv630SL/sRQghMBk9iw6dR\nW99lu2SE0Jy+09Zpo6o+j9D0hejdetdeT+maCu5DwKb6Up4syiBaeFIkLVzrH80PQxJcPawhz+QV\nxLjb/0qbpQad0b3zDjEoZSZBKTPP+3hszQ14uYdS21CIp3sQft7R5BZuxegZcM7nmn1CcfePJqjN\nDb2+PThrNQak7H7+uSJ7CwWb3+KSsT/CaPBi+4HX2ffGL5h4x6sD9po6CQ0SiVM60dKeq+50OjpL\nH/h4hIMQFFdkEB89E6fTTnHFQUxeo7s8XejEq9jz0VPYHW0gJfuyVxI+5Xqipt448GO/SKng7mI2\np5PHiw5yn0wnBi8apZWHq3cxwzuYBPPQuINxVXOOnhAabZ+LVvWGdDpos9RgcPPudrORT1QqBze/\nSVV1DmkJi6lrLEanM2BtqsXYgyJXQamzOfT+w7iZfDEYPNiV9Q4hk7tvIVe6ZxVp8YuJDmvfoXrJ\n2DtYv+0Z2prqMLr79O2FdkNndCMwcTpf7XqBhMhZlFZl0dhUho9HKDZbCxnHVuF02tl/eCXHi7bR\n3FKD3WkjODy5y/P5Ro8h6brfUbD/C0AwaunDeEekDOiYL3YquLtYncOKDg0xor1sqacwECM8KbW1\nDJngfrFrKDlC1gePIZwSu6ON+Pk/Iyj50jOO8wiKBY2GWZN+jpdHe862pbWGyuyviejRrk6B0SOA\nXVnvoTOYCZt8LaFjF57leInV1tT5yGpvBgTNlccxuvevtHBXEhb+nKKdH5JdvA9DQBBBgZfz0Ybf\nAZKgpBl4GzV4Sw+C/eJpsFRwrHQ7PlFp3Z7vuyUklIGlgruL+emM6DWCPY5KxotASmQTudQTaxxa\n8+4XK6fDTtYHjzE15RaiwiZQU1/A2rVP4RmagNmnq080As0puehaje60TVPdaakrJfP95UxM/h4+\nXuHsO/IhltKjiHHdb7CKvvQWDr37BzQaHSajFweyP8SJA/0pPWUHktBoiZxyA1I6ObHlLUr3rUan\nNxGavoDoS/8fDmszeev+QWbhVxg8/Rl905/Oy/y51VJDQ8kRtEY3fCJTh/RegPOpz8FdCOEHvAdE\nA/nAUillXRfH5QMNgAOwSSn7V+VomNEKwRPRE/jNid285zxKs7BzX9goIo3urh4aMDQ3L51P1qYa\nhFMSFda+y9TPOwo/nxE0VxV0GdxDxlzOpr2vkB5/NfWWUgrK9zF2wa3nvE7Nsd1EhYwjLqq9F+j0\nsXewct29nTtju+ITmUZw+nyyDqzBoDeD0BCSOg+Pjg1bveF02LG3NKB38z5ncCze8ymNR3Zx5YyH\nkBI27nmJEk8/wsddScKVv+z1tfujoTSHzPeX4+8zguaWaop9gxl1wx9VgKd/d+6/AdZJKZ8WQvy6\n43FX+VoSmCWlrOnHtYa1FDcfVibOodLeiq/OgFmjPlANFXqzN3ZHG7X1Bfh6R9Ha1khdfSHh3czz\nj5h1G0XmD9h3bC06syejlz3Rox2XGp2BZqul87HVagGnk4aS7LPWTY+f9xOCki+lqfI4Jp/QPnV6\nqsnbQ/anzyAkoNGSfO1vzzpdUn9sD6NHLsbDrf1nMHrkYjKP7SB83JW9vnZ/5a5+icmjbmZE+BSc\nTgdrtz9N2aEvCVUVJvsV3K8Cvk1JeAPYSNfBHc5VvENBr9EQNsTye4dqfvv5pNUbib/ibtasewp/\n31hq6wsJTp+PR9CILo8XQkPklBvO2g+0K4GJl7B703/4Zt8/8fWOIjtvHVFhE8lb9w/Sbz17j1Dv\niJRzLkZKKSne/TGVGV8hNDrCp1xLYNIMbM31HPn0WeZMuIdg/0SKKw6y5cM/Mekn/0LbTUql1uxJ\ng6Ws83G9pRSd2TXTiG2NVYT4t7/5aTRagn3jaaivcMlYhpr+BPdgKeW3SazlQHdVfySwXgjhAF6V\nUv5//bimch6JifNgpWqkEJQyE8+wBJoqTxDmHdxtYJdOB60NFWh1xl63gNOZPPBPmo6l8BgajY5x\nKUvx9Ypizc5nBuIlULznEyp3r8bHPYSisn0c/uRpyg+uJ3zydXi4BxLgEwtAeNBojAYPWurKun2d\nUZcs48B/f019UxkgKao8yJhbBmacveUZmkBW3hrGJS+lpa2e46U7iRn9E5eMZag5a3AXQqwDulo1\nOm1XhZRSCiG6q6t+iZSyVAgRCKwTQmRLKbf0bbiK4hpmn9Czbom3Wmo49L+HsDfVY7e3Eph8KSOv\n+Gm3TUC64hs7jhNHd3PJuB9jNnqz7eAbeA1QemDVoY2E+CVQ31jM0gUvIxCs3/4sx9b+nebaIt75\n/E7io2aSHDeflpazp266+Ucw7ra/UJmzFYCxi37Yo1z+wZCw8OdkrniEnC/uwumwETXte/jHTXTJ\nWIaaswZ3KeW87r4nhCgXQoRIKcuEEKFAl5+FpJSlHX9WCiE+BCYBXQZ3IcTyUx5ulFJuPPvwlcE0\n6zfqrr2ncte8QqRPMuOnLsVub+WLb56gdP8XhI1d0ONzBIycTEtVIR9++WuQTnyj00m64oEBGZ9G\nZ6C2voCUkQsw6Nun/9xMvujsLVyz6I84nA7WffMkqzY/xIhZt50zy8XoFUDEhP619xsIBg8/0m99\nAXtLAxq9yeX1d84HIcQsYNa5juvPtMwnwK3AUx1/ftTFINwArZSyUQjhDlwOPNzdCaWUy/sxHkVx\nmaaKPKZN+CVCCPR6M7HhUzi05b8Ej5rV7dx1VyKn3EDE5OuQDsdpNdAdtjYKt79PS2UB5oBIIqcu\nQavvuo1gV8KnLuHIx09TWZPbWc+lpr6AKWNuQ6s1oNVCctwVHK7eRdi4RT1/4UOAEOKiKlnQcdO7\n8dvHQoiHujquPyV/nwTmCSFygDkdjxFChAkhVnUcEwJsEULsB3YAn0kp1/bjmooyJJl9Qykq2weA\n02mnpCIDs8GT0oPren0uITSnBXYpnWStfBRnfi6JXqORBXlkvv9wr0rj+sdNIGHxL8kt+po13zzJ\n+p0v0GZvoqImp/OYitpczP6RvR6vMjT1+c69I7VxbhdfLwEWdfw9D0jv8+gUlxlq+e2t9eXkfvEy\nLTXFuAVEMfKKn2L0cs08b1firriLvf+6mxOlu2izWvD2CCUiaDSNLQ39PndLTTEtlQUsvOxZNBot\n0eGT+fCrB2iqKuh20bMrAfFT8flJGjV5ewBJiP8PyFrxKBX1eTgcNpps9YxZ9HS/x6sMDSqhWhny\nHLY2Mt55kMSw6URNWMrx4m1kvPcHxt3+Ihqt/twnOA/MvmH4jZyEqKlhctINGPRmvtr1FxIn9r9U\nr3Q60Gi0nYuzAoGmhztfv0tn8jitqNq4H75EXf5+hEZLQszYXk0hKUObCu7KkNdUmY9eGEhLaG9i\nPSbxWvJKdtBcU9yn3ZiDJX7BPeSufokNu/+C3uBG9OzbBqR2ipt/JDoPX7YdfIMRYZM4UbobYXLD\nPSC63+fWmzwJTJrR7/N0pa2xioKv38ZqqcUrMpWISdeonaPnkQruyhmmZdwHQyhTRqs3YbVacDis\naLUGHA4rNlvzkMuM0BndSLrm1wN+XqHRMurGh8nf+Aa78z/DHBBB6uLH0Gh79+tbnrmBqsxNaHQG\nwqdcd9adr/1la23kwJu/IjZ0MgH+k8nKXMex+nJGXjG0pvuGMxXclSHPLSAKz4hk1u54jsjA0RSU\n78U3bjwm76Fbinig6U2exM+/u8/PLz2whuKv32NC8hLarBb2vr+c1O89jmdw3ACO8qSavD34eUYw\nPnkJAKGBKfzvi58RN+9Odfd+nqjgrpxhqOW3CyFIuvoBSg+upaKqEL+JCwgZPa9XG4QudmV7P+eS\n0T8gJLB9U1Rrm4Xyg+vxnDc4wR0pObXqSF1DCVI6sTbX96i2vdJ/KrgrFwSh0RKW3vMNQeeLlE6E\n0CClk8LtKzqmPfSET11CYOIlA3gdibWxGimdGL0C+/TGJk/7u3NQ3xz9YieQv/ENth/4D7X1J2ho\nKsPPZwR7X7ublOt+rxpznAcquCtKHzQUZ3Pk02dpbijDwy8a75gxNOdlMCPtVtpsTWxd8wo6kwe+\n0WP6fS2nw0b2x09TX5CB0GhxC4gi5fo/dNl4ujuh46/kmy2vMS7xetqsFg7nrydt2eP9Hlt39GZP\ngkfP5diODzAbvbhu3nPodSaKyvax7dNnmfR/rw3atZV2/dnEpAxDU1/ruuelcpKtpZGslY8yKWEJ\nt1z5L1IjL6P8wFomJi8lwDeO8KDRpMYtoDr76wG5XuGOD9A3NrFk3p9ZMu/P+Aov8je90atzhIye\nR9Ts28iu28cJWwGpSx9p7xw1SFpqSyndu4q0+CsJC0pFr2vfTRsWNJqWxoo+pXEqvaOCu3Ka2Sun\nu3oIQ15T5XE83YOICpuARqMjIXomep2J2sbizmNa2hoQuoHJ5mkuyyM2fCparR6N0BAXMZ3m8uO9\nPk9QykxSbvgDSVc/gGdo/ICMrTstdWX4eEUQEpBMcflBmltqAThasAlP/2iEUKFnsKlpGUXpJb2b\nD5amSqy2Fgx6My2t9dgcrRzI+Qi7vZU2WxO5xd+QPufZAbmeyS+UopIMRoRPAQRFFQcw+XZfoXIo\ncPOPoK6+EL3ejcQRc/noywfQag1ojGZGLe22vJQygISU3VXqPb+EEFJK2esVnm/SrhwaL2CYGGpl\nB4aqY+v+Tn3uXkL8EymuPETw2Pl4R6VRlf01QqcnNH1BNz1We89hbSHj3d8jm5vQanVYcTD6pj9h\nGKReqQOlImsjuWv+hsHogdXaTOzcHxOUfGmv8/MvBhufXNjn1e3uYqcK7kqnaRn3Dbk0yKFKSknt\n8b201JbgHjRiQHaino3TYaexNAcpnXiFJqDRGQb1egPF3taM1VKN0SuwV1UsLzaDEdzVW6ii9IEQ\nAr/Y8cD483I9jVaHV3gSJfs+p3TXJ+g9fImcduOQzxnXGd16ldWjDBy1qqF0UnftQ9vxDa9Ts2ct\nCV5peDU6OPDm/dhaG109LGWIUsFdUS4AUjop3vsZl036JbER05g4ahl+HhFU5+509dCUIUoFd0W5\nEEgJSLSakzOpWo0enCpfXOmaCu4KAO+9epOrh6CchdBoCRk1hw27/0pJxSEOHV1FeW0OfnETXD00\nZYhSwV0B4MAnPq4egnIOcZffhXnkaPYUrqbIXsLom54c8umQiuuobBlFuUBotDqiZ9zs6mEoFwh1\n5660N+dQlO+Q0omtub5XjbiVoUPduSuKcoaGkiMc/uBxHLZWEILExb/CX83vX1BUcFcU5TROu42s\nDx5jasotRIVNoKLmKF999hzjfvjykN80pZzU52kZIcQSIUSmEMIhhBh3luPmCyGyhRBHhRAD32BS\n6Te1eUk5VWtDBVqhIyqs/U49yC8eb68ImqtOuHhkSm/0Z849A7gW2NzdAUIILfBXYD6QAiwTQiT3\n45qKogwyg7sP1rZGGizlALS2NdLQWILRM8DFI1N6o8/TMlLKbOBcrbomAblSyvyOY98FrgYO9/W6\nysAybbgOBqYyrTJM6IzuxM75Eas3PUagfwLVtXmEpC/AzT/S1UNTemGw59zDgcJTHhcBkwf5mkov\n3PvswJSlVYaX0PT5eEWk0FR5giDfm/EMGdzmHsrAO2twF0KsA7r67f+dlPLTHpxfleNVlAuUe0AU\n7gFRrh6G0kdnDe5Synn9PH8xcOpnuUja7967JIRYfsrDjVLKjf28vqIoyrAihJgFzDrXcQM1LdPd\nxPtuIF4IEQOUADcCy7o7iZRy+QCNR+mBaRn3gcqUUZQLSsdN78ZvHwshHurquP6kQl4rhCgEpgCr\nhBCrO74eJoRY1TEIO3A3sAbIAt6TUqrFVEVRlEHWn2yZD4EPu/h6CbDolMergdV9vY4yeFR+u6IM\nX6q2jKIoyjCkgruiKMowpIL7Rep3i+5y9RAURRlEKrgriqIMQyq4K4qiDEMquF+Epr422tVDUBRl\nkKngfhESE/u78VhRlKFOBfeL0N6q464egqIog0wF94uQqgSpKMOfCu6KoijDkAruiqIow5AK7hcZ\ntXlJUS4OKrgriqIMQyq4X0RUfruiXDxUcFcURRmGVHBXFEUZhlRwv4jMXjnd1UNQFOU8UcFdURRl\nGFLBXVEUZRhSwf0iofLbFeXiooK7oijKMKSCu6IoyjDU5+AuhFgihMgUQjiEEOPOcly+EOKgEGKf\nEGJnX6+n9F36Arurh6AoynkmpJR9e6IQSYATeBW4T0q5t5vjjgPjpZQ15ziflFKKPg1GURTlItVd\n7NT19YRSyuyOE/fo+n29jqIoitJ752POXQLrhRC7hRB3nIfrKYqiXPTOeucuhFgHdNW253dSyk97\neI1LpJSlQohAYJ0QIltKuaW3A1UURVF67qzBXUrZ707KUsrSjj8rhRAfApOALoO7EGL5KQ83Sik3\n9vf6iqIow4kQYhYw65zH9XVB9ZQLbQDul1Lu6eJ7boBWStkohHAH1gIPSynXdnGsWlBVFEXppe5i\nZ39SIa8VQhQCU4BVQojVHV8PE0Ks6jgsBNgihNgP7AA+6yqwn+Uas/o6vuFE/RxOUj+Ldurn0E79\nHLrX5+AupfxQShkppTRLKUOklAs6vl4ipVzU8fc8KWV6x3+pUsonenmZWX0d3zAzy9UDGEJmuXoA\nQ8QsVw9giJjl6gEMVWqHqqIoyjCkgruiKMow1O8F1YEihBgaA1EURbnAdLWgOmSCu6IoijJw1LSM\noijKMKSCu6IoyjA05IO7EOIZIcRhIcQBIcQHQghvV4/JFXpaYnm4EkLMF0JkCyGOCiF+7erxuIoQ\n4jUhRLkQIsPVY3ElIUSkEGJDx+/EISHEPa4e01Az5IM77btaR0kpxwA5wG9dPB5XyQCuBTa7eiDn\nmxBCC/wVmA+kAMuEEMmuHZXLvE77z+FiZwN+KaUcRftGyp9exP9PdGnIB3cp5ToppbPj4Q4gwpXj\ncRUpZbaUMsfV43CRSUCulDJfSmkD3gWudvGYXKKj6F6tq8fhalLKMinl/o6/W4DDQJhrRzW0DPng\n/h23A5+7ehDKeRcOFJ7yuKjja4qCECIGGEv7zZ/Soc/NOgZST0oLCyEeBKxSyrfP6+DOowEqsTwc\nqXxdpUtCCA9gBfDzjjt4pcOQCO7nKi0shLgNWAhcdl4G5CIDUWJ5mCoGIk95HEn73btyERNC6IGV\nwFtSyo9cPZ6hZshPywgh5gO/Aq6WUra6ejxDxMVWGnk3EC+EiBFCGIAbgU9cPCbFhUR7f89/AVlS\nyj+7ejxD0ZAP7sBLgAftXZz2CSH+5uoBuUJ3JZYvBlJKO3A3sAbIAt6TUh527ahcQwjxDvANkCCE\nKBRC/MDVY3KRS4BbgNkdcWFfx42g0kGVH1AURRmGLoQ7d0VRFKWXVHBXFEUZhlRwVxRFGYZUcFcU\nRRmGVHBXFEUZhlRwVxRFGYZUcFcURRmGVHBXFEUZhv5/W+GjDAwAMGIAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "num_examples = len(X) # training set size\n", + "nn_input_dim = 2 # input layer dimensionality\n", + "nn_output_dim = 2 # output layer dimensionality\n", + " \n", + "# Gradient descent parameters (I picked these by hand)\n", + "epsilon = 0.01 # learning rate for gradient descent\n", + "reg_lambda = 0.01 # regularization strength\n", + "\n", + "# Helper function to evaluate the total loss on the dataset\n", + "def calculate_loss(model):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " # Forward propagation to calculate our predictions\n", + " z1 = X.dot(W1) + b1\n", + " a1 = np.tanh(z1)\n", + " z2 = a1.dot(W2) + b2\n", + " exp_scores = np.exp(z2)\n", + " probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", + " # Calculating the loss\n", + " corect_logprobs = -np.log(probs[range(num_examples), y])\n", + " data_loss = np.sum(corect_logprobs)\n", + " # Add regulatization term to loss (optional)\n", + " data_loss += reg_lambda/2 * (np.sum(np.square(W1)) + np.sum(np.square(W2)))\n", + " return 1./num_examples * data_loss\n", + "\n", + "# Helper function to predict an output (0 or 1)\n", + "def predict(model, x):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " # Forward propagation\n", + " z1 = x.dot(W1) + b1\n", + " a1 = np.tanh(z1)\n", + " z2 = a1.dot(W2) + b2\n", + " exp_scores = np.exp(z2)\n", + " probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", + " return np.argmax(probs, axis=1)\n", + "\n", + "# This function learns parameters for the neural network and returns the model.\n", + "# - nn_hdim: Number of nodes in the hidden layer\n", + "# - num_passes: Number of passes through the training data for gradient descent\n", + "# - print_loss: If True, print the loss every 1000 iterations\n", + "def build_model(nn_hdim, num_passes=20000, print_loss=False):\n", + " \n", + " # Initialize the parameters to random values. We need to learn these.\n", + " np.random.seed(0)\n", + " W1 = np.random.randn(nn_input_dim, nn_hdim) / np.sqrt(nn_input_dim)\n", + " b1 = np.zeros((1, nn_hdim))\n", + " W2 = np.random.randn(nn_hdim, nn_output_dim) / np.sqrt(nn_hdim)\n", + " b2 = np.zeros((1, nn_output_dim))\n", + " \n", + " # This is what we return at the end\n", + " model = {}\n", + " \n", + " # Gradient descent. For each batch...\n", + " for i in xrange(0, num_passes):\n", + " \n", + " # Forward propagation\n", + " z1 = X.dot(W1) + b1\n", + " a1 = np.tanh(z1)\n", + " z2 = a1.dot(W2) + b2\n", + " exp_scores = np.exp(z2)\n", + " probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", + " \n", + " # Backpropagation\n", + " delta3 = probs\n", + " delta3[range(num_examples), y] -= 1\n", + " #print type(delta3)\n", + " dW2 = (a1.T).dot(delta3)\n", + " db2 = np.sum(delta3, axis=0, keepdims=True)\n", + " delta2 = delta3.dot(W2.T) * (1 - np.power(a1, 2)) # Remember: a1=np.tanh(z1)\n", + " dW1 = np.dot(X.T, delta2)\n", + " db1 = np.sum(delta2, axis=0)\n", + " \n", + " # Add regularization terms (b1 and b2 don't have regularization terms)\n", + " dW2 += reg_lambda * W2\n", + " dW1 += reg_lambda * W1\n", + " \n", + " # Gradient descent parameter update\n", + " W1 += -epsilon * dW1\n", + " b1 += -epsilon * db1\n", + " W2 += -epsilon * dW2\n", + " b2 += -epsilon * db2\n", + " \n", + " # Assign new parameters to the model\n", + " model = { 'W1': W1, 'b1': b1, 'W2': W2, 'b2': b2}\n", + " \n", + " # Optionally print the loss.\n", + " # This is expensive because it uses the whole dataset, so we don't want to do it too often.\n", + " if print_loss and i % 1000 == 0:\n", + " print \"Loss after iteration %i: %f\" %(i, calculate_loss(model))\n", + " \n", + " return model\n", + "\n", + "# Build a model with a 3-dimensional hidden layer\n", + "model = build_model(3, print_loss=True)\n", + " \n", + "# Plot the decision boundary\n", + "plot_decision_boundary(lambda x: predict(model, x))\n", + "plt.title(\"Decision Boundary for hidden layer size 3\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_1.ipynb b/.ipynb_checkpoints/python_tutorial_part_1-checkpoint.ipynb similarity index 100% rename from python_tutorial_part_1.ipynb rename to .ipynb_checkpoints/python_tutorial_part_1-checkpoint.ipynb diff --git a/.ipynb_checkpoints/python_tutorial_part_2-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_2-checkpoint.ipynb new file mode 100644 index 0000000..b28a838 --- /dev/null +++ b/.ipynb_checkpoints/python_tutorial_part_2-checkpoint.ipynb @@ -0,0 +1,1039 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# NLTK Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "*** Introductory Examples for the NLTK Book ***\n", + "Loading text1, ..., text9 and sent1, ..., sent9\n", + "Type the name of the text or sentence to view it.\n", + "Type: 'texts()' or 'sents()' to list the materials.\n", + "text1: Moby Dick by Herman Melville 1851\n", + "text2: Sense and Sensibility by Jane Austen 1811\n", + "text3: The Book of Genesis\n", + "text4: Inaugural Address Corpus\n", + "text5: Chat Corpus\n", + "text6: Monty Python and the Holy Grail\n", + "text7: Wall Street Journal\n", + "text8: Personals Corpus\n", + "text9: The Man Who Was Thursday by G . K . Chesterton 1908\n" + ] + } + ], + "source": [ + "# Take a look at the preface here: http://www.nltk.org/book/ch00.html\n", + "# This tutorial is based on Python 2.7, but it shouldn't be an issue to write the same code for Python 3 as the differences\n", + "# are minimal so long as the tutorial is concerned\n", + "import nltk\n", + "from nltk.book import *" + ] + }, + { + "cell_type": "code", + "execution_count": 253, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Displaying 10 of 66 matches:\n", + "r occupations may come . The Negroes are now Americans . Their ancestors came here years ago agains\n", + "e it so or not . And yet we are not the less Americans on that account . We shall be the more Ameri\n", + "we find them now secure ; and there comes to Americans the profound assurance that our representati\n", + "have called me . I am certain that my fellow Americans expect that on my induction into the Preside\n", + " and the hurricanes of disaster . In this we Americans were discovering no wholly new truth ; we we\n", + " and that freedom is an ebbing tide . But we Americans know that this is not true . Eight years ago\n", + "eat . We are not content to stand still . As Americans , we go forward , in the service of our coun\n", + "uguration be simple and its words brief . We Americans of today , together with our allies , are pa\n", + "in the discharge of this responsibility , we Americans know and we observe the difference between w\n", + "cked bargain of trading honor for security . Americans , indeed all free men , remember that in the\n", + "None\n" + ] + } + ], + "source": [ + "# Let's look at the text4: Inaugural Address Corpus\n", + "# NLTK can show a word in context, called a concordance (with a given text window size)\n", + "# width: a parameter forthe window size of surrounding character context\n", + "# lines: a parameter for the number of lines returned \n", + "print(text4.concordance(\"Americans\", width=100, lines=10))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "the free power opportunity fellow opinions colleges peace gangs\n", + "judgments consent noblest ideas colors fidelity unquestionable worship\n", + "discipline industrious just\n", + "None\n" + ] + } + ], + "source": [ + "# Other words that appear in a similar range of contexts as a given word\n", + "print(text4.similar(\"patriotic\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "every_citizen our_citizens\n", + "None\n" + ] + } + ], + "source": [ + "# Let's look at common contexts of two words:\n", + "print(text4.common_contexts([\"patriotic\", \"free\"])) " + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0669\n" + ] + } + ], + "source": [ + "# Lexical diversity shows the richness of a text's vocabulary:\n", + "from __future__ import division # in Python 3 you don't need to do the import\n", + "def lexical_diversity(text):\n", + " return len(set(text))/len(text)\n", + "\n", + "lex_div=lexical_diversity(text4)\n", + "print(round(lex_div, 4))\n", + "\n", + "# What interesting ways can you use \"lexical_diversity\" for?\n", + "# Can you play with some texts, say from presidential candidates and tell us what you find?" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "6" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text=\"What interesting ways can you you use\".split()\n", + "len(text)\n", + "len(set(text))" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'AS', u'Abandonment', u'Abhorring', u'About', u'Above', u'Abraham', u'Abroad', u'Accept', u'Across', u'Act', u'Acting', u'Action', u'Actual', u'Adams', u'Additional', u'Address', u'Administered', u'Administration', u'Administrations', u'Advance']\n" + ] + } + ], + "source": [ + "# sorted set of words\n", + "print(sorted(set(text4))[100:120])" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "None\n", + "['father', 'man', 'mother', 'woman']\n", + "['father', 'man', 'mother', 'woman']\n" + ] + } + ], + "source": [ + "#P ay attenstion to the difference of these!\n", + "tokens=[\"man\", \"woman\", \"father\", \"mother\"]\n", + "x= tokens.sort() # Returns \"None\", but sorts the list in place\n", + "print x\n", + "print tokens\n", + "print sorted(tokens) # Returns the sorted list\n" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('man', 102)\n", + "('woman', 3)\n", + "('father', 4)\n", + "('mother', 4)\n" + ] + } + ], + "source": [ + "# Counting word frequencies:\n", + "words=[\"man\", \"woman\", \"father\", \"mother\"]\n", + "for w in words:\n", + " print(w, text4.count(w))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + " # We Stopped here:" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "****************************************************************************************************\n", + "[(u'institutions', 76), (u'come', 75), (u'party', 75), (u'better', 75), (u'always', 74), (u'today', 74), (u'office', 73), (u'still', 73), (u'need', 73), (u'others', 73), (u'strength', 72), (u'Let', 72), (u'nor', 72), (u'itself', 72), (u'means', 70), (u'believe', 70), (u'themselves', 70), (u'place', 70), (u'land', 69), (u'could', 69), (u'then', 69), (u'.\"', 69), (u'home', 69), (u'equal', 69), (u'together', 68), (u'might', 68), (u'things', 67), (u'secure', 67), (u'Nation', 67), (u'whose', 66), (u'find', 66), (u'given', 66), (u'prosperity', 66), (u'Americans', 66), (u'old', 65), (u'am', 65), (u'full', 65), (u'give', 65), (u'here', 64), (u'Federal', 64), (u'action', 64), (u'order', 64), (u'yet', 64), (u'proper', 64), (u'found', 63), (u'up', 63), (u'important', 63), (u'responsibility', 63), (u'take', 62), (u'where', 62), (u'being', 62), (u'change', 62), (u'Executive', 62), (u'even', 62), (u'subject', 62), (u'administration', 61), (u'revenue', 61), (u'State', 61), (u'see', 60), (u'security', 60), (u'ought', 60), (u'trust', 60), (u'These', 60), (u'A', 59), (u'self', 59), (u'true', 59), (u'business', 59), (u'seek', 59), (u'character', 59), (u'honor', 59), (u'question', 59), (u'called', 59), (u'respect', 59), (u'commerce', 58), (u'cause', 58), (u'toward', 58), (u'principle', 58), (u'again', 58), (u'century', 58), (u'influence', 57), (u'become', 56), (u'protection', 56), (u'done', 56), (u'stand', 56), (u'course', 55), (u'another', 55), (u'very', 55), (u'help', 55), (u'like', 55), (u'citizen', 54), (u'authority', 54), (u'also', 53), (u'Republic', 53), (u'live', 53), (u'civil', 53), (u'past', 52), (u'sense', 52), (u'constitutional', 52), (u'meet', 52), (u'democracy', 52)]\n", + "****************************************************************************************************\n", + "14\n", + "312\n", + "****************************************************************************************************\n", + "[u'than', u'country', u'.', u'has', u'people', u'for', u'citizens', u'time', u'so', u'nation']\n" + ] + } + ], + "source": [ + "# Frequency distribution\n", + "freq_dist = FreqDist(text4) \n", + "print(\"*\"*100)\n", + "print(freq_dist.most_common(1000))[200:300]\n", + "print(\"*\"*100)\n", + "print(freq_dist[\"European\"])\n", + "print(freq_dist[\"world\"])\n", + "print(\"*\"*100)\n", + "#------------------------------------------\n", + "# Vocabulary\n", + "V=set(text4)\n", + "words=[w for w in V if freq_dist[w] > 200][:10]\n", + "print(words)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'a'" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "l=[\"a\", \"b\"]\n", + "l[0]\n", + "\n", + "d= {\"Hi\": 44, \"Hello\": 2}\n", + "d[\"Hello\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "United States; fellow citizens; four years; years ago; Federal\n", + "Government; General Government; American people; Vice President; Old\n", + "World; Almighty God; Fellow citizens; Chief Magistrate; Chief Justice;\n", + "God bless; every citizen; Indian tribes; public debt; one another;\n", + "foreign nations; political parties\n", + "None\n" + ] + } + ], + "source": [ + "# Collocations\n", + "print(text4.collocations())" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'zodiac', u'zodiac', u'zogranda', u'zone', u'zoned', u'zoned', u'zones', u'zoology', u'zoology', u'zoroaster']\n", + "[u'zephyr', u'zeuglodon', u'zig', u'zodiac', u'zogranda', u'zone', u'zoned', u'zones', u'zoology', u'zoroaster']\n" + ] + } + ], + "source": [ + "# Could you tell the difference?\n", + "print(sorted(w.lower() for w in set(text1))[-10:])\n", + "print(sorted(set(w.lower() for w in text1))[-10:])" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "Hamlet: Entire Play\n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "The Tragedy of Hamlet, Prince of Denmark\n", + "\n", + "Shakespeare homepage \n", + " | Hamlet \n", + " | Entire play\n", + "\n", + "ACT I\n", + "SCENE I. Elsinore. A platform before the castle.\n", + "\n", + "FRANCISCO at his post. Enter to him BERNARDO\n", + "\n", + "BERNARDO\n", + "\n", + "Who's there?\n", + "\n", + "FRANCISCO\n", + "\n", + "Nay, answer me: stand, and unfold you\n" + ] + } + ], + "source": [ + "# Fetching and cleaning a webpage:\n", + "from urllib import urlopen\n", + "from bs4 import BeautifulSoup\n", + "url=\"http://shakespeare.mit.edu/hamlet/full.html\"\n", + "page = urlopen(url)\n", + "soup = BeautifulSoup(page.read()) \n", + "raw = BeautifulSoup.get_text(soup) \n", + "print(raw[:300])\n", + "tokens=nltk.word_tokenize(raw)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['I', 'am', 'happy']\n" + ] + } + ], + "source": [ + "# Word tokenization with NLTK:\n", + "import nltk\n", + "raw=\"I am happy\"\n", + "tokens=nltk.word_tokenize(raw)\n", + "print tokens" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# More on files, this time with NLTK:" + ] + }, + { + "cell_type": "code", + "execution_count": 230, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "[u'The', u'Project', u'Gutenberg', u'EBook', u'of', u'Hamlet', u',', u'by', u'William', u'Shakespeare', u'This', u'eBook', u'is', u'for', u'the', u'use', u'of', u'anyone', u'anywhere', u'at']\n", + "**************************************************\n", + "[u'The', u'Project', u'Gutenberg', u'EBook', u'of', u'Hamlet', u',', u'by', u'William', u'Shakespeare', u'This', u'eBook', u'is', u'for', u'the', u'use', u'of', u'anyone', u'anywhere', u'at', u'no', u'cost', u'and', u'with', u'almost', u'no', u'restrictions', u'whatsoever', u'.', u'You', u'may', u'copy', u'it', u',', u'give', u'it', u'away', u'or', u're-use', u'it', u'under', u'the', u'terms', u'of', u'the', u'Project', u'Gutenberg', u'License', u'included', u'with']\n", + "Project Gutenberg-tm; _1st Clo._; Project Gutenberg; _Crosses to_;\n", + "Literary Archive; Gutenberg-tm electronic; Archive Foundation;\n", + "electronic works; Gutenberg Literary; United States; _2nd Clo._;\n", + "ROSENCRANTZ _and_; public domain; _and_ GUILDENSTERN; Dr. Johnson;\n", + "_1st Play._; electronic work; _and_ Attendants; the_ KING; set forth\n", + "None\n" + ] + } + ], + "source": [ + "import codecs\n", + "from nltk import word_tokenize, Text\n", + "text_string=codecs.open(\"hamlet.txt\", \"r\", \"utf-8\").read() # Opens for reading and gets you the file content as a list\n", + "tokens = word_tokenize(text_string)\n", + "print(type(tokens))\n", + "print(tokens[:20])\n", + "text = Text(tokens)\n", + "print(\"*\"*50)\n", + "print(text[:50])\n", + "print(text.collocations())\n" + ] + }, + { + "cell_type": "code", + "execution_count": 90, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'[', u'The', u'Tragedie', u'of', u'Hamlet', u'by', u'William', u'Shakespeare', u'1599', u']']\n", + "[u'Actus', u'Primus', u'.']\n", + "[u'Scoena', u'Prima', u'.']\n", + "[u'Enter', u'Barnardo', u'and', u'Francisco', u'two', u'Centinels', u'.']\n", + "[u'Barnardo', u'.']\n" + ] + } + ], + "source": [ + "# Sentence splitting\n", + "from nltk.corpus import gutenberg\n", + "# This will return each sentence as a list of words\n", + "hamlet_sent=gutenberg.sents('shakespeare-hamlet.txt')\n", + "for sent in hamlet_sent[:5]:\n", + " print(sent)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'1789-Washington.txt', u'1793-Washington.txt', u'1797-Adams.txt', u'1801-Jefferson.txt', u'1805-Jefferson.txt']\n", + "**************************************************\n", + "[u'1789', u'1793', u'1797', u'1801', u'1805', u'1809', u'1813', u'1817', u'1821', u'1825', u'1829', u'1833', u'1837', u'1841', u'1845', u'1849', u'1853', u'1857', u'1861', u'1865', u'1869', u'1873', u'1877', u'1881', u'1885', u'1889', u'1893', u'1897', u'1901', u'1905', u'1909', u'1913', u'1917', u'1921', u'1925', u'1929', u'1933', u'1937', u'1941', u'1945', u'1949', u'1953', u'1957', u'1961', u'1965', u'1969', u'1973', u'1977', u'1981', u'1985', u'1989', u'1993', u'1997', u'2001', u'2005', u'2009']\n" + ] + } + ], + "source": [ + "# NLTK fileids:\n", + "from nltk.corpus import inaugural\n", + "print(inaugural.fileids()[:5])\n", + "print(\"*\"*50)\n", + "#print([fileid[:4] for fileid in inaugural.fileids()])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Generate text" + ] + }, + { + "cell_type": "code", + "execution_count": 122, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "living creature that he said , and the land of the land of the land of the land of the land of the land of the land of the land None\n" + ] + } + ], + "source": [ + "# A function from the NLTK book: http://www.nltk.org/book/ch02.html\n", + "def generate_model(cfdist, word, num=15):\n", + " for i in range(num):\n", + " print(word),\n", + " word = cfdist[word].max()\n", + "\n", + "text = nltk.corpus.genesis.words('english-kjv.txt')\n", + "bigrams = nltk.bigrams(text)\n", + "cfd = nltk.ConditionalFreqDist(bigrams)\n", + "print(generate_model(cfd, 'living', num=30))\n", + "#print(cfd[\"living\"].max())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# WordNet" + ] + }, + { + "cell_type": "code", + "execution_count": 133, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('nice.n.01'), Synset('nice.a.01'), Synset('decent.s.01'), Synset('nice.s.03'), Synset('dainty.s.04'), Synset('courteous.s.01')]\n", + "**************************************************\n", + "done with delicacy and skill\n", + "[u'nice', u'skillful']\n", + "**************************************************\n", + "exhibiting courtesy and politeness\n", + "[u'courteous', u'gracious', u'nice']\n", + "**************************************************\n", + "excessively fastidious and easily disgusted\n", + "[u'dainty', u'nice', u'overnice', u'prissy', u'squeamish']\n" + ] + } + ], + "source": [ + "# WordNet is a very useful resource.\n", + "# You should get familiar with its structure, and with ways to navigate it.\n", + "# NLTK provides many off-the-shelf useful functions\n", + "from nltk.corpus import wordnet as wn\n", + "print(wn.synsets('nice'))\n", + "print(\"*\"*50)\n", + "print(wn.synset('nice.s.03').definition())\n", + "print(wn.synset('nice.s.03').lemma_names())\n", + "print(\"*\"*50)\n", + "print(wn.synset('courteous.s.01').definition())\n", + "print(wn.synset('courteous.s.01').lemma_names())\n", + "print(\"*\"*50)\n", + "print(wn.synset('dainty.s.04').definition())\n", + "print(wn.synset('dainty.s.04').lemma_names())\n", + "print(\"*\"*50)" + ] + }, + { + "cell_type": "code", + "execution_count": 138, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "**************************************************\n", + "the act of drilling\n", + "[u'drilling', u'boring']\n", + "**************************************************\n", + "the act of drilling a hole in the earth in the hope of producing petroleum\n", + "[u'boring', u'drilling', u'oil_production']\n", + "**************************************************\n", + "cause to be bored\n", + "[u'bore', u'tire']\n", + "**************************************************\n", + "make a hole, especially with a pointed power or hand tool\n", + "[u'bore', u'drill']\n", + "**************************************************\n", + "so lacking in interest as to cause mental weariness\n", + "[u'boring', u'deadening', u'dull', u'ho-hum', u'irksome', u'slow', u'tedious', u'tiresome', u'wearisome']\n" + ] + } + ], + "source": [ + "# Printing the definition and lemma names/lemmas of a given word is easily done in a \"for\" loop\n", + "for synset in wn.synsets('boring'):\n", + " print(\"*\"*50)\n", + " print(synset.definition())\n", + " print(synset.lemma_names())\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 145, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Lemma('drilling.n.01.boring'), Lemma('boring.n.02.boring'), Lemma('boring.s.01.boring')]\n", + "[Lemma('fantastic.s.02.wonderful')]\n", + "[Lemma('dazzling.s.01.dazzling'), Lemma('blazing.s.01.dazzling')]\n" + ] + } + ], + "source": [ + "# You can access lemmas of a word directly, using the \"lemmas\" function:\n", + "print(wn.lemmas('boring'))\n", + "print(wn.lemmas('wonderful'))\n", + "print(wn.lemmas('dazzling'))" + ] + }, + { + "cell_type": "code", + "execution_count": 153, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Lemma('dish.n.01.dish'), Lemma('dish.n.02.dish'), Lemma('dish.n.03.dish'), Lemma('smasher.n.02.dish'), Lemma('dish.n.05.dish'), Lemma('cup_of_tea.n.01.dish'), Lemma('serve.v.06.dish'), Lemma('dish.v.02.dish')]\n", + "= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = \n", + "**************************************************\n", + "a piece of dishware normally used as a container for holding or serving food\n", + "[u'dish']\n", + "**************************************************\n", + "a particular item of prepared food\n", + "[u'dish']\n", + "**************************************************\n", + "the quantity that a dish will hold\n", + "[u'dish', u'dishful']\n", + "**************************************************\n", + "a very attractive or seductive looking woman\n", + "[u'smasher', u'stunner', u'knockout', u'beauty', u'ravisher', u'sweetheart', u'peach', u'lulu', u'looker', u'mantrap', u'dish']\n", + "**************************************************\n", + "directional antenna consisting of a parabolic reflector for microwave or radio frequency radiation\n", + "[u'dish', u'dish_aerial', u'dish_antenna', u'saucer']\n", + "**************************************************\n", + "an activity that you like or at which you are superior\n", + "[u'cup_of_tea', u'bag', u'dish']\n", + "**************************************************\n", + "provide (usually but not necessarily food)\n", + "[u'serve', u'serve_up', u'dish_out', u'dish_up', u'dish']\n", + "**************************************************\n", + "make concave; shape like a dish\n", + "[u'dish']\n" + ] + } + ], + "source": [ + "# Play with the word \"dish\"\n", + "print(wn.lemmas('dish'))\n", + "print(\"= \"*50)\n", + "for synset in wn.synsets('dish'):\n", + " print(\"*\"*50)\n", + " print(synset.definition())\n", + " print(synset.lemma_names())" + ] + }, + { + "cell_type": "code", + "execution_count": 198, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Happy.a.01: enjoying or showing or marked by joy or pleasure\n", + "Felicitous.s.02: marked by good fortune\n", + "Glad.s.02: eagerly disposed to act or to be of service\n", + "Happy.s.04: well expressed and to the point\n", + "Gladiolus.n.01: any of numerous plants of the genus Gladiolus native chiefly to tropical and South Africa having sword-shaped leaves and one-sided spikes of brightly colored funnel-shaped flowers; widely cultivated\n", + "Glad.a.01: showing or causing joy and pleasure; especially made happy\n", + "Glad.s.02: eagerly disposed to act or to be of service\n", + "Glad.s.03: feeling happy appreciation\n", + "Beaming.s.01: cheerful and bright\n", + "Joyful.a.01: full of or producing joy\n", + "Elated.s.02: full of high-spirited delight\n", + "Joyous.a.01: full of or characterized by joy\n" + ] + } + ], + "source": [ + "# A function that prints the synsets and definitions of a given word:\n", + "def get_definitions(word):\n", + " for synset in wn.synsets(word):\n", + " try:\n", + " print synset.name().capitalize() + ':', synset.definition() # capitalizing to give the feel of a dict entry\n", + " except:\n", + " continue\n", + " \n", + "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", + "for w in happy_words:\n", + " get_definitions(w)\n", + "\n", + "# You can condition by a part of speech (POS), see the book!\n", + "#for synset in wn.synsets('mint', wn.NOUN):\n", + "#... print(synset.name() + ':', synset.definition())\n" + ] + }, + { + "cell_type": "code", + "execution_count": 157, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'happy']\n", + "[u'felicitous', u'happy']\n", + "[u'glad', u'happy']\n", + "[u'happy', u'well-chosen']\n", + "[u'gladiolus', u'gladiola', u'glad', u'sword_lily']\n", + "[u'glad']\n", + "[u'glad', u'happy']\n", + "[u'glad']\n", + "[u'beaming', u'glad']\n", + "[u'joyful']\n", + "[u'elated', u'gleeful', u'joyful', u'jubilant']\n", + "[u'joyous']\n" + ] + } + ], + "source": [ + "# A function to print the lemma names of a passed word\n", + "def get_lemma_names(word):\n", + " for synset in wn.synsets(word):\n", + " try:\n", + " print(synset.lemma_names())\n", + " except:\n", + " continue\n", + " \n", + "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", + "for w in happy_words:\n", + " get_lemma_names(w)" + ] + }, + { + "cell_type": "code", + "execution_count": 185, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "set([u'felicitous', u'well-chosen', u'glad', u'happy'])\n", + "set([u'gladiolus', u'beaming', u'sword_lily', u'gladiola', u'glad', u'happy'])\n", + "set([u'elated', u'jubilant', u'joyful', u'gleeful'])\n", + "set([u'joyous'])\n", + "set([])\n", + "**************************************************\n", + "\n", + "Here's a single unique list/set:\n", + "\n", + "set([u'elated', u'gladiolus', u'beaming', u'joyous', u'sword_lily', u'well-chosen', u'felicitous', u'jubilant', u'gleeful', u'gladiola', u'joyful', u'glad', u'happy'])\n" + ] + } + ], + "source": [ + "# As above, but we uniqify using a set.\n", + "def get_unique_lemma_names(word):\n", + " l=[]\n", + " for synset in wn.synsets(word):\n", + " try:\n", + " l.extend(synset.lemma_names())\n", + " except:\n", + " continue\n", + " l=set(l)\n", + " return l\n", + "\n", + "happy_words=[\"happy\", \"glad\", \"joyful\", \"joyous\", \"exhuberant\"]\n", + "for w in happy_words:\n", + " l=get_unique_lemma_names(w)\n", + " print(l)\n", + "\n", + "# To get a set\n", + "print(\"*\"*50)\n", + "print(\"\\nHere's a single unique list/set:\\n\")\n", + "uniq_list=[]\n", + "for w in happy_words:\n", + " l=get_unique_lemma_names(w)\n", + " uniq_list.extend(l)\n", + "print(set(uniq_list))" + ] + }, + { + "cell_type": "code", + "execution_count": 190, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synset('ambulance.n.01')\n", + "**************************************************\n", + "[u'Model_T', u'S.U.V.', u'SUV', u'Stanley_Steamer', u'ambulance', u'beach_waggon', u'beach_wagon', u'bus', u'cab', u'compact', u'compact_car', u'convertible', u'coupe', u'cruiser', u'electric', u'electric_automobile', u'electric_car', u'estate_car', u'gas_guzzler', u'hack', u'hardtop', u'hatchback', u'heap', u'horseless_carriage', u'hot-rod', u'hot_rod', u'jalopy', u'jeep', u'landrover', u'limo', u'limousine', u'loaner', u'minicar', u'minivan', u'pace_car', u'patrol_car', u'phaeton', u'police_car', u'police_cruiser', u'prowl_car', u'race_car', u'racer', u'racing_car', u'roadster', u'runabout', u'saloon', u'secondhand_car', u'sedan', u'sport_car', u'sport_utility', u'sport_utility_vehicle', u'sports_car', u'squad_car', u'station_waggon', u'station_wagon', u'stock_car', u'subcompact', u'subcompact_car', u'taxi', u'taxicab', u'tourer', u'touring_car', u'two-seater', u'used-car', u'waggon', u'wagon']\n", + "**************************************************\n", + "ambulance beach_wagon station_wagon wagon estate_car beach_waggon station_waggon waggon bus jalopy heap cab hack taxi taxicab compact compact_car convertible coupe cruiser police_cruiser patrol_car police_car prowl_car squad_car electric electric_automobile electric_car gas_guzzler hardtop hatchback horseless_carriage hot_rod hot-rod jeep landrover limousine limo loaner minicar minivan Model_T pace_car racer race_car racing_car roadster runabout two-seater sedan saloon sport_utility sport_utility_vehicle S.U.V. SUV sports_car sport_car Stanley_Steamer stock_car subcompact subcompact_car touring_car phaeton tourer used-car secondhand_car\n" + ] + } + ], + "source": [ + "#Nice example from the book (http://www.nltk.org/book/ch02.html)\n", + "motorcar = wn.synset('car.n.01')\n", + "types_of_motorcar = motorcar.hyponyms()\n", + "print(types_of_motorcar[0]) # prints: Synset('ambulance.n.01')\n", + "print(\"*\"*50)\n", + "print(sorted(lemma.name() for synset in types_of_motorcar for lemma in synset.lemmas()))\n", + "print(\"*\"*50)\n", + "\n", + "# Remember, the tuple coprehension can be broken down as follows (with no sorting):\n", + "for synset in types_of_motorcar:\n", + " for lemma in synset.lemmas():\n", + " print(lemma.name())," + ] + }, + { + "cell_type": "code", + "execution_count": 193, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('motor_vehicle.n.01')]\n", + "2\n", + "\n", + "Path 1\n", + "[u'entity.n.01', u'physical_entity.n.01', u'object.n.01', u'whole.n.02', u'artifact.n.01', u'instrumentality.n.03', u'container.n.01', u'wheeled_vehicle.n.01', u'self-propelled_vehicle.n.01', u'motor_vehicle.n.01', u'car.n.01']\n", + "\n", + "Path 2\n", + "[u'entity.n.01', u'physical_entity.n.01', u'object.n.01', u'whole.n.02', u'artifact.n.01', u'instrumentality.n.03', u'conveyance.n.03', u'vehicle.n.01', u'wheeled_vehicle.n.01', u'self-propelled_vehicle.n.01', u'motor_vehicle.n.01', u'car.n.01']\n" + ] + } + ], + "source": [ + "# Another useful example, this time on hypernyms:\n", + "motorcar = wn.synset('car.n.01')\n", + "print(motorcar.hypernyms()) # prints: [Synset('motor_vehicle.n.01')]\n", + "\n", + "paths = motorcar.hypernym_paths()\n", + "print(len(paths)) # prints 2 as there are two paths, as the book states, between car.n.01 and entity.n.01 \n", + " # because wheeled_vehicle.n.01 can be classified as both a vehicle and a container.\n", + " # Take a look at the output below\n", + "\n", + "print(\"\\nPath 1 between car.n.01 and entity.n.01\")\n", + "print([synset.name() for synset in paths[0]])\n", + "print(\"\\nPath 2 between car.n.01 and entity.n.01\")\n", + "print([synset.name() for synset in paths[1]])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Try the graphical WordNet browser from your command line:\n", + "nltk.app.wordnet()" + ] + }, + { + "cell_type": "code", + "execution_count": 227, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('lilac.n.01'), Synset('lavender.s.01')]\n", + "[Synset('tulip.n.01')]\n", + "[Synset('flower.n.01'), Synset('flower.n.02'), Synset('flower.n.03'), Synset('bloom.v.01')]\n", + "[Synset('tree.n.01'), Synset('tree.n.02'), Synset('tree.n.03'), Synset('corner.v.02'), Synset('tree.v.02'), Synset('tree.v.03'), Synset('tree.v.04')]\n", + "[Synset('daffodil.n.01')]\n", + "**************************************************\n", + "[Synset('flower.n.01')]\n", + "[Synset('orchid.n.01')]\n", + "[Synset('vascular_plant.n.01')]\n", + "[Synset('vascular_plant.n.01')]\n" + ] + } + ], + "source": [ + "# Similarity\n", + "from nltk.corpus import wordnet as wn\n", + "print(wn.synsets('lilac'))\n", + "print(wn.synsets('tulip'))\n", + "print(wn.synsets('flower'))\n", + "print(wn.synsets('tree'))\n", + "print(wn.synsets('daffodil'))\n", + "#--------------------------\n", + "print(\"*\"*50)\n", + "african = wn.synset('african_daisy.n.01')\n", + "orchid = wn.synset('orchid.n.01')\n", + "scarlet = wn.synset('scarlet_musk_flower.n.01')\n", + "aster = wn.synset('white-topped_aster.n.01')\n", + "tree = wn.synset('tree.n.01')\n", + "daffodil = wn.synset('daffodil.n.01')\n", + "#--------------------------\n", + "print(\"*\"*50)\n", + "print(african.lowest_common_hypernyms(orchid))\n", + "print(orchid.lowest_common_hypernyms(orchid))\n", + "print(scarlet.lowest_common_hypernyms(tree))\n", + "print(aster.lowest_common_hypernyms(daffodil))\n", + "#print(wn.synset('flower.n.01').hypernyms())\n", + "#print(wn.synset('flower.n.01').hyponyms())" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/.ipynb_checkpoints/python_tutorial_part_3-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_3-checkpoint.ipynb new file mode 100644 index 0000000..a4abd5b --- /dev/null +++ b/.ipynb_checkpoints/python_tutorial_part_3-checkpoint.ipynb @@ -0,0 +1,170 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": false + }, + "source": [ + "# Visualization with Python" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "\n", + "import matplotlib\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from nltk.draw.dispersion import dispersion_plot\n", + "print(text4.dispersion_plot([\"citizens\", \"democracy\", \"freedom\", \"duties\", \"America\"]))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", + "\n", + "x = np.linspace(0, 3*np.pi, 500)\n", + "plt.plot(x, np.sin(x**2))\n", + "plt.title('A simple chirp');" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "#!/usr/bin/python\n", + "%matplotlib inline\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "#################################\n", + "\n", + "###################\n", + "# Classification confidence per class:\n", + "#--------------------------------------\n", + "Account=[0.50597769529107606, 0.61137631750300769, 0.67732439371970943, 0.74335210285266851, 0.78045128083006687, 0.85889268848391032, 0.86004645511688793, 0.86034494338992484, 0.86110627662083916, 0.86385229563374299, 0.8652039704758846, 0.86629669936492792, 0.86714993874468849, 0.86951104057901096, 0.87107760241792387, 0.87433969633042807, 0.87731372225510584, 0.87786812374991918, 0.87810590629205709, 0.87892633430246814, 0.87915317999299925, 0.88066216034760669, 0.88408934521227001, 0.88469154406025774, 0.88796777230531254, 0.89044129384145887, 0.90267024623065706, 0.92524363229460704]\n", + "Alert=[0.47637448264043625, 0.66454777829823419, 0.83611243185409068, 0.84343541279259093, 0.84407692188080419, 0.84691874602073758, 0.84846838467167951, 0.84885499324244018, 0.84892085388116045, 0.85662976488218612, 0.85748220725433, 0.85766843771151779, 0.86282314473652266, 0.88306412005817692, 0.90953193806965249]\n", + "EventUpdate=[0.60620179952905806, 0.81414035159064524, 0.83977384250581455, 0.84197728312163245, 0.84665030060369317, 0.86063170708443371, 0.86556282358318515, 0.86670240801689957, 0.86730292811655896, 0.87129628595308339, 0.87179895532552454, 0.87237623417151511]\n", + "purchasesAndPayments= [0.70209823840705454, 0.87763882598773824, 0.87853436781994654, 0.88111290254442187, 0.88188179857430626, 0.88427555688939874, 0.88575543579520666, 0.89021547578541182, 0.89111298147392382, 0.89325185353073655, 0.89699979308063715, 0.89711891518041997, 0.90022327360644228, 0.90038922055635651, 0.90352512667392848, 0.91167136191589293, 0.91258912510512546, 0.91292772595692206, 0.91385158396318156, 0.91514777255816948, 0.91598257367845204, 0.91620376194631981, 0.91624196881789599, 0.91629208488490144, 0.91682304964064509, 0.91685424758210343, 0.91773130126105062, 0.91859239740198162, 0.91885115720464194, 0.9189813690872739, 0.91942897543635649, 0.92005261642510894, 0.92031278031339603, 0.9206131773956312, 0.92162770964147711, 0.92333172011600717, 0.92368736632287529, 0.92445312588899153, 0.92487782965565768, 0.92585645404696637, 0.92736029109282003, 0.92745082086828889, 0.92853691341173528, 0.92974148319851113, 0.93038737495076018, 0.93268166532934882, 0.93485981648392313, 0.93560689873538494, 0.93792467408291125, 0.93990750368079101, 0.94138345097080245, 0.94171127507598984, 0.94257827570703179, 0.94278726547269032, 0.94591753032286208, 0.94684854929169837, 0.94778523908566703, 0.94938466710678737, 0.9501448612157134, 0.95149247851897123, 0.95239002506270776, 0.95375471665360612, 0.95562783008800667, 0.95780240099868053, 0.9578653435017056, 0.95787837810691678, 0.95826883828494502, 0.96152066593986663, 0.96162681902834768, 0.9642282884732325, 0.96529873521893783, 0.96552974107677436, 0.96639428810283956, 0.96678961265384455, 0.96687982068407863, 0.96873869536512358, 0.96880029214978958, 0.96947401140396405, 0.97052647680045956, 0.97057034041651968]\n", + "TrackedInfo=[0.57856804762622893, 0.84336621528774514, 0.84726630715050089, 0.84865439463077963, 0.85259413447559007, 0.85367437726360995, 0.85572529471292957, 0.85595201321671999, 0.86019342820522748, 0.86280650298103134, 0.87695710743248556, 0.88755323066381953, 0.89051154523294096, 0.89159321691228055]\n", + "Travel=[0.83341902931726042, 0.84109694886055264, 0.84915529812154522, 0.84965845310243482, 0.85351817819423192, 0.86268823869067024, 0.87021845881088733]\n", + "\n", + "def createHistogram(x, cat=\"Purchases and Payments\", color='green'):\n", + " \"\"\"\n", + " Plots an individual histogram.\n", + " \"\"\"\n", + " plt.hist(x, len(x)+20, normed=1, facecolor=color, alpha=0.75)\n", + " plt.xlabel('Confidence')\n", + " #plt.ylabel('Frequency')\n", + " plt.title(cat)\n", + " plt.grid(True)\n", + " plt.show()\n", + "\n", + "def createHist(x, cat=\"Purchases and Payments\", color='green'):\n", + " \"\"\"\n", + " Used as a helper function for creating individual histograms \n", + " inside the subplotter\n", + " \"\"\"\n", + " plt.hist(x, len(x)+20, normed=1, facecolor=color, alpha=0.75)\n", + " plt.xlabel('Confidence')\n", + " plt.title(cat)\n", + " plt.grid(True)\n", + " \n", + "def subplotter():\n", + " \"\"\"\n", + " Subplots several histograms...\n", + " \"\"\"\n", + " plt.figure(figsize=(15,15))\n", + " plt.subplot(3,3 , 1 )\n", + " createHist(Account, cat=\"Account\", color=np.random.rand(3))\n", + " plt.subplot( 3,3, 2 )\n", + " createHist(Alert, cat=\"Alert\", color='magenta')\n", + " plt.subplot( 3,3, 3 )\n", + " createHist(EventUpdate, cat=\"Event Update\", color=np.random.rand(3))\n", + " plt.subplot( 3,3, 4 )\n", + " createHist(purchasesAndPayments, cat=\"Purchases and Payments\", color=np.random.rand(3)) \n", + " plt.subplot( 3,3, 5 )\n", + " createHist(TrackedInfo, cat=\"Tracked Info\", color=np.random.rand(3))\n", + " plt.subplot( 3,3, 6 )\n", + " createHist(Travel, cat=\"Travel\", color='y')\n", + " plt.show()\n", + "\n", + "def main():\n", + " subplotter()\n", + " # createHist(Account, cat=\"Account\")\n", + " # createHist(Alert, cat=\"Alert\", color='magenta')\n", + " # createHist(EventUpdate, cat=\"EventUpdate\", color='blue')\n", + " # createHist(purchasesAndPayments, cat=\"Purchases and Payments\", color=\"red\")\n", + " # createHist(TrackedInfo, cat=\"TrackedInfo\", color='brown')\n", + " # createHist(Travel, cat=\"Travel\", color='y')\n", + " \n", + " \n", + "if __name__ == \"__main__\":\n", + " main()\n", + "\n", + "######################################\n", + "# This is useful: http://cs.smith.edu/dftwiki/index.php/MatPlotLib_Tutorial_1\n", + "#---------------------------------\n", + "# This is about color maps: http://matplotlib.org/examples/color/colormaps_reference.html\n", + "\n", + "# cmaps = [('Sequential', ['Blues', 'BuGn', 'BuPu',\n", + "# 'GnBu', 'Greens', 'Greys', 'Oranges', 'OrRd',\n", + "# 'PuBu', 'PuBuGn', 'PuRd', 'Purples', 'RdPu',\n", + "# 'Reds', 'YlGn', 'YlGnBu', 'YlOrBr', 'YlOrRd']),\n", + "# ('Sequential (2)', ['afmhot', 'autumn', 'bone', 'cool', 'copper',\n", + "# 'gist_heat', 'gray', 'hot', 'pink',\n", + "# 'spring', 'summer', 'winter']),\n", + "# ('Diverging', ['BrBG', 'bwr', 'coolwarm', 'PiYG', 'PRGn', 'PuOr',\n", + "# 'RdBu', 'RdGy', 'RdYlBu', 'RdYlGn', 'Spectral',\n", + "# 'seismic']),\n", + "# ('Qualitative', ['Accent', 'Dark2', 'Paired', 'Pastel1',\n", + "# 'Pastel2', 'Set1', 'Set2', 'Set3']),\n", + "# ('Miscellaneous', ['gist_earth', 'terrain', 'ocean', 'gist_stern',\n", + "# 'brg', 'CMRmap', 'cubehelix',\n", + "# 'gnuplot', 'gnuplot2', 'gist_ncar',\n", + "# 'nipy_spectral', 'jet', 'rainbow',\n", + "# 'gist_rainbow', 'hsv', 'flag', 'prism'])]" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb new file mode 100644 index 0000000..2d63135 --- /dev/null +++ b/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb @@ -0,0 +1,342 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Rule-Based Sentiment Classifier" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The code below was written in class, to teach simple Python concepts" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['couthie', 'confidence man', 'definiteness', 'changelessness', 'morally']\n" + ] + } + ], + "source": [ + "import re\n", + "def clean_lexicon():\n", + " positive_words= open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\", \"r\").readlines()\n", + " new_pos_list=[]\n", + " for i in positive_words[:5]:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_pos_list.append(i)\n", + " return new_pos_list\n", + "\n", + "my_positive_list= clean_lexicon()\n", + "print my_positive_list[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['couthie', 'confidence man', 'definiteness', 'changelessness', 'morally', 'ethmoidal vein', 'unquestionableness', 'uselessness', 'top-quality', 'good-humoredness']\n", + "['twilight of the gods', 'rumbustious', 'screaming', 'grueling', 'inanimate', 'stern', 'changelessness', 'sugarless', 'order pseudoscorpiones', 'modest']\n" + ] + } + ], + "source": [ + "import re\n", + "\n", + "def clean_lexicon(lex_input):\n", + " lex_file_l=open(lex_input, \"r\").readlines()\n", + " \n", + " new_lex_file_l=[]\n", + " for i in lex_file_l:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_lex_file_l.append(i)\n", + " return new_lex_file_l\n", + "\n", + "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\")\n", + "print my_positive_list[:10]\n", + "\n", + "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", + "print my_positive_list[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "## hello ##\n", + "##hello##\n", + "##hello##\n" + ] + } + ], + "source": [ + "import re\n", + "s = \" hello \"\n", + "print \"##\"+ s + \"##\"\n", + "s2= re.sub(\" \", \"\", s)\n", + "print \"##\"+ s2 + \"##\"\n", + "s3=s.strip()\n", + "print \"##\"+ s3 + \"##\"" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "['couthie\\n', 'confidence_man\\n', 'definiteness\\n', 'changelessness\\n', 'morally\\n']\n", + "5440\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n" + ] + } + ], + "source": [ + "lines=open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/posTweets.txt\", \"r\").readlines()\n", + "print type(lines) \n", + "print positive_words[0:5]\n", + "print len(positive_words)\n", + "positive_words=positive_words#+[\"good\"]\n", + "#print lines[0:5]\n", + "pos_counter=0\n", + "for line in lines:\n", + " for entry in positive_words:\n", + " #print i[:-1]\n", + " #break\n", + " if entry in line and \"never\" not in line:\n", + " #print i\n", + " pos_counter+=1\n", + " if pos_counter > 1:\n", + " print(\"Predicted Label= POSITIVE\")\n", + " #else: #pos_counter ==0:\n", + " # print(\"No posiotive words found\")\n", + " pos_counter=0\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "x=open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/posTweets.txt\", \"r\").readlines()\n", + "print type(x) " + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "@Vivawonderwoman Got it! :)\n", + "Riri flow on Pandora..Christmas tree all done :)\n", + "Ah love feels so great :-)\n", + "@stephhybb okay maybe then but the other stores usually have better ones & okay yeah come after you're done at game stop!:) def!\n", + "@pammpimm haha gpp kok dek :) thanks yaaaa\n", + "@katelittle_ @soph_funari @kaseycreehan @kaylaaajx3 awe Kate I love youuu <333 :)\n" + ] + } + ], + "source": [ + "for l in x[:6]:\n", + " print l[:-1]" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "mixed tweet\t@chelvanderbaan well Idk if Thts good or bad for you ??? But its kinda nice to hear Haha :)\n", + "\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n" + ] + } + ], + "source": [ + "lines=x[:201]\n", + "from collections import defaultdict\n", + "d=defaultdict(int)\n", + "\n", + "for l in lines:\n", + " if \"good\" in l and \"bad\" in l:\n", + " print \"mixed tweet\\t\", l\n", + " elif \"bad\" in l:\n", + " print \"negative tweet\"\n", + " elif \"good\" in l:\n", + " print \"positive tweet\"\n", + " else:\n", + " pass #print \"\\t\\tobjective tweet\"" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1\n", + "2\n", + "3\n", + "4\n", + "5\n", + "6\n", + "7\n", + "8\n", + "9\n", + "10\n", + "11\n", + "12\n" + ] + } + ], + "source": [ + "x=open(\"~/Desktop/posTweets.txt\", \"r\").readlines()\n", + "lines=x[:201]\n", + "from collections import defaultdict\n", + "d=defaultdict(int)\n", + "\n", + "pos_lex=[\"good\", \"fantastic\", \"wonderful\", \"great\", \"fascinating\", \"pizza\"]\n", + "neg_lex=[\"bad\", \"ugly\", \"boring\", \"disguting\", \"lazy\"]\n", + "\n", + "count_pos=0\n", + "\n", + "for l in lines:\n", + " for entry in pos_lex:\n", + " if entry in l:\n", + " count_pos+=1\n", + " print count_pos #entry, lines.index(l)\n", + " count_pos=0\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/.ipynb_checkpoints/python_tutorial_part_4-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_4-checkpoint.ipynb new file mode 100644 index 0000000..8c1d8b6 --- /dev/null +++ b/.ipynb_checkpoints/python_tutorial_part_4-checkpoint.ipynb @@ -0,0 +1,1463 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Numpy Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I 0\n", + "like 1\n", + "Pizza 2\n", + "I 0\n", + "like 1\n", + "samon! 2\n" + ] + } + ], + "source": [ + "sentences=[\"POSITIVE I like Pizza\", \"NEAGTIVE I like samon!\",\\\n", + " \"POSITIVE I like Pizza\", \"NEAGTIVE I like samon!\",\\\n", + " \"POSITIVE I like Pizza\", \"NEAGTIVE I like samon!\"]\n", + "from collections import defaultdict\n", + "\n", + "def get_dict(text):\n", + " word_dict=defaultdict(int)\n", + " for w in text:\n", + " word_dict[w]=len(word_dict)\n", + " return word_dict\n", + "\n", + "for sent in sentences[:2]:\n", + " label=sent.split()[0]\n", + " text=sent.split()[1:]\n", + " index_dict=get_dict(text)\n", + " for w in index_dict:\n", + " print w, index_dict[w]\n", + "\n", + "\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['couthie', 'confidence man', 'definiteness', 'changelessness', 'morally']\n" + ] + } + ], + "source": [ + "import re\n", + "\n", + "def clean_lexicon():\n", + " positive_words= open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\", \"r\").readlines()\n", + " new_pos_list=[]\n", + " for i in positive_words[:5]:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_pos_list.append(i)\n", + " return new_pos_list\n", + "\n", + "my_positive_list= clean_lexicon()\n", + "print my_positive_list[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['couthie', 'confidence man', 'definiteness', 'changelessness', 'morally', 'ethmoidal vein', 'unquestionableness', 'uselessness', 'top-quality', 'good-humoredness']\n" + ] + } + ], + "source": [ + "import re\n", + "\n", + "def clean_lexicon(lex_input):\n", + " lex_file_l=open(lex_input, \"r\").readlines()\n", + " \n", + " new_lex_file_l=[]\n", + " for i in lex_file_l:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_lex_file_l.append(i)\n", + " return new_lex_file_l\n", + "\n", + "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\")\n", + "print my_positive_list[:10]\n", + "\n", + "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", + "print my_positive_list[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "## hello ##\n", + "##hello##\n", + "##hello##\n" + ] + } + ], + "source": [ + "import re\n", + "s = \" hello \"\n", + "print \"##\"+ s + \"##\"\n", + "s2= re.sub(\" \", \"\", s)\n", + "print \"##\"+ s2 + \"##\"\n", + "s3=s.strip()\n", + "print \"##\"+ s3 + \"##\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "['couthie\\n', 'confidence_man\\n', 'definiteness\\n', 'changelessness\\n', 'morally\\n']\n", + "5440\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n" + ] + } + ], + "source": [ + "lines=open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/posTweets.txt\", \"r\").readlines()\n", + "print type(lines) \n", + "print positive_words[0:5]\n", + "print len(positive_words)\n", + "positive_words=positive_words#+[\"good\"]\n", + "#print lines[0:5]\n", + "pos_counter=0\n", + "for line in lines:\n", + " for entry in positive_words:\n", + " #print i[:-1]\n", + " #break\n", + " if entry in line and \"never\" not in line:\n", + " #print i\n", + " pos_counter+=1\n", + " if pos_counter > 1:\n", + " print(\"Predicted Label= POSITIVE\")\n", + " #else: #pos_counter ==0:\n", + " # print(\"No posiotive words found\")\n", + " pos_counter=0\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "x=open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/posTweets.txt\", \"r\").readlines()\n", + "print type(x) " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "@Vivawonderwoman Got it! :)\n", + "Riri flow on Pandora..Christmas tree all done :)\n", + "Ah love feels so great :-)\n", + "@stephhybb okay maybe then but the other stores usually have better ones & okay yeah come after you're done at game stop!:) def!\n", + "@pammpimm haha gpp kok dek :) thanks yaaaa\n", + "@katelittle_ @soph_funari @kaseycreehan @kaylaaajx3 awe Kate I love youuu <333 :)\n" + ] + } + ], + "source": [ + "for l in x[:6]:\n", + " print l[:-1]" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "mixed tweet\t@chelvanderbaan well Idk if Thts good or bad for you ??? But its kinda nice to hear Haha :)\n", + "\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n" + ] + } + ], + "source": [ + "lines=x[:201]\n", + "from collections import defaultdict\n", + "d=defaultdict(int)\n", + "\n", + "for l in lines:\n", + " if \"good\" in l and \"bad\" in l:\n", + " print \"mixed tweet\\t\", l\n", + " elif \"bad\" in l:\n", + " print \"negative tweet\"\n", + " elif \"good\" in l:\n", + " print \"positive tweet\"\n", + " else:\n", + " pass #print \"\\t\\tobjective tweet\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1\n", + "2\n", + "3\n", + "4\n", + "5\n", + "6\n", + "7\n", + "8\n", + "9\n", + "10\n", + "11\n", + "12\n" + ] + } + ], + "source": [ + "x=open(\"~/Desktop/posTweets.txt\", \"r\").readlines()\n", + "lines=x[:201]\n", + "from collections import defaultdict\n", + "d=defaultdict(int)\n", + "\n", + "pos_lex=[\"good\", \"fantastic\", \"wonderful\", \"great\", \"fascinating\", \"pizza\"]\n", + "neg_lex=[\"bad\", \"ugly\", \"boring\", \"disguting\", \"lazy\"]\n", + "\n", + "count_pos=0\n", + "\n", + "for l in lines:\n", + " for entry in pos_lex:\n", + " if entry in l:\n", + " count_pos+=1\n", + " print count_pos #entry, lines.index(l)\n", + " count_pos=0\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "æ" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + " if pos_lex in l and neg_lex in l:\n", + " print \"mixed tweet\\t\", l\n", + " elif neg_lex in l:\n", + " print \"negative tweet\"\n", + " elif pos_lex in l:\n", + " print \"positive tweet\"\n", + " else:\n", + " print \"\\t\\tobjective tweet\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# if you meet a positive word or more AND a negative word or more --> MIXED\n", + "# elif you meet a positive word or more, predict \"POSITIVE\"\n", + "# elif you meet a neg word or more, predict \"NEGATIVE\"\n", + "# else, predict \"OBJ\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "axz\n" + ] + } + ], + "source": [ + "l=[\"axz\\n\", \"b\"]\n", + "print l[0][:-1]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "a --> [2 3 4 5]\n", + "b --> [5 6 7 8]\n", + "a+b --> [ 7 9 11 13]\n" + ] + } + ], + "source": [ + "from numpy import *\n", + "#from numpy import array\n", + "import numpy as np\n", + "a= array([2,3,4,5])\n", + "b=array((5,6,7,8))\n", + "print type(a)\n", + "print \"a -->\", a\n", + "print \"b -->\", b\n", + "print \"a+b -->\", a+b\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This will give an error!!!\n", + "a+c -->" + ] + }, + { + "ename": "ValueError", + "evalue": "operands could not be broadcast together with shapes (4,) (6,) ", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mc\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m8\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m8\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m9\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"This will give an error!!!\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"a+c -->\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0mc\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mValueError\u001b[0m: operands could not be broadcast together with shapes (4,) (6,) " + ] + } + ], + "source": [ + "# You can only add arrays of the same shape / equal length:\n", + "c=array([5,8,8,9,5,2])\n", + "print \"This will give an error!!!\"\n", + "print \"a+c -->\", a+c" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a+1 --> [3 4 5 6]\n" + ] + } + ], + "source": [ + "# broadcasting\n", + "# If you add an array to a scalar, the scalar gets broadcast across all the array elements\n", + "print \"a+1 -->\", a+1\n", + "# Now you can broadcast arrays and so you can add arrays of different shapes..." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Printing array x: [[ 1. 2. 3. 4.]\n", + " [ 5. 6. 7. 8.]] \n", + "\n", + "\"Shape of array x is:\" (2, 4) \n", + "\n", + "\"Value at x[0][1] is:\" 2.0\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "x= np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.float32)\n", + "print \"Printing array x: \", x,\"\\n\"\n", + "print \"\\\"Shape of array x is:\\\" \", x.shape,\"\\n\"\n", + "print \"\\\"Value at x[0][1] is:\\\" \", x[0][1] # gives row0, c1 --> we start index from zero!" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 1 1 -2]\n" + ] + } + ], + "source": [ + "x=np.array([1, 3, 5, 6])\n", + "y=np.array([1,2,3,1])\n", + "d=y[1:]-y[:-1]\n", + "print d\n", + "# This runs in C, the loop happens in C, so it's fast.\n", + "# It doesn't matter what shape y is. So, it can be a very big array." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "14\n", + "[ 2 5 9 14]\n" + ] + } + ], + "source": [ + "print sum(a)\n", + "# cumsum adds every emelement to the previous element\n", + "print cumsum(a)" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-------------------\n", + "[0 1 2]\n", + "[ 0. 1. 2.]\n", + "-------------------\n", + "[2 3 4 5 6]\n", + "-------------------\n", + "[2 4 6]\n", + "-------------------\n", + "[ 100. 215.443469 464.15888336 1000. ]\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "#numpy.arange: http://docs.scipy.org/doc/numpy/reference/generated/numpy.arange.html\n", + "\"\"\"\n", + "numpy.arange([start, ]stop, [step, ]dtype=None)\n", + " Return evenly spaced values within a given interval.\n", + " Values are generated within the half-open interval [start, stop) (in other words, the interval including\n", + " start but excluding stop). For integer arguments the function is equivalent to the Python built-in range\n", + " function, but returns an ndarray rather than a list.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.arange(3)\n", + "print np.arange(3.0)\n", + "print \"-------------------\"\n", + "print np.arange(2,7)\n", + "print \"-------------------\"\n", + "print np.arange(2,7, 2)\n", + "print \"-------------------\"" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " numpy.zeros\n", + "-------------------\n", + "[ 0. 0. 0. 0. 0.]\n", + "-------------------\n", + "[0 0 0 0 0 0 0 0 0 0]\n", + "-------------------\n", + "[[ 0.]\n", + " [ 0.]\n", + " [ 0.]]\n", + "-------------------\n", + "numpy.ones\n", + "-------------------\n", + "[ 1. 1. 1. 1. 1.]\n", + "-------------------\n", + "[ 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0]\n", + "-------------------\n", + "[[ 1.]\n", + " [ 1.]\n", + " [ 1.]]\n", + "-------------------\n", + "numpy.identity\n", + "-------------------\n", + "[[ 1. 0. 0. 0. 0.]\n", + " [ 0. 1. 0. 0. 0.]\n", + " [ 0. 0. 1. 0. 0.]\n", + " [ 0. 0. 0. 1. 0.]\n", + " [ 0. 0. 0. 0. 1.]]\n", + "-------------------\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "#------------------\n", + "print \"numpy.zeros\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.zeros(shape, dtype=float, order='C')¶\n", + " Return a new array of given shape and type, filled with zeros.\n", + " \n", + "shape : int or sequence of ints\n", + " Shape of the new array, e.g., (2, 3) or 2.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.zeros(5)\n", + "print \"-------------------\"\n", + "print np.zeros((10,), dtype=np.int)\n", + "print \"-------------------\"\n", + "print np.zeros((3, 1))\n", + "print \"-------------------\"\n", + "#------------------\n", + "print \"numpy.ones\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.ones(shape, dtype=None, order='C')\n", + " Return a new array of given shape and type, filled with ones.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.ones(5)\n", + "print \"-------------------\"\n", + "print np.ones((10,), dtype=np.float128)\n", + "print \"-------------------\"\n", + "print np.ones((3, 1))\n", + "print \"-------------------\"\n", + "\n", + "#------------------\n", + "print \"numpy.identity\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.identity(n, dtype=None)\n", + " Return the identity array.\n", + " The identity array is a square array with ones on the main diagonal.\n", + "n : int\n", + " Number of rows (and columns) in n x n output.\n", + "\"\"\"\n", + "print \"-------------------\"\n", + "print np.identity(5)\n", + "print \"-------------------\"\n" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " numpy.linspace\n", + "[ 2. 2.25 2.5 2.75 3. ]\n", + "-------------------\n", + "[ 2. 2.2 2.4 2.6 2.8]\n", + "-------------------\n", + "(array([ 2. , 2.25, 2.5 , 2.75, 3. ]), 0.25)\n", + "-------------------\n", + "\n", + "\n", + " numpy.logspace\n", + "---------------------------------------------------------\n", + "[ 100. 215.443469 464.15888336 1000. ]\n", + "---------------------------------------------------------\n", + "[ 4. 5.0396842 6.34960421 8. ]\n", + "---------------------------------------------------------\n", + "[ 4. 4.75682846 5.65685425 6.72717132]\n", + "---------------------------------------------------------\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "#------------------\n", + "print \"\\n numpy.linspace\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None)[source]¶\n", + " Return evenly spaced numbers over a specified interval.\n", + " Returns num evenly spaced samples, calculated over the interval [start, stop].\n", + " The endpoint of the interval can optionally be excluded.\n", + " \n", + "retstep : bool, optional\n", + " If True, return (samples, step), where step is the spacing between samples.\n", + "\n", + "http://docs.scipy.org/doc/numpy-1.10.1/reference/generated/numpy.linspace.html#numpy.linspace\n", + "\"\"\"\n", + "print np.linspace(2.0, 3.0, num=5)\n", + "print \"-------------------\"\n", + "print np.linspace(2.0, 3.0, num=5, endpoint=False)\n", + "print \"-------------------\"\n", + "print np.linspace(2.0, 3.0, num=5, retstep=True)\n", + "print \"-------------------\\n\"\n", + "#------------------\n", + "print \"\\n numpy.logspace\"\n", + "#------------------\n", + "\"\"\"\n", + " numpy.logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None)\n", + " Return numbers spaced evenly on a log scale.\n", + " In linear space, the sequence starts at base ** start (base to the power of start) \n", + " and ends with base ** stop (see endpoint below).\n", + "\"\"\"\n", + "print \"-------------------\"*3\n", + "print np.logspace(2.0, 3.0, num=4)\n", + "print \"-------------------\"*3\n", + "print np.logspace(2.0, 3.0, base=2.0, num=4)\n", + "print \"-------------------\"*3\n", + "print np.logspace(2.0, 3.0, base=2.0, num=4, endpoint=False)\n", + "print \"-------------------\"*3" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_5.ipynb b/.ipynb_checkpoints/python_tutorial_part_5-checkpoint.ipynb similarity index 84% rename from python_tutorial_part_5.ipynb rename to .ipynb_checkpoints/python_tutorial_part_5-checkpoint.ipynb index 42b118b..378b0c7 100644 --- a/python_tutorial_part_5.ipynb +++ b/.ipynb_checkpoints/python_tutorial_part_5-checkpoint.ipynb @@ -364,6 +364,165 @@ "source": [ "# To be continued" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Memory friendly iteration over a corpus using Python's \"yield\".\n", + "# Function from tutorial at: https://radimrehurek.com/gensim/tut1.html\n", + "class MyCorpus(object):\n", + " def __iter__(self):\n", + " for line in open('mycorpus.txt'):\n", + " # assume there's one document per line, tokens separated by whitespace\n", + " yield dictionary.doc2bow(line.lower().split())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Corpus Streaming & Formats" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Take a look at the parts under the same names from the gensim tutorial at:\n", + "# https://radimrehurek.com/gensim/tut1.html\n", + "# You're now pretty much up and running with gensim. Congrats!" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Transformations" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Let's get, e.g., a tf*idf (https://en.wikipedia.org/wiki/Tf%E2%80%93idf) transformation of a document" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TfidfModel(num_docs=10, num_nnz=145)\n" + ] + } + ], + "source": [ + "tfidf = models.TfidfModel(corpus) # step 1 -- initialize a model\n", + "print tfidf" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(0, 0.31348023222449883), (1, 0.23857601299713876), (2, 0.31348023222449883), (3, 0.13300444544014492), (4, 0.31348023222449883), (5, 0.31348023222449883), (6, 0.31348023222449883), (7, 0.13300444544014492), (8, 0.5995275865658466), (9, 0.23857601299713876)]\n" + ] + } + ], + "source": [ + "# Now we can apply the transformation to the whole corpus:\n", + "corpus_tfidf = tfidf[corpus]\n", + "for doc in corpus_tfidf[:1]: # Only printing first document transformation\n", + " print doc" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[u'0.262*\"family\" + 0.262*\"broader\" + 0.262*\"methods\" + 0.262*\"part\" + 0.261*\"machine\" + 0.256*\"data\" + 0.248*\"based\" + 0.187*\"attempts\" + 0.173*\"processing\" + 0.170*\"representations\"']" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lsi = models.LsiModel(corpus_tfidf, id2word=dictionary, num_topics=2) # initialize an LSI transformation\n", + "corpus_lsi = lsi[corpus_tfidf] # create a double wrapper over the original corpus: bow->tfidf->fold-in-lsi\n", + "lsi.print_topics(1)" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(0, 0.48641308269852679), (1, 0.11579771159401867)]\n", + "[(0, 0.71628142228181579), (1, 0.59038188262621183)]\n", + "[(0, 0.56480415960439878), (1, -0.78758599492475101)]\n", + "[(0, 0.71628142228181579), (1, 0.59038188262621183)]\n", + "[(0, 0.33106538645556027), (1, -0.56392050838027818)]\n", + "[(0, 0.35299506797325275), (1, -0.42198478055514865)]\n", + "[(0, 0.33026858639927253), (1, -0.40718497786487473)]\n", + "[(0, 0.37419828239791864), (1, -0.025902785063798527)]\n", + "[(0, 0.39929537547357097), (1, 0.086715418602822544)]\n", + "[(0, 0.32548779377635029), (1, -0.037026269019385064)]\n" + ] + } + ], + "source": [ + "for doc in corpus_lsi:\n", + " print doc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/python_tutorial_part_6.ipynb b/.ipynb_checkpoints/python_tutorial_part_6-checkpoint.ipynb similarity index 70% rename from python_tutorial_part_6.ipynb rename to .ipynb_checkpoints/python_tutorial_part_6-checkpoint.ipynb index 88592d0..867f59a 100644 --- a/python_tutorial_part_6.ipynb +++ b/.ipynb_checkpoints/python_tutorial_part_6-checkpoint.ipynb @@ -9,6 +9,240 @@ "# A Vector Space Model, with scikit-learn" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# This is code to build a vector space model, with SVMs on Andrew Mass' \n", + "# distribution of movie review sentiment data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from collections import namedtuple\n", + "\n", + "all_data = [] \n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " #print my_data[line_no]\n", + " #break\n", + "train_data = all_data[:25000]\n", + "test_data = all_data[25000:50000]\n", + "print len(train_data)\n", + "\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "from collections import defaultdict\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "#test_vecs= get_sparse_vectors(test_data, word_space)\n", + "\n", + "#print train_vecs, test_vecs[0]\n", + "print len(train_data[12500:12600])\n", + "print len(train_vecs)\n", + "print len(test_vecs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "from random import shuffle, randint\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "\n", + "\n", + "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]\n", + "print len(train_tags)\n", + "print len(test_tags)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "print train_vecs.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Classification with scikit-learn\n", + "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", + "# Let's use sklearn to train an svm classifier:\n", + "#-------------------------------------------------\n", + "\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC, LinearSVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "import gensim\n", + "n_jobs = 2\n", + "\n", + "#train_vecs=array(train_vecs)\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "\n", + "print type(train_tags)\n", + "print type(train_vecs)\n", + "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + "clf.fit(train_vecs, train_tags)\n", + "print \"\\nDone fitting classifier on training data...\\n\"\n", + "\n", + "#------------------------------------------------------------------------------------------\n", + "print \"=\"*50, \"\\n\"\n", + "print \"Results with 5-fold cross validation:\\n\"\n", + "print \"=\"*50, \"\\n\"\n", + "#------------------------------------------------------------------------------------------\n", + "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", + "print \"*\"*20\n", + "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", + "print \"*\"*20\n", + "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", + "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", + "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + " \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Below was old code we wrote for emotion detection.\n", + "# Now deprecated!!" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -19,7 +253,7 @@ "The example is based on emotion classification, with the 6 early Paul Ekman types of emotions: Anger, Fear, Happiness, Sadness, Disgust, and Surprise. There are other types of emotions, according to other theories. But the purpose here is to show how to build a vector space model, rather than get deeper into what types of emotions there are.\n", "\n", "There are a number of things I will change in the code, including the names of some functions.\n", - "For example, the function with the string \"OneHotVectors\" is a misnomer. A lot of the code was written and run in a couple of class sessions, to teach" + "For example, the function with the string \"OneHotVectors\" is a misnomer. A lot of the code was written and run in a couple of class sessions, to teach text classification." ] }, { diff --git a/python_tutorial_part_7.ipynb b/.ipynb_checkpoints/python_tutorial_part_7-checkpoint.ipynb similarity index 100% rename from python_tutorial_part_7.ipynb rename to .ipynb_checkpoints/python_tutorial_part_7-checkpoint.ipynb diff --git a/.ipynb_checkpoints/python_tutorial_part_8-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_8-checkpoint.ipynb new file mode 100644 index 0000000..3d8b234 --- /dev/null +++ b/.ipynb_checkpoints/python_tutorial_part_8-checkpoint.ipynb @@ -0,0 +1,388 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Python's collections module" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Documentation: https://docs.python.org/2/library/collections.html\n", + "# Per documentation, \"this module implements specialized container datatypes\\\n", + "# providing alternatives to Python’s general purpose built-in containers, dict, list, set, and tuple\"." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "DataDoc(tag='POSITIVE', words=['i', 'love', 'pizza'])\n", + "DataDoc(tag='POSITIVE', words=['i', 'like', 'apple'])\n", + "DataDoc(tag='POSITIVE', words=['i', 'enjoy', 'hiking'])\n", + "DataDoc(tag='POSITIVE', words=['i', 'am', 'passionate', 'about', 'traveling'])\n", + "DataDoc(tag='POSITIVE', words=['we', 'had', 'fun', 'writing', 'this', 'code'])\n", + "DataDoc(tag='NEGATIVE', words=['i', \"don't\", 'like', 'to', 'stay', 'up', 'late'])\n", + "DataDoc(tag='NEGATIVE', words=['i', 'am', 'tired'])\n", + "DataDoc(tag='NEGATIVE', words=['he', 'feels', 'sick'])\n" + ] + } + ], + "source": [ + "# namedtuple(): factory function for creating tuple subclasses with named fields\n", + "# Named tuples assign a name to each position in a tuple, thus enabling accessing\n", + "# fields by name instead of position index.\n", + "#-----------------------------------------------\n", + "# namedtuple(typename, field_names[, verbose=False][, rename=False])\n", + "# Returns a new tuple subclass named typename. \n", + "# The new subclass is used to create tuple-like objects that have fields accessible \n", + "# by attribute lookup as well as being indexable and iterable. \n", + "\n", + "from collections import namedtuple\n", + "# We create a named tuple with two fields, tags and words.\n", + "# tags will be a string\n", + "# words will be a list of words\n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "# we create a list and each item in the list will be a namedtuple element with the two fields \"tags\" and \"words\"\n", + "my_data=[]\n", + "# We have a list of document. Each document has a single sentence. \n", + "# The first word in each sentence/document is a tag from the set {POSITIVE, NEGATIVE}, so a sentiment analysis task. \n", + "documents = [\"POSITIVE I love pizza\", \"POSITIVE I like Apple\", \"POSITIVE I enjoy hiking\",\\\n", + " \"POSITIVE I am passionate about traveling\", \"POSITIVE We had fun writing this code\",\\\n", + " \"NEGATIVE I don't like to stay up late\", \"NEGATIVE I am tired\", \"NEGATIVE He feels sick\"]\n", + "\n", + "# Now we loop over the documents and populate the list of allsent, which is basically our container for the \n", + "# instances and their labels. From each document/sentence, we get the tag and the list of words\n", + "for line_no, doc in enumerate(documents):\n", + " label=doc.split()[0]\n", + " word_list=doc.lower().split()[1:]\n", + " my_data.append(DataDoc(label, word_list))\n", + " print my_data[line_no]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[DataDoc(tag='POSITIVE', words=['i', 'love', 'pizza']), DataDoc(tag='POSITIVE', words=['i', 'like', 'apple']), DataDoc(tag='POSITIVE', words=['i', 'enjoy', 'hiking']), DataDoc(tag='POSITIVE', words=['i', 'am', 'passionate', 'about', 'traveling']), DataDoc(tag='POSITIVE', words=['we', 'had', 'fun', 'writing', 'this', 'code']), DataDoc(tag='NEGATIVE', words=['i', \"don't\", 'like', 'to', 'stay', 'up', 'late']), DataDoc(tag='NEGATIVE', words=['i', 'am', 'tired']), DataDoc(tag='NEGATIVE', words=['he', 'feels', 'sick'])]\n" + ] + } + ], + "source": [ + "print my_data" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "POSITIVE\n" + ] + } + ], + "source": [ + "# Now you can access the tag of each instance\n", + "print my_data[0].tag" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['i', 'love', 'pizza']\n" + ] + } + ], + "source": [ + "# You can also access the instance word list itself\n", + "print my_data[0].words" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# We don't need this, but provided as an example\n", + "def get_text_list(data):\n", + " \"\"\"\n", + " input is a list of namedtuples (either train, dev, or text)\n", + " returns a list of lists, each inner list is just the list of words belonging to a given data point\n", + " Used to get train_text, dev_text, or test_text\n", + " \"\"\"\n", + " \n", + " text_list=[]\n", + " for i in range(len(data)):\n", + " text_list.append(data[i].words)\n", + " return text_list\n", + "\n", + "train_text= get_text_list(train_data)\n", + "print train_text[0][:6]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], + "source": [ + "from gensim import corpora\n", + "# Now let's use Gensim to get a dictionary of the words in the train data:\n", + "# We only need that dict from the training data (Can you think why?, use since we only learn using feature from train)\n", + "dictionary = corpora.Dictionary(train_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Dictionary(113562 unique tokens: [u'fawn', u'tsukino', u'nunnery', u'gah', u\"zuniga's\"]...)\n" + ] + } + ], + "source": [ + "print dictionary" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# We can do the below to get the id of each word in the dict\n", + "#print(dictionary.token2id)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(0, 1), (1, 1), (2, 1), (3, 1), (4, 5), (5, 1), (6, 6), (7, 1), (8, 1), (9, 1), (10, 1), (11, 2), (12, 1), (13, 1), (14, 1), (15, 4), (16, 1), (17, 1), (18, 1), (19, 1), (20, 1), (21, 1), (22, 3), (23, 1), (24, 1), (25, 1), (26, 1), (27, 1), (28, 1), (29, 4), (30, 1), (31, 1), (32, 1), (33, 1), (34, 1), (35, 1), (36, 1), (37, 1), (38, 1), (39, 1), (40, 4), (41, 1), (42, 27), (43, 2), (44, 1), (45, 1), (46, 3), (47, 1), (48, 1), (49, 1), (50, 4), (51, 1), (52, 1), (53, 1), (54, 1), (55, 1), (56, 1), (57, 2), (58, 1), (59, 1), (60, 1), (61, 1), (62, 2), (63, 1), (64, 1), (65, 1), (66, 1), (67, 1), (68, 4), (69, 2), (70, 4), (71, 2), (72, 2), (73, 2), (74, 1), (75, 1), (76, 1), (77, 1), (78, 1), (79, 1), (80, 1), (81, 1), (82, 1), (83, 1), (84, 1), (85, 2), (86, 1), (87, 2), (88, 1), (89, 1), (90, 4), (91, 1), (92, 1), (93, 4), (94, 1), (95, 1), (96, 9)]\n" + ] + } + ], + "source": [ + "# Now let's vectorize the training data\n", + "train_vecs= [dictionary.doc2bow(doc) for doc in train_text]\n", + "print train_vecs[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['david', \"bryce's\", 'comments', 'nearby', 'are', 'exceptionally', 'well', 'written', 'and', 'informative']\n" + ] + }, + { + "data": { + "text/plain": [ + "[(0, 1),\n", + " (1, 1),\n", + " (4, 16),\n", + " (6, 4),\n", + " (16, 2),\n", + " (28, 1),\n", + " (29, 5),\n", + " (37, 2),\n", + " (39, 1),\n", + " (40, 2)]" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Now let's get vectors for the test data:\n", + "# First we need the text of each data point in test, let's use the function we developed above\n", + "test_text= get_text_list(test_data)\n", + "print test_text[-1][:10]\n", + "# We can now use the test_text to get test_vecs\n", + "test_vecs= [dictionary.doc2bow(doc) for doc in test_text]\n", + "test_vecs[-1][:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0 [(0, 1), (4, 9), (6, 2), (8, 1), (16, 1), (29, 5), (32, 1), (33, 1), (37, 1), (39, 2)]\n" + ] + } + ], + "source": [ + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TAG: 1.0, Vector: [(0, 1), (1, 1), (2, 1), (3, 1), (4, 5), (5, 1), (6, 6), (7, 1), (8, 1), (9, 1)]\n", + "TAG: 1.0, Vector: [(0, 1), (1, 1), (4, 21), (6, 20), (11, 1), (13, 1), (17, 2), (20, 5), (28, 1), (29, 5)]\n", + "TAG: 1.0, Vector: [(4, 8), (6, 2), (17, 1), (26, 1), (29, 2), (31, 1), (37, 1), (40, 2), (42, 11), (56, 1)]\n", + "TAG: 1.0, Vector: [(4, 9), (6, 1), (16, 1), (21, 2), (28, 1), (29, 6), (42, 7), (44, 1), (50, 2), (60, 2)]\n", + "TAG: 1.0, Vector: [(0, 2), (4, 8), (6, 5), (16, 2), (17, 1), (29, 1), (40, 4), (42, 6), (43, 1), (44, 1)]\n" + ] + } + ], + "source": [ + "# You can loop over the data to get the tags and vectors easily now:\n", + "for i in range(5): # len(train_tage)\n", + " print(\"TAG: %s, Vector: %s\" % (train_tags[i], train_vecs[i][:10]))" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "DataDoc(tag='_*0', words=['bromwell', 'high', 'is', 'a', 'cartoon', 'comedy', '.', 'it', 'ran', 'at', 'the', 'same', 'time', 'as', 'some', 'other', 'programs', 'about', 'school', 'life', ',', 'such', 'as', '\"', 'teachers', '\"', '.', 'my', '35', 'years', 'in', 'the', 'teaching', 'profession', 'lead', 'me', 'to', 'believe', 'that', 'bromwell', \"high's\", 'satire', 'is', 'much', 'closer', 'to', 'reality', 'than', 'is', '\"', 'teachers', '\"', '.', 'the', 'scramble', 'to', 'survive', 'financially', ',', 'the', 'insightful', 'students', 'who', 'can', 'see', 'right', 'through', 'their', 'pathetic', \"teachers'\", 'pomp', ',', 'the', 'pettiness', 'of', 'the', 'whole', 'situation', ',', 'all', 'remind', 'me', 'of', 'the', 'schools', 'i', 'knew', 'and', 'their', 'students', '.', 'when', 'i', 'saw', 'the', 'episode', 'in', 'which', 'a', 'student', 'repeatedly', 'tried', 'to', 'burn', 'down', 'the', 'school', ',', 'i', 'immediately', 'recalled', '.', '.', '.', '.', '.', '.', '.', '.', '.', 'at', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', 'high', '.', 'a', 'classic', 'line', ':', 'inspector', ':', \"i'm\", 'here', 'to', 'sack', 'one', 'of', 'your', 'teachers', '.', 'student', ':', 'welcome', 'to', 'bromwell', 'high', '.', 'i', 'expect', 'that', 'many', 'adults', 'of', 'my', 'age', 'think', 'that', 'bromwell', 'high', 'is', 'far', 'fetched', '.', 'what', 'a', 'pity', 'that', 'it', \"isn't\", '!'])\n" + ] + } + ], + "source": [ + "print train_data[0]" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/.ipynb_checkpoints/python_tutorial_part_9-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_9-checkpoint.ipynb new file mode 100644 index 0000000..2d1c3f6 --- /dev/null +++ b/.ipynb_checkpoints/python_tutorial_part_9-checkpoint.ipynb @@ -0,0 +1,278 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "#Scikit-learn " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# From here: http://www.wildml.com/2015/09/implementing-a-neural-network-from-scratch/" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAEACAYAAAC08h1NAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnWd4HdXRgN/Z3dtlSZYsuUnuHdu4YMAUF3qopkMICSUE\nSCGUhIQPCBBIQgu9JwFCr4FA6M1ADNgYjHvBvUu2Javcvrvz/dCVI8myLVuy5LLv8yhEe8+emV3r\nzp6dM0VUFQ8PDw+PvQejrRXw8PDw8GhdPMPv4eHhsZfhGX4PDw+PvQzP8Ht4eHjsZXiG38PDw2Mv\nwzP8Hh4eHnsZzTb8IvK4iJSIyMwtfD5ORCpEZFrm57rmyvTw8PDw2HGsFpjjCeB+4KmtjPlUVU9s\nAVkeHh4eHs2k2St+Vf0cKN/GMGmuHA8PDw+PlqE1fPwKHCQi00XkbREZ1AoyPTw8PDy2QEu4erbF\nt0CxqsZE5AfA60C/VpDr4eHh4dEIO93wq2pVnf//jog8JCJ5qlpWd5yIeEWDPDw8PHYAVd0ud/pO\nN/wi0hEoVVUVkf0BaWj0a9le5Xc2InKjqt7Y1nrUxdOp6eyKenk6NQ1Pp6azI4vmZht+EXkeGAt0\nEJEVwA2AD0BVHwVOAy4VERuIAWc1V6aHh4eHx47TbMOvqmdv4/MHgQebK8fDw8PDo2XwMne3zsS2\nVqARJra1Ao0wsa0V2AIT21qBRpjY1go0wsS2VqARJra1Ao0wsa0VaClkV2nEIiK6q/n4PTw8PHZ1\ndsR2eit+Dw8Pj72M1ojj9/DYIiISAA4AbGCKqtptrJKHxx6PZ/g92gxD5FQfxuMdCOKgbCSZFpGz\nVPXDttbNw2NPxvPxe7QJIjIkiPnVbxke7inZAMzRMu5jRjSFO0BVV7aiLiZwNDAKWAO8qKoVrSXf\nw6M5eD5+j92GIOZlR9MtUGv0AQZJHgfRybKQn7aWHiKSE8T8tjPhF4+nxw3D6HCXD2OFiBzUWjp4\neLQ2nqvHo02wMPoUkWU2PN6NdgEfpX1bS48A5u0jKOh/IQMDIgIQmaHreYhZ/xaRzt6eg8eeiLfi\n92gTEjiTZrIhWfdYXG3eZXnawZ3gF7M6LNarItJrZ+rhoOdMoGet0QdgqHQgn6AfOHRnyvbwaCs8\nw+/RJti4D3zF2vgHukKT6hDVNDcyhe60893AqPCtjI4cTbcJfoypItJVRAosMa7PEt+HQbEeFZEh\nLaGHixuM1FQYqUfmWFZLyPDw2NXwNnc92gwRGRjCeiiJfagCnQlzMweYdVffz+j81OeseVrg5P0o\nDO9Lh+Bqos57LE8mcc53VV9qjg5h8X1yKr3GHiZFm4SWaYJr+CqRxu2iqttqMuTh0absiO30DL9H\nmyMiPhO59SR6Xnm89Kj32Vwt4yFmVR1Dt8hx0mPTG+oyreLPfFOdxi1U1XgzZA/1Y0w6im7Bfcm3\n1hLjVRZFo9g3p9S5bYcvysOjldgR2+lt7nq0OaqaFpFVa4klgGDdz9aRwMbNOoyien/Y3aUdnTTs\nrqD6YGCH4/5VdYaIDP+AFdd8zMoxwMoY9u2q+s6OzunhsavjGX6PXYVnv6b05nHalT6SA0C5JnmN\nxdEUbtBFN4sActGa/zQTVV0IXNjceTw8dhc8V4/HLoOI/MCH8VI3stwQlsxno1/gZhMZdThFx58i\nvTcZ/4VawR1Mq8i4elJtqbeHR1vi+fg9dntEJAQcBYSBj1S1VESK/BhfD6B9uxEURFZSnfqM1XYK\n9wxVfauNVfbwaFM8w++xxyIi7YBzw1hj07iL0rh/U9UlbaBHF2AAsKQt5Ht4NMQz/HswmXoyfYBq\nVV3VBvLbAecEMfdP4S5w0SdUtaS19WgrRCQQwnzSQSd0JpIsIeYXmBTHOW1n1vWRmtjWdkBcVdM7\nS47H7otn+PdQDJHT/RgPBTCDCRzLxJgRxz5TVZe2hnwR6eHH+Ko/7bOGkh9ZQmXia0rtNO4xqjqp\nNXRoa0JiPdCHnAsuZXAoJBZpdXiK+clvWfdBTO0TdoZMQ+SEIOa9KdxiAcfCeD6B8ytVrd4Z8jx2\nTzzDvwciIoeGsd69nH3DfSQHW13eZ4XzJkvXJnF6tsYqMCy+D46h2/gTpMemzdVpuo7HmLM6iVOs\nqs2OrNmVEZGghVF2G6ND7SWw6XhSHX7N54kUbi9VXdPCMg8PY71xCfuE9yGPSlK8wMLEDNZPjant\nlZLw2IRXnXMPJIx1/en0DtWGOFpicKx0NzsTzgZO3NnyRSSUwhl3FMX1wimH0YEwVjaw787WYRcg\n10Koa/QBAmKSSyAFdG1pgWGsP/2IfuHBko+IkCMBLmJQ0IcxXERGtrQ8j70Lz/Dv+vTvRc5mT/MB\ntA8B/VpBvlnzP/VVEBEsxIVGCt3seaxz0cSKBh6WCk1STtIPfN/SAtO4+wykfb1jhggDao4NbWl5\nHnsXnuHf9Zm9kIrN/HFzKIsD83a2cFWtDmBO+4K19XRYpBVUkHKAaS0hR0RGB8X6R0R8/xKRG/1i\n3hcQ834RGSd1i/c0T4YpIhdkiW9Klvhm+8T4o4jkbes8VXVcuPF+ZkQXayUAq7Sau5geVfQd4EgR\n6VBHTueQWC9YYiQsMZJhsV4TkR7bo6uFsWoF9R80qsoyqlxg2fbM5eHREM/Hv4sjIqNDmB/+kqHh\nAeSSxuVtltnvsWJ1EqdPa/j4RWSYH+OzMXQJ7EOefxlVzrssTyZwzlHV15s7f0DM6/2Yvz+absGF\nbDQWU8k4umBh6ERWx+LYbyVwzm7OXoKISBDz1UJCR51Ez0gIi09ZnfiO9aVJnGHbKsYmImIiF1sY\nN9m4eQIJQXxdiCTDWCykIiBwUwr3/gDmvPF07Xg03SwT4SNWOu+wrDzTWWxDU/Q1RM7tQPDhqxkR\nyZcgrirvstx5k6XLkjh99/R9FY+m423u7qEYIif5MR82kZw0rmlhfBXH/lErtyfs5sf4tR/zQBt3\nfgLnHlWd0QLz9gpgzrqVA0PrSfAIs7mBUWRJjQcpqQ43MqW6hPiPVfW1Zsg5OAf/e7czOuKT/21X\nPKqzE1Mpvc1W98YmziNAkQ9j7pXsG+kvNe6Yck3yR76OVpB6fDB5F1wpwyJ1z3tEZ8W/Yd3Ntrp/\naaocP8Z1CtcUEEpVkPS56II4zgRV9Vb8HpvwNnf3UFzVfydximLY+6Rxi2OaHteaRh9AVZcn1bmq\nSlMHx9W+oCWMfoaT9qfQyJEAX1PKGDpvMvpQs4F6LN2zwljNqqUjcPQhdA7XNfoAY+gcDGKe2tR5\ntGaldPJwOhi1Rh+gvQQ4hV6RMNYZ+1EYaXjeCApCIazDtkdOUp2b07gdVxM9Loo9Iqb2cM/oe7QE\nnuHfTVBVV1WXquq6ttZlZ2Hj4mOzWmz4MBAINHJKk1GIVpPezC0WwwZoNC5eRPqHxXo1IGZ5WKzl\nlhjXiIjfgE5diIQaji8kBGCuJrqZnDXEHBt3u422qlap6iRVnb+953p4bAnP8O/FSA3dRaRzG6rx\nxhRK3QpN0oN2fMJK0nXc164qH7IyFsV+tplyXvyStW6JxjYdSKrDv1kSjWI/0nCwiPT1Y3x9LN0n\n3MIBuVcyrLgPOdcFMV9z4csplFY3dJN+y7q0jfvORFall2Q2gQFWajXv1eyJ3N/Ma/DwaBGa7eMX\nkceB44BSVW20HZ6I3Af8AIgB56nqZpEgno+/dRGR8UHMJwQpdHDFxJgTxz5HVXd6pFBDAmL+wY/5\nuxBmOIIPQTiCIiwMPmIFy6muSOB0VNXktmfbMpYYPzORew+koxHBZ01ibTyF83YC56yGm6UhsZ45\nmm5nnSQ9N72C2OpyFZNiVaQPC2I+ti8d+k+gZyCExees5k2Wqo17pcJSC+OpLkTUQFhJteXgXuyo\nPtMc/T08GqNNNndF5FBqXpWfaszwi8ixwC9V9VgROQC4V1UPbGScZ/hbCREZ6Mf4+hIGR/YlHwfl\nU1bryywsy2ShVm57lhbX6dQQ5ov3cIj5DeuYyjocXAaTxwt8n3KgsCVq4ohId+AMoAswE3ilsesN\nibX6WkZ27ir12+4+pfNTE1n1e+BxP8YSC6O9izKUfEZSyDPMj1WRPj+AebyDe6aDWkHMz+I4v1TV\n2c3V38OjIW3SgUtVP99GjPKJwD8zYyeLSK6IdNybCnztagQwrzqa4sCwTOi5hXA4RTJLNwSns+GH\nwGauj60hIv1DmLfZ6OEGEnfRf6Rx/7idLRG/D2LFfGK2O5BOHEgnoCZ2/RUWuw5OGGiJYmiJEOZJ\nCiNyCaQ2kHggKNZdSZzrtc4qyEA2bCDZuWuDfuvriKWADcCIHPy+WxlN3TQDW93w08z/5ygKjZPo\n6Q9h8V/WjH2FRV+KyNDWqq/k4bE1WqMDV1dgRZ3fVwJFgGf42wgfxr59yd3s334A7SNzKW/UXbcl\nRKSnH2PKcfTIGk0nI0o6618svnwe5YeKyKHa9FfKuVHSzjKtoru023RwNmVIzd/K2u3Rawu6ShDz\n3bF03ecUevksMUJlmuCvfHdFCfFVwMO1Y2PY97zMwnv7ak4kJDW3aq6WsaDm2fMacE4/co2GuWUp\nHDoQDJzHgE15Z0dSLGWaCH7Cqt8Cv9iGjkFgPDUZ0RPb4u3LY8+ntTZ3G76G7BrJA3spNu7MRVQ4\nDY/PZ2M0hbtd7ogA5rVHUBw+Vrob7SVAkWTxS4YEI/j2BcY1dR5VTdu4l93JtNjnupoVWs3HulIf\nYlYsgfPz7XiAbI0RAcy+p9HbZ0nNn36eBDmfgWE/xv81GPvEBhLPX8Wk+MM6K/oX/abyHmZUpXFP\nUNUqYP4CKtyGas1gA6PouFmy8TA6+HwYY7amnIgc68Mo6U7WC33IedrCWGuJcXGzr9rDowGtseJf\nBRTX+b0oc2wzROTGOr9OVNWJO0+tvZcEzl/fYfmZPTU7PJg8XJTPWaNzKEsB2xU9YyLjR1JQ7+/I\nEGF/LQy/w/KDgU+aOpej+rSIrHyRhdcq9BOYkcC5WVUnb49OW6FnMVmO0cAoF5NFCqdT3WOZzd6L\nROTWrykdR42b6a067qtPq0itfJlFvU/Snr6AmCzTKuZQljIRpUH46RpiaP0333qISLEf4+XfMDxc\nW5CvRGPcwtSHTZFRLlymWickyWOvRUTGsR2LqsZoDcP/BvBL4AURORDYuCX/vqre2Ar67PWo6mwR\nmfAwsx43kTwHNQS+T+GevQMbqKXriPfqSXa9g2uIxYG4iARVNbEdun3CdjwstpOZi6j02epSu+IH\nmE85QayFW9BnEbCokeOuiIz7lNXPfsTKQ4Jq2kmclI173XQ23DZLNwQGSz4A6zTOayyOxbD/uiXF\nTOS80XQya40+QEcJc7z2kI9ZeV4V6SEicpCqbvam5rF3kVkQT6z9XURu2N45WiKq53lgLNCBGl/s\nDWQqNqrqo5kxDwDHAFHgfFX9tpF5vKieVibjj+gNJFV1i6vRbcxxegHBJ65jv0g78QMwV8u5h+kq\naMoFtTCeTeDsEivWsFjvDCRv3I/oF8zBz/dU8CAzY1Wkf9RYSQgR6S1wliBZbk1Bts8bup1EpADI\npaYdoy0iY/0Yr3UgZIYwWU61T+GatDr3bkmvoFiPnUyvi46S4nrHp+t6PmIlG0hUryF2jqq+0TJ3\nwmNPwavV49HqiIgEMG9V9LKBtE9XkrJWEQ2dTh8OlyIqNMlTzE/Mo/yjmNrH7wL6Hh3E/LuN21UQ\nNZANaZwrHNXNXFw+MS41kbsOorORhc/3X9bE4tgfJXBO2dbKW0R8wKHUNI3/fFtvUiJybl9yHvo9\nI7Lq7g88rfNphw8fBm+w9P6UOpft2JV77Kl4tXo8dhoism9YrJci4lsUEd/7IjIeamrKJNT+XQq3\nz3Q2XLqS6opLGczhUgRAjgSYQM+ggx4hIhNCYv29nfgnBcS8e3tLFTcXS4yfRrD+dSZ9ul7FMDmS\nYnVxwy4sbuR6exrIXTdxQPBc6e8/WXrJrYyOdCR8OHD+tmSpalpVP1bV/zTRffbycqpLn2a+lmmC\nak3zli5lGusYT1c2kEw56PoduW4Pj4Z4K/49ABHJNuDCENbxLloax3lYVT/LfGYB/ua4WTKui7dP\nomdwAO2NFVTzMgtjCZxf2uo+UXesIeI8yjjDEoNSjfMYsyknSQCTDSQYQK47niJjHuXpiaxKpnAP\nU9Wvm3kLmnINIR9G6R8YldVV/ldD7Utdy7Ms+C6q6eENxl89hi43nycD/HWPf6Ol/J05C5K4twJT\nVXVmC+pY4Md4TuEIAxhGASfTkzQuNzM1nsId5OUBeDSkTRK4PNoWEekQwJw6kPYFo+kU3khS/8PS\nE/1i/tVEik3kbAUrLNaieE2j7ve3V0YI65HzGRDeTwoB6Ek2vTQ7fDNT7xWR5+qWUghgliyjqnMP\nbcfdfMd4ijiCIgwRSjTG3Uw3HJSzpK+vm2b5nuX7v9M67RtHdiDo1jX6AKMo5B/MHSIi4QYPx1AE\nq973Y63GeJ7vySPYp4is++ZQZoTE+iyBc3LDDWwRCQA/jmD9CHCi2E8Az6uqvSUFVXWdiBzlx/gz\nyOWA+zzfu3MoNx3cizyj79FSeK6e3Rw/xvX7U9j5MhkaHiWFHCnFchP7hwWuG0j7s+/koMBjjDN/\nyqB+QczXROTgxuYRkaBPzJtDYq0JiLkxLNbLItJHRHLSOL1HUFBvfJFkkUdAgXor5TTuX55gbvRL\n1pKFj6OkmNrwyY4S5nR681EmqvEAOpLGHSCSCX/ZuUTj2EbDN9wEDgIu0NAgvzOJNfFkxpWvqjzE\nTI6lO3+SA41LZXDW3RwS7kvuWD/GzXVPFBF/EPOT3uTc/WMGjDmHfuOLyXo4iPmmiGz1O5cpx3xN\nEmfgZEqums6GX6Vxi2r3IETkiIj4vgiKWRYR37ciMqG5N8Zj78Mz/Ls5BnL6ERTXc0fkSoAh5Mtg\n8gI5EsAQYbgUcBZ9wyGsWxrOkclofX8g7a/6HSM63cIBOUfT7RQ/xlSgo1JjIOviqhLDNoGquscd\n9IENJO59kvnpzmxWlp5i2rGOmsWxi5LJ5Ts8Ir5vA2JWRsQ3XURObtZNaZzvEjgbJlOyyfKrKm+y\nNO3H+I+qphqM/zqF+8YtTI1O0RI+YiVxHMbX6atuicEP6RtSaJhkdVZnIkOvYURklBRyoHTievaL\n5OA/hJpihdskU4L7EVV9UlXLAAyRU8NY/z6HfqNv4cD25zNgeA7+Z31i/HzHbonH3opn+HdzBByH\nzbvwKYrZ4J93EO1x0cYadY+L4Bt+GUNC3aUdHSTEidLTOIyicADzCj/mO2+wJF13tTyJNZrGXQPM\nqSdXVRPqXOuiJ8yhLN5whT2Pcooz9W8+YpVrYSxrh++J8xgw/DZGt7uQgUNz8D/jE+PSHbwljaKq\nmsA5+UnmVdyl31W/rou5ia+rPmP18jjOJVsY/6NVRC9+mvn/fYWFS3Lxpxtm5OYTJI2bVTdVN4J1\n9uF0jdRNFLPEYDxFWUHM03dEfxExApj3/Yoh4dHSiXwJMlIK+Q3Dw4Lcmin14OHRJDzDv5tjo0+/\nzbJEXQNbojFmUcYwOtQbu4JqTKSxzl2H7E9h2GzghRhJgc9Ejoxj/+xTVi+/ka+rXtVFeodOq36W\nBWWJmjaAW4oOeD+GPe9J5qWqNIWrygxdz0sspIAQ9+mM6OssLnPQ/F+zb3g/KSRHAgyXAq5iWNhA\n/iIi/i3MvUOo6rQUbrdZlF3+Bkv/uJzq85I4A1W1dAvjXVV9Nop9Koi1gmpfVYMXg2msJ4w1o+59\nUEikGnkYp3BcpenJbLVkeh4/p9CpH7n1PusqEbLxKzBge+f12HvxNnd3c9K4f5lF2fG3MLXnQdo5\nq5yk/TEr04omp1CSfZgWGYYI6zXOc3wfjWH/uZFp1pcST1ATc76JDSSQmj4LJSIyYAXVx6+gegiw\nFHh1a5FCqqoicthUSh/+grWnCIiFsTqJ89HnrJYkzjcKn0awJveS+lm/RZJFWC0jRaoPDd4omkum\nzs4/tuecIObtB9O5kx+Du5nOGdqHzoSZSRnPsSCewLmi7vgY9hPvsPyI0dopK5Bp9RjVNB+yIp7E\nfXp7ZBsipwcxnzyC4sAHrDDiOITrfG1tdYmS9tUMleHAwsw1enhsES+ccxdCRMLAWQHM0WncxS76\nhKpusyplZmV8WgjzGBtdl8b9B5AKYr7pwyjOwW+vJe434JakOpsZfhHpaSKzf8Hg0DCp2cSt1jQ3\nMzW6jvj5qvpyM68rAISAirorYxHJtTDW3sshgdoKmAApdbiMz5Mp3B5Nuf6dgYh0CmD+WdFTFHL+\nzIHkEeBTVvMJq9hIEgNxKkhdp6q3NjhXgphPBTFPHk/XsI3qJ6xKpHH/llD78u3QYbiBfDmeLoEj\nKOZ1lpCFjx/SFxFBVXmDpfb7LK+y0XAu/mQ5KZ+J3J/EuUYbNJfx2DPxMnd3Y0SkawBzci+yc4bR\nIWs5VYkplDpp3ONU9dMdnFOAwUAeMK1hiV8R8QUxH3TQH+cScDaSDLfDl+5Gu+Qcyizg4RTu5DDW\n2QqpOPY/gbdbqFImAGGx3hxNp6POoZ+/1pi9zKL0p6z+PKbpw1tKzvYgItl+jLlj6VJ4BMXWTXzN\nzRxAGIuFVGAi9CGHO5lWuYCK87ZQ6kGAQ30Ypyjq2OiLqjqlifIliHmPgVw8mo4BECZTwmF0ZTZl\nxLDpTjuWUlVVTtIaTJ5xIQMDQbEo1yT3MD26ltjNKXVua/Gb47HL4Rn+3ZiwWK8fRtFxp0rvTUvf\nWbqBB5i5PoXbaWcU5wqKdU8P2l30cwaH24mfmNr8nTmpuZTNTuK+FMA8t4Bg9yMojti4vMvy6mrS\nrydwftxSxl9E8oOYH4aw+vQjVxZRodWklydwDttSMb+djSly+b50+NOvZGgY4B86hyhpFlJJZ8LY\nuGwgSTXppIt23IHCdltFRI5uT+DVP7J/JCI+AMo1yc18zWUMpZIU9zIDBSeAIfcxxvDV2Z9ZqdXc\nwtTyFG5+Sz6kPXZNvASu3RQRsQzkuB/Qrd6/x2DJJ0cDgXXEDwC+2MK5fsDU7et2VZvJetFFDArX\nFlcLi8WFOtB/JZOGF5M1yEAC1zGS2k3fg7Vz1jV8dXICZxwtVEFTVTeIyIgEzujJlAwEvqeRQmit\nSQjrqFEUbtrvGEUhjzKbaxhJUaYV43wt5y6mi4u2o2U6g9WV/7Pj6b7J6AO0lwBjtAuTKcGPyQgK\nOIhO5tssw9dgU75IsrDVzQaCwHb9XXjsHXhRPbsGoqhYjfxz+DAU2Cy6RUS6hsV600SqDaQqIr7J\nmc29ptLBh0FegyjAiPgoJEQEX+Aoiqkb6RMQk8PpGvZjnLEdcraJ1vCFqv5DVT9r61Wqg65eR3zT\nG9Y01nMs3TcZfYD+0p6D6KQmckFLyzcgJ4Jvs+MRfHzLOr5iLWfTl+60Yy0xkg1eBpdoJT7MDcB2\nRxB57B14hn8XQFXTIawvPmdNPYO3XKtYR1yAr+oeF5FQAGPy4RQdcx+H+h5hrHkGffb3Y3wmIj2b\nKLYkjeuWNAjMqdAUZSQJYqCNNErTmoyrPbomfALnoXdZnqy9N+Uk6dJIMloRWQEfRq+Wlh/FfuVT\nVkfrPv9cVT5llRYS4ib2J0+C5EmQfenAI8xiY6Zqxkqt5hFmRW3cG9v6Aeqx6+IZ/l2EGPbPX2Zh\n1bO6IDlD1/O2LnNv49uYg3tJwzowwBk9yM45RXpbIbGwxGCMdOEwigJ+jCubIk9VUwJ3PsDM2NqM\ngVuvcR5hFmPozP505BNWYdcJDImrzUesjKdwX5QazBa7AW2AiARFpFsm6mgTqvptCveKPzAlfodO\nq1pDNPUdmxfG/JZ11Qmcz3eCak8toXLZA8yMz9UyZukG7mRarIxkSQ+y7WCdCKif0J9yks7VfGH/\nQj+L38LUjRtJXe+gj+wEvTz2ELzN3V0IESn2Yfw6gHmwjbsogXO3qn7TcFxAzAcn0Ovnx0i3esdn\naxmPMvubKk3t10R5hh/j/xSuthDLRq0uRIzrGGkCPMJs1hPnELqQxtEPWBmLYb/mmJbluPYpqq7l\n94W+SKVjv1TV6S1yE1oBEbECmLe66KV+DFK4roHcm8T5Q90QSBFpDxwFZPsx7phAr3bj6GLYKO+w\nzP6YlSVJ3H7NqXy6FR3bmcivApjnCNgx7CcUPglgfvFbhodrcx/majn3Mj2awh0OxICSrRWC21UQ\nkYBhWNcYhnWxuk7EMKxP0nb896o6t611293wonr2EgyRK0ZR+KdLZHCo7vF3dbn7Jkteiql99vbM\nl2kaUgCYfoxvDqOo/aF0tmLYPMOCVAmxSuCtBM6zpuF/qE/3Md337T/BZ1lBFi3/XKfOej7quKl9\nVXWzuva7IgExH8/Cd24En5WDn6F04HNWRUuJ35tQ59rGzhGR/mGs++LYhwuiAYzX4zVdxda0pu6G\nyAQfxpP5BMVB2UgynWmZ+UFr6tEcRER8Vuj9Du17Hzxi0BmhUCCbxSu/1OnzX6t2nNRIVf2+rXXc\nnfAM/16CiHTwYSy+hH3aDaMDIsJyreI2vo3FccY1p769iBQFMG8CThRI2LiP2+itqhoXkdPzc3v+\n49gxN7arW7Pm2zkvpect/uAfaTvRovV1dgYiMtSPMX08XRlJIWUkeJOl9COH/7I2msbt0Ihrre75\nBpn96FZUu6EOfuAAavZapuwOK/y6iMjoUDD3g1OPvCtiGP9zW30371/OnEXvPp9Ox89tQ/W2iohE\nDDEvt6zATwCx7eQzrjp3tWW2tBfOuZegqutF5OjHmPNKBCvbr6ZbRgIb99LmGP3M3CuBCxv7TMQc\nVdxpZD2jD9C1cKhvwdKJhzRHbmsRxLztSIo5WWr3ZHMYpHlcw5cI+CNYX4XEmpFxs01reH5LZsNm\n9hYmAH2BecAbjVQJ3YzMmJ2xt9BaHNy98yh/XaMP0L3zfubcRe+NaxuVto2IBCwrOKlT/sB++/T5\nQQgR5i569/er180+XURGbW9IdVviGf7dFFX9UkSKkzgjgAA13aCS2zpPRI4MY/0pjTvQh7Eyhn2z\nqj7XNJk8uQTnAAAgAElEQVTO8rKKZTEa1PTZWLVKVd2lO3IdOxMRKQSOBYSajOMSFx19CJ3rjcsS\nH/00lwSO7xi67bucqsFvsexUQ+THruqrO0m3Xn6M/xaTldWf9pG5lEVXE6sQkYNVdflOkGcCR1PT\n9GYZ8FobGqr1ldGSJNSPWa2OrUfEaNP2kpmM61HACGAF8F6dN6ozctt17TP+gMtDtYufwrx+wfcm\n/blH6Yb5P2Q7a0C1JV5Uz26AiJimGL8Ki29hUKwNYbH+LSKDM9Ujp6rqpKYYfUPk5DDW6z+m/6jb\nGZ11MfsMyCf4WEDM3zdRledWlXznrFz7v4VwZfUavpv3ajxtx+7c0evbGfjEuNSHsWwYHe4fTof7\nfBhLfWJeZiDRKOnNxidwGEsXhkg+x0kP8zcMD/sw/pbZ/2hxQpgvTqBXx2tlv3anSW/jehnV7gd0\n6xTC2q4ibk0hkx09szPhF46h2839yH3Ej7FCRPZpaVlN5NWS9XN1demsTQeSqSjfzHkxmkpH72oj\nnRCRiM8KfRoM5Hzcq/jgv7bPLn7eMv3LRaQ/gN8XPrFPtzGRum+8IkLfbmMjfl9kt2qI4634dwOC\nmE90InzqmfQJ5xHkG9ad8DqLDxORg7SJPV8z9V/u/TmDw4MkD4AhBLhah0euY/L1InK/qka3Noeq\nlonIMZ9Nfej1UDAn6LNCbkXVap+qe9WO1hPaGYjIsBDmnTewf7BQava/12ucG/n6L0mc515j8TmX\n6dBQbXLaQq1gOVXsW6eMdS/JJlcDZinxEcDkFtavKIA5+AiK6i28jqGb9RbLDhCRDqot11g9hPnI\ngXTq/aNMPSSg3ee6Wp/n+3+LSN/W3q9Q1SoROf6TyXe/2T6nG6FAjrFm3SwL5AngmdbUpS6WGbyj\nc+HgUWP2+0XQyPxtzF/ycdY3c174j4j0s8xAeSJZ6dJgwZxIVaqqU9YWOu8o3op/F0dE+gKnXc3w\ncH9pT4GEOEa6ycn0ioQwGyuxvCXyHLTjQNrXO1ggIfIJpqkp5rZNVPUL20l2roqWHltWsex0x013\ndFx7l4oZD2BefBTdArVGH6CDhDiabgETMb+nYuo1fFX9ii7Uh3Rm8g6mcR4DCNRJS1BV0rgGsE2f\n+w4QCWDYVoNSCz4MLAwXGskW20FExJ/CPelkevnrrlQPobP4MTsCQ1pK1vagqp85brrj+vJFP16x\n9ttf2E5qQNpO/KqtNs1FRFy1zxs1+IebjD5Avx7jxWcFOwHDbSf59zmL3k3E4uWbPo8nKpj9/Vvx\ntJ34WxuovcN4K/5dn0OHkO/WTdoBGEWh/ItFh4pIO6C6CV+YqKJaRZrsOhUgbHWpIOWHRjKUtkCm\nYNx/m34JO5eMO6YdsFFVXRPpmEdgs+SyfAKmD6Mgij02gTP+bZYfahjWWVlWdr9V6Zgxss7Yaawn\nhl0B7Iz8hIVJ3PhCrcjqIzmbDs6lHEXLgJb08fsACVH/dogIWWo5laSyGz9t55OJnvp3W8lvgOm6\nTjAUqN/oRkQIB9s78cTGfFX9wLICf3r9o6uv7951fwRDlq76ygXuVNVd5vvQFLwV/y5GJpv0NBG5\nVET2BcrWk6hXIsFWl9dYDEi2iWwIYK62xPjJ1uZV1YSF8cqLLEy6mWeEqvI2y2zQOaq6aKdd1E5C\nRAI+K3SfYfg2moZvjWUG1piG78IY9jtfUhJt+Cz8kpJoFPvtTG2gj4GnTcPqPv6Qa4yP/eXcZczh\nA13BYzKfR5mtSZzTWzKKpxZVdVI4l97N9NgnukqXaRUf6gp9gJmxBM6lLbnqVdWoH3PBtw2e62s0\nyjoSFvBtS8nanVFV22eFZq9YWz9fMpbYyMbKlQHgGwDbTv7ZdpKDFi3//JqFyz/9P9tJDknbiRva\nQufm4MXx70KIyCgfxnvdyLI6Erams14d9BMb96CfM6T9MKnxQT+hcykhxgUMpIAQC6ngYWbFKkld\n6qg+tZX5s0OY7/owhw4glyVUuRUkS1O411Cz4v+iKZvEmbn8wCl+jENtdJWL/lNVV7XEfWgqfl/4\nhYL2vU8cPfzCUCSUz7qyRXz69X2xeGLjr/0YvxtJQfExdAsAfMCK1BRKVyVxhtTuZYjIEfm5PV85\nbuxNOWk7ydKVX1CxcRmhSAHfzHnJBY1sLaa/uYjIwWGsa4EBCrPj2Leo6g7tJ4iI30KutjAucdEs\nE/kkXtOMZZ6IjA1gvj2BnsGBtDdWUs1LLIzGsK9Oq/tQC1/WbouIHGZZwTdHDf5huHPBYCqqVjFl\n5jPRWKL8bttOXt/W+m0JL4FrN0ZE/H6MNRezT97wTBestLrcxXfxhVQ8ZSBndiPLyCXgm8GG0N0c\nQriO+2eBbuQepq9K4BRva8UoIvsBQ4FOfoyrOxASAdYRJ437E1f19W2cnxfA/KIL4a6j6Ji1hmhy\nMiVOCvd0VX272TejCYhIsWX6F5x+zANBn/W/CqMl6+fx8eS7VqbtxL5+jOsN5GxAXPSFFO4fVXXD\n5nM8GPRZ/yvXs758Ee9Puq3EdhKdd6VCZ5lQw0FAFjC99qGU2bh/pyfZY06jdygbP5Mpcd9gSTSF\nO1JVv89seP9BkJHA0hj2n1X1vba8nl0RETnI7wvf4rrOMMMw16bSsb8Az+xKfwcN8RK4dm+O6ETY\nqjX6AD4xOFP7hG5n2okJnM6LqDwOOKQH7S4Mi9Wu7sl9ySGF04mamP6trlJVdaqIVPgxvvstw8O9\nM37mxVrJHUx7VkRGqOr8LZ0fwLxtfwp7nseA2g3DwKHahTuY9qKIFLZSfPig3OzipM8K1qsrXZjf\nn7Sd6ApUJdW5Arii8dNBVVf4feG3Pp/64LGjh10QCgVzKa9cyWdTH4q5bnqXqm4pIoOCmK9ZGF0j\nWE4ZSTHFuNxR93Fgfz/mIVewb6h2w/hYuhspdcLvs+IPwLmq+h1wShPkGICvqW9+exqq+gVwWFvr\nsbPxDH8LkUmM+oWBFMSx/+Ogj6hq+bbP3EReHsHN9lzaE8RBszOru1dFZGYJ8YttdakbFbKWGCZG\n1MVt0hfWj3HJeLr6etfZXOwl2RyuRf4PWfEL4LItneuiZ51Aj3pRIn0kh64acZdSdRjwVu3xTKGz\nsUAS+KQFXSdLK6vX+F3Xpm4G6MbKFZimf6PjpJpUxiBtx39UsmHe/a9+cOWPTMOnrjpJVfcmV51H\nW0jPZiMiIT/GZ2fSN+9QOoshwkqt5nam3S8iS4Hh+1HgaxgltB+F5gesGN9EGZEA5p0W8hMHDYTF\n930c+9feW0HjZB6Qx1im/wRX3WrXtZ9W1RltrVdTafbmrogcIyLzROR7EfldI5+PE5EKEZmW+bmu\nuTJ3NQJi/jEH/2un0vuk8xlw0AgKrvdjzMpkjjaVSXMpt+INyq5MpVQtjEm1v6vqAtBvXmRhqrZk\ncrWm+QdzY8A9TV2lWhi9i8jylWmCL3Ut3+l6bHXpSsTaVo15RS0/m1dkDtQc2+Qz8Yn5ax/G6n7k\n/rM7WS/4MEpE5OitzS0ioczm9k8zoayN66A6X1W/+XrWcynHqYm4jCcrmTTt71FVvaOp90FVE6l0\n/CLXtfPTdryv46QKHCfd5Pu4levoExTrb1nimxMR39si0iQDvAVO7Um2f6x0ESPzsC2SLE6jdziM\ndQ2wvoT4ZmGn60kgyIaGxxsjhPnmEPLOu5XRob8x3vgpA/sHMf8lImObofceiYj4fFbw/exIpxeH\n9j/5kkG9f3CFzwp9ZZn+a9pat6bSLB9/Jg18PnAEsAr4Gjhb65RWFZFxwJWqeuI25totffwi0s2P\nMf92Dgpmy//CJJ/W+elJrHk0qc6vmjpXSKy/FRA6+0z6RDoQYhrr9DUWx1K4h2Re1Wtl5oew/gU6\nqgOh1FpiARN5KoHzc21ib15D5MpOhG+rJGUNJI9KUpQQowvh5HwqbnDU3WKj7rBYrx9D9xNOkB6b\nFg7rNM51TE6kcbuoarmIjM3C9/b17BcuyMTTL9CN/JXvYmnc3qq6tuG8IjLGNHxv5uf2lHCovbGq\nZLoJPJ+2Ez9tLLpGRPJ9VuglRUdHQvmp6mhpQAzzUdtOXLkzonGaiogM92N8diTFwSHkW6uJ6r9Y\nHEtg/zat7sM7MN91x9L9ptOkd72F2jKt4g6mLY5hD/NhrP41Q7Nqk/NiavNnpkZXE/ulqj65jflH\ntMP32V0cHKnbcW2SruF5vp8U1fRuUYdpZyAiRZYZuFbEOFZEKlPp2P2A2aF97zuPOeTacO3bZixe\nzusfXZ2wneS+NYuzVtWx1X38+wMLVXVpRoEXgJOAhjW1dzuDvh38YDgd3LpGH2A8XX1fsvY0oMmG\nP4Fz8Uqqv32IWVc4aL6B/DeFe13D7NzMBuVYEem9guoiYE5Kdd32KK0QU9S6ldHU9nadqRu4nxl+\nF10pIhcB01R1asNz4zhXvcXSMWWaCI+kIFBCXP/NkrjC/9W6t8JYV0ygZ6igThJVP8lllBYaX7L2\nx8DtdecUkSzT8P1n/AFXtOtSWJNLlrYTvPv5LWeUVy7/Cnhss2uouQ+Hi0j3iqpVXYG56qS2x722\nUwhj3X8avbPGSVcA+pErA7R95Aam3CkiT20rQ7oRZs5gQ/RU7VWvQN48yl1Fp9Vmwt7LjDe6aRa5\nBMxZlJkCTwP/bML8wweRV6/NJsAg8rBxh4hINtAZWLkDuu+2iEiRafq/69tjfHavooN8iWQl0+a+\ncld1rDQ1pN+J4bouxnCoPb27HWrOX/LhacD2JFa2Cc01/F2pKWRUy0pqysXWRYGDRGQ6NW8Fv1HV\nOc2UuyuRTuFutrpM4yI0UhRmK2RWqQ9nfpoyfhGwQ/H3YaxLzqAvdRt6dyREAJNC/H/rRjudQ7mG\nxPo2gXOsqlbXlSsi+3zB2sumUnqECyvi2Peo6me1YwSKOxLe7IHflUjQwihqRKWTCvL6Umv0AXxW\nkJH7nBn5bOqDl9OI4a+jzzJqCo+1OSJiCow+mE71jneSMJ01nF5O9UHA9tbOf2sd8ZIXWBg6SXtY\nQSymsY7XWewmce8FUNVPRaTTIip/AOQAE1V1SRPnX7Gcqs3+hldRjYmhFpREsNIxbCso1gPJmjDR\nPbr9JoBlBq7t12N89n6Df7jpS9Ixv3/klfcvD5nG5iWcLNNvQP1ubrsqzTX8TfETfQsUq2pMRH4A\nvA70a2ygiNxY59eJqjqxmfq1Bm/MouzBNRqls9Rk2ruq/IeliTTuk22r2pZxcQuipPlG19GLbHLx\n8wizOYGecpQUh6DmOh5j9v4z2HA3cFHd87WmAck1mZ/NSOF+Mo11g/chz1/nHL6mtCqN21iWY4d2\nkcLNvk3hUB6qmteca21lXAOx4zh+X4N9kDiOAWz3illVbRG58HNWfzyRVRgIHQnRl1xdSMWDIjJM\nawr2xYF/7YDOH5WR3PiBrogcTpFhiFCuSZ5hgZ1HIPRbhvuzxR8s0wQPMPMXa4imgUYb1uxJiBjH\n9So+uN7fpGUFyM7qpPMWv5/uXLCPr/YNLJ2Os3D5Z0nQnZ6JnHGfj2vWHM308R8I3Kiqx2R+vwZw\nVXWL/mERWQKMVNWyBsd3Sx8/gCXGTyyMh8fQxcoj4PuCtdXriC9I4IzZWa/GItLeh/EbH8ZZQDqB\n87iL3teUqBkR6R/A/DYXf7gTYRZSwVDyWcBGbuMgjDruhHJN8ju+SNhoVmOrPBE5NYx1cxKnlx9z\nRQL7Jlf1GRHp6seYdQI9s8fQ2Uji8CZLU1MoXZLEGaoN6s6LyPCAP+u/px11b9g0//ddmz7/dWfO\nwrdfTqXj29VVrC0JifXcQXQ69Zz/FUVjmq7jMWaXJHG77shqOSzWi8fS/bQjKTZsXMLiQ1W5lslV\na4md3tzoGxHpHcT8jx+zuD0BezXRAKj1Vw62suq4MUs1zvVMrk7j5jf8N9zTCPgjs8fs94tBXQrr\nlzP66Ms7oyUb5lcW5PXN6ddjfDiVjjFzwRvRRKrqhXQ6/tPW1rMtfPxTgb4i0gNYDZwJ1PuCikhH\noFRVVUT2p+Zhs1tVstsWtrr/FJFJn7DyPBOjIInzPvBvbaHOSCLSBQgBS1TVFZF2Acypw+nQ9TCK\nAjYub7H0xkVUniwih25NroiYAcyPzqR3aCxdERFimuZOvsNF6xl9gBz8uKgP8AP14vN9YlyQg//+\n8xgQ7ksuS6js8yTzHvWLWaiqd4nI/m+z9K+vs/goA0kbyAtJnN81ZjBUdZrfF/7wgy9uO2LkPmeG\nQ8Fclqyc7M76/j8xx0ntVinxCZzLJrF2v4VUdBqhBe2WUx2byQYnjXvyjrpIBDlgMPmGX0xqI6pE\nhGHaIfwuy0cCO2T4RaTAh/HbMNZJoNWVpO6oJPUFkCok9O8s8efUHV8oISw1jDRuB2q+83ss6XT8\nwWlzX7mjML9/2DJrHn7ryxezdv1cw3HT+69dN3tC2calZyhuVSodewx4o201bjrNMvyZV9BfUvNH\nZwL/UNW5InJx5vNHgdOAS0XEpqYZ9FnN1HmXRFUXAk0OVRWRXAMuCGIdksZdlMZ9RBvUyxGRPiHM\n5/0Yg30Yro1WiMhFBvTrT26nixgUqF1R9tPc0B+YMng10eOpcadtifF5BLLHSdEmCx8WH2drX+5m\nOmWaIE/+lxM1nfUEsBbFNF3P6Nc8QIzbf83QcI9M4+99yOMqHRa+gSk3ichDwChBeplIwsKYGcN+\nemsP/bQdP21d+cIrPvzyzl+ouu1EjE8cJ3Vta0dJNBet6ZC2z3KqT1xJ9Ui3Zh/seVXd2IxpV6wm\n2r079fL2WEZVnJq9te1GRAoCGNP3p2P+IXT2R7F5ncX9S4m/k8C5uJxksFrTZNXZByrVOHbNnlab\nNkzZGYhIgWn4f2ea1gSQmKKPVFStfvfV9684pkfXA6xYojy9umSG4bj2WVrTqe6BzM9uh1eyoQ0Q\nkZ5+jMlDyI/sS4fwSqrTE1mVTuGeoapvZcaE/BhLTqZXweEUGSbCPMq5n5kxE5l/HgOGj2yQJvC+\nLuc1lvw9ofZFjQqumffcERQ89EsZklX3eKWm+A2T0u0Jpn5Ev0hXIsylnGdZEEvgnNLQlSAixWGs\neQ/ImDAN+I1Oqqok9WQugQt+RL9IEVl15zpNVd9p1g3cCxGR43Lwv/R/jAwXSAhVZQqlPM7cjeka\n91Fse+f0i3n7gXT89fkycJMvJ6UOV/NFtJL0+BDmz3uRc8ZPGRTOET8ZH39sNdG7kurssrVrdgQR\n6WCZgek9iw7s0KfbWH8qHWP6/NdiFVWrP0nb8RuoyeatAF6tW/ZjV8Ar2bCbEMJ66BiK806QnrW7\nf76RWuC7k++eyZQ8SAOn9SQ7crR02xRjN5A8TtKewTdY0rGqkYChClK2wzazhafOpdxMq4OvTv35\nGWzAhzllHfFHH2X2NTZuFwtjRgLnurrROnXFpXDMqKbrRQYl1aGatB/46e8YEcrPvD0cTGci6gv/\njdn3UdNj1mM7UNW3/GJefx2Tb+mikVQ1aaOadEUa98QdMfoAPowJh9C5XhyyX0xGa6fge6w4Ko5z\nySIq0lfzxbkhtew4tmEgD6Rwb2yRi9qFMA3fld277p8/etiFm+5Hpw4Dw699+JtxaTseUtU72lK/\nlsYry9zKiIiZxD7yCIrrhXz0lVw61JRsuCQsvld8GH8aSPushuf3JsdwUedNlkar9X/Gf73G+YRV\naRt3q3HbqjpX0XfvYUZ8lVaTVIevdC3PsiAex/69qj4d1fSgpDq5UU2P2YLRR1UrfRhv1i3z7Kry\nLxanDWRWR8KpfKlXRoeh5JPC7SEiLdZoZG8ipc5dadyOy6g6fQOJI5M43bWRhvCNISIHhcR6Okt8\n7xkivxKRLIGqKJtvB1WSTlPT4yEZV/tnadzCSlIj0rgFCbV/tyeGcpqm/6TexYcG6h/z0av4kDDI\nkW2l187CW/G3AcoWM9oCIczbT6ZnYB1xmUc5J9Kz3oBFVLgCk2PYy67mi58foB2NNK5OpRSF36nq\n7G3JT+CctZCKG25m6qVp3OwQ1ndJnKt0O5tJxHF++g2lb09n/bDemuMsodJI4nyfwLm8gtS7rtbf\nLK6qaWblUFO3x2MHUNUq6uQBZGrGHG0gY110HfBcJtR2E34xf5+F7/pj6R7MxW98Sckh8ym/Ior9\nzpPMHXKc9vCNphNZ4mOVRplKKcBLDWRWtc4VthmVydTml5hIVqRA97hr93z8bUBYfO8dS7cjjqtT\n8mCRVnA707iFAyiQECl1uI7JHEZXjqAYE2EO5TxY06zjUFX9VkT6ASdQkyj2r8yGU6sjIsOBgcD3\n1ER6EcScOYGeA4/KuKpcVf7OnOR3rH8urvYFbaHnnoaIhIOYn+QSGHQgHbNKiSe+ptRN456lqm9m\nxhT5Mb7/C6OD7TO5RUl1uJav1MKw96fQt4oocymnJ9mphVS4LnqRrW6b9b5tC0Tk3Jx2XR8+dsyN\nkdoS3RurVvHWxBsSjpvqp6ortjFFm7EjttMz/G2AiPT2Y0wZQUFoKPmhlUTtj1jpdibs/kFGbfKP\nlGqMvzGHFVTjw4g56MYEzkXaSjXvm4OI9ApgTswnkNudbHMOZZrEmZHAOTqzgtxtkZomNKf6rNAJ\nrjpljpN6XFVbvZOVX8w/Dybvil8wJFj7ZrVEK7mVb6Np3E6qWi0iPxtF4V2XyuBN7rWXdSHrSXAx\n+2x6I5ur5dzD9EQad2BtCZa9CRExLCv4T1OsU3oWj/YnU9Xp5auniqvuxa5rb7G50a6At7m7m5Ap\nedD/a0ovmsmGQ9O4C1O4yyL4bgA2Gf5CCfMTHcAtTF0fxT4AWNrSxccyroIR1OQJTNUWqqWvqotF\npOdqYkeuJtaNmt61U3QnrzQyhQMLgIqWupYG80csK/h5Tlbnvn26jcmKJyuceYvfP98y/X+wndRf\nW1re1jCR80+kZ7CuO62nZNNLs935bDwOeBFwnToJ9o66TGItv2RIPTfcQGlPV42kllI1GFjaahex\ni5D5Xp0rInfNW/zBUdSEnr/cWDHBPQHP8LcRqroe+EvmBxHJnc/Gm1dpNV0lq3YMb7EsqfCUqi5u\naR1EZL8A5mthrNwQprOOhGmJcZmt7hONjB0EDAGWAF83xYBnNgHfbWm9t4Rp+i74f/bOO7yqKmvj\n7zrttvROCAkJIaH3jggiFkCwO7axO2MvM6OObUYddfSzjtjbWMaxKyoiDL2X0DshhCQkIb3eetr6\n/rg3mAoJhOr9PU+eh3vObudw7zr77L3WuyTR8jxAISbrUGTb55ruvetIvV5aQyDxvoToXr3OGnmf\nzf/MhNgzZYJ95oIHnyair5m5MxOlHxIGW2yt/IQdkAX4H+QA8NNWVM4oYw92oxrfIxcmGK9gE/py\nFK5Dr4N++iGQCf7sXr9ZApvl7dowP5UJLvUcBUQUJgA32SCdb4CLvTDe5FbULNuLSHS1DOH9cUiU\nYmGTV6HEWQJ3vhfGGGau6+Sxh8sQCm5Bn7BhiAURoYideA4b3C7o5zVs9BKR3QZxJoHGpiPcKEC9\n4IWR64Vx7omeDRFRDPxvK2UAetksER+cNfI+e0xkGjzeWqzZ8rH3QPn2xarmntxZfVoUR/ZZI+/v\nGR+d2eT48g3veHP3r3iImV/rrL4Oh42k/5yFxCu7IETMRR0ioGAAovEs1qsyxBwv9F4yhDoGrwYw\nMQSKcg8GIIVC4WUd3yEX+ajHXzEEdVDxIFZ5NZjdmbn0eF3D8YKIBgB0sf8Tf8fNFG9PZYJLPccR\nIoq3QFzXG5FRIxBnr4TX/AUFv5NJeOBIE1gbzP8lojWLUXSjBCHBC2M+/Ju2x0IT5cq+iBKHNwoC\n60ohuJjTbN8h90EAywHACvG1voga9wf0tUokNLhs9l6Eou8AjDkG4zosRESSaHlBFOQ7I8K6+Vye\nSkk3fMrgPlfIMZH+HDI2azjGDb3d+tWcu8YTUUYnRv+KArX0ghZIJBxn92gvjJcWouiqVIRhCGJR\nAjeexXpIEMTrkdlnMGJRAU/EZ8gen4s67VpkKCnkj/y1koQruScexmr8iH1YhgMuAl46HY2+LFlf\nUGT7nenJZ8oAkFOw9CFZsr6h6d4HDlUvsAyaAsDFzGXHY6zHi6DhP0IsEJ85A13ir6GMhuglYTjH\n2x/DmpeI6Msjje4LyDYc8yxlAig1FaEt/OmTEEIE9AAAIrJIoGuuQYa1Ia2fQISLOFVahMJBRJTK\n7Zf+PSLIr0kxEcAoAKUAvibQ1Q57zO3njn3YarOEWZlN7MiZg+17ZqFHtzEILMFAFGVER6SqJRU7\n+gDoFMOvG+p/d+yd+5czI9OtDXIZHl8d8orWMICf2nlNcRYIfyPQZQQYOvhTDeYzHd30tkK8fwwS\nzGsp8+ADJ4/rMRFdxREUDwDoAgfu5QG2+7AcoWgqfioQoRs7zNko2KPBfIj52CtLHm+IaLzNEn77\ntInP2qyK/6HXL2Oa9NPCR24nop8a4lQCS5mD4ZfXWAbgAkmyvi2QEGGYuqjIjg2a7r4afnmMUPj3\nkE5Ysp+jJWj4jxAGXzIJSU1+SXFkQy+O0Lah6nwAn52gobULE7xhEyrrpyG1ifjLDlTpBnhN4KOD\nQEIYmiaZkUhAOFtUD9zx8K/5HxOIyCFLtvkWJaRfSuJwe219sedA+fZXBEFyjhxwvd1mCQuUE9An\nfTL27l+O0opdSIjt479GNlFdt18G0Gn7I6apv1hUuuXyeSuf75aecqbd66vjbXtmeRj8cnOtpTau\nKdwCcf0YJMSfha6yAcbPyL9vGyqnENGwjrzd6TAvm4KUJr/hErgxADEwmZGPemgwkYpQZCDCXIdy\nIR0Rv14LM/ah3q3BvJGZV3XoRpwiyJL1pj7pU+wNRh8ArEoo+qRPtm/ePfMmIlorS7ZvFdl+Vnx0\nL726bj+8vro6gKPOGnGfLSGmN0xTx7acn0ds2/PzVmZDAkgRSDAlUVljmNodfArl2m0gaPiPEAKx\n2cC5wMsAACAASURBVEo6gsCxU2EmMLMYrue+5D3WC9BdtkDEGpRiDgq8KsznAmWqRVDFHtQmZjQy\nGFXsRSW8CoDDBosdDaKgPJkQ22fQ+OF3W32qE/XOEkf3rqOwctP7IRGhXZuUJSKEOuJQVVeAhNg+\n0A0VG3Z8qZqmvq0zf5jMXEtEQ0oqdlxXVZt/EbNZpemed7mduSME0B/6Iir695R5cNJwO/e1/APr\nUvNQfymAz9s9FkCQmq0uhUPBOpRhLgoggGCBiEp4IQB6DmrNsdzF2o1C4GMD32Kv5oG+F8Dq9vZ5\nqkEkhCpyy4RAimwngYQwSbQ8HxedMXHCiHutoiCBmbFtz88hOfmLKSGmN4gIoijD5a4UYyPTQsYM\nvgUOWwwqqnOFpeteH+v11mYR0eXMfMoocwJBw3/EMPjLOSi4+QbudVBz/QC7sAe1EoCTXoSMmVUi\nGr0UxW/PQ+FUgAUrpE0qzDsa1sOZmUWiv7yOre/fxL3tvRCBAjjxEXa5CHj5WPvjE9GNAzMvsq7d\n8jHyitYgLKQL6pwlkEQLF5dvpbSkX7cYDFNHaeUuvbBks741+yfWNLdEgrjJMNSpnT2ugJfQ24G/\nDmGDNHU04m2NjxERxnBCSAnc56IDhl+BMGcxiqZNR+pB698f0fgCe3Ar+mIIYkBEKOB6PI+Nshf6\ni89g3a1WliQPdFmCsNgL49pj7WJ7IlE197fZeQvPSU8eF9KwBMhsIjtvkVPV3N8IgvTeyAHXWcVA\nGkUiQr+eUyg7bwFq6vYjMjwZbk8VCg6sw6XnvAxZ9v/XxUb1wBlDb8OKDe8oHm/dh0QUfypJWQQN\n/xGiwnx8LcrOK4cnfhQnOMrg0ReiUDXBdx6l/O5xI7CRdzERyQAkVzPpZQAwmD8nIvf72PGMD0ZP\nBcIBL4x/8iFSIXYWJhu27LxFcHmqcMk5L0GRHdB1HxaseZnWbP7YlESLkJQwGC53JbK2fuox2dwq\nCEKfyLAkIdQRbxYcWNdPIPEDIrosIHx3wjFhllXB10K1oxJeTUfHNhA9MO6fjfwzKtjrGIwYawnc\n5iIU6v0RIw2l2IMPg2QKxXTuzj9iX5oHRpwKNQ1AlcpGh/I0n6J8Xes8cM+8lc/375M+xQYA23Nm\nu+ucB7YD+ME0jU8dtugmFYgE2K2R8PjqEAmgpr4YUeHJB41+A3FRGXB5qmG1hCkerzoAp5AbaNDw\nHyHMXElE/Xeh5qoCOM/TYRapMN/jUzCfcMAotmkYA5t+x3Tjj4gGAhgLv877T8zsEUhakrt/xfkX\nnv08FNm/Dy1JFkwccR++/OUOY8WGd/dquiddECQPkfBvNs3fTxz1p5CGvL0j+v8e81b936SKqpw/\nAWgzK1yjMZwvS/YXdMPbWxSUCpP1l01Tf7EzN/E8MN6YhbypwznOHhGQUChhNxajWNdhftiRtph5\nHxH1WY2SOzehYqIOs4ABoQfCWuS8SEaIIEDowaxrAHZ3ztWc/ATebMeXVOz6Y1VtwXUAoGruTwB+\nh5m9imzfW1S6uWdSwuCDdTy+OlTV5sMwVDAzbNYIVNfth2nqaJxgvba+CFYlFKahCwBOqWxkQT/+\nICcUIpJlyfaNIIiTkhIGU72zVK+syTMMU50MwCJLtsVXTX2nRb3v5/+ltt5VNhHAVgA6gEtio3r+\ne/K4x5tsVpdX7cH8VS8WqJo75TDjmKrI9q9GD7rZ3jVuAGqdxVi9+SN3nfPAf1TN88dOvGRYSHwU\nwGMDEGNoMHgHqiUTfEdrgXMdhYiu7YnwNx+moU3uwzecoy1A0Qde1m9vVDYN/jiIQgBrmJkD2d76\nAsjvRBfYkxYimiJLtq9HDbrRnhjbDzX1RViz+SNXvbt8PoFGiaISYhiqSCRoqUmjQ4b3u5pEUYHX\nV4fFWTPgsEZif+mmPF33pp2oJbOgVk+QUw5RlB+NjUx/ZNLoBw7m2i0s2Ygl696oNgy1qygqpVPP\nfDI0IuzXzVyPtwbfzfuz1zC1BGauBQAiuqV715GvnjnsziYuqk53OX5c+EiNpnsjDzUORXbsOmPo\nHzO7NZr5+VQXvpl7j88wte6dHaxGRF0BTIb/oTUrEMndGe1aLRCzz0ZSlylIlhSIWIUSfIZspwpz\nUEAuRLZC/IyBaekIV4vhEtzQigm03QRP7ooQXwncCsAbPDAuPFLX5FMFIpqkyPbndEPtIwpymW74\nnmc234Z/Oa4HABcAryzZZjF4dIgtBk53ORz2GL3eWeY2WZvEzFkncPzBAK4gpxaiIN0xtO+VTRKs\nJyUMRnhIolhVmzcJzE8tWvvqExOG3+3w+Oqwbc8sVNcVsCBIxYap9URADRTAksLSzYKmeZqsxe4r\nXGMSCYsONYZAoE5m1/iBTY5bFAeiIrp7y6v2DAHQqcJ4zFwE4P3ObDPQrpeIRi1C0VtzUTCZAcEK\ncYMK8/YGd1MFwlPdEXrBvRhotZBoZWa8gI09Geh5DwaQjSSrzia+RM7wlSj5FsCEzh7nyQQzzwcw\nrLVT8CvONjCGiPrU1BdeLwpKfJ3zwBZm85POemgfT4KJWIKcUEzTDLNbW07GHfZoAUCkYWovuTyV\nj/+89AnXsvVvoke3sTh3zF9pYK9LUiXRsoSIzgYAZt4D4Is5y592F5dtRU19ETbvnmlsyZ7p0nTP\nI4caAzOboiDX1TtLmx03Ue8qkwAc1LcnIiISrrcojm2ybCtTZPsPfjmAkwdmLnazdqEBDjHBoS7W\nRjDz+oPngTt+j0ybJZCBzQSjEC66Cb3JRv65oEQCrkC6wuARgSWhEw4R2YmEe6xK6CqLErKEiH4f\nEOU7bjDzDmZ+SDd8N5im8fKpaPSB4Iw/yAlGEKRl+cVrz+/d47yDr6qq5sGBsm0SgGWBdee3REF+\n8ryxjyAiLAkAEBmeTKH2WPvyDe+8RUSZzMy67r2lum7/6qXr3riPmaMALDYM9W/tWqsmenPNlo/u\nnTjyTzZJsjT4cxuG4csHsKmhmCRaXrbbom4d3u9qR6gjHoUlm6Zt3PXN2UQ04Wh0mo4FrQWDBd5u\nwuLxa6pkFSY0mIhB04xpMgmIZZu6H84kdGIQXLPxjJQl+7Mm68MFksp13fMSg99uvqFORDZJsq6K\njUxP75U6ya6bKrZlzxrqdJddRkQXnc4uqceCoOE/BhBRMoCz4Zd2nX0of3ciOscG6U4RFO2GPsv0\nexucEu6gnYGmu/+6YefX4wxTtyd3GSY43eVYv/0LFxF90UgOYoDdFsUNRr+BpIRBMEw9BUAEgOqA\nsXgXR+Bqahjq3ypr8tK+mnPX9Nionmp17X5J1V1O09Q/BRADoJyIEkVBvv38cY9ZGiJB+6SfT5Jk\ncazf8eVLAMYf+Z04PjCzaSd59xZUZg5CDADAChFhkLEXdUhH+MGyTtZQArcFwDHxVCOiMZJomTe0\n7+/sSfGDUOcqDc3a+ukLda7SCy2KQwdIVDXXZwC+INBNMRFp6ZNGP2BviJtJThjqmLngoYma7p0I\nYMGxGGMHroUAXKrIjnsAxBiG+othai9ys2xoJwvBzd1OhIjIAvF5Bt/dH9G6Exrnok4IZESa1by8\nhcSnbZDum47ujghYsAolnq2oqvTBGMLMvwUfawAAEfWVJdvTzHyGIAhVmuZ5hcHvNsz6iKiXRQlZ\nd8X5rzuokUCaqrnw1S93qSYbEe3V3iciKwA7/A+KFl9+IkoTReVNgYQJKYkjWNXcRlHpZtFk42Zm\nU+0S2/eDc8Y8FNa4jqq58OUvd6mmqVuat3cyQkRT7JC+vhG97P0QjWK48Ba2qRpM4Tb0lTIQgWK4\n8W/sdBfD9bGH9TuOxTgUxbFqRL9rRvVIHnfwmKq58PXcezEo81JYrWHYuXeuq95Vso5IFEcPuumM\nlMThTdrYmv0Tb8n+4U1d9911LMbYXmTJ+orVEn7r4N6XOey2SOQVrlZz9i+rMwx1CB/j7F3Bzd0T\nz0UhkO/4G4ZZQ8mvb5PLdXgeG74MCJodDNAhou4KhD8/iRHWsEDZwYi1fcy74lah5FEA952QKzgB\nsD9P8MWHKLLbNI392XmLMjNTz6ZAHWzeNVMTReUXU/eEioL0oCzZxhus5+q6dwYzb27cABGFy5Lt\nbYHES4gECIJUQiTczWw2D7UfYrdGnjF1/JMWRfYvh9TUFeLnpU+8bxjqjW5PdYvBuT01EAXRdVQ3\n4TjCzLOJ6NJ/Y9fzXhh9FAgVGsyXTXDpv7DlWR+MLjKEegZe1mA+c6zGoeveYcldmu6pKrIDXWL7\nIsQRg5TE4UjtOsoxe+mTw+pdpbm63jJVs6Z7TWaz0xPudAQiSpNEy21Txz9ptSh+p7L46ExFkqwR\nu/MWPAngpEs1GjT8nYgD0r2XIM3RYPQBII3CMJRjaTVKrwLwr0bFpwxBLIdRUwG0s9BVWYvSy/Eb\nMvyHI7DOf/G67Z8vyS1caYuNSrcVlW7xuNyVB3TD+6woKju7J46wJyUMttbUF525PWf21YIg3mya\nxueA/01MlqzzkxOH9R/a50rFooTgQPn25CVZM74goguYeWFDX4rsuHtQr0scDUYfACLCkpDadZSQ\nU7C0u9NT4c0vXhuWkjgCAGCaOtZv/8LDzMc8krkzYeY5aCVJDhF9CkBRYarHet1cFOQ6p6cyKlL+\ndQmPmeFyV8ISWEoTBBG90851rN78kb5tz0+ulMThDimQE9fjrUV23gLVNPUTLYh4TreEIWaD0W8g\nPXmctDtvwQUnaEyHJGj4O5e46GYbZAAQB5uN/OkAG6OpMFtEhGowAdBJIS9wMsHMu4goubxqz0Xl\nVXtSAWwD8Iss2Wf16zk1on/GNAEAUjBc7JYwxPbL0iffJaKZgSWg0bJk6z1m0M2WhqWixLh+GDHg\nOlvW1k//AWBho64ibZbw5t3DbouyEAnhhqFOXr7h3fk79s6RwkO7yoUlGw3D0FYbpvbEsb4Hx4OA\nsW85tT4mfZlvZW39z5/OHvVnW4M77979y6EbPsRHZxwsZ7IBQRD3u73V2d8veOCCjJSz7Lqhmtl5\ni1Rm4wVm3tRWH8cJt09ztvgtq5obBDqhbyNtETT8nYgGc24WynpkIOLgNN5kxmqUOhlY2qz4D1tR\n+VoJu5FA9oNlf0a+V4PxUVt9BDaRUgB4jmXSDCJyADgr8HERM5/wpQxm9sGfRxaA/17ohveczNRJ\nTdySo8KTERbaxaiuLTgDwDwA/RNi+xA1S6Dil9w1+jQ+ZhjqrNzClRkJsX0Ortebpo7c/SuczOYC\nZt5ARInlVTnTyqtyEuCPeF17DC73tMcwtacqa/YN+Hru3ZMSY/ubNc4icrrKbZPGPHjw/0o3VGzP\nme3UdM/HAL7XdO/YrXt+uhjMXpONL/jkyKT1Y2nFrrcra/IQHdEdAGCaBjbv+t5jGFqnx2p0BkHD\n34moMF9chuLrbCyFn4Euogc6ZiLXWwd1K4D5jcsyc5lEwl1PImvGeE6UomCRV6DEWQ7Pbh38Qmvt\nE9FkK8T3CBRlwBTsJG/0QL+GOzkfr0h0lQzh/W4I0QFgP5ySSHSzwfxFZ/bTSbQqisimQQAaTuyr\nqM5tUaiqNh+CIDbJkWuY2it5RWtuEgU5Kj1lvKzpHmze9b3Hp9ZnIfBmwMxeAF93+pX8xgi4m04n\non55xWtGACgRBeXCJWtfu6ZX2jl2QZCwK3e+26c55wKYGXgbWR74O24QUSr8AV4lAFY0dzX1S3UL\n18xZ9o//piQOpxB7rDW3cKXTp9ZvNFl/oVlb3YjEG0RBStYN3xL4E7oflzesJuMIevV0LkTU3Qrx\naQZPIZBXB3+o+7MrtfrKR0QZEugGEUKMD8ZcAD8ws95KucEWiMvvRD97X0RBB2M+9hs/YF+5CjOt\nvV4t7Rh/XwvErEcw1NYtkPS9gOvxT2zw+GAMO9lE6BTZ/l1m6qRpQ/pcfnASU1aZjXmr/q/WMNS4\ngEiXKImWnP4Z07v1TZ8iCoKIelcp5q74p9vtqbqBmZsYcSJKkETlUSLxIiLyqJr334C5C4AM/9vP\nSe1xRUQjbBDvFyGk+WAs02C+ysyFndBuLxF0jQAK02DOBjCvMwXsAn0QgDMkUbkaIFk3fF8F+jnu\nhoqIJFmyfsTMl8ZFZ6h1zhLy+uoqdcN3biBgsHn5BABXAhQF8FIACxqPm4guEEXlyx7dzhDDQ7pY\n8orXOqtrC0p1wzfqaALBTohWDxGdD+BVACKA95m5hQoiEb0Gvy6JG8AN7M9k37zMaWH4jxV2kr6a\nhtRLz6fkJusVz/GG+mzU3MHM/+mMfiwkvn4uuv3xEurR5G3wG96rz8f+t3xs3NMZ/XQWRNRVEi1Z\ncdEZYcldhjlq6vb79hQsNQxDvZyZZzcqlyJLtu+IqJdVCdNdnkoRwN91Q33pMO2fKwryNxFhSazI\nDpRV7raA6Cld9z17zC/uCJBIuF6B+OY0dLcmwiFsQYW6HAe8KszRR/PQlkm8RwI9dyYSpVDI0lIc\ncNVDXe2FMYVPEsnrzkYU5UejI1IfmTT6AbssWcHM2L1vvrlhx1cFuuHrQSTeJYryX3XdFy9L1lxN\n9z7MbH7TWltEZBMFuezcsQ+HxEalA/BvZK/Z/JGWW7jqU0333Hyk4zzu7pyBcOnXAUwCUAQgi4h+\nZOadjcpMAZDOzD2JaCSAt+DPnxqkAwig/ukIbyGx0QeRoXtQ07uz+pEhpCTA3uJ70QV2SYZwSIXL\nEwEzFxFRZnHZ1mvLq/eOMww11zT195k5r1m5fABDiShd1dzRALayP6FKq/hnnvSgKMj/nDTmQYqP\nzgQAuD1V+HnJE48Q0YaAZ8xJAxHZZQhvPIIh9q6Bt7UBiFbi2S7/gH0z4A8qPJJ2Uy0Qnn8KI6wx\n5NdBOo+TQ/4PG8fkoPYW+H/Tpx0CifeOHHCdXZb8DhtEhMzUScKOvXOi3Z7qT8JC4i8ePegWe1R4\nMkoqd6avWP/ux4IgWk3TaG0SNjEiLMlsMPoN7fXLmCbv3b/8dwCO2PAfCUer1TMCQA4z5wWe+l8A\nuLBZmekAPgYAZl4DIIIokAk6SLsxge17UdvitXoXqp3cifrqHugL16O8hUHcgHK3G/rC1uqcSIjI\nAmAogO2a5r7eMLTHmhv9xjBzDjOvaTD6fu0dGktEvyOing3lJNHyrNUS+lRK1xEHjT4A2G1RGNz7\nMocs2d62KCG7LUrISiK6ihrCSTs+/qN+yyWiOJmEpxyQ1tkhKfXNUiuciUTyQJ/QYne7/Vw2EglC\ng9EH/Fo+FyDF7oD0h6MY+gmBiEKJKJMo8HRsA8PUIkMdTU0VESHUkQAGXz5pzEP22KgeEEUZXeMG\nYPyIu+2iqPxfG/+niigqLQ6KogJm87jvtR6t4e8Kf1b6BgoDxw5XJglBOoQH+nM/YJ93F1eDmaGz\niXm838xFnQeduNFoAh9uR1Xd15yj17IPtezD15yjb0dVHQMfdVY/nQGRcJkoyGURoV1/CAvp8pMo\nKqVEdF7761OKJFl3O2zRc7rGDXhXlmybFdk+k4jimM37UhJHKmGOlnOUEEccLEpI8sSR92eMHnTj\n6LCQhPck0fJmB/q1SpLlBUlUagEYFsWxgYiOSO6BiJIVCNtGIv6BW9Cn91SkyB9gB/7Hv+5ZazBB\nIBNoJUl0+1CsEFuIoVkggoFTIloZAIhIkWXbm6Igl9msEVmiIJfJsu1VImrV8MqSdXNhSVNPUU33\norwq2xJii/barRFNzsVFZYBNIxpAaxLgiyqrc+V6V1NHvD15C01RVDpV+bU9HO2Tpr1fpOZPwFbr\nEdETjT4u5nYmsP4twMzriOjKf2HLuwqEUBWmKAA7VL8cRKf5Cvs9FGjYIhS9PA/7pwOABOEHFeaf\nOaB9fzJARP0k0fLJuWMftsVE+sUjSyt2Yf7qF78joj6BpZ1D1SdZss7unzE9rW/6VJGIYBgqFq35\n17kHKna8EuKI8yXG9bNu3f0D+mdciMaTuIID65CcOIziAr7mXWL7Ob79333XE9HLrW36NUeWbN/F\nRvWcMKL/tTaHPQYFB9YNXrXxg1+IaBIzr+zIfbBCfO5sJEVdSj0OGuYhHIu/YS1GcwJCScHPyNMU\nCDM9rB+p4f95BQ48ciGn2hvUO5kZC1Hk9cE4GT29WkWWrDOiI9KuHTf0dqvNGm51e6qxZN3rt1bX\n7TcA/Ll5eVVz/2XV5g9/BrhBSwhrt3ziBtFcr1p/vmHqEBtl5PJ4q8FgE4CzeVvMXCOK8l9+XvLE\nCwMyL7SFORKo4MA6776i1W7DUP/Skesgogk4Sqnso9rcJaJRAJ5g5vMDnx8GYDbe4CWit+E34l8E\nPu8CML65D3pwc7d9BF7X0wG4O8NT41RFlqzv9EmffNOgXpc0mbys2fKJuid/8QuGoT12qPpENMhq\nCVt++XmvNdH/qXOW4seFD7tEURYuP3+Gbd6K5xDqiEf/jOlQZBv25C/Brtx5mDrhSTTO1bp8w7ue\n3P3LH2DmNw7Tb39Fdqy+/PwZ9sZGY0/+Eqzb/vlSVXV1aOavkFj3LEaFRlPTwMFXeBNCoKASXmc+\n6qt9MEbwUSSTsZH0fhiUK6ehu8MBGUtR7NmJ6mIfjKEn04SgLYgoTBTk0kvPfcVqtfwqteTyVGHm\n/AfchqnFtDaBIqKzZNn+f7ruHSgKcrXJxr9MU39elmyLe3Y/a+SQPlfIAgkwDA1L1r3uLanY+ZGm\neW5v3k6j9sbIku1eQRC7abp3oWnqM442HudEaPWsA9CTiLoDKAbwOwBXNSvzI4C7AHwReFDUHMvA\no9OdgPvcaZ8S73CIgpweGZbU4vsbGdZNkUSlBwAQ0RmybH/GNPWBoiAVq5r7nwD+E3Cxiw+xx+rN\nl71DHLEwWbeLkNdu2T1z6MRRf5a2Zv+I/638J1TNDYFEfeTAG6TmCbp9ar0Bf6amwzE8MbYfNzb6\nANA1fiDWbvlkcBt12kQANBUt4xi8MHw7ULrUAH8G4KujfSv0wrjVC88vn2PPHwkI90D/2gTeZea6\no2n3ONLNYglVrZawJk9Ihy0KkmRlQ9USAOxrXomZFwEY3vw4EV22J2/R7NyC5ZkRYUl6ZU2uBaD5\nuu7906EGEXij69Bb3bHgqAw/M+tEdBeAufC7c37AzDuJ6I+B8+8EBKGmEFEO/D+MG4961EF+82iG\nd1lhyeYxKYkjmvyQC0s2ujTNs4KIzpEk68xhfa+yJ8b1R03d/vA1Wz59y+ur7QHgCQAbq2sLrF61\nHg0SywBQXLoZsmTbo+mei3bvmz93b8GyHpHhyYau+2SBxEWa7l2Zk7/40e6Jw20Nibcra/JQUr5D\nADCzHUMvqqkvbLFJX1dfDEGQylqrAPhzEwMYBH9S7y0N/uEM/HcW8m+9hXtbGpaj8rkeeag3DPAl\nzNxi2aEjkF/N9GwAVgCLXKx9ezTtnUD2e331isdXB1uTGX8ldN1L8AdntZvA5HUoEQ0uqdjRHcC2\n9izznSwEA7iCnJIQUbwoKjsHZV4cnpF6tsBsYsfeOcb2nNkVhqFmyJJ99dght/ZO7jL0YB23pwrf\nz3/Aa5haF2aukWXba6H2uJtHDbzBHhHaFcXlW7Fq04duVXNfwcw/B7wzhsIvkbGVmbOJSJEl62xF\ndoxMTRrtcHkqvQXF62CY+tXM5mENPxFJkmjJH9bv6i49UyYQEcGr1uN/y59119YX/9n053ptVke4\nWBTlD2yWMMkwNNJ0X41ueC9j5jVEFGGFuCIB9uSRiA8phdu3EiWmBvMak/n7o7zH58kQvu4KB1sh\nYi/qFACPqmy8fDTtHk+IqItA0q2SpAzQDS0pKqJ7/wnD77bbrRFwe6qwOGuGu6au8E1N9z5wiDYi\nACjcSF33ZOKEBHB1FkHDH6SjEFGGLNne0HXvWSBiSbTM0XTPnQBKCeS+Zvq/BaHZUs6sxY/XVtXm\nX8LMC4lIIBLvkUT5L7qhxcqSZYequf/KzHMP0y8BOBP+DbZqAF92ZPmSiHpJovUXi+KIcdiijcqa\nfRYSxHd03Xt/8whVIpoikTSzS2RPOS6+P9KSz0RZ1W4s3/BOvWGoPZi5nIgUABdbIJylg4sN8MeH\n29xuxxi7yBBy/oxB9gzye69UsAf/wDp3PbRp3EjRNFDeBuAaRXZcaLJRqevedzu6Ud2sPQIQDcAZ\nkMg4kjaGi4K8IDVptBwXlWEtqdzpyS/KEplNU1HsuqZ5RBLEt3Td+1Ab0fLdZcn+sWGqo4gEFkjM\n03TPrcy87Eiv61gQNPxBfpMEAgkPivYQkSAIkuuSSS9a7baog+WYTXw99x6X11c3lpvp9R9vAoZt\nJPzZvbJae3AQ0TkKhNnjKUlKYQe2i/XYKtTi7HGPYUv2TE9e0ZrHTdM4ZOTxkSIQPTgaCU/eQn2a\nLKUt5EL+DrmzXKxNbzTOMEmyro4O756cnnymw+OrNbfn/OzVDfVZXfd1WM9fEMQrRFF5hdmMZjZZ\nFORvNN1zBx8ik11z/F5btuxRA29MT036NV40p2ApZ239bIemey4GUNRaEB8RJYqC8jQI1/fvOU3o\nkz4ZoiCh4MA6rNjwnks3fMOYeVdHr+tYcSI2d4MEOeFwM5U2ZjZlyfZJ1rb/Xjdu2B3Whln/rn0L\nTMNQCwFsORHjbExgZr+6rfNEJCoQ/nsvBki9EQUQMMbsgv+Z+7Fi88dI6DLYtv/Axsy26h8tIoTE\nRDhaaIzHw07ULA5HEKQHu8YNSD1z2J3WwD6DkNZtrH3m/L88RkSfHSqgrjlENNUih/x7woh77HHR\nmfCp9cja9tnlhSWbugMYd7j6jUglosTuXUc0OZiWNJbWbvk0HYCrDaMfK4nKxtiojGgiQRiQ+Ws8\nakriCFTXFVp27J3zIE7C5Cod4WgDuIIEOSnRDe+fisu2Zn0z9x7X8vXvuH9c+HD9xh1fVWi69+oT\nIfh1BAwJhWLpTVFNDk7gRByo2o39pZucuuHNOlad6zBXZKGsvvmt2ohyVYXZZJlHFJSr+/Q4/xim\nOwAAIABJREFUv8HoAwDs1ggk+9MkTkcb+APoLC9aLaGLZcn6JhFlKrL96VGDbrTHx/QCEcFqCcPY\nwX+wCII0hIg64vVE/vChZhNhIgDELU/4EQT57pTEEeGR4d3EhJheLc4nRPeSRJKGdGAcJyVBwx/k\ntISZXZruGe/11Z2TV7R6XZ2zxKLIDqsoyCsU2f5BQOrh1IQZZZXZLgCfd0ZzRCQRUUQzqYGZpXAX\nfoidahl7UMcqZnGeuRwH3BrMV5o2ALQak+l/ajAR2QLeQY37HCaKyraeKRPuHj3o5vG90s69VRKV\nDbqh9o5vZnAFQUR8dKYJoF8HLiuX2SjbX7KhycH8orUgojxmLmqtkixapnRPGmUJtcejsiavxfmK\nmn2mycbOljVPLYKGP8hpCzOzJFmvjo3qOeyy8/6lXHbeq2GXnveqNTYq/SpZsh0y0OokYEM9VN9O\nbprjdxGKIEGsMU1tVCe4alpkyfaaIMi1giCVSaK1WBCk3wMAM2teGGPWofzDx7Gm7i9Y4f0F+T+p\nMIc3N5qGoX66PWe2t/HbgctThbzitYIs2W4gEuqIhHpFts8jv7Y9ZMn24aiBN4aM6H+tktxlKIb0\nuVyaMOI+uyjIUlVN031pZkZlzT6gFT/7tmBm1nTv75etf8u5dut/1LyiNViz5RN15ab3nZruub7N\nejDL3J5qpCaNQlllNnIKlqFBebqsMhtbs3/warqn1XwZpxLBzd0gpy1E5BAEufySSS/a7LZf5VO8\nvjp8+7/7vYapJZzMUadEdK4C4fsJ6CqnIFTehkrvepQ3SCwf9eaiItu/jo1Mnzpq0I02hy0a5VV7\nsDhrhtvrq7uZ2Wy3FAMRhUqSdWVkWLfU9ORxDo+31tieM1s1TUMY1u8qJT15HJlsYlfu/4wt2T9W\nGoY6UhTk3VdNfVcRhF8lgJgZX8y+zWtRQvjcsX+1hdhjYZg6tu7+Qd+ROzdH1719OrpMR0TdRUG5\nUxSVAYbh22CY2pvMvP8Q5SfbrZFfXzDhHw6PtwYrNr4Hj88fo6Zp7mrdUG9qj9vu8STo1RMkSCOI\nKM2ihG753eQ3HM3PfTP33nq3t3oUH+PEMgE3x0sApMGfJ/in1lwHD1G/hwLhdhlCphfGKgP8HndC\nIhgi6i6Jlp1XnP+6tSF5OQAcKN+OxWtf26dq7rQOtmcFcIUi26ebplFpsmFJTz7zylEDb2iypLZw\n9cuuwtLNTxIJTw/MvFg5UL4Noiije9dRSE0ag6/n3O0yDN97AP7gsEXrHl+tDGCzpnsuZebio73u\n9iBJlqcB/DkpfpChah4uqdgpMet/B/ByR/7vjhdBr54gQZpyQNe95HRXIMQec/Cg21sDn+qU0FQ1\nttMhot6iqCyJiUizxUb1dBSXbXHWOUsqiGgsMx9oTxvMvBdAh0S82knfqPAUnyRZmqy9J8T0hqZ7\nUolI4A5k1wr42n8S+IPVEro4Ma5/i32UrvGDHCUVu4YAMCtr96FfxgXQdR925PyCnPylMEyt3DC1\nPwF4rM5V0hdAOTO3e4mnM9B132NE9E5+8drz4E88/xMz1xzPMRxrgmv8QU5bmNlDJMxYkjXD7XT7\nJ8lOdwWWZM1wkyC+1xG/8I4S8CP/bljfq2POO+ORkCF9LqcLJvwjNDN1UjdZsn14rPrtAHm1zmLZ\nbGbbq+v2QxItFR0x+q1hGHp2VW1+i9lxVe0+r2FqUQkxvTBh+D3oGjcAKYnDce7Yv8LpLoNh+J5j\nPy5mXnu8jX4DzLyfmd9n5k9PN6MPBA1/kNMc3fA9Wltf/K8fFvzV9cXs290/LHjIXV1b8Kaue/9M\nRBYiiqV2JijxR/pSSDPvl7bIIBKSe3af0KRs/4zpkmGoZ9NhkoC0czxWIhpBRL3aOaaDMPN20zS2\nrt/+hWaYfvvs8dVh5cb3XSYbR715qRve13bk/KKWV/0qX1NUugW5hasMWbLGZ3Sf2MT9UxQVpKdM\nYEGQ0ltr70ghf8KVR4mEJ4jolHfD7CyCSz1BTmsCwV2PENFThqnFASgDAEmyvmaa+o1EAhGoThSk\nRwxTf7+1NohIFEXl76Ig38ts2gVBqhIF6XHD1N89RNchimzTm0tGyJIFAT/ycLSi295eREH6gyjI\nL9ptUayqLslkPZ+ILmbmdmdj03TPtJz8Jd/m5C8Z5rBFq/WuUguR8J5p6i8e6bgaYOZtRHTN/1Y+\n/2+7NUIwTYN8qtNjGOoVoiA/qektVRg0zW0wm22mw+wokmR5VJKsj6Ynj5NFQRZyCpY+IEu2z3TD\n+8dTJJbjmBHc3A3ym0OR7d/FRWecP3rgjTa7LQoV1XuxaO2/3F5f3W2maXzasrztjfDQrjeMHXyr\nPTw0ERXVe7Ek63W3x1f7F8PQWs03S0SKKCrlk8c9HhYV7k9V7HRXYNn6t1BRvRcADEm0rtF0923M\nvLUj4yeicy1K6PfnjX3YHhGWBGYT2XkLzXXbv6gwDDWZmX0dbC8NQCKAHcxc1ZG67WhbBjAMgA5g\nAzMbRHR1RGjXd6ec+YSjYWPZ5anEDwsf9ui6d0hneCwR0SBFtq+YPvE5e0OmLFXzYNbix1xOd/lV\nzPzT0fZxshD06gkS5DC05c1SUrETi9a8WqBq7pRm5aNEQS669NxXrVbLr/LNlTV5mLv8mQrd8MW3\ntR4uCNJ1smR9a2if39nCQhNoydrXkZE6EX17TIYgiMgpWMbrtn9ebxhqX+5AUh2L4lgyvP/vz+zR\nbWyT478se6q+vCrnFmb+qtk1pMMvd1AJYG5HHwydDREJsmT9XBItU9NTxts1zaPn7F+ms2k8rhvq\nIbWHAstyE4mEs5jNagCftxaMJYnKK33SJ989uPdlTVJG7slfjPXbv/jJp7rajCg+1Qh69QQJcnha\neLN4vDWoqSuEpnuSyZ+2sbGLZ2aIPdZntYQ28X6JjugOBofCn1+1srWOTFP/hIjy1+/44mHD0IbG\nRfeKaJwxLDP1bKqpK7TkFCy9G8BDjesGZsqpAKqYuaLxOWakRYUnt+gvIrSbraJ63+1WJfRhhnlA\n1dwzZMl6pSRZr+ga199wuiuNmvpCg4gmM/Oadt+xToaZTSK6UtO9o7btmTWNmT0Af8GH0bP3B5xZ\n51qU0KFp3caGuDyVvryiNU8RCTcwm80edkKIRXa0yBMsS3YAQljz4781goY/yG8CIiKCcJMkWh+v\nqS8KN02jYdaNdds+Q1L8IKSnjEd+0dp1smT7SDe8dwbWgQtdnkqLbqiQROVge053BeDfPzhkBipm\nXgJgiSLbP+qWMLhFxGhiXH9LXvGaMY2PiaJ8mygq/5Qlq6RpHlmR7Ys13XPtrw8A3lRSsbNrZFi3\ng7O8elc59hWuFNOTx41NSRwhuzyVg9Zt/e/ZoY44Ou+MR2VZ8j+39pdsxNKsN+YQUZcjlTvuDAL3\ndlXgr10IJN4fHZE2YtLoB2yBwC9Ln7Tz8PPSJz8iovmNl6l0w/dDdv6iK3ulnRMiCBLcnmrkH8jC\nrtx5qqo5F3f6BZ1iBL16gvwmkETlyRBH7L/OGnlvSmRYN6zb9hlq6oqwfvvnmHLm33HG0NswZtDN\nuPTcV2xWS9h1AC4G/G59RMLSrK2fqoahAgBUzY2VG993EwlvM7PWnv4NQ91TVZvXwtBW1+03DFM/\nONMlokstcshLU8b9LeKK818PuWLyG5Ye3c44S5as8xo8dzTd8+TGHV97Cg6sB7MJVXNh0dpXzV5p\n59LoQTfJiXH90DNlPBz2aGVYv2sOGn0A6JYwGFHhySKAKUdzP08EomT5w6Bel9gaR/tGhicjMbaf\nAeCiZsV/8Xhr1s5d/qx79eaPMHPBQyirzEFCTG9ZlmwPypJ1Rkc9oU4ngoY/yGkPEUUw8wPnjX3Y\n0SW2L8aPuBtOTyV+XvJ3pCSOQFhIl4NlFdmOgb0udiiy/Y6GY5ru+V1e0dplX865yzNr8eO1X8+5\n21dZs+9b3fD9tb1jMNn4cF/haqOo9FdF6IrqXGzL+dmn695XG/X/5OhBN9ojA0s5smTF8P7XKLJk\nS4dfvx/MvE43fBeu2PDuns9m3ap+9ctdar2r1MzoPqFJn6rmRvPcwAAQ4ogT4c8DcGrBbGUABQfW\no7hsKxrcUBXFIQKwNy3KhqZ7p1bW5mXtLVgGQRBQVrkTYSFd6JJzXrJZLaE3Aph2/C/i5CC41BPk\nt8DAsJAEn90WZQUAqxKKiSPvx6pNH8IW8PhojNUSCoJwUNwnEMAziYhSq2rzUwDs0g21ozlaDxDR\n1CVZM76yKKE2UZDY5a1iw9BuZuaDTwPD0FJjIpu6shMJiI1KR35xVgYCGv7MPJ+IMgFEAPBKkPM1\n3RfbuF5cdAYKDmShb/qvk3vDUFFUuokArOjI+I81RDRQlmxPATyKSChTNfeLAEoB9IdfnO1HUVD2\nLVj1Qpe46AxomgdO97sYNehGFBxYBwBzmrcpSdbnwkO6jBk39A6EhcSjuq4QKza8C2YTAzIucqzb\n/t87APx4XC/0JCFo+IP8Fih3e6tlk0009quPj87Epl3foX/PaWi8fLC3YLlX0z0tDEIgivSII0mZ\neQkRJeoe31AAMvyZt9SG80TUR5ZslvLqHHRLGNyonomyqhwAyG7WHsOf+hGSaPlky+6Zd5057A5L\nQzxat4TBWLnpAxAJSEkcAbenEuu3f+lm5l+YefuRXkdnQ0TDRVFZPDDzYltSwkCqc5bGZW377781\nza2lJo2m8uocb1VtviZLVvuU8X9HiN3/fDtQvgML17wMMD5j5pxmbYaJgvzHiSPvlxse7pFhSRg3\n9HbMXfEsRg24AUS/Ptx/awQNf5DTHmbeocj2vTtyZvfpmz5VJCKYpoH8A+u9XrW+at7K5yMGZF5k\nlyULsvMWq4UlGytMNmYco7EYANa2dk6WbN907zpKyNr6GRy2aESFJ0PTvdiw/UvWdc8eAG164him\n+kRR2ZZJPy56ND0taYzD6S5HfnEWBve+DOVVe7B590yA2W0Y6pNmG8nSAyksHxJIvJzBLmbz+ePh\n767I9peG9r3S3jNlAgAgLKQLYiJ70MwFDyj9ek6FzRohz1r8N2SmTjxo9AGgS2wfJMUP0vKL17YW\nB5Fms0aoNmtEE2+s8NAuIBCy8xd5Vc19VMnoT2WChv8kgogutEN6UoPZQ4awzw39CWb+7kSP63RA\n0z3Ttuz+YeGe/CWxUeHJVFKxUzBNY42uey8tq8q+cUnWa7cyw2aa+jeGqT7f2YFMh4OIeiqyI2XU\nwOtpT/5SzFv5fxAFAarmBcCsG76LDxVtysxOIhpWW1/08t6CZbelp4yXp5/1DOy2KKDH+ahzlmLW\n4se8hqn/Xxv9OyTJWmCzhEeldRsLl7sC+4pW/ygK8izD1I7pWrime0enJI5scsxqCUVcVAbKqvYg\nJXE4VM2NUEdCi7qRYcny/gPrW/q2AoUeb42iam4o8q/L/25PFTTdg9KKXeXM5tudfS2nCkHDf5Ig\nkXBDKOQ3rkMvezrCkYva/p9g96cyCXHab/gL2lkwcz4R9ax3lY6vd5UmA9jMzJsCp18O/J1IHLJk\nNcoqs7Fz7xxIogxmht0WCae7XAfgOVwDzKwT0SqHPfqGfj2nyo3PaboHRNRm4BaR+FFcVEbUxFF/\nOrgc1ivtHPyy9KkLiOgCZp51tBfYFoIgOT3emghFtjU57vHWQJH9ito+1YnCko1onA6RmZFfvAYm\nGy2kL5i5QpHtP63a9MG0MYNuscqyDT7VhWUb3mEAGwxTPe90FF9rL0HDfxJARJIC4aX7MdDenfyx\nJYMRi1i22Z/Gun8S0QftdRsM0jaBCNtFx7tfIhoAvx7/9kMEKW33+OrMRWv/hbGDb0VSYI1/f8kG\nLFv/lgxAaaNec34urcwWK2v2IToiFYB/j2DL7ple0zT+3VYlSVSmDcy8uMkeSFR4ChJi+6C4bOtr\nAI6Z4Qfw/vrtn981YcS91oa9lryitfBpTjSkYTRMDbmFK2C3RqBH8pnQdA+2Zv8It7fGBNDqfoWm\ne24sLtv26Vdz754cYovxOd3lFkGQvtQN3x8a7638Fgka/pODZAWipcHoN5BEIbCxJKlQewA4av2S\nIMcXIoqRJdtsixLaJzKsm15Zs09RZPtCTfdc7o9W/RVm1kRRXpLaddT0bl1+FZFM7jIUPbqN1XIK\nlt0G4LF2dOtmNr6fs+yZa1K6DkeYIx65hSs9bm/NNsPUnmmrEoOl5jNuoCHSlbq2+6KPAMNQ/1Za\nuXvot/+7b0RSwmCxpq6Qq2oLbL3SJnFdfTFV1uZDEmUjPXmCUFaVQxt3fQtRkNEtYSg0zaMCWNLq\nNTG7AFxCRF1qncXdAeTohnrUSWxOB4KG/+Sg2gdD8rAOG/36X+JjAx7oEoDjut4cpHOQJdtX6cnj\nBg3td7UskADDULF03Vtnl1Rsfw3Arc3LS6JFio3u2aKd2MieSl7R2nYlGpcl28eRYd0u6p8xHZU1\n+1BRnQunu1IwTe1BZm5T+ZJNI3dP/uKew/pdffCYV61HYckGAG0vEXUG/rwJdLame4bvyV88EkB/\nSbRO2ZU7LyE7b7HObOzSDd/bO3PnvNCv5wW2AZnTxTpnKdZv/8JFgvAK61x2mPYPAGhX4pv2QETh\nALoBKGDmQ0Zun6wEDf9JADNX20le8C32TrqaMxSBCMyM75GriRCWMxuH/GIHOfkgomRJtI4e0ud3\ncsPyiSgqGDnweut38/50LRHd3VwyQTfU9aUVu85OSxrTJHNVScVOr6771rejzzRJtF5y9ugHrLJk\nQdf4AQCA3P0rLGu3fvocgFFt1TVM7fe78xauVjUPUpNGw+2txqad30KWbWDV/VVb9TqLwMb1Wlmy\nXhVij7l6RP/fO8JDE1FUukVcu+0/PQHsNgx12Pac2Y/vyJkzjogOqJrrRQDfHuuxNUBEiixZ3xQF\n+RqLJVT1+eoVWbZ9oOve+07GlIyHImj4TxI80K9biZL5G1GR3pPDKQe17Iae54VxzYkeW5AjIsFu\njVBFUW7iTmizhEMgESb0MABNDL9pam/l7l95X2xUupKWNJYAxt79KziveI1q8iG1/xsYHh+TqcnN\n0ikmJQzCyo3vDz1URWZeI5D03L6iVQ8Ulm4SBRIhSRbd46qpMFn/W3svujFENEaWbHcKJCaqunsu\ns/kOM1cfony8KMi3nTv2kYNKqOkpZ0KUFPvqTR+9omquQQCuPZKxdAayZH0zJjL96nFDb7daLaFW\nj7cWS7Jm3FhVm68BuP9EjetIOGLDT0RRAL4EkAIgD8AVre2SE1Ee/EJWBgCNmUccaZ+nM8xcSURD\nvDBGr0VZL/iDdVb81hNGnMLsdHoqZbenyu9SGaCiOhfw/x4qmlcIRPeOz9r62UdrNn/cCwAEQd5h\nGOoNzFzajj5L65wlYGY0lqGpc5ZCEGXxcN45JusPE9FcgcS7WBATvK7aX0w23jqUa2tAJrkHAC8z\nH8xhLIryfRY55Jl+GdNsIfYYyitaM7KodPO9RDTkEPmGh0dHpLZQQk3uMgzL1r05gAL6w+24D50O\nEUUIgnxNwOgDAGzWcIwbdod95vwH/khEjx5qKe1k42i0ev4KYB4zZwBYEPjcGgxgAjMPDhr9QxPI\nNbqSmT9k5uVBo3/qwsz1AgmvzFv1gqu8ai9MU0dx2TYsXvuq2zD1h9rS8GfmjarmHmiYWjfD1JJU\nzTWYmTe3s9ulHm+Nc3feQjR8dTTdi/U7vkRyl2GkyPbn2jHuxarmvszrqz/DMPVnDmP0J0uSdb8i\nOzaKgrJHEi3ZRDSAiGIJ9M+pE56y902fTCmJwzF++F22nt0nxEiStc0NZgDlTk+F2Pxr73JXQBRk\n5wn+PSRZLaFq45wMAOCwRUGSLCaA+BMzrCPjaJZ6pgMYH/j3xwAWo23j/5tVwQvy20U3fI/VO0tK\n5q16/mFd98XLkjVP072PMJtftlY+EDmbDsDVkcQsDQR07p/btPObV/fkLaJQRyxKK3cjuctwDOl7\nJfYVrsw82mtqNNaBkmj5ZmifK+3ZeQug6h5IgtLT6a7YZJjqB/HRvXRFdqCxTEav1EnSnrxFFwO4\nqY1m16qquzw7b6EjM/VsAgDD1JG19TMPiN7prLEfIQVeX53i8dbCZg0/eNDproCu+whAh7SbTjRH\nnIGLiKqZOTLwb4I/YUQL7QsiygVQC/9SzzvM/F4b7QUzcAX5zUIkXCyJytuSaHHohk8kErZruud3\nzLy3Y+3QCJslYsG4YbeHeH31iIlMRYg9FuVVezFv1fMHNM2T2BnjVWT7f3r3OO/KPflLxCG9L0da\nt7EgIlTW5OGXZf8wRUESmE1IkgW9UiehX8Z01DtL8POSv1dqurdNZVAiypBEyyKHPTo0MrSbUFy+\njZjNpZruuehEZw5TZNsbkWHJN5w57A673RYFp7sCS7JmuGvri2dourfdSq2dTadn4CKieQBaxkkD\njzb+wMxMRG09QcYG1i5jAcwjol3MvKyN/p5o9HExMy8+1PiCBDkdIKJRsmT7z8RRf7LHR2fCNHXs\nzJ03ePOub5cTUfcOGrws3fAWlFTszBiQcaEkCCI8vjqs3vyh2zC0VuUajgSBxH6GoYoxEWnokXzG\nwePlVTlw2KKEcUNvR0xkGmrrD2D15g+hah74VKcK8OeHapeZs4kouba++Oza+uKuANZxB3MSHys0\n3XtvVV2B9v38B/4gSRZT131EJMzQDd+jh6/deRDRBAATjqqNo5jx74J/7b6EiP6/vXsPj6su8wD+\n/Z5zZiaZpGkampKkSW+00ILcKi21okSwbMELFEQXlVXZFXS98CgiKO7DrnvzUXRV2Oq6XhfL6upi\nKZRbq4V2RUoKLb1f0muatGlL2zRpkplzefePmUraTCYzycyZSc/7eR4ezsz59fzeeZO+PfM75/x+\ntQBWiMj0Qf7MgwC6RKTfupp6xq+CKhIuW3LZ9FveO33KvNN+/59Z+XXv8LHme0Ukq+kkSI4PWaVP\nGjQvKIueY3d0tpWQxg8cN3bPqWsLyZszGgDsHsq96OFQ6X9VjZ700ZqxM3jp9AUAAE88PP78F3DN\nnHvQd2nIntgJPL7si4Cw2fVis9Pd2TMSkIwiMaZ/oJCrmPWJJ+vaOZyLu0sAnFpK7mMAFqcIKEpy\nVHK7DMB1AIriX2+ligcvHDvmvH5/cWuqLzRI4x9JZrVGrIi0xu3umTG7a9bRjr03u55dbzu9X0he\nAygNh6KLTCPUWlY6dqVphNpDodKHSWZ1vc92er95+Ghz/MDhN2dLiMW74Ho2zlwPuDRSgWhJVY/r\nxT4x0os+AIhIt4jsLoaiP1TDKfzfADCP5HYA1yRfg2QdyaXJNjUAVpFch8SUsk+JyPPDCVipTDAh\nNHjLwhORjYeObu/31fvw0WZUVjSYAD6c4o9lctzNIvIH6bNYe8gq/cm5Y6cv+MBffL/kluu+U3Hz\nvO+UjBlVf4dlRtLdbZMyZk+cm4527HXXbPoV4nYPLKsEnucm1iPuw3Zi6Ok9SgC7hvI5VO4Neagn\n13SoR+UCSRqGdY9B837HjVdZVuSg69p/53nOTwod20BIzrKskpcbZ33eqK2+CJ5nY/PO59C8byWm\nTWzE+m2/+6HtxD6dg36qTSO079b5D5f0naq4q/swnvj9/Sddzz4n2wuoJGtDVski17WvAgmD1omx\nYyaXN86+OxIOReG4cax+/eexfQdeXR63u9873M+g+sv5xV2lRhrLjPxLedm4z101886yMRUTcORY\nc+3KNQu/b5nhqOPG87K4ynCJSJNphn7/x9f+Yx5AOG4c1VXnYd7c+9C0YVG369q5Wi1rQrR0TCwc\nip72gFR5tBqGYRmuZ58DoC3L2A8AuIZkFIKwB6f7jeN7fvybZz/3wYry2t7Ok+0R0njRdnqG9K1F\n5Yee8auzBslRphFqv+nd3yot6/O07NGOfXhm1dePuW58XLHOqULyItMIvzLn0k9E68a9BZHIKOzc\nt0pe2fBoh+vGJ+di7niSVaYRar3luu+W9H0Q6UTXATy54mudyTP+nEz/TbIGwPkA9orI3lwcc4B+\nLgBwLYAuAE+ISEe++ipWesavgm5aaUmlXVZaddr8wlWjJ8CgWeIC45DlGa1fRGQTyQWr1//iZ6YZ\nGu15jgFgl+vGb8vVgiEicjQcKv3li00Pf/iqt34qWlZahc6T7Xih6ZFukA8NpeiTLAXwVgDdANae\nerpWRA4ijw81kaRllfwoZJV+pKH2reiNnXDbj2z5Acm/FB+WixzptPCrs0lbT6wjbDsxhKw3J7js\n6T0O17OJ5MLkxUpEnifZ4Li9UwHERWRPrvuwnd6/faNjT/fi5fd+0jTD4rpxD+RDrhv/p2yPZRjW\nHaYR+l55tNqLO92GbfceI3mTiLyW67hT+GhZSdVt17/zwdJT6wgcObYLz/3fP/+a5IS+F7RVfzrU\no84q4VB06cS6WddeeenHI6ZhwXFiWLlmYU/7G1sWxe2efnPgB1XyTH0sgHYZwmpUJK8Oh8qenn/V\nA9HKinqICPa0rsZLa3983PXiE0SkM/dRvykSLm+ae/nfXDGh9vRJR19serh7b1vTvSKyMJ/9FxMd\n6lGBZzs9H9nb1vT43ramOaNH1cWPn9gfIY1nbaf384WOrZhIYgWwlkEbDiAcin555oW3llZW1AMA\nSGJy/Rzs2v/HUGv76x8C8OMchToAGRMtqfzzK9eN4+CRrRDxSjDCJkwrBC386qySHA+/huS0I8d2\nTgawLZ8XF3OF5BjDsL5gmZFbAfTE7ZM/AlDEay3zvDEVDf3OMsdWTilrbV8/Md+9u57z3O79f/rk\n2DHnhXa1/BFNGx/D6PJaeJ5jGEboPsOw9nqe89N8xzFSaeFXZyVJLGo+0MLmBUVytEHrbsuKfBBA\nb9w++ahphO9uqJ1Ze8Gka0pspxfrtz/xUEdn640kb8jldMQkzwtZpd9x3Ph8kq5hWL9PGjq3AAAM\nFUlEQVRxnN4viUhWa9GKuKvbDm+aWl01zXzzPUHLwbWdgKzLVbwDcd34v27b84eP7G1bMzpmd+G6\nufejumoqAKCjsy3y9MqvP0xyg4g05TuWkUjH+JXyEckKy4y8WjfukvoLJl9b4rgxrN+22O6NdZo3\nz/u2cWoBFddzsHj5vV0ne964UUT+kKO+a0wzvPmS898/etqkdxmea2PDjqfsnftWtThu7KJspiAg\nOcM0w01zLv1E2eTxV8J2Yli//Ql7x54VLY4bm57vbyok600zvLWu+uKystIqzL7k9tP2b9zxlLdh\n+5OPxe3u2wc4xFlDx/iVKnKk8ana6ovGXz3rsyWninzduItDT654AAePbEZt9UUAANOwMHXi1WUb\nti+5AUBOCr9phD43pX5u9OLz3//nqVpmX3x76FjHvnGHjm6/FcCjmR5LRLaQfPcr6x9d+NLa/7wE\noGeZ4accN/YpP4anTCP0makN77AEHirKa/vtryivNQzDzPuQ00g1nLl6lFJZClvRD0yb2Fjad2lE\n07Awuf5taDt0+vyFvbETjnhu1jNnDsSyIu9qqJl52kLuJDGxbla5ZUbeme3xROTluH1ypohXIeKW\nx+3um0XkUK7iTceyIlfUVM+InFM5Ba2H+i9Q1nLgtV7b6c3JP5hnIy38SvlIIJ1xu//SrLF4F8g3\n/zp2njyEnftWuQJ5LFd9e56770TXwX5ju8c7W2Ou5+wb6nGTs1VmfUvocDhufOPhY7vsyePnoOvk\nITRtXITu3uPojXdi/bbFsqdtdbfnOYG5pTNbOtSjlI/i9skfrt+++MqG2pllISsxZU5X9xE0713p\n0jDinueE43a3s7v1ZRHxvigizbnq23Z6vrd++xPvq6+5LFpRnlhf6fDRHdi1/yVXxP1Zrvrxg+vG\nH9m2e/mdNWNnhObN/QrWbf0tFi+/F67nwDLDy1w3/mm/vn2MRHpxVykfJaca+LlphD4wdcI7InG7\nx929/yXPE+9+z7NXAbgBQA+A3+bjNlTTsO4CjX87p3KS7bpxdHS2wfXs20Tk6Vz3lW8kr7HMkkct\nKzxaROB59hHb6b1NRP5U6Nj8NJTaqYVfqQIgeQUSRT4G4Dci4ttc9cmFXRoBCIBDAA6JyG6/+s8l\nJsbHpgPwkHhmozgKmo+08CulMmIa1p2k+VBpSYXE4idDALbYTs8t+ZgfSOWXFn6l1KBIvqckUvE/\n1829P1pZUQ/Pc7F557Pu+m2LDzhubHKxTl2tUvN7zV2l1AgUDpV9bdZbPho9Nc+OYZh4y7T3mKPK\nzq0AcH1ho1N+0MKvVMCIuFPOqZzU7/3qqqkRAFN8D0j5Tgu/UgFDGpva39h22nsiggOHN8UB5GqZ\nR1XEtPArFTBxu/vBNRsf6247tBEigrjdjaaNi+yeWEcLcjQ9hCpuenFXqQAi+b6QVfLvIjLOE5em\nEXrWdnr+WleuGnn0rh6lVMaYmDBoHICTItJV6HjU0GjhV0qNCCTHAhAReaPQsYx0ejunUqqokbws\nHIquNY1Qq2mE2sKh6KskLy50XEGjZ/xKKV+QrDXN8NbZF98+6ryGtxMAmvetkqaNi064bvx8nVRt\naPSMXylVtAzDumvy+LeFp028moZhwTAsnD/pXZxYNztsGNadhY4vSLTwK6V8YZmRWTXVM0rOfL92\n7IWllhm5ohAxBdWQCz/JW0luIumSnJmm3XySW0nuIHnfUPtTSo1srmdvOnJsV78FW44c2xlz3bg+\nOOaj4ZzxbwCwAMDKgRqQNAE8AmA+gAsB3EZyxjD6VEqNUK4bX7hj7wv2/oNrISIQEbQceA3NLats\n17N/WOj4gmTIK3CJyFYgsWZnGrMBNJ+a6pXkrwDcCGDLUPtVSo1MIrKH5HtWrvnBIssKV0IAx4sf\ndd34h0WkpdDxBUm+l14cD6DvD3Q/gCvz3KdSqkiJyIskGxy399Q3/y1BXDyl0NIWfpLLANSk2PVV\nEXkyg+PrD1QpdZpkod9c6DiCLG3hF5F5wzx+K4CGPq8bkDjrT4nk3/d5+YKIvDDM/pVS6qxCshGJ\npTOHfozhfssiuQLAl0Tk1RT7LADbAFwLoA3AKwBuE5F+Y/z6AJdSSmXP1we4SC4g2QJgDoClJJ9J\nvl9HcikAJJdw+yyA55D4avfrVEVfKaWUf3TKBqWUGsF0ygallFKD0sKvlFIBo4VfKaUCRgu/UkoF\njBZ+pZQKGC38SikVMFr4lVIqYLTwK6VUwGjhV0qpgNHCr5RSAaOFXymlAkYLv1JKBYwWfqWUChgt\n/EopFTBa+JVSKmC08CulVMBo4VdKqYDRwq+UUgGjhV8ppQJGC79SSgWMFn6llAoYLfxKKRUwWviV\nUipgtPArpVTAaOFXSqmA0cKvlFIBo4VfKaUCRgu/UkoFzJALP8lbSW4i6ZKcmabdHpLrSa4l+cpQ\n+1NKKZUbwznj3wBgAYCVg7QTAI0icrmIzB5Gf74j2VjoGM6kMWWuGOPSmDKjMeXXkAu/iGwVke0Z\nNudQ+ymwxkIHkEJjoQNIobHQAQygsdABpNBY6ABSaCx0ACk0FjqAFBoLHUCu+DHGLwCWk1xD8pM+\n9KeUUioNK91OkssA1KTY9VUReTLDPt4uIgdIVgNYRnKriKzKNlCllFK5QREZ3gHIFQDuEZHXMmj7\nIIAuEfl2in3DC0QppQJKRLIaTk97xp+FlJ2SjAIwRaSTZBmA6wD8Q6q22QaulFJqaIZzO+cCki0A\n5gBYSvKZ5Pt1JJcmm9UAWEVyHYDVAJ4SkeeHG7RSSqmhG/ZQj1JKqZGlIE/ukvwWyS0kXyf5OMnR\nA7SbT3IryR0k7/MhrqJ7KC2LmHzLFckqkstIbif5PMnKAdrlPU+ZfG6S30/uf53k5fmII5uYSDaS\n7EjmZS3Jr/kQ009JtpPckKaN33lKG1OB8tRAckXy79xGkp8foJ1vucokpqxzJSK+/wdgHgAjuf0N\nAN9I0cYE0AxgEoAQgHUAZuQ5rukAzgewAsDMNO12A6jyKVeDxuR3rgB8E8CXk9v3pfr5+ZGnTD43\ngBsAPJ3cvhLAy3n+eWUSUyOAJX78/vTp8x0ALgewYYD9vuYpw5gKkacaAJclt8sBbCuC36lMYsoq\nVwU54xeRZSLiJV+uBlCfotlsAM0iskdEbAC/AnBjnuMquofSMozJ71y9H8Avktu/AHBTmrb5zFMm\nn/vPsYrIagCVJM8tcEyAzw81SuIW6mNpmvidp0xiAvzP00ERWZfc7gKwBUDdGc18zVWGMQFZ5KoY\nJmm7A8DTKd4fD6Clz+v9yfeKQbE9lOZ3rs4VkfbkdjuAgX7p852nTD53qjapTjT8jEkAzE0OEzxN\n8sI8xpMpv/OUiYLmieQkJL6RrD5jV8FylSamrHKVq9s5+8nk4S+SDwCIi8hjKdrl5apzMT6UloOY\ncp6rNDE9cFrHIpLmGYx8P7yX6ec+80won3c0ZHLs1wA0iEg3yesBLEZiOK/Q/MxTJgqWJ5LlAH4L\n4O7kWXa/Jme8znuuBokpq1zlrfCLyLx0+0l+HImxsmsHaNIKoKHP6wYk/mXNa1wZHuNA8v+HSf4O\nia/3Qy5oOYgp57lKF1PyglyNiBwkWQvg0ADHyGmeUsjkc5/Zpj75Xr4MGpOIdPbZfobkQpJVInI0\nj3ENxu88DapQeSIZAvC/AH4pIotTNPE9V4PFlG2uCnVXz3wA9wK4UUR6B2i2BsA0kpNIhgF8CMAS\nv2JEmofSSI5Kbp96KG3AOyX8iAn+52oJgI8ltz+GxNnFaXzKUyafewmAv0rGMQfA8T7DVPkwaEwk\nzyXJ5PZsJG6rLmTRB/zP06AKkadkfz8BsFlEvjtAM19zlUlMWecqn1ej01yl3gFgL4C1yf8WJt+v\nA7C0T7vrkbiC3QzgKz7EtQCJsbseAAcBPHNmXACmIHGnxjoAG/MdVyYx+Z0rAFUAlgPYDuB5AJWF\nylOqzw3gLgB39WnzSHL/60hzt5ZfMQH4TDIn6wC8BGCODzH9N4A2APHk79MdRZCntDEVKE9XAfCS\nfZ6qT9cXMleZxJRtrvQBLqWUCphiuKtHKaWUj7TwK6VUwGjhV0qpgNHCr5RSAaOFXymlAkYLv1JK\nBYwWfqWUChgt/EopFTD/D18g0y5/DAqxAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import numpy as np\n", + "import sklearn\n", + "from sklearn.datasets import make_moons, make_circles, make_classification\n", + "from sklearn import linear_model\n", + "\n", + "import matplotlib.pyplot as plt\n", + "# See here: http://scikit-learn.org/stable/auto_examples/classification/plot_classifier_comparison.html#example-classification-plot-classifier-comparison-py\n", + "%matplotlib inline\n", + "np.random.seed(0)\n", + "X, y = make_moons(200, noise=0.20) # X: numpy.ndarray ; X.shape -> (200,2) ; y.shape -> (200,)\n", + "plt.scatter(X[:,0], X[:,1], s=40, c=y, cmap=plt.cm.Spectral)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(200,)" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X, y = make_moons(200, noise=0.20)\n", + "y.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'plot_decision_boundary' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;31m# Plot the decision boundary\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mplot_decision_boundary\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mlambda\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mclf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtitle\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Logistic Regression\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'plot_decision_boundary' is not defined" + ] + } + ], + "source": [ + "# Train the logistic rgeression classifier\n", + "clf = sklearn.linear_model.LogisticRegressionCV()\n", + "clf.fit(X, y)\n", + " \n", + "# Plot the decision boundary\n", + "plot_decision_boundary(lambda x: clf.predict(x))\n", + "plt.title(\"Logistic Regression\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 0.69836452, 0.65822006])" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X[-1]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/hamlet.txt b/hamlet.txt new file mode 100644 index 0000000..c7fa434 --- /dev/null +++ b/hamlet.txt @@ -0,0 +1,6496 @@ +The Project Gutenberg EBook of Hamlet, by William Shakespeare + +This eBook is for the use of anyone anywhere at no cost and with +almost no restrictions whatsoever. You may copy it, give it away or +re-use it under the terms of the Project Gutenberg License included +with this eBook or online at www.gutenberg.net + + +Title: Hamlet + +Author: William Shakespeare + +Editor: Charles Kean + +Release Date: January 10, 2009 [EBook #27761] + +Language: English + +Character set encoding: UTF-8 + +*** START OF THIS PROJECT GUTENBERG EBOOK HAMLET *** + + + + +Produced by David Starner, Curtis Weyant and the Online +Distributed Proofreading Team at http://www.pgdp.net + + + + + + + + + + Transcriber's Note: + This is a heavily edited version of _Hamlet_. It was used + for Charles Kean's 1859 stage production. Phrases printed + in italics in the book are indicated in this electronic + version by _ (underscore). Footnotes originally appeared + at the bottom of each page. For this electronic version + the footnotes are collected at the end of each act. In + Act I, Scene 5, the word Uumix'd has been changed to + Unmix'd. A closing bracket ] was added to Act IV footnote + 37 after _Naked on your kingdom_,. A closing bracket ] + was added to Act IV footnote 50 after _Venom'd stuck_,. + The word o'er-crows appears in Act V, Scene 3; in + footnote V.81, o'ercrows appears without a hyphen. Both + are as they appear in the book. + + + + + SHAKESPEARE'S TRAGEDY + + OF + + HAMLET, + PRINCE OF DENMARK. + + ARRANGED FOR REPRESENTATION AT THE + Royal Princess's Theatre + + WITH + EXPLANATORY NOTES, + + BY + CHARLES KEAN, F.S.A. + + AS PERFORMED ON + MONDAY, JANUARY 10, 1859. + + + + + LONDON: + + BRADBURY AND EVANS, 11, BOUVERIE STREET. + 1859. + + + + + LONDON: + BRADBURY AND EVANS, PRINTERS, WHITEFRIARS. + + + + +Dramatis Personæ + + + CLAUDIUS (_King of Denmark_) Mr. RYDER. + + HAMLET (_son to the former and_ + _nephew to the present King_). Mr. CHARLES KEAN. + + POLONIUS (_Lord Chamberlain_) Mr. MEADOWS. + + HORATIO (_friend To Hamlet_) Mr. GRAHAM. + + LAERTES (_son To Polonius_) Mr. J. F. CATHCART. + + ROSENCRANTZ } { Mr. BRAZIER. + GUILDENSTERN } (_Courtiers_) { Mr. G. EVERETT. + OSRICK } { Mr. DAVID FISHER. + + PRIEST Mr. TERRY. + + MARCELLUS Mr. PAULO. + + BERNARDO Mr. DALY. + + FRANCISCO Mr. COLLETT. + + GHOST OF HAMLET'S FATHER Mr. WALTER LACY. + + FIRST GRAVEDIGGER Mr. FRANK MATTHEWS. + + SECOND GRAVEDIGGER Mr. H. SAKER. + + FIRST PLAYER Mr. F. COOKE. + + SECOND PLAYER Mr. ROLLESTON. + + GERTRUDE (_Queen of Denmark, and_ + _mother of Hamlet_) Mrs. CHARLES KEAN. + + OPHELIA (_daughter of Polonius_) Miss HEATH. + + ACTRESS Miss DALY. + + + + +STAGE DIRECTIONS. + + +R. H. means Right Hand; L. H. Left Hand; U. E. Upper Entrance; R. H. +C. Enters through the Centre from the Right Hand; L. H. C. Enters +through the Centre from the Left Hand. + + +RELATIVE POSITIONS OF THE PERFORMERS WHEN ON THE STAGE. + +R. means on the Right side of the Stage; L. on the Left side of the +Stage; C. Centre of the Stage; R. C. Right Centre of the Stage; L. C. +Left Centre of the Stage. + +The reader is supposed _to be on the Stage_, facing the audience. + + + + +PREFACE. + + +The play of _Hamlet_ is above all others the most stupendous monument +of Shakespeare's genius, standing as a beacon to command the wonder +and admiration of the world, and as a memorial to future generations, +that the mind of its author was moved by little less than inspiration. +_Lear_, with its sublime picture of human misery;--_Othello_, with its +harrowing overthrow of a nature great and amiable;--_Macbeth_, with +its fearful murder of a monarch, whose "virtues plead like angels +trumpet-tongued against the deep damnation of his taking +off,"--severally exhibit, in the most pre-eminent degree, all those +mighty elements which constitute the perfection of tragic art--the +grand, the pitiful, and the terrible. _Hamlet_ is a history of mind--a +tragedy of thought. It contains the deepest philosophy, and most +profound wisdom; yet speaks the language of the heart, touching the +secret spring of every sense and feeling. Here we have no ideal +exaltation of character, but life with its blended faults and +virtues,--a gentle nature unstrung by passing events, and thus +rendered "out of tune and harsh." + +The original story of Hamlet is to be found in the Latin pages of the +Danish historian, Saxo Grammaticus, who died in the year 1208. +Towards the end of the sixteenth century, the French author, Francis +de Belleforest, introduced the fable into a collection of novels, +which were translated into English, and printed in a small quarto +black letter volume, under the title of the "Historie of Hamblett," +from which source Shakespeare constructed the present tragedy. + +Saxo has placed his history about 200 years before Christianity, when +barbarians, clothed in skins, peopled the shores of the Baltic. The +poet, however, has so far modernised the subject as to make Hamlet a +Christian, and England tributary to the "sovereign majesty of +Denmark." A date can therefore be easily fixed, and the costume of +the tenth and eleventh centuries may be selected for the purpose. +There are but few authentic records in existence, but these few +afford reason to believe that very slight difference existed between +the dress of the Dane and that of the Anglo-Saxon of the same period. + +Since its first representation, upwards of two centuries and a half +ago, no play has been acted so frequently, or commanded such +universal admiration. It draws within the sphere of its attraction +both the scholastic and the unlearned. It finds a response in every +breast, however high or however humble. By its colossal aid it exalts +the drama of England above that of every nation, past or present. It +is, indeed, the most marvellous creation of human intellect. + +CHARLES KEAN. + + + + +HAMLET, + +PRINCE OF DENMARK. + + + + +ACT I. + +SCENE I.--ELSINORE. A PLATFORM BEFORE THE CASTLE. NIGHT. + + FRANCISCO _on his post. Enter to him_ BERNARDO (L.H.) + + _Ber._ Who's there? + + _Fran._ (R.) Nay, answer me:[1] stand, and unfold[2] yourself. + + _Ber._ Long live the king![3] + + _Fran._ Bernardo? + + _Ber._ He. + + _Fran._ You come most carefully upon your hour. + + _Ber._ 'Tis now struck twelve; get thee to bed, Francisco. + + _Fran._ For this relief much thanks: + + [_Crosses to_ L.] + + 'tis bitter cold, + And I am sick at heart. + + _Ber._ Have you had quiet guard? + + _Fran._ Not a mouse stirring. + + _Ber._ Well, good night. + If you do meet Horatio and Marcellus, + The rivals of my watch,[4] bid them make haste. + + _Fran._ I think I hear them.--Stand, ho! Who's there? + + _Hor._ Friends to this ground. + + _Mar._ And liegemen to the Dane.[5] + + _Enter_ HORATIO _and_ MARCELLUS (L.H.) + + _Fran._ Give you good night. + + _Mar._ O, farewell, honest soldier: + Who hath reliev'd you? + + _Fran._ Bernardo hath my place. + Give you good night. + + [_Exit_ FRANCISCO, L.H.] + + _Mar._ Holloa! Bernardo! + + _Ber._ Say, + What, is Horatio there? + + _Hor._ (_Crosses to_ C.) A piece of him.[6] + + _Ber._ (R.) Welcome, Horatio: welcome, good Marcellus. + + _Hor._ What, has this thing appear'd again to-night? + + _Ber._ I have seen nothing. + + _Mar._ (L.) Horatio says, 'tis but our fantasy, + And will not let belief take hold of him, + Touching this dreaded sight, twice seen of us: + Therefore I have entreated him, along + With us, to watch the minutes of this night;[7] + That, if again this apparition come, + He may approve our eyes,[8] and speak to it. + + _Hor._ Tush! tush! 'twill not appear. + + _Ber._ Come, let us once again assail your ears, + That are so fortified against our story, + What we two nights have seen.[9] + + _Hor._ Well, let us hear Bernardo speak of this. + + _Ber._ Last night of all, + When yon same star that's westward from the pole + Had made his course to illume that part of heaven + Where now it burns, Marcellus, and myself, + The bell then beating one-- + + _Mar._ Peace, break thee off; look, where it comes + again! + + _Enter_ Ghost (L.H.) + + _Ber._ In the same figure, like the king that's dead. + + _Hor._ Most like:--it harrows me with fear and wonder.[10] + + _Ber._ It would be spoke to. + + _Mar._ Speak to it, Horatio. + + _Hor._ What art thou, that usurp'st this time of night,[11] + Together with that fair and warlike form + In which the majesty of buried Denmark + Did sometimes march? By heaven I charge thee, speak! + + _Mar._ It is offended. + + [Ghost _crosses to_ R.] + + _Ber._ See! it stalks away! + + _Hor._ Stay!--speak!--speak, I charge thee, speak! + + [_Exit_ Ghost, R.H.] + + _Mar._ 'Tis gone, and will not answer. + + _Ber._ How now, Horatio! You tremble, and look pale: + Is not this something more than fantasy? + What think you of it? + + _Hor._ Before heaven, I might not this believe, + Without the sensible and true avouch[12] + Of mine own eyes. + + _Mar._ Is it not like the king? + + _Hor._ As thou art to thyself: + Such was the very armour he had on, + When he the ambitious Norway combated. + + _Mar._ Thus, twice before, and jump at this dead hour,[13] + With martial stalk hath he gone by our watch. + + _Hor._ In what particular thought to work,[14] I know not; + But in the gross and scope[15] of mine opinion, + This bodes some strange eruption to our state.[16] + In the most high and palmy[17] state of Rome, + A little ere the mightiest Julius fell, + The graves stood tenantless, and the sheeted dead + Did squeak and gibber in the Roman streets. + + _Re-enter_ Ghost (R.H.) + + But, (L.C.) soft, behold! lo, where it comes again! + I'll cross it, though it blast me. + + [HORATIO _crosses in front of the_ Ghost _to_ R. + Ghost _crosses to_ L.] + + Stay, illusion! + If thou hast any sound, or use of voice,[18] + Speak to me: + If there be any good thing to be done, + That may to thee do ease, and grace to me, + Speak to me: + If thou art privy to thy country's fate, + Which, happily, foreknowing may avoid, + O, speak! + O, if thou hast uphoarded in thy life + Extorted treasure in the womb of earth,[19] + For which, they say, you spirits oft walk in death, + Speak of it:--stay, and speak! + + [_Exit_ Ghost, L.H.] + + _Mar._ 'Tis gone! + We do it wrong, being so majestical, + To offer it the show of violence. + + _Ber._ It was about to speak, when the cock crew. + + _Hor._ And then it started like a guilty thing + Upon a fearful summons.[20] I have heard, + The cock, that is the trumpet of the morn, + Doth with his lofty[21] and shrill-sounding throat + Awake the god of day; and, at his warning, + Whether in sea or fire, in earth or air, + The extravagant and erring spirit[22] hies + To his confine. + But, look, the morn, in russet mantle clad, + Walks o'er the dew of yon high eastern hill: + Break we our watch up; and, by my advice, + Let us impart what we have seen to-night + Unto young Hamlet; for, upon my life, + This spirit, dumb to us, will speak to him. + + [_Exeunt_, L.H.] + + +SCENE II.--A ROOM OF STATE IN THE PALACE. + + _Trumpet March._ + + _Enter the_ KING _and_ QUEEN, _preceded by_ POLONIUS, HAMLET, + LAERTES,[23] Lords, Ladies, _and_ Attendants. + + _King._ (R.C.) Though yet of Hamlet our dear brother's death + The memory be green;[24] and that it us befitted + To bear our hearts in grief, and our whole kingdom + To be contracted in one brow of woe; + Yet so far hath discretion fought with nature, + That we with wisest sorrow[25] think on him, + Together with remembrance of ourselves. + Therefore our sometime sister, now our queen, + The imperial jointress of this warlike state, + Have we, as 'twere with a defeated joy,[26] + Taken to wife: nor have we herein barr'd[27] + Your better wisdoms, which have freely gone + With this affair along:--For all, our thanks. + And now, Laertes, what's the news with you? + You told us of some suit; What is't, Laertes? + + _Laer._ (R.) My dread lord, + Your leave and favour[28] to return to France; + From whence though willingly I came to Denmark, + To show my duty in your coronation, + Yet now, I must confess, that duty done, + My thoughts and wishes bend again toward France, + And bow them to your gracious leave and pardon. + + _King._ Have you your father's leave? What says Polonious? + + _Pol._ (R.) He hath, my lord, (wrung from me my slow leave + By laboursome petition; and, at last, + Upon his will I sealed my hard consent):[29] + I do beseech you, give him leave to go. + + _King._ Take thy fair hour, Laertes; time be thine, + And thy best graces spend it at thy will![30] + But now, my cousin Hamlet, and my son,---- + + _Ham._ (L.) A little more than kin, and less than kind.[31] + + [Aside.] + + _King._ How is it that the clouds still hang on you? + + _Ham._ Not so, my lord; I am too much i'the sun.[32] + + _Queen._(L.C.) Good Hamlet, cast thy nighted colour[33] off, + And let thine eye look like a friend on Denmark. + Do not for ever with thy vailed lids[34] + Seek for thy noble father in the dust: + Thou know'st 'tis common, all that live must die, + Passing through nature to eternity. + + _Ham._ Ay, madam, it is common. + + _Queen._ If it be, + Why seems it so particular with thee? + + _Ham._ Seems, madam! nay, it is; I know not seems. + 'Tis not alone my inky cloak, good mother, + Nor the dejected haviour of the visage, + No, nor the fruitful river in the eye, + Together with all forms, modes, shows of grief, + That can denote me truly: These, indeed, seem, + For they are actions that a man might play. + But I have that within which passeth show;[35] + These but the trappings[36] and the suits of woe. + + _King._ 'Tis sweet and commendable in your nature, Hamlet, + To give these mourning duties to your father: + But, you must know, your father lost a father; + That father lost, lost his;[37] and the survivor bound, + In filial obligation, for some term + To do obsequious sorrow:[38] But to perséver[39] + In obstinate condolement,[40] is a course + Of impious stubbornness; 'tis unmanly grief: + It shows a will most incorrect to Heaven.[41] + We pray you, throw to earth + This unprevailing[42] woe; and think of us + As of a father: for let the world take note, + You are the most immediate to our throne; + Our chiefest courtier, cousin, and our son. + + _Queen._ Let not thy mother lose her prayers, Hamlet: + I pray thee, stay with us; go not to Wittenberg. + + _Ham._ I shall in all my best obey you, madam. + + _King._ Why, 'tis a loving and a fair reply; + Be as ourself in Denmark.--Madam, come; + This gentle and unforc'd accord of Hamlet + Sits smiling to my heart:[43] in grace whereof,[44] + No jocund health that Denmark drinks to-day,[45] + But the great cannon to the clouds shall tell; + Re-speaking earthly thunder. + + [_Trumpet March repeated. Exeunt_ KING _and_ QUEEN, + _preceded by_ POLONIUS, Lords, Ladies, LAERTES, _and_ + Attendants, R.H.] + + _Ham._ O, that this too, too solid flesh would melt, + Thaw, and resolve itself[46] into a dew! + Or that the Everlasting had not fix'd + His canon[47] 'gainst self-slaughter! O God! O God! + How weary, stale, flat, and unprofitable + Seem to me all the uses of this world![48] + Fye on't! O fye! 'tis an unweeded garden, + That grows to seed; things rank and gross in nature + Possess it merely.[49] That it should come to this! + But two months dead!--nay, not so much, not two: + So excellent a king; that was, to this, + Hyperion to a satyr:[50] so loving to my mother, + That he might not beteem[51] the winds of heaven + Visit her face too roughly. Heaven and earth! + Must I remember? why, she would hang on him, + As if increase of appetite had grown + By what it fed on: And yet, within a month,-- + Let me not think on't,--Frailty, thy name is Woman!-- + A little month; or ere those shoes were old + With which she follow'd my poor father's body, + Like Niobe, all tears;--she married with my uncle, + My father's brother; but no more like my father + Than I to Hercules. + It is not, nor it cannot come to, good: + But break, my heart, for I must hold my tongue! + + _Enter_ HORATIO, BERNARDO, _and_ MARCELLUS (R.H.) + + _Hor._ Hail to your lordship! + + _Ham._ I am glad to see you well: + Horatio,--or I do forget myself. + + _Hor._ The same, my lord, and your poor servant ever. + + _Ham._ Sir, my good friend; I'll change that name with you:[52] + And what make you from Wittenberg, Horatio?-- + Marcellus? + + [_Crosses to_ C.] + + _Mar._ (R.) My good lord, + + _Ham._ (C.) I am very glad to see you; good even, sir. + + [_To_ BERNARDO, R.] + + But what, in faith,[53] make you[54] from Wittenberg?[55] + + _Hor._ (L.) A truant disposition, good my lord. + + _Ham._ I would not hear your enemy say so; + Nor shall you do mine ear that violence, + To make it truster of your own report + Against yourself: I know you are no truant. + But what is your affair in Elsinore? + We'll teach you to drink deep, ere you depart. + + _Hor._ My lord, I came to see your father's funeral. + + _Ham._ I pray thee, do not mock me, fellow-student; + I think it was to see my mother's wedding. + + _Hor._ Indeed, my lord, it followed hard upon. + + _Ham._ Thrift, thrift, Horatio! the funeral bak'd meats + Did coldly furnish forth the marriage tables. + Would I had met my dearest foe[56] in Heaven + Ere ever I had seen that day, Horatio! + My father,--Methinks, I see my father. + + _Hor._ Where, + My lord? + + _Ham._ In my mind's eye, Horatio. + + _Hor._ I saw him once; he was a goodly king.[57] + + _Ham._ He was a man, take him for all in all, + I shall not look upon his like again. + + [_Crosses to_ L.] + + _Hor._ (C.) My lord, I think I saw him yesternight. + + _Ham._ Saw who? + + _Hor._ My lord, the king your father. + + _Ham._ The king my father! + + _Hor._ Season your admiration for a while[58] + With an attent ear; till I may deliver, + Upon the witness of these gentlemen, + This marvel to you. + + _Ham._ For Heaven's love, let me hear. + + _Hor._ Two nights together had these gentlemen, + Marcellus and Bernardo, on their watch, + In the dead waste and middle of the night,[59] + Been thus encounter'd. A figure like your father, + Arm'd at all points exactly, cap-à-pé, + Appears before them, and, with solemn march + Goes slow and stately by them: thrice he walk'd + By their oppress'd and fear-surprisèd eyes, + Within his truncheon's length; whilst they, distill'd + Almost to jelly with the act of fear,[60] + Stand dumb, and speak not to him. This to me + In dreadful secrecy impart they did; + And I with them the third night kept the watch: + Where, as they had deliver'd, both in time, + Form of the thing, each word made true and good, + The apparition comes. + + _Ham._ But where was this? + + [_Crosses to_ MARCELLUS.] + + _Mar._ (R.) My lord, upon the platform where we + watch'd. + + _Ham._ (C.) Did you not speak to it? + + _Hor._ (L.) My lord, I did; + But answer made it none: yet once methought + It lifted up its head, and did address[61] + Itself to motion, like as it would speak: + But, even then, the morning cock crew loud, + And at the sound it shrunk in haste away; + And vanish'd from our sight. + + _Ham._ 'Tis very strange. + + _Hor._ As I do live, my honour'd lord, 'tis true; + And we did think it writ down[62] in our duty + To let you know of it. + + _Ham._ Indeed, indeed, sirs, but this troubles me. + Hold you the watch to-night? + + _Mar._ We do, my lord. + + _Ham._ Arm'd, say you? + + _Mar._ Arm'd, my lord. + + _Ham._ From top to toe? + + _Mar._ My lord, from head to foot. + + _Ham._ Then saw you not + His face? + + _Hor._ O, yes, my lord; he wore his beaver up.[63] + + _Ham._ What, looked he frowningly? + + _Hor._ A countenance more + In sorrow than in anger. + + _Ham._ Pale or red? + + _Hor._ Nay, very pale. + + _Ham._ And fix'd his eyes upon you? + + _Hor._ Most constantly. + + _Ham._ I would I had been there. + + _Hor._ It would have much amaz'd you. + + _Ham._ Very like, + Very like. Stay'd it long? + + _Hor._ While one with moderate haste might tell a hundred. + + _Mar._} + } Longer, Longer. + _Ber._} + + _Hor._ Not when I saw it. + + _Ham._ His beard was grizzl'd, No? + + _Hor._ It was, as I have seen it in his life, + A sable silver'd. + + _Ham._ I will watch to-night; + Perchance, 'twill walk again. + + _Hor._ (C.) I warrant it will. + + _Ham._ If it assume my noble father's person, + I'll speak to it, though hell itself should gape, + And bid me hold my peace. + + [_Crosses to_ L.] + I pray you all, + If you have hitherto conceal'd this sight, + Let it be tenable[64] in your silence still; + And whatsoever else shall hap to-night, + Give it an understanding, but no tongue; + I will requite your loves. So, fare you well: + Upon the platform, 'twixt eleven and twelve, + I'll visit you. + + _Hor._ (R.) Our duty to your honour. + + _Ham._ Your loves, as mine to you: Farewell. + + [_Exeunt_ HORATIO, MARCELLUS, _and_ BERNARDO, R.H.] + + My father's spirit in arms! all is not well; + I doubt some foul play: 'would the night were come; + Till then sit still, my soul: Foul deeds will rise, + Though all the earth o'erwhelm them, to men's eyes. + [_Exit_, L.H.] + + +SCENE III.--A ROOM IN POLONIUS'S HOUSE. + + _Enter_ LAERTES _and_ OPHELIA (R.H.) + + _Laer._ (L.C.) My necessaries are embarked: farewell: + And, sister, as the winds give benefit,[65] + Let me hear from you. + + _Oph._ (R.C.) Do you doubt that? + + _Laer._ For Hamlet, and the trifling of his favour,[66] + Hold it a fashion, and a toy in blood; + A violet in the youth of primy nature, + Forward, not permanent, sweet, not lasting, + The pérfume and suppliance of a minute.[67] + + _Oph._ No more but so? + + _Laer._ He may not, as unvalued persons do, + Carve for himself; for on his choice depends + The safety and the health of the whole state. + Then weigh what loss your honour may sustain, + If with too credent ear you list his songs. + Fear it, Ophelia, fear it, my dear sister; + And keep within the rear of your affection,[68] + Out of the shot and danger of desire. + The chariest maid[69] is prodigal enough, + If she unmask her beauty to the moon: + Virtue itself scapes not calumnious strokes: + Be wary, then; best safety lies in fear: + Youth to itself rebels, though none else near. + + _Oph._ I shall the effect of this good lesson keep, + As watchman to my heart. But, good my brother, + Do not, as some ungracious pastors do, + Show me the steep and thorny way to heaven + Whilst, like a puff'd and reckless libertine,[70] + Himself the primrose path of dalliance treads, + And recks not his own read.[71] + + _Laer._ O, fear me not. + I stay too long;--but here my father comes. + + _Enter_ POLONIUS (L.H.) + + _Pol._ Yet here, Laertes! aboard, aboard, for shame! + The wind sits in the shoulder of your sail,[72] + And you are staid for. There,--my blessing with you! + + [_Laying his hand on_ LAERTES' _head_.] + + And these few precepts in thy memory-- + Look thou charácter.[73] Give thy thoughts no tongue, + Nor any unproportion'd thought[74] his act. + Be thou familiar, but by no means vulgar. + The friends thou hast, and their adoption tried, + Grapple them to thy soul with hooks of steel; + But do not dull thy palm with entertainment + Of each new-hatch'd, unfledg'd comrade. Beware + Of entrance to a quarrel; but being in, + Bear it, that the opposer may beware of thee. + Give every man thine ear, but few thy voice: + Take each man's censure,[75] but reserve thy judgment. + Costly thy habit as thy purse can buy, + But not express'd in fancy; rich, not gaudy: + For the apparel oft proclaims the man; + And they in France of the best rank and station + Are most select and generous, chief in that.[76] + Neither a borrower nor a lender be: + For loan oft loses both itself and friend; + And borrowing dulls the edge of husbandry.[77] + This above all,--To thine ownself be true; + And it must follow, as the night the day, + Thou canst not then be false to any man. + Farewell; my blessing season this in thee![78] + + _Laer._ Most humbly do I take my leave, my lord. + + [_Crosses to_ L.] + + Farewell, Ophelia; and remember well + What I have said to you. + + _Oph._ (_Crosses to_ LAERTES.) 'Tis in my memory lock'd, + And you yourself shall keep the key of it.[79] + + _Laer._ Farewell. + + [_Exit_ LAERTES, L.H.] + + _Pol._ What is it, Ophelia, he hath said to you? + + _Oph._ So please you, something touching the lord Hamlet. + + _Pol._ Marry, well bethought: + 'Tis told me, he hath very oft of late + Given private time to you;[80] and you yourself + Have of your audience been most free and bounteous: + If it be so (as so 'tis put on me,[81] + And that in way of caution), I must tell you, + You do not understand yourself so clearly + As it behoves my daughter, and your honour. + What is between[82] you? give me up the truth. + + _Oph._ He hath, my lord, of late, made many tenders + Of his affection to me. + + _Pol._ Affection! pooh! you speak like a green girl, + Unsifted[83] in such perilous circumstance. + Do you believe his tenders, as you call them? + + _Oph._ I do not know, my lord, what I should think. + + _Pol._ Marry, I'll teach you: think yourself a baby; + That you have ta'en these tenders for true pay, + Which are not sterling. Tender yourself more dearly; + Or, you'll tender me a fool. + + _Oph._ My lord, he hath impórtun'd me with love + In honourable fashion. + + _Pol._ Ay, fashion you may call it; go to, go to. + + _Oph._ And hath given countenance to his speech, my lord, + With almost all the holy vows of heaven. + + _Pol._ Ay, springes to catch woodcocks.[84] I do know, + When the blood burns, how prodigal the soul + Lends the tongue vows: This is for all,-- + I would not, in plain terms, from this time forth, + Have you so slander any leisure moment,[85] + As to give words or talk with the lord Hamlet. + Look to't, I charge you: come your ways. + + _Oph._ I shall obey, my lord. + + [_Exeunt_, R.H.] + + +SCENE IV.--THE PLATFORM. NIGHT. + + _Enter_ HAMLET, HORATIO, _and_ MARCELLUS (L.H.U.E.) + + _Ham._ The air bites shrewdly; it is very cold. + + _Hor._ It is a nipping and an eager air.[86] + + _Ham._ What hour now? + + _Hor._ I think it lacks of twelve. + + _Mar._ No, it is struck. + + _Hor._ (R.C.) Indeed? I heard it not: then it draws near the season, + Wherein the spirit held his wont to walk. + + [_A Flourish of Trumpets, and Ordnance shot off without._] + + What does this mean, my lord? + + _Ham._ (L.C.) The king doth wake to-night,[87] and takes his + rouse,[88] + And, as he drains his draughts of Rhenish down, + The kettle-drum and trumpet thus bray out + The triumph of his pledge. + + _Hor._ Is it a custom? + + _Ham._ Ay, marry, is't: + + [_Crosses to_ HORATIO.] + + But to my mind,--though I am native here, + And to the manner born,--it is a custom + More honour'd in the breach than the observance. + + _Enter_ Ghost (L.H.) + + _Hor._ (R.H.) Look, my lord, it comes! + + _Ham._ (C.) Angels and ministers of grace defend us!-- + Be thou a spirit of health or goblin damn'd, + Bring with thee airs from heaven or blasts from hell, + Be thy intents wicked or charitable, + Thou com'st in such a questionable shape,[89] + That I will speak to thee: I'll call thee--Hamlet, + King, father: Royal Dane: O, answer me! + Let me not burst in ignorance; but tell + Why thy canoniz'd bones, hearsed in death,[90] + Have burst their cerements;[91] why the sepulchre, + Wherein we saw thee quietly in-urn'd, + Hath op'd his ponderous and marble jaws, + To cast thee up again! What may this mean, + That thou, dead corse, again, in cómplete steel, + Revisits thus the glimpses of the moon, + Making night hideous; and we fools of nature[92] + So horridly to shake our disposition[93] + With thoughts beyond the reaches of our souls? + Say, why is this? wherefore? what should we do? + + [Ghost _beckons._] + + _Hor._ It beckons you to go away with it, + As if it some impartment did desire + To you alone. + + [Ghost _beckons again._] + + _Mar._ Look, with what courteous action + It waves you to a more removèd ground:[94] + But do not go with it. + + _Hor._ No, by no means. + + _Ham._ It will not speak; then I will follow it. + + _Hor._ Do not, my lord. + + _Ham._ Why, what should be the fear? + I do not set my life at a pin's fee;[95] + And for my soul, what can it do to that, + Being a thing immortal as itself? + + [Ghost _beckons._] + + It waves me forth again;--I'll follow it. + + _Hor._ What if it tempt you toward the flood, my lord,[96] + Or to the dreadful summit of the cliff + That beetles o'er his base into the sea,[97] + And there assume some other horrible form, + And draw you into madness? + + [Ghost _beckons._] + + _Ham._ It waves me still.-- + Go on; I'll follow thee. + + _Mar._ You shall not go, my lord. + + _Ham._ Hold off your hands. + + _Hor._ Be rul'd; you shall not go. + + _Ham._ My fate cries out, + And makes each petty artery in this body + As hardy as the Némean lion's nerve.[98] + + [Ghost _beckons_] + + Still am I call'd:--unhand me, gentlemen; + + [_Breaking from them._] + + By heaven, I'll make a ghost of him that lets me:--[99] + I say, away!--Go on; I'll follow thee. + + [_Exeunt_ Ghost _and_ HAMLET, L.H., _followed at a distance by_ + HORATIO _and_ MARCELLUS.] + + +SCENE V.--A MORE REMOTE PART OF THE PLATFORM. NIGHT. + + _Re-enter_ Ghost _and_ HAMLET (L.H.U.E.) + + _Ham._ (R.) Whither wilt thou lead me? Speak; I'll go no further. + + _Ghost._ (L.) Mark me. + + _Ham._ I will. + + _Ghost._ My hour is almost come, + When I to sulphurous and tormenting flames + Must render up myself. + + _Ham._ Alas, poor ghost! + + _Ghost._ Pity me not, but lend thy serious hearing + To what I shall unfold. + + _Ham._ Speak; I am bound to hear. + + _Ghost._ So art thou to revenge, when thou shalt hear. + + _Ham._ What? + + _Ghost._ I am thy father's spirit; + Doom'd for a certain term to walk the night, + And for the day confin'd to fast in fires,[100] + Till the foul crimes done in my days of nature + Are burnt and purg'd away. But that I am forbid + To tell the secrets of my prison-house, + I could a tale unfold, whose lightest word + Would harrow up thy soul;[101] freeze thy young blood; + Make thy two eyes, like stars, start from their spheres; + Thy knotted and combinèd locks to part, + And each particular hair to stand on end,[102] + Like quills upon the fretful porcupine:[103] + But this eternal blazon[104] must not be + To ears of flesh and blood.--List, list, O, list!-- + If thou didst ever thy dear father love,---- + + _Ham._ O Heaven! + + _Ghost._ Revenge his foul and most unnatural murder. + + _Ham._ Murder! + + _Ghost._ Murder most foul, as in the best it is; + But this most foul, strange, and unnatural. + + _Ham._ Haste me to know it, that I, with wings as swift + As meditation or the thoughts of love, + May sweep to my revenge. + + _Ghost._ I find thee apt; + And duller shouldst thou be than the fat weed + That rots itself in ease on Lethe wharf,[105] + Wouldst thou not stir in this. Now, Hamlet, hear: + 'Tis given out that, sleeping in mine orchard,[106] + A serpent stung me; so the whole ear of Denmark + Is by a forged process[107] of my death + Rankly abus'd: but know, thou noble youth, + The serpent that did sting thy father's life + Now wears his crown. + + _Ham._ O, my prophetic soul! my uncle! + + _Ghost._ Ay, that incestuous, that adulterate beast, + With witchcraft of his wit, with traitorous gifts, + Won to his shameful lust + The will of my most seeming virtuous queen: + O, Hamlet, what a falling-off was there! + From me, whose love was of that dignity, + That it went hand in hand even with the vow + I made to her in marriage; and to decline + Upon a wretch,[108] whose natural gifts were poor + To those of mine! + But, soft! methinks I scent the morning air; + Brief let me be.--Sleeping within mine orchard, + My custom always in the afternoon, + Upon my secure[109] hour thy uncle stole, + With juice of cursed hebenon[110] in a vial, + And in the porches of mine ears did pour + The leperous distilment; whose effect + Holds such an enmity with blood of man, + That, swift as quicksilver, it courses through + The natural gates and alleys of the body; + So did it mine; + Thus was I, sleeping, by a brother's hand + Of life, of crown, of queen, at once despatch'd:[111] + Cut off even in the blossoms of my sin, + Unhousel'd, disappointed, unanel'd;[112] + No reckoning made, but sent to my account + With all my imperfections on my head. + + _Ham._ O, horrible! O, horrible! most horrible! + + _Ghost._ If thou hast nature in thee, bear it not; + Let not the royal bed of Denmark be + A couch for luxury[113] and damnèd incest. + But, howsoever thou pursu'st this act, + Taint not thy mind, nor let thy soul contrive + Against thy mother aught: leave her to Heaven, + And to those thorns that in her bosom lodge, + To prick and sting her. Fare thee well at once! + The glow-worm shows the matin to be near, + And 'gins to pale his ineffectual fire:[114] + Adieu, adieu, adieu! remember me. + + [_Exit_, L.H.] + + _Ham._ Hold, hold, my heart; + And you, my sinews, grow not instant old, + But bear me stiffly up.--Remember thee! + Ay, thou poor ghost, while memory holds a seat + In this distracted globe.[115] Remember thee! + Yea, from the table of my memory + I'll wipe away all forms, all pressures past,[116] + And thy commandment all alone shall live + Within the book and volume of my brain, + Unmix'd with baser matter: yes, by heaven, + I have sworn't. + + _Hor._ (_Without._) My lord, my lord,---- + + _Mar._ (_Without._) Lord Hamlet,---- + + _Hor._ (_Without._) Heaven secure him! + + _Ham._ So be it! + + _Mar._ (_Without._) Illo, ho, ho, my lord! + + _Ham._ Hillo, ho, ho, boy! come, bird, come.[117] + + _Enter_ HORATIO _and_ MARCELLUS (L.H.U.E.) + + _Mar._ (R.) How is't, my noble lord? + + _Hor._ (L.) What news, my lord? + + _Ham._ (C.) O, wonderful! + + _Hor._ Good my lord, tell it. + + _Ham._ No; + You will reveal it. + + _Hor._ Not I, my lord, by heaven. + + _Mar._ Nor I, my lord. + + _Ham._ How say you, then; would heart of man once think it? + But you'll be secret?-- + + _Hor._} + } Ay, by heaven, my lord. + _Mar._} + + _Ham._ There's ne'er a villain, dwelling in all Denmark-- + But he's an arrant knave.[118] + + _Hor._ There needs no ghost, my lord, come from the grave + To tell us this. + + _Ham._ Why, right; you are in the right; + And so, without more circumstance at all, + I hold it fit that we shake hands, and part: + You as your business and desire shall point you, + For every man hath business and desire, + Such as it is;--and, for my own poor part, + Look you, I will go pray. + + _Hor._ These are but wild and whirling words,[119] my lord. + + _Ham._ I am sorry they offend you, heartily. + + _Hor._ There's no offence, my lord. + + _Ham._ Yes, by Saint Patrick,[120] but there is, Horatio, + And much offence, too. Touching this vision here, + It is an honest ghost, that let me tell you: + For your desire to know what is between us, + O'er-master it[121] as you may. And now, good friends, + As you are friends, scholars, and soldiers, + Give me one poor request. + + _Hor._ What is't, my lord? + We will. + + _Ham._ Never make known what you have seen to-night. + + _Hor._} + } My lord, we will not. + _Mar._} + + _Ham._ Nay, but swear't. + + _Hor._ Propose the oath, my lord. + + _Ham._ Never to speak of this that you have seen. + Swear by my sword. + + [HORATIO _and_ MARCELLUS _place each their right + hand on_ HAMLET'S _sword._] + + _Ghost._ (_Beneath._) Swear. + + _Hor._ O day and night, but this is wondrous strange! + + _Ham._ And therefore as a stranger give it welcome.[122] + There are more things in heaven and earth, Horatio, + Than are dreamt of in your philosophy. + But come;-- + Here, as before, never, so help you mercy, + How strange or odd soe'er I bear myself, + As I, perchance, hereafter shall think meet + To put an antick disposition[123] on,-- + That you, at such times seeing me, never shall, + With arms encumber'd thus,[124] or this head-shake, + Or by pronouncing of some doubtful phrase, + As, _Well, we know_; or, _We could, an if we would_; or, _If + we list to speak_;--or, _There be, an if they might_;-- + Or such ambiguous giving out, to note + That you know aught of me:--This do you swear, + So grace and mercy at your most need help you! + + [HORATIO _and_ MARCELLUS _again place their hands on_ HAMLET'S + _sword._] + + _Ghost._ (_Beneath._) Swear. + + _Ham._ Rest, rest, perturbed spirit! So gentlemen, + With all my love I do commend me to you: + And what so poor a man as Hamlet is + May do, to express his love and friending to you, + Heaven willing, shall not lack.[125] Let us go in together; + + [_Crosses to_ L.] + + And still your fingers on your lips, I pray. + The time is out of joint;--O cursèd spite, + That ever I was born to set it right! + Nay, come, let's go together. + + [_Exeunt_ L.H.] + +END OF ACT FIRST. + + + + +Notes + +Act I + + + [Footnote I.1: _Me:_] _i.e., me_ who am already on the watch, and + have a right to demand the watch-word.] + + [Footnote I.2: _Unfold_] Announce, make known.] + + [Footnote I.3: _Long live the King._] The watch-word.] + + [Footnote I.4: _The rivals of my watch_,] _Rivals_, for partners + or associates.] + + [Footnote I.5: _And liegemen to the Dane._] _i.e._, owing + allegiance to Denmark.] + + [Footnote I.6: _A piece of him._] Probably a cant expression.] + + [Footnote I.7: _To watch the minutes of this night_; This seems + to have been an expression common in Shakespeare's time.] + + [Footnote I.8: _Approve our eyes_,] To _approve_, in + Shakespeare's age, signified to make good or establish.] + + [Footnote I.9: _What we have seen._] We must here supply "with," + or "by relating" before "what we have seen."] + + [Footnote I.10: _It harrows me with fear and wonder._] _i.e._, it + confounds and overwhelms me.] + + [Footnote I.11: _Usurp'st this time of night_,] _i.e._, abuses, + uses against right, and the order of things.] + + [Footnote I.12: _I might not this believe, &c._] I _could_ not: + it had not been permitted me, &c., without the full and perfect + evidence, &c.] + + [Footnote I.13: _Jump at this dead hour_,] _Jump_ and _just_ were + synonymous in Shakespeare's time.] + + [Footnote I.14: _In what particular thought to work_,] In what + particular course to set my thoughts at work: in what particular + train to direct the mind and exercise it in conjecture.] + + [Footnote I.15: _Gross and scope_] Upon the whole, and in a + general view.] + + [Footnote I.16: _Bodes some strange eruption to our state_,] + _i.e._, some political distemper, which will break out in + dangerous consequences.] + + [Footnote I.17: _Palmy state_] Outspread, flourishing. Palm + branches were the emblem of victory.] + + [Footnote I.18: _Sound, or use of voice_,] Articulation.] + + [Footnote I.19: + + _Uphoarded in thy life + Extorted treasure in the womb of earth_,] + + So in Decker's Knight's Conjuring, &c. "If any of them had bound + the spirit of gold by any charmes _in cares_, or in iron fetters, + _under the ground_, they should, _for their own soule's quiet + (which, questionless, else would whine up and down_,) not for the + good of their children, release it."] + + [Footnote I.20: + + _And then it started like a guilty thing + Upon a fearful summons._] + + Apparitions were supposed to fly from the crowing of the cock, + because it indicated the approach of day.] + + [Footnote I.21: _Lofty_] High and loud.] + + [Footnote I.22: _The extravagant and erring spirit_] + _Extravagant_ is, got out of his bounds. _Erring_ is here used in + the sense of wandering.] + + [Footnote I.23: Laertes is unknown in the original story, being + an introduction of Shakespeare's.] + + [Footnote I.24: _Green_;] Fresh.] + + [Footnote I.25: _Wisest sorrow_] Sober grief, passion discreetly + reined.] + + [Footnote I.26: _With a defeated joy_,] _i.e._, with joy baffled; + with joy interrupted by grief.] + + [Footnote I.27: _Barr'd_] Excluded--acted without the concurrence + of.] + + [Footnote I.28: _Your leave and favour_] The favour of your leave + granted, the kind permission. Two substantives with a copulative + being here, as is the frequent practice of our author, used for + an adjective and substantive: an adjective sense is given to a + substantive.] + + [Footnote I.29: _Upon his will I sealed my hard consent:_] At or + upon his earnest and importunate suit, I gave my full and final, + though hardly obtained and reluctant, consent.] + + [Footnote I.30: + + _Take thy fair hour! time be thine; + And thy best graces spend it at thy will!_] + + Catch the auspicious moment! be time thine own! and may the + exercise of thy fairest virtue fill up those hours, that are + wholly at your command!] + + [Footnote I.31: _A little more than kin, and less than kind._] + Dr. Johnson says that _kind_ is the Teutonic word for _child_. + Hamlet, therefore, answers to the titles of _cousin_ and _son_, + which the king had given him, that he was somewhat more than + _cousin_, and less than _son_. Steevens remarks, that it seems to + have been another proverbial phrase: "The nearer we are in blood, + the further we must be from love; the greater the _kindred_ is, + the less the _kindness_ must be." _Kin_ is still used in the + Midland Counties for _cousin_, and _kind_ signifies _nature_. + Hamlet may, therefore, mean that the relationship between them + had become _unnatural_.] + + [Footnote I.32: _I am too much i'the sun._] Meaning, probably, + his being sent for from his studies to be exposed at his uncle's + marriage as his _chiefest courtier_, and being thereby placed too + much in the radiance of the king's presence; or, perhaps, an + allusion to the proverb, "_Out of Heaven's blessing, into the + warm sun:_" but it is not unlikely that a quibble is meant + between _son_ and _sun_.] + + [Footnote I.33: _Nighted colour_] Black--night-like.] + + [Footnote I.34: _Vailed lids_] Cast down.] + + [Footnote I.35: _Which passeth show_;] _i.e._, "external manners + of lament."] + + [Footnote I.36: _Trappings_] _Trappings_ are "furnishings."] + + [Footnote I.37: _That father lost, lost his_;] "That lost father + (of your father, _i.e._, your grandfather), or father so lost, + lost his.]" + + [Footnote I.38: _Do obsequious sorrow:_] Follow with becoming and + ceremonious observance the memory of the deceased.] + + [Footnote I.39: _But to perséver_] This word was anciently + accented on the second syllable.] + + [Footnote I.40: _Obstinate condolement_,] Ceaseless and + unremitted expression of grief.] + + [Footnote I.41: _Incorrect to Heaven._] Contumacious towards + Heaven.] + + [Footnote I.42: _Unprevailing_] Fruitless, unprofitable.] + + [Footnote I.43: _Sits smiling to my heart:_] _To_ is _at_: + gladdens my heart.] + + [Footnote I.44: _In grace whereof_,] _i.e._, respectful regard or + honour of which.] + + [Footnote I.45: _No jocund health, that Denmark drinks to-day_,] + Dr. Johnson remarks, that the king's intemperance is very + strongly impressed; everything that happens to him gives him + occasion to drink. The Danes were supposed to be hard drinkers.] + + [Footnote I.46: _Resolve itself_] _To resolve_ is an old word + signifying _to dissolve_.] + + [Footnote I.47: _His canon_] _i.e._, his rule or law]. + + [Footnote I.48: _The uses of this world!_] _i.e._, the habitudes + and usages of life.] + + [Footnote I.49: _Merely._] Wholly--entirely.] + + [Footnote I.50: _Hyperion to a satyr:_] An allusion to the + exquisite beauty of Apollo, compared with the deformity of a + satyr; that satyr, perhaps, being Pan, the brother of Apollo. Our + great poet is here guilty of a false quantity, by calling + Hypĕrīon, Hypērĭon, a mistake not unusual among our English + poets.] + + [Footnote I.51: _Might not beteem_] _i.e._, might not allow, + permit.] + + [Footnote I.52: _I'll change that name with you._] _i.e._, do not + call yourself my _servant_, you are my _friend_; so I shall call + you, and so I would have you call me.] + + [Footnote I.53: _In faith._] Faithfully, in pure and simple + verity.] + + [Footnote I.54: _But what make you_] What is your object? What + are you doing?] + + [Footnote I.55: _What, in faith, make you from Wittenberg?_] In + Shakespeare's time there was a university at Wittenberg; but as + it was not founded till 1502, it consequently did not exist in + the time to which this play refers.] + + [Footnote I.56: _My dearest foe_] _i.e._, my direst or most + important foe. This epithet was commonly used to denote the + strongest and liveliest interest in any thing or person, for or + against.] + + [Footnote I.57: _Goodly king._] _i.e._, a good king.] + + [Footnote I.58: + + _Season your admiration for a while + with an attent ear_;] + + _i.e._, suppress your astonishment for a short time, that you may + be the better able to give your attention to what we will + relate.] + + [Footnote I.59: _In the dead waste and middle of the night_,] + _i.e._, in the dark and desolate vast, or vacant space and middle + of the night. It was supposed that spirits had permission to + range the earth by night alone.] + + [Footnote I.60: _With the act of fear_,] _i.e._, by the influence + or power of fear.] + + [Footnote I.61: _Address_] _i.e._, make ready.] + + [Footnote I.62: _Writ down_] Prescribed by our own duty.] + + [Footnote I.63: _He wore his beaver up._] That part of the helmet + which may be lifted up, to take breath the more freely.] + + [Footnote I.64: _Tenable_] _i.e._, strictly maintained.] + + [Footnote I.65: _Benefit_,] Favourable means.] + + [Footnote I.66: _Trifling of his favour_,] Gay and thoughtless + intimation.] + + [Footnote I.67: _Pérfume and suppliance of a minute._] _i.e._, an + amusement to fill up a vacant moment, and render it agreeable.] + + [Footnote I.68: _Keep within the rear of your affection_,] Front + not the peril; withdraw or check every warm emotion: advance not + so far as your affection would lead you.] + + [Footnote I.69: _The chariest maid_] Chary is cautious.] + + [Footnote I.70: _Puff'd and reckless libertine._] Bloated and + swollen, the effect of excess; and heedless and indifferent to + consequences.] + + [Footnote I.71: _Recks not his own read._] _i.e._, heeds not his + own lessons or counsel.] + + [Footnote I.72: _Shoulder of your sail_,] A common sea phrase.] + + [Footnote I.73: _Look thou charácter._] _i.e._, a word often used + by Shakespeare to signify to _write, strongly infix_; the accent + is on the second syllable.] + + [Footnote I.74: _Unproportion'd thought_] Irregular, disorderly + thought.] + + [Footnote I.75: _Each man's censure_,] Sentiment, opinion.] + + [Footnote I.76: _Chief in that._] _i.e._, chiefly in that.] + + [Footnote I.77: _Husbandry_] _i.e._, thrift, economical + prudence.] + + [Footnote I.78: _Season this in thee!_] _i.e._, infix it in such + a manner as that it may never wear out.] + + [Footnote I.79: _Yourself shall keep the key of it._] Thence it + shall not be dismissed, till you think it needless to retain it.] + + [Footnote I.80: _Given private time to you_;] Spent his time in + private visits to you.] + + [Footnote I.81: _As so 'tis put on me_,] Suggested to, impressed + on me.] + + [Footnote I.82: _Is between_] _i.e._, what has passed--what + intercourse had.] + + [Footnote I.83: _Green girl, Unsifted_] _i.e._, inexperienced + girl. Unsifted means one who has not nicely _canvassed_ and + examined the peril of her situation.] + + [Footnote I.84: _Woodcocks._] Witless things.] + + [Footnote I.85: _Slander any leisure moment_,] _i.e._, I would + not have you so disgrace your most idle moments, as not to find + better employment for them than lord Hamlet's conversation.] + + [Footnote I.86: _An eager air._] _Eager_ here means _sharp_, from + _aigre_, French.] + + [Footnote I.87: _Doth wake to-night_,] _i.e._, holds a late + revel.] + + [Footnote I.88: _Takes his rouse_,] _Rouse_ means drinking bout, + carousal.] + + [Footnote I.89: _Questionable shape_,] To _question_, in our + author's time, signified to _converse_. Questionable, therefore, + means _capable of being conversed with._] + + [Footnote I.90: _Hearsed in death_,] Deposited with the + accustomed funeral rites.] + + [Footnote I.91: _Cerements_;] Those precautions usually adopted + in preparing dead bodies for sepulture.] + + [Footnote I.92: _Fools of nature_] _i.e._, making sport for + nature.] + + [Footnote I.93: _Disposition_] Frame of mind and body.] + + [Footnote I.94: _Removèd ground:_] _Removed_ for _remote_.] + + [Footnote I.95: _At a pin's fee_;] _i.e._, the value of a pin.] + + [Footnote I.96: _What if it tempt you toward the flood, &c._] + Malignant spirits were supposed to entice their victims into + places of gloom and peril, and exciting in them the deepest + terror.] + + [Footnote I.97: _Beetles o'er his base into the sea_,] _i.e._, + projects darkly over the sea.] + + [Footnote I.98: _Némean lion's nerve._] Shakespeare, and nearly + all the poets of his time, disregarded the quantity of Latin + names. The poet has here placed the accent on the first syllable, + instead of the second.] + + [Footnote I.99: _That lets me:_] To let, in the sense in which it + is here used, means to hinder--to obstruct--to oppose. The word + is derived from the Saxon.] + + [Footnote I.100: _To fast in fires_,] Chaucer has a similar + passage with regard to eternal punishment--_"And moreover the + misery of Hell shall be in default of meat and drink."_] + + [Footnote I.101: _Harrow up thy soul_;] Agitate and convulse.] + + [Footnote I.102: _Hair to stand on end_,] A common image of that + day. + + "_Standing_ as frighted with _erected haire_."] + + [Footnote I.103: _The fretful porcupine:_] This animal being + considered irascible and timid.] + + [Footnote I.104: _Eternal blazon_] _i.e._, publication or + divulgation of things eternal.] + + [Footnote I.105: _Rots itself in ease on Lethe wharf_,] _i.e._, + in indolence and sluggishness, by its torpid habits contributes + to that morbid state of its juices which may figuratively be + denominated rottenness.] + + [Footnote I.106: _Orchard_,] Garden.] + + [Footnote I.107: _Forged process_] _i.e._, false report of + proceedings.] + + [Footnote I.108: _Decline upon a wretch._] Stoop with degradation + to.] + + [Footnote I.109: _Secure_] Unguarded.] + + [Footnote I.110: _Hebenon_] Hebenon is described by Nares in his + Glossary, as the juice of ebony, supposed to be a deadly poison.] + + [Footnote I.111: _Despatch'd:_] Despoiled--bereft.] + + [Footnote I.112: _Unhousel'd, disappointed, unanel'd_;] To + _housel_ is to minister the sacrament to one lying on his death + bed. _Disappointed_ is the same as unappointed, which here means + unprepared. _Unanel'd_ is without extreme unction.] + + [Footnote I.113: _Luxury_] Lasciviousness.] + + [Footnote I.114: _Pale his uneffectual fire:_] _i.e._, not seen + by the light of day; or it may mean, shining without heat.] + + [Footnote I.115: _In this distracted globe._] _i.e._, his head + distracted with thought.] + + [Footnote I.116: _Pressures past_,] Impressions heretofore made.] + + [Footnote I.117: _Come, bird, come._] This is the call which + falconers used to their hawk in the air when they would have him + come down to them.] + + [Footnote I.118: + + _There's ne'er a villain dwelling in all Denmark-- + But he's an arrant knave._] + + Hamlet probably begins these words in the ardour of confidence + and sincerity; but suddenly alarmed at the magnitude of the + disclosure he was going to make, and considering that, not his + friend Horatio only, but another person was present, he breaks + off suddenly:--"There's ne'er a villain in all Denmark that can + match (perhaps he would have said) my uncle in villainy; but + recollecting the danger of such a declaration, he pauses for a + moment, and then abruptly concludes:--"but he's an arrant + knave."] + + [Footnote I.119: _Whirling words_,] Random words thrown out with + no specific aim.] + + [Footnote I.120: _By Saint Patrick_,] At this time all the whole + northern world had their learning from Ireland; to which place it + had retired, and there flourished under the auspices of this + Saint.] + + [Footnote I.121: _O'er-master it_] Get the better of it.] + + [Footnote I.122: _Give it welcome._] Receive it courteously, as + you would a stranger when introduced.] + + [Footnote I.123: _Antick disposition_] _i.e._, strange, foreign + to my nature, a disposition which Hamlet assumes as a protection + against the danger which he apprehends from his uncle, and as a + cloak for the concealment of his own meditated designs.] + + [Footnote I.124: _Arms encumber'd thus_,] _i.e._, folded.] + + [Footnote I.125: _Friending to you--shall not lack_] Disposition + to serve you shall not be wanting.] + + + + +ACT II. + +SCENE I.--A ROOM IN POLONIUS'S HOUSE. + + + _Enter_ POLONIUS[1] (L.H.), _meeting Ophelia._ (R.H.) + + _Pol._ How now, Ophelia! What's the matter? + + _Oph._ O, my lord, my lord, I have been so affrighted! + + _Pol._ With what, in the name of Heaven? + + _Oph._ My lord, as I was sewing in my closet, + Lord Hamlet, with his doublet all unbrac'd; + Pale as his shirt; his knees knocking each other, + And with a look so piteous in purport, + He comes before me. + + _Pol._ Mad for thy love? + + _Oph._ My lord, I do not know; + But, truly, I do fear it. + + _Pol._ What said he? + + _Oph._ He took me by the wrist, and held me hard; + Then goes he to the length of all his arm; + And, with his other hand thus o'er his brow, + He falls to such perusal of my face + As he would draw it. Long staid he so; + At last,--a little shaking of mine arm, + And thrice his head thus waving up and down, + He rais'd a sigh so piteous and profound, + As it did seem to shatter all his bulk,[2] + And end his being: That done, he lets me go: + And, with his head over his shoulder turn'd, + He seem'd to find his way without his eyes; + For out o'doors he went without their helps, + And, to the last, bended their light on me. + + _Pol._ Come, go with me; I will go seek the king. + This is the very ecstacy of love;[3] + What, have you given him any hard words of late? + + _Oph._ No, my good lord; but, as you did command, + I did repel his letters, and denied + His access to me. + + _Pol._ That hath made him mad. + Come, go we to the king: + This must be known; which, being kept close, might move + More grief to hide than hate to utter love.[4] + Come. + [_Exeunt_ L.H.] + + +SCENE II.--A ROOM IN THE CASTLE. + + _Enter_ KING, QUEEN, ROSENCRANTZ, GUILDENSTERN, _and_ + Attendants (R.H.) + + _King._ (C.) Welcome, dear Rosencrantz and Guildenstern! + Moreover that we much did long to see you, + The need we have to use you did provoke + Our hasty sending. Something have you heard + Of Hamlet's transformation. What it should be, + More than his father's death, that thus hath put him + So much from the understanding of himself,[5] + I cannot dream of: I entreat you both, + That you vouchsafe your rest[6] here in our court + Some little time: so by your companies + To draw him on to pleasures, and to gather, + Whether aught, to us unknown, afflicts him thus, + That, open'd, lies within our remedy. + + _Queen._ (R.C.) Good gentlemen, he hath much talk'd of you; + And sure I am two men there are not living + To whom he more adheres. If it will please you + So to expend your time with us a while, + Your visitation shall receive such thanks + As fits a king's remembrance. + + _Ros._ (R.) Both your majesties + Might, by the sovereign power you have of us,[7] + Put your dread pleasures more into command + Than to entreaty. + + _Guil._ (R.) But we both obey, + And here give up ourselves, in the full bent,[8] + To lay our service freely at your feet. + + _King._ Thanks, Rosencrantz and gentle Guildenstern. + + _Queen._ I do beseech you instantly to visit + My too much changèd son. Go, some of you, + And bring these gentlemen where Hamlet is. + [_Exeunt_ ROSENCRANTZ, GUILDENSTERN, _and_ + Attendants, R.H.] + + _Enter_ POLONIUS (L.H.) + + _Pol._ Now do I think (or else this brain of mine + Hunts not the trail of policy[9] so sure + As it hath us'd to do), that I have found + The very cause of Hamlet's lunacy. + + _King._ (C.) O, speak of that; that do I long to hear. + + _Pol._ (L.C.) My liege, and madam, to expostulate[10] + What majesty should be, what duty is, + Why day is day, night night, and time is time, + Were nothing but to waste night, day, and time; + Therefore, since brevity is the soul of wit, + And tediousness the limbs and outward flourishes,-- + I will be brief:--Your noble son is mad: + Mad call I it; for, to define true madness, + What is't, but to be nothing else but mad? + But let that go. + + _Queen._ (R.C.) More matter, with less art. + + _Pol._ Madam, I swear I use no art at all. + That he is mad, 'tis true: 'tis true 'tis pity; + And pity 'tis, 'tis true: a foolish figure; + But farewell it, for I will use no art. + Mad let us grant him, then: and now remains + That we find out the cause of this effect, + Or, rather say, the cause of this defect, + For this effect defective comes by cause: + Thus it remains, and the remainder thus, + Perpend.[11] + I have a daughter, have, while she is mine, + Who, in her duty and obedience, mark, + Hath given me this: Now gather, and surmise. + + [Reads] _To the celestial, and my soul's idol, the most beautified + Ophelia,--_[12] + + That's an ill phrase, a vile phrase, _beautified_ is a vile phrase: + but you shall hear. Thus: + + _In her excellent white bosom,[13] these_, &c.[14] + + _Queen._ Came this from Hamlet to her? + + _Pol._ Good madam, stay awhile; I will be faithful.-- + [_Reads._] + + _Doubt thou the stars are fire;_ + _Doubt thou the sun doth move;_ + _Doubt truth to be a liar;_ + _But never doubt, I love._ + + _O dear Ophelia, I am ill at these numbers;[15] I have not art to + reckon my groans: but that I love thee best, O most best,[16] + believe it. Adieu._ + + _Thine evermore, most dear lady, whilst this machine is to him_,[17] + Hamlet. + + This, in obedience, hath my daughter shown me: + And more above,[18] hath his solicitings,[19] + As they fell out by time, by means, and place, + All given to my ear. + + _King._ But how hath she + Receiv'd his love? + + _Pol._ What do you think of me? + + _King._ As of a man faithful and honourable. + + _Pol._ I would fain prove so. But what might you think, + When I had seen this hot love on the wing + (As I perceived it, I must tell you that, + Before my daughter told me), what might you, + Or my dear majesty your queen here, think, + If I had play'd the desk or table book;[20] + Or given my heart a winking, mute and dumb;[21] + Or look'd upon this love with idle sight;[22] + What might you think? No, I went round to work,[23] + And my young mistress thus did I bespeak: + _Lord Hamlet is a prince, out of thy sphere; + This must not be:_ and then I precepts gave her, + That she should lock herself from his resort, + Admit no messengers, receive no tokens. + Which done, she took the fruits of my advice;[24] + And he, repuls'd (a short tale to make), + Fell into sadness; thence into a weakness; + Thence to a lightness; and, by this declension, + Into the madness wherein now he raves, + And all we mourn for. + + _King._ Do you think 'tis this? + + _Queen._ It may be, very likely. + + _Pol._ Hath there been such a time (I'd fain know that,) + That I have positively said, _'tis so_, + When it proved otherwise? + + _King._ Not that I know. + + _Pol._ Take this from this, if it be otherwise: + + [_Pointing to his head and shoulder._] + + If circumstances lead me, I will find + Where truth is hid, though it were hid indeed + Within the centre. + + _King._ How may we try it further? + + _Pol._ You know, sometimes he walks for hours together + Here in the lobby. + + _Queen._ So he does, indeed. + + _Pol._ At such a time I'll loose my daughter to him: + Mark the encounter: if he love her not, + And be not from his reason fallen thereon, + Let me be no assistant for a state, + But keep a farm, and carters. + + _King._ We will try it. + + _Queen._ But, look, where sadly the poor wretch comes reading. + + _Pol._ Away, I do beseech you both, away: + I'll board him presently.[25] + + [_Exeunt_ KING _and_ QUEEN, R.H.] + + _Enter_ HAMLET, _reading_ (L.C.) + +_Pol._ How does my good lord Hamlet? + +_Ham._ (C.) Excellent well. + +_Pol._ (R.) Do you know me, my lord? + +_Ham._ Excellent well; you are a fishmonger.[26] + +_Pol._ Not I, my lord. + +_Ham._ Then I would you were so honest a man. + +_Pol._ Honest, my lord! + +_Ham._ Ay, sir; to be honest, as this world goes, is to be one man +picked out of ten thousand. + +_Pol._ That's very true, my lord. + +_Ham._ For if the sun breed maggots in a dead dog, being a god, +kissing carrion,----Have you a daughter?[27] + +_Pol._ I have, my lord. + +_Ham._ Let her not walk i'the sun: conception is a blessing; but as +your daughter may conceive,--friend, look to't, look to't, look to't. + + [_Goes up stage._] + +_Pol._ (_Aside._) Still harping on my daughter:--yet he knew me not +at first; he said I was a fishmonger. + +[_Crosses to_ L.] + +I'll speak to him again.--What do you read, my lord? + +_Ham._ (C.) Words, words, words. + +_Pol._ (L.) What is the matter, my lord? + +_Ham._ Between who? + +_Pol._ I mean, the matter that you read, my lord. + +_Ham._ Slanders, sir: for the satirical rogue[28] says here that old +men have grey beards; that their faces are wrinkled; their eyes +purging thick amber and plum-tree gum; and that they have a plentiful +lack of wit, together with most weak hams: All of which, sir, though +I most powerfully and potently believe, yet I hold it not honesty to +have it thus set down; for yourself, sir, shall be as old as I am, +if, like a crab, you could go backward. + + [_Crosses_, L.] + +_Pol._ (_Aside._) Though this be madness, yet there's method in it. +Will you walk out of the air, my lord? + +_Ham._ Into my grave? + + [_Crosses_ R.] + +_Pol._ (L.) Indeed, that is out o' the air.--How pregnant sometimes +his replies[29] are! a happiness that often madness hits on, which +reason and sanity could not so prosperously be delivered of. I will +leave him, and suddenly contrive the means of meeting between him and +my daughter.--My honourable lord, I will most humbly take my leave of +you. + +_Ham._ (C.) You cannot, sir, take from me any thing that I will more +willingly part withall, except my life, except my life, except my +life. + +_Pol._ Fare you well, my lord. + + [_Exit_ POLONIUS, L.H.] + +_Ham._ These tedious old fools! + +_Pol._ (_Without._) You go to seek the lord Hamlet; there he is. + +_Ros._ Heaven save you, sir! + +_Enter_ ROSENCRANTZ _and_ GUILDENSTERN (L.H.) + +_Guil._ My honor'd lord!-- + +_Ros._ My most dear lord!-- + +_Ham._ My excellent good friends! How dost thou, Guildenstern? + + [_Crosses to_ ROSENCRANTZ.] + +Ah, Rosencrantz! Good lads, how do ye both? What news? + +_Ros._ (L.) None, my lord, but that the world's grown honest. + +_Ham._ (C.) Then is dooms-day near: but your news is not true. In the +beaten way of friendship,[30] what make you at Elsinore? + +_Ros._ To visit you, my lord; no other occasion. + +_Ham._ Beggar that I am, I am even poor in thanks; but I thank you. +Were you not sent for? Is it your own inclining? Is it a free +visitation? Come, come, deal justly with me: come, come; nay, speak. + +_Guil._ (R.) What should we say, my lord? + +_Ham._ Any thing--but to the purpose. You were sent for; and there is +a kind of confession in your looks, which your modesties have not +craft enough to colour: I know the good king and queen have sent for +you. + +_Ros._ To what end, my lord? + +_Ham._ That you must teach me. But let me conjure you, by the rights +of our fellowship, [_taking their hands_,] by the consonancy of our +youth,[31] by the obligation of our ever-preserved love, and by what +more dear a better proposer[32] could charge you withal, be even[33] +and direct with me, whether you were sent for, or no? + +_Ros._ What say you? + + [_To_ GUILDENSTERN.] + +_Ham._ Nay, then, I have an eye of you.[34] + + [_Crosses_ R.] + + [_Aside._] + +--if you love me, hold not off. + +_Guil._ My lord, we were sent for. + +_Ham._ (_Returning_ C.) I will tell you why; so shall my anticipation +prevent your discovery, and your secrecy to the king and queen moult +no feather.[35] I have of late (but wherefore I know not) lost all my +mirth, forgone all custom of exercises; and, indeed, it goes so +heavily with my disposition, that this goodly frame, the earth, seems +to me a steril promontory; this most excellent canopy, the air, look +you, this brave o'erhanging firmament, this majestical roof fretted +with golden fire, why, it appears no other thing to me than a foul +and pestilent congregation of vapours. What a piece of work is man! +How noble in reason! how infinite in faculties! in form and moving +how express[36] and admirable! in action how like an angel! in +apprehension how like a god! the beauty of the world! the paragon[37] +of animals! And yet, to me, what is this quintessence of dust? man +delights not me,--nor woman neither, though by your smiling you seem +to say so. + +_Ros._ My lord, there was no such stuff in my thoughts. + +_Ham._ Why did you laugh, then, when I said, _Man delights not me?_ + +_Ros._ To think, my lord, if you delight not in man, what lenten +entertainment[38] the players shall receive from you: we coted them +on the way;[39] and hither are they coming, to offer you service. + +_Ham._ He that plays the king shall be welcome, his majesty shall +have tribute of me; the adventurous knight shall use his foil and +target; the lover shall not sigh gratis; the humorous man shall end +his part in peace;[40] and the lady shall say her mind freely, or the +blank verse shall halt for't.[41]--What players are they? + +_Ros._ Even those you were wont to take such delight in, the +tragedians of the city. + +_Ham._ How chances it, they travel?[42] their residence, both in +reputation and profit, was better both ways. Do they hold the same +estimation they did when I was in the city? Are they so followed? + +_Ros._ No, indeed, they are not. + +_Ham._ It is not very strange; for my uncle is king of Denmark,[43] +and those that would make mouths at him[44] while my father lived, +give twenty, forty, fifty, an hundred ducats a-piece for his picture +in little.[45] There is something in this more than natural, if +philosophy could find it out. + + [_Flourish of trumpets without._] + +_Guil._ There are the players. + +_Ham._ Gentlemen, you are welcome to Elsinore. Your hands. You are +welcome: but my uncle-father and aunt-mother are deceived. + +_Guil._ In what, my dear lord? + +_Ham._ I am but mad north-north west: when the wind is southerly I +know a hawk from a hern-shaw.[46] + + [_Crosses_ R.] + +_Pol._ (_Without_, L.H.) Well be with you, gentlemen! + +_Ham._ (_Crosses_ C.) Hark you, Guildenstern;--and Rosencrantz: that +great baby you see there is not yet out of his swaddling-clouts. + +_Ros._ (R.) Haply he's the second time come to them; for they say an +old man is twice a child. + +_Ham._ I will prophesy he comes to tell me of the players; mark +it.--You say right, sir: o'Monday morning; 'twas then, indeed. + + _Enter_ POLONIUS (L.H.) + +_Pol._ My lord, I have news to tell you. + +_Ham._ My lord, I have news to tell you. When Roscius was an actor in +Rome,---- + +_Pol._ The actors are come hither, my lord. + +_Ham._ Buz, buz![47] + +_Pol._ Upon my honour,---- + +_Ham._ Then came each actor on his ass.[48] + +_Pol._ The best actors in the world, either for tragedy, comedy, +history, pastoral, pastorical-comical, historical-pastoral, scene +indivisible, or poem unlimited: Seneca cannot be too heavy, nor +Plautus too light.[49] For the law of writ and the liberty, these are +the only men.[50] + +_Ham._ _O, Jephthah, judge of Israel_,--what a treasure hadst thou! + +_Pol._ What a treasure had he, my lord? + + _Ham._ Why,--_One fair daughter, and no more, + The which he loved passing well._ + +_Pol._ Still harping on my daughter. + + [_Aside._] + +_Ham._ Am I not i'the right, old Jephthah? + +_Pol._ If you call me Jephthah, my lord, I have a daughter that I +love passing well. + +_Ham._ Nay, that follows not. + +_Pol._ What follows, then, my lord? + +_Ham._ Why, _As by lot, God wot_,[51] and then, you know, _It came to +pass, As most like it was_,--The first row of the pious Chanson[52] +will show you more; for look, my abridgment comes.[53] + + _Enter Four or Five_ Players (L.H.)--POLONIUS _crosses behind_ + HAMLET _to_ R.H. + +You are welcome, masters; welcome, all: O, old friend! Why, thy face +is valanced[54] since I saw thee last; Com'st thou to beard me[55] in +Denmark?--What, my young lady and mistress. By-'r-lady, your ladyship +is nearer to heaven than when I saw you last, by the altitude of a +chopine.[56] You are welcome. We'll e'en to't like French +falconers,[57] fly at anything we see: We'll have a speech straight: +Come, give us a taste of your quality;[58] come, a passionate speech. + +_1st Play._ (L.H.) What speech, my lord? + +_Ham._ I heard thee speak me a speech once,--but it was never acted; +or, if it was, not above once; for the play, I remember, pleased not +the million; 'twas caviare to the general:[59] but it was an +excellent play, well digested in the scenes, set down with as much +modesty as cunning.[60] One speech in it I chiefly loved; 'twas +Æneas' tale to Dido; and thereabout of it especially, where he speaks +of Priam's slaughter: If it live in your memory, begin at this line; +let me see, let me see;-- + +_The rugged Pyrrhus, like the Hyrcanian beast_,--'tis not so: it +begins with Pyrrhus: + + _The rugged Pyrrhus,--he, whose sable arms_, + _Black as his purpose, did the night resemble_, + _Old grandsire Priam seeks._ + +_Pol._ (R.) 'Fore Heaven, my lord, well spoken, with good accent and +good discretion. + +_Ham._ (C.) So proceed you. + + _1st Play._ (L.) _Anon he finds him + Striking too short at Greeks; his antique sword, + Rebellious to his arm, lies where it falls, + Repugnant to command: Unequal match'd, + Pyrrhus at Priam drives; in rage strikes wide; + But with the whiff and wind of his fell sword[61] + The unnerved father falls. + But, as we often see, against some storm, + A silence in the heavens, the rack[62] stand still, + The bold wind speechless, and the orb below + As hush as death; anon the dreadful thunder + Doth rend the region; So, after Pyrrhus' pause, + A roused vengeance sets him new a work; + And never did the Cyclops' hammers fall + On Mars's armour, forg'd for proof eterne, + With less remorse than Pyrrhus' bleeding sword + Now falls on Priam.-- + Out, out, thou fickle Fortune!_ + +_Pol._ (R.) This is too long. + +_Ham._ It shall to the barber's, with your beard.--Say on;--come to +Hecuba. + + _1st Play._ _But who, ah woe, had seen the mobled queen_-- + +_Ham._ The mobled queen?[63] + +_Pol._ That's good; mobled queen is good. + + _1st Play._ _Run barefoot up and down, threatening the flames; + A clout upon that head + Where late the diadem stood; and, for a robe, + A blanket, in the alarm of fear caught up; + Who this had seen, with tongue in venom steep'd, + 'Gainst fortune's state would treason have pronounced._ + +_Pol._ Look, whether he has not turned his colour, and has tears in's +eyes.--Prithee, no more. + +_Ham._ (C.) 'Tis well; I'll have thee speak out the rest of this +soon.--Good, my lord, will you see the players well bestowed? Do you +hear, let them be well used; for they are the abstract and brief +chronicles of the time: After your death you were better have a bad +epitaph than their ill report while you live. + +_Pol._ (R.) My lord, I will use them according to their desert. + +_Ham._ Much better: Use every man after his desert, and who shall +'scape whipping? Use them after your own honour and dignity: The less +they deserve, the more merit is in your bounty. Take them in. + + [_Crosses to_ R.H.] + +_Pol._ Come, sirs. + +_Ham._ Follow him, friends: we'll hear a play to-morrow. + + [_Exit_ POLONIUS _with some of the_ Players, L.H.] + +Old friend + + [_Crosses to_ C.] + +--My good friends + + [_To_ ROSENCRANTZ _and_ GUILDENSTERN.] + +I'll leave you till night: you are welcome to Elsinore--can you play +the murder of Gonzago? + + [_Exeunt_ ROSENCRANTZ _and_ GUILDENSTERN, R.H.] + +_1st Play._ Ay, my lord. + +_Ham._ We'll have it to-morrow night. You could, for a need, study a +speech of some dozen or sixteen lines, which I would insert +in't--could you not? + +_1st Play._ Ay, my lord. + +_Ham._ Very well.--Follow that lord; and look you mock him not. + +[_Exit_ Player, L.H.] + + Now I am alone. + O, what a rogue and peasant slave am I! + Is it not monstrous, that this player here, + But in a fiction, in a dream of passion, + Could force his soul so to his own conceit, + That, from her working, all his visage wann'd;[64] + Tears in his eyes, distraction in's aspect, + A broken voice, and his whole function suiting + With forms to his conceit?[65] And all for nothing! + For Hecuba? + What's Hecuba to him, or he to Hecuba, + That he should weep for her? What would he do, + Had he the motive and the cue[66] for passion + That I have? He would drown the stage with tears, + And cleave the general ear with horrid speech; + Make mad the guilty, and appal the free; + Confound the ignorant, and amaze, indeed, + The very faculties of eyes and ears. + Yet I, + A dull and muddy-mettled rascal, peak, + Like John a-dreams,[67] unpregnant of my cause,[68] + And can say nothing; no, not for a king, + Upon whose property and most dear life + A damn'd defeat was made.[69] Am I a coward? + Who calls me villain? breaks my pate across? + Plucks off my beard, and blows it in my face? + Tweaks me by the nose? gives me the lie i'the throat, + As deep as to the lungs? Who does me this, + Ha? + Why, I should take it: for it cannot be + But I am pigeon-liver'd, and lack gall + To make oppression bitter;[70] or, ere this, + I should have fatted all the region kites + With this slave's offal: Bloody, bawdy villain! + Remorseless, treacherous, lecherous, kindless[71] villain! + O, vengeance! + Why, what an ass am I! This is most brave, + That I, the son of a dear father murder'd, + Prompted to my revenge by heaven and hell, + Must, like a scold, unpack my heart with words, + And fall a cursing, like a very drab, + A scullion! + Fye upon't! fye! About, my brains![72] I have heard + That guilty creatures, sitting at a play, + Have by the very cunning of the scene + Been struck so to the soul, that presently + They have proclaim'd their malefactions; + For murder, though it have no tongue, will speak + With most miraculous organ. I'll have these players + Play something like the murder of my father + Before mine uncle: I'll observe his looks; + I'll tent him to the quick:[73] if he do blench,[74] + I know my course. The spirit that I have seen + May be the devil: and the devil hath power + To assume a pleasing shape; yea, and, perhaps + Out of my weakness and my melancholy + (As he is very potent with such spirits), + Abuses me to damn me: I'll have good grounds + More relative than this:[75] The play's the thing + Wherein I'll catch the conscience of the king. + + [_Exit_, R.H.] + +END OF ACT SECOND. + + + + +Notes + +Act II + + + [Footnote II.1: _Polonius_,] Doctor Johnson describes Polonius as + "a man bred in courts, exercised in business, stored with + observation, confident in his knowledge, proud of his eloquence, + and declining into dotage. A man positive and confident, because + he knows his mind was once strong, and knows not that it is + become weak." The idea of dotage encroaching upon wisdom will + solve all the phenomena of the character of Polonius.] + + [Footnote II.2: _His bulk_,] Frame.] + + [Footnote II.3: _Ecstacy of love_;] _i.e._, madness of love. In + this sense the word is now obsolete.] + + [Footnote II.4: + + _This must be known; which being kept close, might move + More grief to hide than hate to utter love._] + + _i.e._, this must be made known to the king, for (being kept + secret) the hiding Hamlet's love might occasion more mischief to + us from him and the queen, than the uttering or revealing of it + will occasion hate and resentment from Hamlet. + + It was the custom of Shakespeare's age, to conclude acts and + scenes with a couplet, a custom which was continued for nearly a + century afterwards.] + + [Footnote II.5: _The understanding of himself_,] _i.e._, the just + estimate of himself.] + + [Footnote II.6: _Vouchsafe your rest_] Please to reside.] + + [Footnote II.7: _Of us_,] _i.e._, over us.] + + [Footnote II.8: _In the full bent_,] To the full stretch and + range--a term derived from archery.] + + [Footnote II.9: _The trail of policy_] The _trail_ is the + _course_ of an animal pursued by the scent.] + + [Footnote II.10: _Expostulate_] To _expostulate_ is to discuss, + to put the pros and cons, to answer demands upon the question. + _Expose_ is an old term of similar import.] + + [Footnote II.11: _Perpend._] _i.e._, reflect, consider + attentively.] + + [Footnote II.12: _Most beautified Ophelia_,] Heywood, in his + History of Edward VI., says "Katharine Parre, Queen Dowager to + King Henry VIII., was a woman _beautified_ with many excellent + virtues." The same expression is frequently used by other old + authors.] + + [Footnote II.13: _In her excellent white bosom_,] The ladies, in + Shakespeare's time, wore pockets in the front of their stays.] + + [Footnote II.14: _These, &c._] In our poet's time, the word + _these_ was usually added at the end of the superscription of + letters.] + + [Footnote II.15: _I am ill at these numbers_;] No talent for + these rhymes.] + + [Footnote II.16: _O most best_,] An ancient mode of expression.] + + [Footnote II.17: _Whilst this machine is to him_,] Belongs to, + obey his impulse; so long as he is "a sensible warm motion," the + similar expression to "While my wits are my own."] + + [Footnote II.18: _And more above_,] _i.e._, moreover, besides.] + + [Footnote II.19: _His solicitings_,] _i.e._, his love-making, his + tender expressions.] + + [Footnote II.20: _If I had played the desk, or table book_;] This + line may either mean _if I had conveyed intelligence between + them_, or, _known of their love, if I had locked up his secret in + my own breast, as closely as it were confined in a desk or table + book._] + + [Footnote II.21: _Or given my heart a winking, mute and dumb_;] + _i.e._, connived at it.] + + [Footnote II.22: _With idle sight_;] _i.e._, with indifference.] + + [Footnote II.23: _Round to work_,] _i.e._, roundly, without + reserve.] + + [Footnote II.24: _Which done, she took the fruits of my advice_;] + She took the _fruits_ of advice when she obeyed advice, the + advice was then made _fruitful._--JOHNSON.] + + [Footnote II.25: _I'll board him presently._] Accost, address + him.] + + [Footnote II.26: _You are a fishmonger._] This was an expression + better understood in Shakespeare's time than at present, and no + doubt was relished by the audience of the Globe Theatre as + applicable to the Papists, who in Queen Elizabeth's time were + esteemed enemies to the Government. Hence the proverbial phrase + of _He's an honest man and eats no fish_; to signify he's a + friend to the Government and a Protestant.] + + [Footnote II.27: _For if the sun breed maggots in a dead dog, + being a god, kissing carrion,----Have you a daughter?_] _i.e._, + Hamlet having just remarked that honesty is very rare in the + world, adds, that since there is so little virtue, since + corruption abounds everywhere, and maggots are _bred_ by the sun, + which is a god, even in a dead dog, Polonius ought to take care + to prevent his daughter from walking in the sun, lest she should + prove _"a breeder of sinners;"_ for though _conception_ + (understanding) in general be a blessing, yet as Ophelia might + chance to _conceive_ (to be pregnant), it might be a calamity. + Hamlet's abrupt question, _"Have you a daughter?"_ is evidently + intended to impress Polonius with the belief of the Prince's + madness.--MALONE.] + + [Footnote II.28: _The satirical rogue_] Hamlet alludes to + Juvenal, who in his 10th Satire, describes the evils of long + life.] + + [Footnote II.29: _How pregnant his replies_] Big with meaning.] + + [Footnote II.30: _Beaten way of friendship_,] Plain track, open + and unceremonious course.] + + [Footnote II.31: _Rights of our fellowship and constancy of our + youth_,] Habits of familiar intercourse and correspondent years.] + + [Footnote II.32: _A better proposer_] An advocate of more address + in shaping his aims, who could make a stronger appeal.] + + [Footnote II.33: _Even_] Without inclination any way.] + + [Footnote II.34: _Nay, then, I have an eye of you._] _i.e._, I + have a glimpse of your meaning. Hamlet's penetration having shown + him that his two friends are set over him as spies.] + + [Footnote II.35: _So shall my anticipation prevent your + discovery, and your secrecy to the king and queen moult no + feather._] Be beforehand with your discovery, and the plume and + gloss of your secret pledge be in no feather shed or tarnished.] + + [Footnote II.36: _Express_] According to pattern, justly and + perfectly modelled.] + + [Footnote II.37: _Paragon_] Model of perfection.] + + [Footnote II.38: _Lenten entertainment_] _i.e._, sparing, like + the entertainments given in Lent.] + + [Footnote II.39: _We coted them on the way_;] To cote, is to pass + by, to pass the side of another. It appears to be a word of + French origin, and was a common sporting term in Shakespeare's + time.] + + [Footnote II.40: _The humorous man shall end his part in peace_;] + The fretful or capricious man shall vent the whole of his spleen + undisturbed.] + + [Footnote II.41: _The lady shall say her mind freely, or the + blank verse shall halt for't._] _i.e._, the lady shall mar the + measure of the verse, rather than not express herself freely and + fully.] + + [Footnote II.42: _Travel?_] Become strollers.] + + [Footnote II.43: _It is not very strange; for my uncle is king of + Denmark_;] This is a reflection on the mutability of fortune, and + the variableness of man's mind.] + + [Footnote II.44: _Make mouths at him_] _i.e._, deride him by + antic gestures and mockery.] + + [Footnote II.45: _In little._] In miniature.] + + [Footnote II.46: _I know a hawk from a hern-shaw._] A hernshaw is + a heron or hern. _To know a hawk from a hernshaw_ is an ancient + proverb, sometimes corrupted into _handsaw_. Spencer quotes the + proverb, as meaning, _wise enough to know the hawk from its + game._] + + [Footnote II.47: _Buz, buz!_] Sir William Blackstone states that + _buz_ used to be an interjection at Oxford when any one began a + story that was generally known before.] + + [Footnote II.48: _Then came each actor on his ass._] This seems + to be a line of a ballad.] + + [Footnote II.49: _Seneca cannot be too heavy, nor Plautus too + light._] An English translation of the tragedies of Seneca was + published in 1581, and one comedy of Plautus, viz., the + Menœchme, in 1595.] + + [Footnote II.50: _For the law of writ and the liberty, these are + the only men._] The probable meaning of this passage is,--_For + the observance of the rules of the Drama, while they take such + liberties, as are allowable, they are the only men_--_writ_ is an + old word for _writing_.] + + [Footnote II.51: _As by lot, God wot_,] There was an old ballad + entitled the song of Jephthah, from which these lines are + probably quotations. The story of Jephthah was also one of the + favourite subjects of ancient tapestry.] + + [Footnote II.52: _The first row of the pious Chanson_] This + expression does not appear to be very well understood. Steevens + tells us that the _pious chansons_ were a kind of _Christmas + carols_, containing some scriptural history thrown into loose + rhymes, and sung about the streets. The _first row_ appears to + mean the _first division_ of one of these.] + + [Footnote II.53: _My abridgment comes._] Hamlet alludes to the + players, whose approach will shorten his talk.] + + [Footnote II.54: _Thy face is valanced_] _i.e._, fringed with a + beard. The valance is the fringes or drapery hanging round the + tester of a bed.] + + [Footnote II.55: _Com'st thou to beard me_] To _beard_ anciently + meant to set _at defiance_. Hamlet having just told the player + that his face is valanced, is playing upon the word _beard_.] + + [Footnote II.56: _By the altitude of a chopine._] A chioppine is + a high shoe, or rather clog, worn by the Italians. Venice was + more famous for them than any other place. They are described as + having been made of wood covered with coloured leather, and + sometimes _even half a yard high_, their altitude being + proportioned to the rank of the lady, so that they could not walk + without being supported.] + + [Footnote II.57: _Like French falconers_,] The French seem to + have been the first and noblest falconers in the western part of + Europe. The French king sent over his falconers to show that + sport to King James the First.--_See Weldon's Court of King + James._] + + [Footnote II.58: _Quality_;] Qualifications, faculty.] + + [Footnote II.59: _Caviare to the general_;] Caviare is the spawn + of fish pickled, salted, and dried. It is imported from Russia, + and was considered in the time of Shakespeare a new and + fashionable luxury, not obtained or relished by the vulgar, and + therefore used by him to signify anything above their + comprehension--general is here used for the people.] + + [Footnote II.60: _As much modesty as cunning._] As much propriety + and decorum as skill.] + + [Footnote II.61: _Falls with the whiff and wind of his fell + sword_] Our author employs the same image in almost the same + phrase: + + "The Grecians _fall + Even in the fan and wind of your fair sword._" + + _Tr. & Cress. V. 3. Tr._] + + [Footnote II.62: _The rack_] The clouds or congregated vapour.] + + [Footnote II.63: _The mobled queen?_] Mobled is veiled, muffled, + disguised.] + + [Footnote II.64: _All his visage wann'd_;] _i.e._, turned pale or + wan.] + + [Footnote II.65: _His whole functions suiting with forms to his + conceit?_] _i.e._, his powers and faculties--the whole energies + of his soul and body giving material forms to his passion, such + as tone of voice, expression of face, requisite action, in + accordance with the ideas that floated in his conceit or + imagination.] + + [Footnote II.66: _The cue_] The point--the direction.] + + [Footnote II.67: _Like John a-dreams_,] Or dreaming John, a name + apparently coined to suit a dreaming, stupid person; he seems to + have been a well-known character.] + + [Footnote II.68: _Unpregnant of my cause_,] _i.e._, not quickened + with a new desire of vengeance; not teeming with revenge.] + + [Footnote II.69: _Defeat was made._] Overthrow.] + + [Footnote II.70: _Lack gall to make oppression bitter_;] _i.e._, + lack gall to make me feel the bitterness of oppression.] + + [Footnote II.71: _Kindless_] Unnatural.] + + [Footnote II.72: _About, my brains!_] Wits to work.] + + [Footnote II.73: _I'll tent him to the quick:_] _i.e._, probe + him--search his wounds.] + + [Footnote II.74: _Blench_,] Shrink, start aside.] + + [Footnote II.75: _More relative than this:_] Directly + applicable.] + + + + +ACT III. + +SCENE I.--A ROOM IN THE CASTLE. + + + _Three chairs on_ L.H., _one on_ R. + + _Enter_ KING _and_ QUEEN, _preceded by_ POLONIUS. OPHELIA, + ROSENCRANTZ, _and_ GIULDENSTERN, _following_ (R.H.) + + _King._ (C.) And can you, by no drift of conference, + Get from him why he puts on this confusion? + + _Ros._ (R.) He does confess he feels himself distracted; + But from what cause he will by no means speak. + + _Guild._ (R.) Nor do we find him forward[1] to be sounded + But, with a crafty madness, keeps aloof, + When we would bring him on to some confession + Of his true state. + + _Queen._ (R.C.) Did you assay him[2] + To any pastime? + + _Ros._ Madam, it so fell out, that certain players + We o'er-raught on the way:[3] of these we told him; + And there did seem in him a kind of joy + To hear of it: They are about the court; + And, as I think, they have already order + This night to play before him. + + _Pol._ 'Tis most true: + And he beseech'd me to entreat your majesties + To hear and see the matter. + + _King._ With all my heart; and it doth much content me + To hear him so inclin'd. + Good gentlemen, give him a further edge, + And drive his purpose on to these delights. + + _Ros._ We shall, my lord. + + [_Exeunt_ ROSENCRANTZ _and_ GUILDENSTERN, R.H.] + + _King._ Sweet Gertrude, leave us too; + For we have closely sent[4] for Hamlet hither, + That he, as 'twere by accident, may here + Affront Ophelia:[5] + Her father and myself (lawful espials[6]), + Will so bestow ourselves, that, seeing, unseen, + We may of their encounter frankly judge; + And gather by him, as he is behaved, + If't be the affliction of his love or no + That thus he suffers for. + + _Queen._ (R.) I shall obey you: + And for your part, Ophelia, + + [OPHELIA _comes down_ L.H.] + + I do wish + That your good beauties be the happy cause + Of Hamlet's wildness: so shall I hope your virtues + Will bring him to his wonted way again, + To both your honours. + + _Oph._ Madam, I wish it may. + + [_Exit_ QUEEN, R.H.] + + _Pol._ Ophelia, walk you here. Gracious, so please you, + We will bestow ourselves. Read on this book; + + [_To_ OPHELIA.] + + That show of such an exercise may colour + Your loneliness. We are oft to blame in this,-- + 'Tis too much prov'd,[7] that, with devotion's visage + And pious action, we do sugar o'er + The devil himself. + + _King._ O, 'tis too true! how smart + A lash that speech doth give my conscience! + + [_Aside._] + + _Pol._ I hear him coming: let's withdraw, my lord. + + [_Exeunt_ KING _and_ POLONIUS, R.H.2 E., _and_ + OPHELIA, R.H.U.E.] + + _Enter_ HAMLET (L.H.) + + _Ham._ To be, or not to be, that is the question:[8] + Whether 'tis nobler in the mind to suffer + The slings and arrows of outrageous fortune, + Or to take arms against a sea of troubles,[9] + And, by opposing end them?--To die,--to sleep, + No more;--and by a sleep, to say we end + The heart-ache, and the thousand natural shocks + That flesh is heir to: 'tis a consummation + Devoutly to be wished. To die,--to sleep,-- + To sleep! perchance to dream: ay, there's the rub; + For in that sleep of death what dreams may come, + When we have shuffled off this mortal coil,[10] + Must give us pause:[11] There's the respect[12] + That makes calamity of so long life; + For who would bear the whips and scorns of time,[13] + The oppressor's wrong, the proud man's contumely,[14] + The pangs of despised love, the law's delay, + The insolence of office, and the spurns + That patient merit of the unworthy takes, + When he himself might his quietus make[15] + With a bare bodkin?[16] Who would fardels bear,[17] + To groan and sweat under a weary life, + But that the dread of something after death, + The undiscovered country, from whose bourn[18] + No traveller returns,[19] puzzles the will, + And makes us rather bear those ills we have + Than fly to others that we know not of? + Thus, conscience does make cowards of us all;[20] + And thus the native hue of resolution + Is sicklied o'er with the pale cast of thought; + And enterprises of great pith and moment,[21] + With this regard, their currents turn away, + And lose the name of action.[22]-- + + [OPHELIA _returns._] + + --Soft you now![23] + The fair Ophelia:--Nymph, in thy orisons[24] + Be all my sins remember'd. + + _Oph._ (R.C.) Good my lord, + How does your honour for this many a day? + + _Ham._ (L.C.) I humbly thank you; well. + + _Oph._ My lord, I have remembrances of yours, + That I have longèd long to re-deliver; + I pray you, now receive them. + + _Ham._ No, not I; + I never gave you aught. + + _Oph._ My honour'd lord, you know right well you did; + And, with them, words of so sweet breath compos'd + As made the things more rich: their perfume lost, + Take these again; for to the noble mind + Rich gifts wax poor when givers prove unkind. + There, my lord. + +_Ham._ Ha, ha! are you honest? + +_Oph._ My lord? + +_Ham._ Are you fair? + +_Oph._ What means your lordship? + +_Ham._ That if you be honest and fair, your honesty should admit no +discourse to your beauty.[25] + +_Oph._ Could beauty, my lord, have better commerce than with honesty? + +_Ham._ Ay, truly; for the power of beauty will sooner transform +honesty from what it is to a bawd, than the force of honesty can +translate beauty into his likeness:[26] this was some time a paradox, +but now the time gives it proof. I did love you once. + +_Oph._ Indeed, my lord, you made me believe so. + +_Ham._ You should not have believed me; for virtue cannot so +inoculate our old stock, but we shall relish of it:[27] I loved you +not. + +_Oph._ I was the more deceived. + +_Ham._ Get thee to a nunnery: Why wouldst thou be a breeder of +sinners? I am myself indifferent honest; but yet I could accuse me of +such things, that it were better my mother had not borne me: I am +very proud, revengeful, ambitious; with more offences at my beck[28] +than I have thoughts to put them in,[29] imagination to give them +shape, or time to act them in. What should such fellows as I do, +crawling between earth and heaven? We are arrant knaves, all; +believe none of us. Go thy ways to a nunnery. Where's your father? + +_Oph._ At home, my lord. + +_Ham._ Let the doors be shut upon him, that he may play the fool +nowhere but in's own house. Farewell. + +_Oph._ O, help him, you sweet heavens! + +_Ham._ If thou dost marry, I'll give thee this plague for thy dowry. +Be thou as chaste as ice, as pure as snow, thou shalt not escape +calumny. Get thee to a nunnery; farewell. Or, if thou wilt needs +marry, marry a fool; for wise men know well enough what monsters you +make of them. To a nunnery, go; go; go. + +_Oph._ Heavenly powers, restore him! + +_Ham._ I have heard of your paintings[30] too, well enough; Heaven +hath given you one face, and you make yourselves another:[31] you +jig, you amble, and you lisp,[32] and nickname Heaven's creatures, +and make your wantonness your ignorance.[33] Go to, I'll no more +of't; it hath made me mad. + + [HAMLET _crosses to_ R.H.] + +I say, we will have no more marriages: those that are married +already, all but one,[34] shall live; the rest shall keep as they +are. To a nunnery, go. + + [_Exit_ HAMLET, R.H.[35]] + + _Oph._ (L.) O, what a noble mind is here o'erthrown! + The expectancy and rose of the fair state,[36] + The glass of fashion[37] and the mould of form,[38] + The observ'd of all observers, quite, quite down! + And I, of ladies most deject and wretched, + That suck'd the honey of his musick vows,[39] + Now see that noble and most sovereign reason, + Like sweet bells jangled, out of tune and harsh: + O, woe is me, + To have seen what I have seen, see what I see! + + [_Exit_ OPHELIA, L.H.] + + _Re-enter_ KING _and_ POLONIUS. + + _King._ Love! his affections do not that way tend; + Nor what he spake, though it lack'd form a little, + Was not like madness. There's something in his soul, + O'er which his melancholy sits on brood; + He shall with speed to England, + For the demand of our neglected tribute: + Haply, the seas, and countries different, + With variable objects, shall expel + This something-settled matter in his heart; + Whereon his brains still beating puts him thus + From fashion of himself. What think you on't? + + _Pol._ It shall do well: But yet I do believe + The origin and commencement of his grief + Sprung from neglected love. My lord, do as you please; + But, if you hold it fit, after the play, + Let his queen mother all alone entreat him + To show his grief: let her be round with him;[40] + And I'll be placed, so please you, in the ear + Of all their conference. If she find him not,[41] + To England send him; or confine him where + Your wisdom best shall think. + + _King._ It shall be so: + Madness in great ones must not unwatch'd go. + + [_Exeunt_, L.H.] + + _Enter_ HAMLET _and a_ Player (R.H.) + +_Ham._ (C.) Speak the speech, I pray you, as I pronounced it to you, +trippingly on the tongue: but if you mouth it, as many of our players +do, I had as lief[42] the town-crier spoke my lines. Nor do not saw +the air too much with your hands thus;[43] but use all gently: for in +the very torrent, tempest, and (as I may say) whirlwind of your +passion, you must acquire and beget a temperance that may give it +smoothness. O, it offends me to the soul, to hear a robustious +perrywig-pated fellow[44] tear a passion to tatters, to very rags, to +split the ears of the groundlings,[45] who, for the most part, are +capable of nothing but inexplicable dumb shows and noise: I would +have such a fellow whipped for o'erdoing Termagant;[46] it +out-herods Herod:[47] Pray you, avoid it. + +_1st Play._ (R.) I warrant your honour. + +_Ham._ Be not too tame neither, but let your own discretion be your +tutor; suit the action to the word, the word to the action; with this +special observance, that you o'erstep not the modesty of nature: for +any thing so overdone is from the purpose of playing, whose end, both +at the first and now, was and is, to hold, as 'twere, the mirror up +to nature; to show virtue her own feature, scorn her own image, and +the very age and body of the time its form and pressure.[48] Now, +this overdone, or come tardy off,[49] though it make the unskilful +laugh, cannot but make the judicious grieve; the censure of which +one[50] must, in your allowance,[51] o'erweigh a whole theatre of +others. O, there be players that I have seen play, and heard others +praise, and that highly, not to speak it profanely,[52] that, neither +having the accent of christians, nor the gait of christian, pagan, +nor man, have so strutted and bellowed, that I have thought some of +nature's journeymen had made men, and not made them well, they +imitated humanity so abominably. + + [_Crosses to_ R.] + +_1st Play._ (L.) I hope we have reformed that indifferently[53] with +us. + +_Ham._ O, reform it altogether. And let those that play your clowns +speak no more than is set down for them:[54] for there be of them +that will themselves laugh, to set on some quantity of barren +spectators[55] to laugh too; though, in the mean time, some necessary +question[56] of the play be then to be considered: that's villainous, +and shows a most pitiful ambition in the fool that uses it. Go, make +you ready. + + [_Exit_ Player, L.H.] + +_Ham._ What, ho, Horatio! + + _Enter_ HORATIO (R.H.) + +_Hor._ Here, sweet lord, at your service. + + _Ham._ Horatio, thou art e'en as just a man + As e'er my conversation cop'd withal.[57] + + _Hor._ O, my dear lord. + + _Ham._ Nay, do not think I flatter; + For what advancement may I hope from thee, + That no revenue hast, but thy good spirits, + To feed and clothe thee? Why should the poor be flatter'd? + No, let the candied tongue lick absurd pomp; + And crook the pregnant hinges of the knee,[58] + Where thrift may follow fawning. Dost thou hear? + Since my dear soul[59] was mistress of her choice, + And could of men distinguish, her election + Hath seal'd thee for herself: for thou hast been + As one, in suffering all, that suffers nothing; + A man that fortune's buffets and rewards + Has ta'en with equal thanks: and bless'd are those + Whose blood and judgment[60] are so well co-mingled, + That they are not a pipe for fortune's finger + To sound what stop she please. Give me that man + That is not passion's slave, and I will wear him + In my heart's core, ay, in my heart of heart, + As I do thee.--Something too much of this.-- + There is a play to-night before the king; + One scene of it comes near the circumstance + Which I have told thee of my father's death: + I pr'ythee when thou seest that act a-foot, + Even with the very comment of thy soul[61] + Observe my uncle: if his occulted guilt[62] + Do not itself unkennel in one speech, + It is a damned ghost that we have seen; + And my imaginations are as foul + As Vulcan's stithy.[63] Give him heedful note: + For I mine eyes will rivet to his face; + And, after, we will both our judgments join + In censure of his seeming.[64] + + [HORATIO _goes off_, U.E.L.H.] + + _March. Enter_ KING _and_ QUEEN, _preceded by_ POLONIUS, OPHELIA, + HORATIO, ROSENCRANTZ, GUILDENSTERN, Lords, Ladies, _and_ Attendants. + KING _and_ QUEEN _sit_ (L.H.); OPHELIA (R.H.) + +_King._ (L.) How fares our cousin Hamlet? + +_Ham._ (C.) Excellent, i'faith; of the cameleon's dish: I eat the +air, promise-crammed: you cannot feed capons so. + +_King._ I have nothing with this answer, Hamlet; these words are not +mine.[65] + +_Ham._ No, nor mine, now.[66] My lord,--you played once in the +university, you say?[67] + + [_To_ POLONIUS, L.] + +_Pol._ (L.C.) That did I, my lord; and was accounted a good actor. + +_Ham._ (C.) And what did you enact? + +_Pol._ I did enact Julius Cæsar:[68] I was killed i'the Capitol; +Brutus killed me. + +_Ham._ It was a brute part of him to kill so capital a calf +there.--Be the players ready? + +_Ros._ Ay, my lord; they stay upon your patience.[69] + +_Queen._ Come hither, my dear Hamlet, sit by me. + + [_Pointing to a chair by her side._] + +_Ham._ No, good mother, here's metal more attractive. + +_Pol._ O, ho! do you mark that? + + [_Aside to the_ KING.] + +_Ham._ Lady, shall I lie in your lap? + + [_Lying down at_ OPHELIA'S _feet._][70] + +_Oph._ (R.) You are merry, my lord. + +_Ham._ O, your only jig-maker.[71] What should a man do but be merry? +for, look you, how cheerfully my mother looks, and my father died +within these two hours. + +_Oph._ Nay, 'tis twice two months, my lord. + +_Ham._ So long? Nay, then, let the devil wear black, for I'll have a +suit of sables.[72] O heavens! die two months ago, and not forgotten +yet? Then there's hope a great man's memory may outlive his life half +a year: But, by'r-lady, he must build churches, then.[73] + +_Oph._ What means the play, my lord? + +_Ham._ Miching mallecho;[74] it means mischief. + +_Oph._ But what is the argument of the play? + + _Enter a_ Player _as_ Prologue (L.H.) _on a raised stage._ + +_Ham._ We shall know by this fellow. + + _Pro._ _For us, and for our tragedy, + Here stooping to your clemency, + We beg your hearing patiently._ + + [_Exit_, L.H.] + +_Ham._ Is this a prologue, or the posy of a ring?[75] + +_Oph._ 'Tis brief, my lord. + +_Ham._ As woman's love. + +_Enter a_ KING _and a_ QUEEN (L.H.) _on raised stage._ + + _P. King._ (R.) Full thirty times hath Phoebus' cart[76] gone round + Neptune's salt wash and Tellus' orbèd ground,[77] + Since love our hearts, and Hymen did our hands, + Unite commutual in most sacred bands. + + _P. Queen._ (L.) So many journeys may the sun and moon + Make us again count o'er ere love be done! + But, woe is me, you are so sick of late, + So far from cheer and from your former state, + That I distrust you. Yet, though I distrust, + Discomfort you, my lord, it nothing must. + + _P. King._ 'Faith, I must leave thee, love, and shortly too; + My operant powers their functions leave to do:[78] + And thou shalt live in this fair world behind, + Honour'd, belov'd; and, haply one as kind + For husband shalt thou---- + + _P. Queen._ O, confound the rest! + Such love must needs be treason in my breast: + In second husband let me be accurst! + None wed the second but who kill'd the first. + +_Ham._ That's wormwood. + + [_Aside to_ HORATIO, R.] + + _P. King._ I do believe you think what now you speak; + But what we do determine oft we break.[79] + So think you thou wilt no second husband wed; + But die thy thoughts when thy first lord is dead. + + _P. Queen._ Nor earth to me give food, nor heaven light! + Sport and repose lock from me day and night! + Both here, and hence, pursue me lasting strife, + If, once a widow, ever I be wife! + + _P. King._ 'Tis deeply sworn. + +_Ham._ If she should break it now!-- + + [_To_ OPHELIA.] + + _P. King._ Sweet, leave me here awhile; + My spirits grow dull, and fain I would beguile + The tedious day with sleep. + + [_Reposes on a bank_, R., _and sleeps._] + + _P. Queen._ Sleep rock thy brain; + And never come mischance between us twain! + + [_Exit_, L.H.] + +_Ham._ Madam, how like you this play? + +_Queen._ The lady doth protest too much, methinks. + +_Ham._ O, but she'll keep her word. + +_King._ Have you heard the argument?[80] Is there no offence in't? + +_Ham._ No, no, they do but jest, poison in jest; no offence i'the +world. + +_King._ What do you call the play? + +_Ham._ The mouse-trap.[81] Marry, how? Tropically.[82] This play is +the image of a murder[83] done in Vienna: Gonzago is the Duke's name; +his wife, Baptista: you shall see anon;--'tis a knavish piece of +work: but what of that? your majesty, and we that have free souls, it +touches us not: Let the galled jade wince,[84] our withers[85] are +unwrung. + + _Enter_ LUCIANUS (L.H.) + +This is one Lucianus, nephew to the king. + +_Oph._ You are as good as a chorus,[86] my lord. + +_Ham._ I could interpret between you and your love, if I could see +the puppets dallying.[87] Begin, murderer; leave thy damnable faces, +and begin. Come:-- + + ----The croaking raven + Doth bellow for revenge.[88] + + _Luc._ Thoughts black, hands apt, drugs fit, and time agreeing; + Confederate season, else no creature seeing; + Thou mixture rank, of midnight weeds[89] collected, + With Hecat's ban thrice blasted, thrice infected, + Thy natural magick and dire property, + On wholesome life usurp[90] immediately. + [_Pours the poison into the Sleeper's Ears._] + +_Ham._ He poisons him i' the garden for his estate. His name's +Gonzago: the story is extant, and written in very choice Italian: You +shall see anon how the murderer gets the love of Gonzago's wife. + +_King._ Give me some light: away! + +_All._ Lights, lights, lights! + + [_Exeunt all_, R. _and_ L., _but_ HAMLET _and_ HORATIO.] + +_Ham._ Why, let the strucken deer go weep,[91] + The hart ungallèd play; + For some must watch, while some must sleep: + So runs the world away.-- + +O, good Horatio, I'll take the ghost's word for a thousand pounds. +Didst perceive? + +_Hor._ (R.) Very well, my lord. + +_Ham._ (C.) Upon the talk of the poisoning.-- + +_Hor._ I did very well note him. + +_Ham._ Ah, ah! come, some musick! come, the recorders! + + [_Exit_ HORATIO, R.H.] + + _Enter_ ROSENCRANTZ _and_ GUILDENSTERN (L.H.) HAMLET _seats + himself in the chair_ (R.) + +_Guil._ (L.C.) Good my lord, vouchsafe me a word with you. + +_Ham._ Sir, a whole history. + +_Guil._ The king, sir,---- + +_Ham._ Ay, sir, what of him? + +_Guil._ Is, in his retirement, marvellous distempered.[92] + +_Ham._ With drink, sir? + +_Guil._ No, my lord, with choler. + +_Ham._ Your wisdom should show itself more rich to signify this to +the doctor; for, for me to put him to his purgation would perhaps +plunge him into more choler. + +_Guil._ Good my lord, put your discourse into some frame, and start +not so wildly from my affair. + +_Ham._ I am tame, sir:--pronounce. + +_Guil._ The queen, your mother, in most great affliction of spirit, +hath sent me to you. + +_Ham._ You are welcome. + +_Guil._ Nay, good my lord, this courtesy is not of the right breed. +If it shall please you to make me a wholesome answer, I will do your +mother's commandment: if not, your pardon and my return shall be the +end of my business. + +_Ham._ Sir, I cannot. + +_Guil._ What, my lord? + +_Ham._ Make you a wholesome answer; my wit's diseased! But, sir, such +answer as I can make, you shall command: or rather as you say, my +mother: therefore no more, but to the matter: My mother, you say,-- + +_Ros._ (_Crosses to_ C.) Then thus she says: Your behaviour hath +struck her into amazement and admiration.[93] + +_Ham._ O wonderful son, that can so astonish a mother! But is there +no sequel at the heels of this mother's admiration?--impart. + +_Ros._ She desires to speak with you in her closet, ere you go to +bed. + +_Ham._ We shall obey, were she ten times our mother. Have you any +further trade with us?[94] + +_Ros._ My lord, you once did love me. + +_Ham._ And do still, by these pickers and stealers.[95] + + [_Rises and comes forward_, C.] + +_Ros._ (R.) Good my lord, what is your cause of distemper? you do, +surely, bar the door of your own liberty, if you deny your griefs to +your friend.[96] + +_Ham._ Sir, I lack advancement. + +_Ros._ How can that be, when you have the voice of the king himself +for your succession in Denmark?[97] + +_Ham._ Ay, sir, but _While the grass grows_,--the proverb is +something musty.[98] + + _Enter_ HORATIO _and_ Musicians (R.H.) + +O, the recorders:[99]--let me see one.--So; withdraw with you:-- + + [_Exeunt_ HORATIO _and_ Musicians R.H. GUILDENSTERN, + _after speaking privately to_ ROSENCRANTZ, _crosses + behind_ HAMLET _to_ R.H.] + +Why do you go about to recover the wind of me,[100] as if you would +drive me into a toil?[101] + +_Guil._ (R.) O, my lord, if my duty be too bold, my love is too +unmannerly.[102] + +_Ham._ (C.) I do not well understand that. Will you play upon this +pipe? + +_Guil._ My lord, I cannot. + +_Ham._ I pray you. + +_Guil._ Believe me, I cannot. + +_Ham._ I do beseech you. + +_Ros._ (L.) I know no touch of it, my lord. + +_Ham._ 'Tis as easy as lying: govern these ventages with your fingers +and thumb, give it breath with your mouth, and it will discourse most +eloquent music.[103] Look you, these are the stops. + +_Guil._ But these cannot I command to any utterance of harmony; I +have not the skill. + +_Ham._ Why, look you now, how unworthy a thing you make of me! You +would play upon me; you would seem to know my stops; you would pluck +out the heart of my mystery; you would sound me from my lowest note +to the top of my compass: and there is much music, excellent voice, +in this little organ; yet cannot you make it speak. 'Sdeath, do you +think I am easier to be played on than a pipe? Call me what +instrument you will, though you can fret me, you cannot play upon +me.[104] + + [_Crosses to_ L.H.] + + _Enter_ POLONIUS (R.H.) + +_Pol._ (R.) My lord, the queen would speak with you, and presently. + +_Ham._ (C.) Do you see yonder cloud that's almost in shape of a +camel? + +_Pol._ By the mass, and 'tis like a camel, indeed. + +_Ham._ Methinks it is like a weasel. + +_Pol._ It is backed like a weasel. + +_Ham._ Or like a whale? + +_Pol._ Very like a whale. + +_Ham._ Then will I come to my mother by and by. They fool me to the +top of my bent.[105] I will come by and by. + +_Pol._ I will say so. + +_Ham._ By and by is easily said. + + [_Exit_ POLONIUS, R.H. + +Leave me, friends. + + [_Exeunt_ ROSENCRANTZ _and_ GUILDENSTERN, R.H.] + + 'Tis now the very witching time of night, + When churchyards yawn, and hell itself breathes out + Contagion to this world: Now could I drink hot blood, + And do such bitter business[106] as the day + Would quake to look on. Soft! now to my mother. + O, heart, lose not thy nature; let not ever + The soul of Nero enter this firm bosom: + Let me be cruel, not unnatural; + I will speak daggers to her, but use none. + + [_Exit._] + + +SCENE II.--A ROOM IN THE SAME. + +_Enter_ KING, ROSENCRANTZ _and_ GUILDENSTERN (R.H.) + + _King._ I like him not; nor stands it safe with us[107] + To let his madness range. Therefore prepare you; + I your commission will forthwith despatch, + And he to England shall along with you: + Arm you, I pray you, to this speedy voyage; + For we will fetters put upon this fear,[108] + Which now goes too free-booted. + + _Ros._ } + } We will haste us. + _Guil._} + + [_Cross behind the_ KING, _and exeunt_ ROSENCRANTZ _and_ + GUILDENSTERN, L.H.] + + _Enter_ POLONIUS (R.H.) + + _Pol._ My lord, he's going to his mother's closet: + Behind the arras I'll convey myself,[109] + To hear the process;[110] I'll warrant, she'll tax him home: + And, as you said, and wisely was it said, + 'Tis meet that some more audience than a mother, + Since nature makes them partial, should o'erhear + The speech of vantage.[111] Fare you well, my liege: + + [POLONIUS _crosses to_ L.H.] + + I'll call upon you ere you go to bed, + And tell you what I know. + + _King._ Thanks, dear my lord. + + [_Exeunt_ POLONIUS, L.H., _and_ KING, R.H.] + + +SCENE III.--THE QUEEN'S CHAMBER. + + _Enter_ QUEEN _and_ POLONIUS (L.H.) + + _Pol._ He will come straight. Look, you lay home to him:[112] + Tell him his pranks have been too broad[113] to bear with, + And that your grace hath screen'd and stood between + Much heat and him. I'll sconce me even here.[114] + Pray you, be round with him. + + _Queen._ I'll warrant you; + Fear me not:--withdraw, I hear him coming. + + [POLONIUS _hides himself_, L.H.U.E. + + _Enter_ HAMLET (R.) + + _Ham._ (R.C.) Now, mother, what's the matter? + + _Queen._ (L.C.) Hamlet, thou hast thy father much offended. + + _Ham._ Mother, you have my father much offended. + + _Queen._ Come, come, you answer with an idle tongue. + + _Ham._ Go, go, you question with a wicked tongue. + + _Queen._ Why, how now, Hamlet! + + _Ham._ What's the matter now? + + _Queen._ Have you forgot me? + + _Ham._ No, by the rood,[115] not so: + You are the queen, your husband's brother's wife; + And--would it were not so!--you are my mother. + + _Queen._ Nay, then, I'll set those to you that can speak. + + _Ham._ Come, come, and sit you down; you shall not budge; + You go not till I set you up a glass + Where you may see the inmost part of you. + + _Queen._ What wilt thou do? thou wilt not murder me? + Help, help, ho! + + _Pol._ + + (_Behind._) + + What, ho! help! + + _Ham._ How now! a rat?[116] + + [_Draws._] + + Dead, for a ducat, dead! + + [HAMLET _rushes off behind the arras._] + + _Pol._ (_Behind._) O, I am slain! + + [_Falls and dies._] + + _Queen._ O me, what hast thou done? + + _Ham._ + + (_Returning._) + + Nay, I know not: + Is it the king? + + _Queen._ O, what a rash and bloody deed is this! + + _Ham._ A bloody deed!--almost as bad, good mother, + As kill a king, and marry with his brother. + + _Queen._ As kill a king! + + _Ham._ Ay, lady, 'twas my word. + + [_Goes off behind the arras, and returns._] + + Thou wretched, rash, intruding fool, farewell! + + [_To the dead body of_ POLONIUS, _behind the arras_.] + + I took thee for thy better. + Leave wringing of your hands: Peace; sit you down, + + [_To the_ QUEEN.] + + And let me wring your heart: for so I shall, + If it be made of penetrable stuff; + If damnèd custom have not brazed it so,[117] + That it be proof and bulwark against sense.[118] + + _Queen._ + + (_Sits_ R.C.) + + What have I done, that thou dar'st wag thy tongue + In noise so rude against me? + + _Ham._ + + (_Seated_ L.C.) + + Such an act, + That blurs the grace and blush of modesty; + Calls virtue, hypocrite; takes off the rose + From the fair forehead of an innocent love, + And sets a blister there;[119] makes marriage vows + As false as dicer's oaths: O, such a deed + As from the body of contraction plucks + The very soul;[120] and sweet religion makes + A rhapsody of words.-- + Ah, me, that act! + + _Queen._ Ah me, what act? + + _Ham._ Look here, upon this picture, and on this, + The counterfeit presentment[121] of two brothers. + See, what a grace was seated on this brow; + Hypérion's curls;[122] the front of Jove himself; + An eye like Mars, to threaten and command; + A station like the herald Mercury[123] + New-lighted on a heaven-kissing hill; + A combination, and a form, indeed, + Where every god did seem to set his seal, + To give the world assurance of a man; + This was your husband.--Look you now, what follows: + Here is your husband; like a mildew'd ear, + Blasting his wholesome brother.[124] Have you eyes? + Could you on this fair mountain leave to feed, + And batten on this moor?[125] Ha! have you eyes? + You cannot call it love; for, at your age + The hey-day in the blood[126] is tame, it's humble, + And waits upon the judgment: And what judgment + Would step from this to this? + O shame! where is thy blush? Rebellious hell, + If thou canst mutine,[127] in a matron's bones, + To flaming youth let virtue be as wax, + And melt in her own fire. + + _Queen._ O, Hamlet, speak no more: + Thou turn'st mine eyes into my very soul; + And there I see such black and grainèd spots + As will not leave their tinct.[128] + + _Ham._ Nay, but to live + In the rank sweat of an enseamed bed,----[129] + + _Queen._ O, speak to me no more; + No more, sweet Hamlet! + + _Ham._ A murderer and a villain: + A slave that is not twentieth part the tythe + Of your precedent lord;--a vice of kings;[130] + A cutpurse of the empire and the rule; + That from a shelf the precious diadem stole, + And put it in his pocket![131] + + _Queen._ No more! + + _Ham._ A king + Of shreds and patches.[132] + + [_Enter_ Ghost, R.] + + Save me + + [_Starts from his chair_], + + and hover o'er me with your wings, + You heavenly guards! What would your gracious figure? + + _Queen._ Alas, he's mad! + + [_Rising._] + + _Ham._ (L.) Do you not come your tardy son to chide, + That, laps'd in time and passion,[133] lets go by + The important acting of your dread command? + O, say! + + _Ghost._ (R.) Do not forget: This visitation + Is but to whet thy almost blunted purpose. + But, look, amazement on thy mother sits: + O, step between her and her fighting soul. + Speak to her Hamlet. + + _Ham._ How is it with you, lady? + + _Queen._ Alas, how is't with you, + That you do bend your eye on vacancy, + And with the incorporal air do hold discourse? + Forth at your eyes your spirits wildly peep. + O gentle son, + + [_Crosses to_ HAMLET.] + + Upon the heat and flame of thy distemper + Sprinkle cool patience.[134] Whereon do you look? + + _Ham._ On him, on him!--Look you, how pale he glares! + His form and cause conjoin'd, preaching to stones, + Would make them capable.[135] Do not look upon me; + Lest with this piteous action, you convert + My stern effects:[136] then what I have to do + Will want true colour; tears perchance, for blood. + + _Queen._ To whom do you speak this? + + _Ham._ Do you see nothing there? + + _Queen._ Nothing at all; yet all that is, I see.[137] + + _Ham._ Nor did you nothing hear? + + _Queen._ No, nothing but ourselves. + + _Ham._ Why, look you there! look, how it steals away! + + [_Ghost crosses to_ L.] + + My father in his habit as he lived![138] + Look, where he goes, even now, out at the portal! + + [_Exit_ Ghost, L.H. HAMLET _sinks into chair_ C. + _The_ QUEEN _falls on her knees by his side._] + + _Queen._ This is the very coinage of your brain: + This bodiless creation ecstasy + Is very cunning in.[139] + + _Ham._ Ecstasy! + My pulse, as yours, doth temperately keep time, + And makes as healthful music: It is not madness + That I have uttered: bring me to the test, + And I the matter will re-word; which madness + Would gambol from.[140] Mother, for love of grace, + + _Rising._] + + Lay not that flattering unction to your soul, + That not your trespass, but my madness speaks: + It will but skin and film[141] the ulcerous place, + Whiles rank corruption, mining all within, + Infects unseen. Confess yourself to heaven; + Repent what's past; avoid what is to come. + + _Queen._ O, Hamlet! thou hast cleft my heart in twain. + + _Ham._ O, throw away the worser part of it, + And live the purer with the other half. + Good night: but go not to my uncle's bed; + + [_Raising the_ QUEEN.] + + Assume a virtue, if you have it not. + Once more, good night! + And when you are desirous to be bless'd, + I'll blessing beg of you.[142] For this same lord, + + [_Pointing to_ POLONIUS.] + + I do repent: + I will bestow him, and will answer well + The death I gave him. So, again, good night. + + [_Exit_ QUEEN, R.H.] + + I must be cruel, only to be kind: + Thus bad begins, and worse remains behind. + + [_Exit_ HAMLET _behind the arras_, L.H.U.E. + +END OF ACT THIRD. + + + + +Notes + +Act III + + + [Footnote III.1: _Forward_] Disposed, inclinable.] + + [Footnote III.2: _Assay him to_] Try his disposition towards.] + + [Footnote III.3: _O'er-raught on the way:_] Reached or overtook.] + + [Footnote III.4 _Have closely sent_] _i.e._, privately sent.] + + [Footnote III.5 _May here affront Ophelia:_] To affront is to + come face to face--to confront.] + + [Footnote III.6 _Lawful espials_,] Spies justifiably inquisitive. + From the French, _espier_.] + + [Footnote III.7 _Too much prov'd_,] Found by too frequent + experience.] + + [Footnote III.8 _To be, or not to be, that is the question:_] + Hamlet is deliberating whether he should continue to live, or put + an end to his existence.] + + [Footnote III.9: _Or to take arms against a sea of troubles_,] _A + sea of troubles_ among the Greeks grew into a proverbial usage; + so that the expression figuratively means, the troubles of human + life, which flow in upon us, and encompass us round like a sea.] + + [Footnote III.10: _This mortal coil_,] Coil is here used in each + of its senses, that of turmoil or bustle, and that which entwines + or wraps round.] + + [Footnote III.11: _Must give us pause:_] _i.e._, occasion for + reflection.] + + [Footnote III.12: _There's the respect That makes calamity of so + long life_;] The _consideration_ that makes the evils of life so + long submitted to, lived under.] + + [Footnote III.13: _The whips and scorns of time_,] Those + sufferings of body and mind, those stripes and mortifications to + which, in its _course_, the life of man is subjected.] + + [Footnote III.14: _Contumely_,] Contemptuousness, rudeness.] + + [Footnote III.15: _His quietus make_] Quietus means the official + discharge of an account: from the Latin. Particularly in the + Exchequer accounts, where it is still current. Chiefly used by + authors in metaphorical senses.] + + [Footnote III.16: _A bare bodkin?_] Bodkin was an ancient term + for a small dagger. In the margin of Stowe's Chronicle it is said + that Cæsar was slain with _bodkins_.] + + [Footnote III.17: _Who would fardels bear_,] Fardel is a burden. + Fardellus, low Latin.] + + [Footnote III.18: _From whose bourn_] _i.e._, boundary.] + + [Footnote III.19: _No traveller returns_,] The traveller whom + Hamlet had seen, though he appeared in the same habit which he + had worn in his life-time, was nothing but a shadow, + "invulnerable as the air," and, consequently, _incorporeal_. The + Ghost has given us no account of the region from whence he came, + being, as he himself informed us, "forbid to tell the secrets of + his prison-house."--MALONE.] + + [Footnote III.20: _Thus conscience does make cowards of us all_;] + A state of doubt and uncertainty, a conscious feeling or + apprehension, a misgiving "How our audit stands."] + + [Footnote III.21: _Of great pith and moment_,] _i.e._, of great + vigour and importance.] + + [Footnote III.22: + + _With this regard, their currents turn away_, + _And lose the name of action._] + + From this sole consideration have their drifts diverted, and lose + the character and name of enterprise.] + + [Footnote III.23: _Soft you now!_] A gentler pace! have done with + lofty march!] + + [Footnote III.24: _Nymph, in thy orisons_] _i.e._, in thy + prayers. Orison is from _oraison_--French.] + + [Footnote III.25: _If you be honest and fair, your honesty should + admit no discourse to your beauty._] _i.e._, if you really + possess these qualities, chastity and beauty, and mean to support + the character of both, your honesty should be so chary of your + beauty, as not to suffer a thing so fragile to entertain + discourse, or to be parleyed with. + + The lady interprets the words otherwise, giving them the turn + best suited to her purpose.] + + [Footnote III.26: _His likeness:_] Shakespeare and his + contemporaries frequently use the personal for the neutral + pronoun.] + + [Footnote III.27: _Inoculate our old stock, but we shall relish + of it:_] So change the original constitution and properties, as + that no smack of them shall remain. "Inoculate our stock" are + terms in gardening.] + + [Footnote III.28: _With more offences at my beck_] That is, + always ready to come about me--at my beck and call.] + + [Footnote III.29: _Than I have thoughts to put them in, &c._] "To + put a thing into thought," Johnson says, is "to think on it."] + + [Footnote III.30: _I have heard of your paintings_,] These + destructive aids of beauty seem, in the time of Shakespeare, to + have been general objects of satire.] + + [Footnote III.31: _Heaven hath given you one face, and you make + yourselves another:_] _i.e._, Heaven hath given you one face, and + you disfigure his image by making yourself another.] + + [Footnote III.32: _You jig, you amble, and you lisp_,] This is an + allusion to the manners of the age, which Shakespeare, in the + spirit of his contemporaries, means here to satirise.] + + [Footnote III.33: _Make your wantonness your ignorance._] You + mistake by _wanton_ affectation, and pretend to mistake by + _ignorance_.] + + [Footnote III.34: _All but one shall live_;] _One_ is the king.] + + [Footnote III.35: _To a nunnery, go. Exit Hamlet._] There is no + doubt that Hamlet's attachment to Ophelia is ardent and sincere, + but he treats her with apparent severity because he is aware that + Ophelia has been purposely thrown in his way; that spies are + about them; and that it is necessary for the preservation of his + life, to assume a conduct which he thought would be attributed to + madness only.] + + [Footnote III.36: _The expectancy and rose of the fair state_,] + The first hope and fairest flower. "The gracious mark o' the + land."] + + [Footnote III.37: _Glass of fashion_] Speculum + consuetudinis.--CICERO. + + [Footnote III.38: _The mould of form_,] The cast, in which is + shaped the only perfect form. + + [Footnote III.39: _Musick vows_,] Musical, mellifluous. + + [Footnote III.40: _Be round with him_;] _i.e._, plain with + him--without reserve. + + [Footnote III.41: _If she find him not_,] Make him not out. + + [Footnote III.42: _As lief_] As willingly.] + + [Footnote III.43: _Thus_;] _i.e._, thrown out thus.] + + [Footnote III.44: _Robustious perrywig-pated fellow_] This is a + ridicule on the quantity of false hair worn in Shakespeare's + time, for wigs were not in common use till the reign of Charles + the Second. _Robustious_ means making an extravagant show of + passion.] + + [Footnote III.45: _The ears of the groundlings_,] The meaner + people appear to have occupied the pit of the theatre (which had + neither floor nor benches in Shakespeare's time), as they now sit + in the upper gallery.] + + [Footnote III.46: _O'er-doing Termagant_;] The Crusaders, and + those who celebrated them, confounded Mahometans with Pagans, and + supposed Mahomet, or Mahound, to be one of their deities, and + Tervagant or Termagant, another. This imaginary personage was + introduced into our old plays and moralities, and represented as + of a most violent character, so that a ranting actor might always + appear to advantage in it. The word is now used for a scolding + woman.] + + [Footnote III.47: _It out-herods Herod:_] In all the old + moralities and mysteries this personage was always represented as + a tyrant of a very violent temper, using the most exaggerated + language. Hence the expression.] + + [Footnote III.48: _The very age and body of the time its form and + pressure._] _i.e._, to delineate exactly the manners of the age, + and the particular humours of the day--_pressure_ signifying + resemblance, as in a print.] + + [Footnote III.49: _Come tardy off_,] Without spirit or animation; + heavily, sleepily done.] + + [Footnote III.50: _The censure of which one_] _i.e._, the censure + of one of which.] + + [Footnote III.51: _Your allowance_,] In your approbation.] + + [Footnote III.52: _Not to speak it profanely_,] _i.e._, + _irreverently_, in allusion to Hamlet's supposition that God had + not made such men, but that they were only the handy work of + God's assistants.] + + [Footnote III.53: _Indifferently_] In a reasonable degree.] + + [Footnote III.54: _Speak no more them is set down for them:_] + Shakespeare alludes to a custom of his time, when the clown, or + low comedian, as he would now be called, addressing the audience + during the play, entered into a contest of raillery and sarcasm + with such spectators as chose to engage with him.] + + [Footnote III.55: _Barren spectators_] _i.e._, dull, + unapprehensive spectators.] + + [Footnote III.56: _Question_] Point, topic.] + + [Footnote III.57: _Cop'd withal._] Encountered with.] + + [Footnote III.58: _Pregnant hinges of the knee_,] _i.e._, bowed + or bent: ready to kneel where _thrift_, that is, thriving, or + emolument may follow sycophancy.] + + [Footnote III.59: _Since my dear soul_] _Dear_ is out of which + arises the liveliest interest.] + + [Footnote III.60: _Whose blood and judgment_] Dr. Johnson says + that according to the doctrine of the four humours, _desire_ and + _confidence_ were seated in the blood, and judgment in the + phlegm, and the due mixture of the humours made a perfect + character.] + + [Footnote III.61: _The very comment of thy soul_] The most + intense direction of every faculty.] + + [Footnote III.62: _Occulted guilt do not itself unkennel_] + Stifled, secret guilt, do not develope itself.] + + [Footnote III.63: _As Vulcan's stithy._] A stithy is the smith's + shop, as stith is the anvil.] + + [Footnote III.64: _In censure of his seeming._] In making our + estimate of the appearance he shall put on.] + + [Footnote III.65: _I have nothing with this answer; these words + are not mine._] _i.e._, they grow not out of mine: have no + relation to anything said by me.] + + [Footnote III.66: _No, nor mine, now._] They are now anybody's. + Dr. Johnson observes, "a man's words, says the proverb, are his + own no longer than while he keeps them unspoken."] + + [Footnote III.67: _You played once in the university, you say?_] + The practice of acting Latin plays in the universities of Oxford + and Cambridge is very ancient, and continued to near the middle + of the last century.] + + [Footnote III.68: _I did enact Julius Cæsar:_] A Latin play on + the subject of Cassar's death, was performed at Christ-church, + Oxford, in 1582.] + + [Footnote III.69: _They stay upon your patience._] _Patience_ is + here used for _leisure_.] + + [Footnote III.70: _Lying down at Ophelia's feet._] To lie at the + feet of a mistress during any dramatic representation, seems to + have been a common act of gallantry.] + + [Footnote III.71: _Jig-maker_,] Writer of ludicrous interludes. + _A jig_ was not in Shakespeare's time only a dance, but a + ludicrous dialogue in metre; many historical ballads were also + called _jigs_.] + + [Footnote III.72: _For I'll have a suit of sables._] Wherever his + scene might be, the customs of his country were ever in + Shakespeare's thoughts. A suit trimmed with sables was in our + author's own time the richest dress worn by men in England. By + the Statute of Apparel, 24 Henry VIII., c. 13, (_article + furres_), it is ordained, that none under the degree of an _Earl_ + may use _sables_.] + + [Footnote III.73: _He must build churches, then._] Such + benefactors to society were sure to be recorded by means of the + feast day on which the patron saints and founders of churches + were commemorated in every parish. This custom has long since + ceased.] + + [Footnote III.74: _Miching mallecho_;] To _mich_ is a provincial + word, signifying _to lie hid_, or _to skulk_, or _act by + stealth_. It was probably once generally used. Mallecho is + supposed to be corrupted from the Spanish _Malechor_, which means + a poisoner.] + + [Footnote III.75: _The posy of a ring?_] Such poetry as you may + find engraven on a ring.] + + [Footnote III.76: _Phoebus' cart_] A chariot was anciently called + a cart.] + + [Footnote III.77: _Tellus' orbèd ground_,] _i.e._, the globe of + the earth. Tellus is the personification of the earth, being + described as the first being that sprung from Chaos.] + + [Footnote III.78: _My operant powers their functions leave to + do:_] _i.e._, my active energies cease to perform their offices.] + + [Footnote III.79: _What we do determine, oft we break._] Unsettle + our most fixed resolves.] + + [Footnote III.80: _The argument?_] The subject matter.] + + [Footnote III.81: _The mouse-trap._] + + He calls it the mouse-trap, because it is the thing, + In which he'll catch the conscience of the king.] + + [Footnote III.82: _Tropically._] _i.e._, figuratively.] + + [Footnote III.83: _The image of a murder_,] _i.e._, the lively + portraiture, the correct and faithful representation of a murder, + &c.] + + [Footnote III.84: _Let the galled jade wince_,] A proverbial + saying.] + + [Footnote III.85: _Our withers are unwrung._] Withers is the + joining of the shoulder bones at the bottom of the neck and mane + of a horse. _Unwrung_ is _not pinched_.] + + [Footnote III.86: _You are as good as a chorus_,] The persons who + are supposed to behold what passes in the acts of a tragedy, and + sing their sentiments between the acts. + + The use to which Shakespeare converted the chorus, may be seen in + King Henry V.] + + [Footnote III.87: _I could interpret between you and your love, + if I could see the puppets dallying._] This refers to the + interpreter, who formerly sat on the stage at all _puppet shows_, + and explained to the audience. _The puppets dallying_ are here + made to signify to the agitations of Ophelia's bosom.] + + [Footnote III.88: + + _The croaking raven_ + _Doth bellow for revenge._] + + _i.e._, begin without more delay; for the raven, foreknowing the + deed, is already croaking, and, as it were, calling out for the + revenge which will ensue.] + + [Footnote III.89: _Midnight weeds_] The force of the epithet + _midnight_, will be best displayed by a corresponding passage in + Macbeth: + + "Root of hemlock, _digg'd i' the dark_."] + + [Footnote III.90: _Usurp_] Encroach upon.] + + [Footnote III.91: _Let the strucken deer go weep_,] Shakespeare, + in _As you like it_, in allusion to the wounded stag, speaks of + the _big round tears_ which _cours'd one another down his + innocent nose in piteous chase_. In the 13th song of Drayton's + Polyolbion, is a similar passage--"_The harte weepeth at his + dying; his tears are held to be precious in medicine._"] + + [Footnote III.92: _Marvellous distempered._] _i.e._, + discomposed.] + + [Footnote III.93: _Admiration._] _i.e._, wonder.] + + [Footnote III.94: _Trade with us?_] _i.e._ Occasion of + intercourse.] + + [Footnote III.95: _By these pickers and stealers._] _i.e._, by + these hands. The phrase is taken from the Church catechism, + where, in our duty to our neighbour, we are taught to keep our + hands from _picking and stealing_.] + + [Footnote III.96: _You do freely bar the door of your own + liberty, if you deny your griefs to your friend._] By your own + act you close the way against your own ease, and the free + discharge of your griefs, if you open not the source of them to + your friends.] + + [Footnote III.97: _You have the voice of the king himself for + your succession in Denmark?_] Though the crown was elective, yet + regard was paid to the recommendation of the preceding prince, + and preference given to royal blood, which, by degrees, produced + hereditary succession.] + + [Footnote III.98: _"While the grass grows,"--the proverb is + something musty._] The proverb is, "_While the grass grows, the + steed starves._" Hamlet alludes to his own position, while + waiting for his succession to the throne of Denmark. A similar + adage is, "_A slip between the cup and the lip._"] + + [Footnote III.99: _Recorder._] _i.e._ A kind of flute, or pipe.] + + [Footnote III.100: _Why do you go about to recover the wind of + me_,] Equivalent to our more modern saying of _Get on the blind + side._] + + [Footnote III.101: _Into a toil?_] _i.e._, net or snare.] + + [Footnote III.102: _If my duty be too bold, my love is too + unmannerly._] If my sense of duty have led me too far, it is + affection and regard for you that makes the carriage of that duty + border on disrespect.] + + [Footnote III.103: _Govern these ventages--and it will discourse + most eloquent music._] Justly order these vents, or air-holes, + and it will breathe or utter, &c.] + + [Footnote III.104: _Though you can fret me, you cannot play upon + me._] A _fret_ is a stop or key of a musical instrument. Here is, + therefore, a play upon the words. Though you cannot fret, stop, + or vex, you cannot play or impose upon me.] + + [Footnote III.105: _They fool me to the top of my bent._] To the + height; as far as they see me _incline_ to go: an allusion to the + utmost flexure of a bow.] + + [Footnote III.106: _Bitter business_] _i.e._, shocking, horrid + business.] + + [Footnote III.107: _Stands it safe with us_] Is it _consistent_ + with our security.] + + [Footnote III.108: _This fear_,] Bugbear.] + + [Footnote III.109: _Behind the arras I'll convey myself_,] The + arras-hangings, in Shakespeare's time, were hung at such a + distance from the walls, that a person might easily stand behind + them unperceived.] + + [Footnote III.110: _To hear the process_;] The course of the + conversation.] + + [Footnote III.111: _The speech of vantage._] _i.e._, opportunity + or advantage of secret observations.] + + [Footnote III.112: _Lay home to him:_] Pointedly and closely + charge him.] + + [Footnote III.113: _Pranks too broad_] Open and bold.] + + [Footnote III.114: _I'll 'sconce me even here._] 'Sconce and + ensconce are constantly used figuratively for _hide._ In "The + Merry Wives of Windsor," Falstaff says, "I will _ensconce_ me + behind the arras."] + + [Footnote III.115: _By the rood_,] _i.e._, the cross or + crucifix.] + + [Footnote III.116: _How now! a rat?_] This is an expression + borrowed from the History of Hamblet.] + + [Footnote III.117: _Have not braz'd it so_,] _i.e._, soldered + with brass.] + + [Footnote III.118: _Proof and bulwark against sense._] Against + all feeling.] + + [Footnote III.119: _Takes off the rose From the fair forehead of + an innocent love, And sets a blister there_;] _i.e._, takes the + clear tint from the brow of unspotted, untainted innocence. "True + or honest as the skin between one's brows" was a proverbial + expression, and is frequently used by Shakespeare.] + + [Footnote III.120: _As from the body of contraction plucks The + very soul_;] Annihilates the very principle of contracts. + Contraction for marriage contract.] + + [Footnote III.121: _The counterfeit presentment_] _i.e._, picture + or mimic representation.] + + [Footnote III.122: _Hypérion's curls_;] Hyperion is used by + Spenser with the same error in quantity.] + + [Footnote III.123: _A station like the herald Mercury_] Station + is attitude--act of standing.] + + [Footnote III.124: + + _Like a mildew'd ear_, + _Blasting his wholesome brother._] + + This alludes to Pharaoh's dream, in the 41st chapter of Genesis.] + + [Footnote III.125: _Batten on this moor?_] Batten is to feed + rankly.] + + [Footnote III.126: _Hey-day in the blood_] This expression is + occasionally used by old authors.] + + [Footnote III.127: _Thou canst mutine_] _i.e._, rebel.] + + [Footnote III.128: _As will not leave their tinct._] So dyed _in + grain_, that they will not relinquish or lose their tinct--are + not to be discharged. In a sense not very dissimilar he presently + says, + + "Then what I have to do + Will _want true colour_."] + + [Footnote III.129: _An enseamed bed._] _i.e._, greasy bed of + grossly fed indulgence.] + + [Footnote III.130: _A vice of kings_;] _i.e._, a low mimick of + kings. The vice was the fool of the old moralities or dramas, who + was generally engaged in contests with the devil, by whom he was + finally carried away. Dr. Johnson says the modern Punch is + descended from the vice.] + + [Footnote III.131: + + _From a shelf the precious diadem stole_, + _And put it in his pocket!_] + + In allusion to the usurper procuring the crown as a common + pilferer or thief, and not by open villainy that carried danger + with it.] + + [Footnote III.132: _A king of shreds and patches._] This is said, + pursuing the idea of the _vice of kings_. The vice being dressed + as a fool, in a coat of party-coloured patches.] + + [Footnote III.133: _Laps'd in time and passion_,] That having + suffered time to slip, and passion to cool, &c. It was supposed + that nothing was more offensive to apparitions than the neglect + to attach importance to their appearance, or to be inattentive to + their admonitions.] + + [Footnote III.134: _Cool patience._] _i.e._, moderation.] + + [Footnote III.135: _Make them capable._] Make them + intelligent--capable of conceiving.] + + [Footnote III.136: _My stem effects:_] _i.e._, change the nature + of my purposes, or what I mean to effect.] + + [Footnote III.137: _Nothing at all; yet all that is, I see._] It + is in perfect consistency with the belief that all spirits were + not only naturally invisible, but that they possessed the power + of making themselves visible to such persons only as they + pleased.] + + [Footnote III.138: _My father, in his habit as he lived!_] In the + habit he was accustomed to wear when living.] + + [Footnote III.139: + + _This bodiless creation ecstasy_ + _Is very cunning in._] + + _i.e._, "Such shadows are the weak brain's forgeries." Ecstasy in + this place, as in many others, means a temporary alienation of + mind--a fit.] + + [Footnote III.140: _Gambol from._] Start away from.] + + [Footnote III.141: _Skin and film_,] Cover with a thin skin.] + + [Footnote III.142: + + _And when you are desirous to be bless'd_, + _I'll blessing beg of you_] + + When you are desirous to receive a blessing from heaven (which + you cannot, seriously, till you reform), I will beg to receive a + blessing from you.] + + + + +ACT IV. + +SCENE I.--A ROOM IN THE CASTLE. + + + _Enter_ KING _and_ QUEEN, _from_ (R.H.) _centre._ + + _King._ There's matter in these sighs, these profound heaves: + You must translate:[1] 'tis fit we understand them. + How does Hamlet? + + _Queen._ Mad as the sea and wind, when both contend + Which is the mightier: In his lawless fit, + Behind the arras hearing something stir, + Whips out his rapier, cries _A rat, a rat!_ + And, in this brainish apprehension,[2] kills + The unseen good old man. + + _King._ O heavy deed! + It had been so with us, had we been there: + Where is he gone? + + _Queen._ To draw apart the body he hath kill'd. + + _King._ The sun no sooner shall the mountains touch, + But we will ship him hence: and this vile deed + We must, with all our majesty and skill, + Both countenance and excuse.--Ho, Guildenstern! + + _Enter_ ROSENCRANTZ _and_ GUILDENSTERN (L.H.) + + Friends both, go join you with some further aid: + Hamlet in madness hath Polonius slain, + And from his mother's closet hath he dragg'd him: + Go seek him out; speak fair, and bring the body + Into the chapel. + + [ROSENCRANTZ _and_ GUILDENSTERN _cross to_ R.] + + I pray you, haste in this. + + [_Exeunt_ ROSENCRANTZ _and_ GUILDENSTERN, R.H.] + + Go, Gertrude, we'll call up our wisest friends; + And let them know, both what we mean to do, + And what's untimely done. + + [_Exit_ QUEEN, R.C.] + + How dangerous is it that this man goes loose! + Yet must not we put the strong law on him: + He's lov'd of the distracted multitude, + Who like not in their judgment, but their eyes; + And where 'tis so, the offender's scourge is weigh'd, + But never the offence.[3] + + _Enter_ ROSENCRANTZ (R.) + + How now! what hath befallen? + + _Ros._ Where the dead body is bestowed, my lord, + We cannot get from him. + + _King._ But where is he? + + _Ros._ Without, my lord; guarded, to know your pleasure. + + _King._ Bring him before us. + + _Ros._ Ho, Guildenstern! bring in my lord. + + _Enter_ HAMLET, GUILDENSTERN, _and_ Attendants (R.H.) + + _King._ (C.) Now, Hamlet, where's Polonius? + + _Ham._ (R.) At supper. + + _King._ At supper? Where? + + _Ham._ Not where he eats, but where he is eaten: a + certain convocation of politick worms[4] are e'en at him. + + _King._ Where's Polonius? + + _Ham._ In Heaven; send thither to see: if your messenger + find him not there, seek him i'the other place + yourself. But, indeed, if you find him not within this + month, you shall nose him as you go up the stairs into + the lobby. + + _King._ Go seek him there. + + [_To_ GUILDENSTERN.] + + _Ham._ He will stay till you come. + + [_Exit_ GUILDENSTERN, R.H.] + + _King._ Hamlet, this deed, for thine especial safety, + Must send thee hence: + Therefore prepare thyself; + The bark is ready, and the wind at help,[5] + For England. + + _Ham._ For England! + + _King._ Ay, Hamlet. + + _Ham._ Good. + + _King._ So is it, if thou knew'st our purposes. + + _Ham._ I see a cherub that sees them. But, come; for + England!--Farewell, dear mother. + + _King._ Thy loving father, Hamlet. + +_Ham._ My mother: Father and mother is man and wife; man and wife is +one flesh; and so, my mother. Come, for England. + + [_Exit_, R.H.] + + _King._ Follow him at foot; tempt him with speed aboard; + Away! for everything is seal'd and done. + + [_Exeunt_ ROSENCRANTZ _and_ Attendants, R.H.] + + And, England, if my love thou hold'st at aught, + Thou may'st not coldly set[6] + Our sovereign process;[7] which imports at full, + By letters conjuring to that effect,[8] + The present death of Hamlet. Do it, England; + For thou must cure me: 'Till I know 'tis done, + Howe'er my haps,[9] my joys will ne'er begin. + + [_Exit_ KING, L.H.] + + _Enter_ QUEEN _and_ HORATIO (R. _centre._) + + _Queen._ ----I will not speak with her. + + _Hor._ She is importunate; indeed, distract: + 'Twere good she were spoken with; for she may strew + Dangerous conjectures in ill-breeding minds. + + _Queen._ Let her come in. + + [_Exit_ HORATIO, R.C.] + + _Re-enter_ HORATIO, _with_ OPHELIA (R. _centre._) + + _Oph._ Where is the beauteous majesty of Denmark? + + _Queen._ How now, Ophelia! + + _Oph._ (C.) + + [_Singing._] + + _How should I your true love know_ + _From another one?_ + _By his cockle hat and staff_, + _And his sandal shoon._[10] + + _Queen._ (L.C.) Alas, sweet lady, what imports this song? + + _Oph._ Say you? nay, pray you, mark. + + [_Sings._] + + _He is dead and gone, lady_, + _He is dead and gone_; + _At his head a grass-green turf_, + _At his heels a stone._ + + _Enter the_ KING (L.H.) + + _Queen._ Nay, but, Ophelia,---- + + _Oph._ Pray you, mark. + + [_Sings._] + + _White his shroud as the mountain-snow_, + _Larded all with sweet flowers_;[11] + _Which bewept to the grave did go_ + _With true-love showers._ + + _King._ How do you, pretty lady? + + _Oph._ Well, Heaven 'ield you![12] + + (_Crosses to the_ KING.) + + They say the owl was a baker's daughter.[13] We know + what we are, but know not what we may be. + + _King._ Conceit upon her father.[14] + + _Oph._ Pray, you, let us have no words of this; but when + they ask you what it means, say you this: + + _To-morrow is Saint Valentine's day_, + _All in the morning betime_, + _And I, a maid at your window_, + _To be your Valentine:_ + + _King._ Pretty Ophelia! + + _Oph._ Indeed, without an oath, I'll make an end on't: + + _Then up he rose, and don'd his clothes_, + _And dupp'd[15] the chamber door_; + _Let in the maid, that out a maid_ + _Never departed more._ + + [_Crosses to_ R.H.] + + _King._ (L.) How long hath she been thus? + +_Oph._ (R.) I hope all will be well. We must be patient: but I cannot +choose but weep, to think they should lay him i'the cold ground. My +brother shall know of it; and so I thank you for your good counsel. +Come, my coach! Good night, ladies; good night, sweet ladies; good +night, good night. + + [_Exit_, R.C.] + + _King._ Follow her close; give her good watch, I pray you. + + [_Exit_ HORATIO, _through centre_ R.] + + O! this is the poison of deep grief; it springs + All from her father's death. + O, Gertrude, Gertrude, + When sorrows come, they come not single spies, + But in battalions! + + _Enter_ MARCELLUS (R. _centre._) + + _King._ What is the matter? + + _Mar._ Save yourself, my lord: + The young Laertes, in a riotous head,[16] + O'erbears your officers. The rabble call him lord; + They cry, _Choose we: Laertes shall be king!_ + Caps, hands, and tongues, applaud it to the clouds, + _Laertes shall be king, Laertes king!_ + + [_Noise within_, R.C.] + + _Enter_ LAERTES, _armed_; Danes _following_ (R. _centre._) + + _Laer._ Where is this king?--Sirs, stand you all without. + + _Dan._ No, let's come in. + + _Laer._ I pray you, give me leave. + + _Dan._ We will, we will. + + [_They retire without_, R.H.] + + _Laer._ O, thou vile king, + Give me my father. + + _Queen_ + + (_Interposing._) + + Calmly, good Laertes. + + _Laer._ (R.) That drop of blood that's calm proclaims me bastard; + Cries cuckold to my father; brands the harlot + Even here, between the chaste unsmirched brow + Of my true mother.[17] + + _King._ (L.) What is the cause, Laertes, + That thy rebellion looks so giant-like? + Let him go, Gertrude; do not fear our person: + There's such divinity doth hedge a king,[18] + That treason can but peep to what it would, + Acts little of his will. + Let him go, Gertrude. + + [QUEEN _obeys._] + + _Laer._ Where is my father? + + _King._ Dead. + + _Queen._ But not by him. + + _King._ Let him demand his fill. + + _Laer._ How came he dead? I'll not be juggled with: + To hell, allegiance! To this point I stand, + That both the worlds I give to negligence,[19] + Let come what comes; only I'll be reveng'd + Most throughly for my father. + + _King._ Who shall stay you! + + _Laer._ My will, not all the world's:[20] + And, for my means, I'll husband them so well, + They shall go far with little. + + _King._ Good Laertes, + That I am guiltless of your father's death, + And am most sensible in grief[21] for it, + It shall as level to your judgment 'pear + As day does to your eye. + + _Hor._ + + (_Without._) + + Oh, poor Ophelia! + + _King._ Let her come in. + + _Enter_ OPHELIA (R.C.), _fantastically dressed with Straws + and Flowers._ + + _Laer._ + + (_Goes up_ L.C.) + + O rose of May! + Dear maid, kind sister, sweet Ophelia! + O heavens! is't possible, a young maid's wits + Should be as mortal as an old man's life? + + _Oph._ (R.C.) + + _They bore him barefac'd on the bier_; + _And on his grave rain many a tear,--_ + + Fare you well, my dove! + + _Laer._ + + (_Coming down_ R.) + + Hadst thou thy wits, and didst persuade revenge, + It could not move thus. + +_Oph._ You must sing, _Down-a-down,[22] an you call him a-down-a._ O, +how well the wheel becomes it![23] It is the false steward, that +stole his master's daughter. + +_Laer._ This nothing's more than matter. + +_Oph._ There's rosemary, that's for remembrance;[24] pray you, love, +remember: and there is pansies,[25] that's for thoughts. + +_Laer._ A document in madness; thoughts and remembrance fitted. + +_Oph._ There's fennel for you, + + (_crosses to the_ KING _on_ L.H.) + +and columbines:[26] there's rue for you; + + (_turns to the_ QUEEN, _who is_ R.C.) + +and here's some for me:--we may call it herb of grace +o'Sundays:[27]--you may wear your rue with a difference.[28]--There's +a daisy:[29]--I would give you some violets,[30] but they withered +all when my father died:--They say he made a good end,---- + + _For bonny sweet Robin is all my joy--_[31] + + _Laer._ (R.) Thought and affliction,[32] passion, hell itself, + She turns to favour and to prettiness. + + _Oph._ + + _And will he not come again?_ + _And will he not come again?_ + _No, no, he is dead_, + _Gone to his death-bed_, + _He never will come again._ + + _His beard was white as snow_, + _All flaxen was his poll:_ + _He is gone, he is gone_, + _And we cast away moan:_ + _Heaven 'a mercy on his soul!_ + + And of all christian souls, I pray Heaven. Heaven be wi' you. + + [_Exit_ OPHELIA, R.C., QUEEN _following._] + + _Laer._ Do you see this, O Heaven? + + _King._ (L.C.) Laertes, I must commune with your grief,[33] + Or you deny me right. + Be you content to lend your patience to us, + And we shall jointly labour with your soul + To give it due content. + + _Laer._ (R.C.) Let this be so; + His means of death, his obscure funeral,-- + No trophy, sword, nor hatchment o'er his bones,[34] + No noble rite nor formal ostentation,-- + Cry to be heard,[35] as 'twere from heaven to earth, + That I must call't in question. + + _King._ So you shall; + And where the offence is let the great axe fall.[36] + How now! what news? + + _Enter_ BERNARDO (R.H.C.) + + _Ber._ (C.) Letters, my lord, from Hamlet: + This to your majesty; this to the Queen. + + _King._ From Hamlet! who brought them? + + _Ber._ Sailors, my lord, they say; I saw them not. + + _King._ Laertes, you shall hear them.-- + Leave us. + + [_Exit_, L.H.C.] + [Reads.] + +_High and mighty, You shall know I am set naked on your kingdom.[37] +To morrow shall I beg leave to see your kingly eyes: when I shall, +first asking your pardon thereunto, recount the occasion of my sudden +and more strange return._ HAMLET. + + What should this mean? Are all the rest come back? + Or is it some abuse, and no such thing? + + _Laer._ (R.) Know you the hand? + + _King._ (L.) 'Tis Hamlet's character:[38] _Naked,--_ + + And in a postscript here, he says, _alone_. + Can you advise me? + + _Laer._ I am lost in it, my lord. But let him come; + It warms the very sickness in my heart, + That I shall live and tell him to his teeth, + _Thus diddest thou_. + + _King._ If it be so, Laertes, + Will you be rul'd by me? + + _Laer._ Ay, my lord; + So you will not o'er-rule me to a peace. + + _King._ To thine own peace. + Some two months since, + Here was a gentleman of Normandy, + He made confession of[39] you; + And gave you such a masterly report, + For art and exercise in your defence,[40] + And for your rapier most especially, + That he cried out, 'twould be a sight indeed, + If one could match you: this report of his + Did Hamlet so envenom with his envy, + That he could nothing do but wish and beg + Your sudden coming o'er, to play with you. + Now, out of this,---- + + _Laer._ What out of this, my lord? + + _King._ Laertes, was your father dear to you? + Or are you like the painting of a sorrow, + A face without a heart? + + _Laer._ Why ask you this? + + _King._ Hamlet return'd shall know you are come home: + We'll put on those shall praise your excellence, + And set a double varnish on the fame + The Frenchman gave you; bring you, in fine, together, + And wager o'er your heads; he, being remiss,[41] + Most generous, and free from all contriving, + Will not peruse the foils:[42] so that, with ease, + Or with a little shuffling, you may choose + A sword unbated,[43] and, in a pass of practice,[44] + Requite him for your father. + + _Laer._ I will do't: + And, for the purpose, I'll anoint my sword. + I bought an unction of a mountebank, + So mortal, that but dip a knife in it, + Where it draws blood no cataplasm[45] so rare, + Collected from all simples[46] that have virtue + Under the moon, can save the thing from death + That is but scratch'd withal: I'll touch my point + With this contagion, that, if I gall him slightly, + It may be death. + + _King._ (L.) Let's further think of this; + We'll make a solemn wager on your cunnings,[47] + When in your motion[48] you are hot and dry, + (As make your bouts more violent to that end,) + And that he calls for drink, I'll have prepared him + A chalice for the nonce;[49] whereon but sipping, + If he by chance escape your venom'd stuck,[50] + Our purpose may hold there. But stay, what noise? + + _Enter_ QUEEN (R.C.) + + _Queen._ (C.) One woe doth tread upon another's heel, + So fast they follow: Your sister's drown'd, Laertes. + + _Laer._ (R.) Drown'd! O, where? + + _Queen._ There is a willow grows aslant a brook, + That shows his hoar leaves in the glassy stream; + Therewith fantastick garlands did she make + Of crow-flowers, nettles, daisies, and long purples;[51] + There, on the pendent boughs her cornet weeds + Clambering to hang, an envious sliver broke; + When down her weedy trophies, and herself, + Fell in the weeping brook. + + _Laer._ I forbid my tears: But yet + It is our trick:[52] nature her custom holds, + Let shame say what it will: when these are gone, + The woman will be out.[53] + Adieu, my lord: + I have a speech of fire, that fain would blaze, + But that this folly drowns it.[54] + + [_Exeunt._ C.] + +END OF ACT FOURTH. + + + + +Notes + +Act IV + + + [Footnote IV.1: _Translate:_] Interpret.] + + [Footnote IV.2: _In this brainish apprehension_,] Distempered, + brainsick mood.] + + [Footnote IV.3: _Where the offender's scourge is weigh'd, But + never the offence._] When an offender is popular, the people + never consider what his crime was, but they scrutinise his + punishment.] + + [Footnote IV.4: _Politick worms_] _i.e._, artful, cunning worms.] + + [Footnote IV.5: _The wind at help_,] _i.e._, ready.] + + [Footnote IV.6: _May'st not coldly set_] Set is to value or + estimate. "Thou may'st not _set little by it_, or _estimate it + lightly_."] + + [Footnote IV.7: _Our sovereign process:_] _i.e._, our royal + design.] + + [Footnote IV.8: _By letters conjuring to that effect_,] The verb + to conjure, in the sense of to supplicate, was formerly accented + on the first syllable.] + + [Footnote IV.9: _Howe'er my haps_,] Chances of fortune.] + + [Footnote IV.10: _His sandal shoon._] Shoon is the old plural of + shoe. The verse is descriptive of a pilgrim. While this kind of + devotion was in favour, love intrigues were carried on under that + mask.] + + [Footnote IV.11: _Larded with sweet flowers_;] _i.e._, Garnished + with sweet flowers.] + + [Footnote IV.12: _Heaven 'ield you._] Requite; yield you + recompence.] + + [Footnote IV.13: _The owl was a baker's daughter._] This is in + reference to a story that was once prevalent among the common + people of Gloucestershire.] + + [Footnote IV.14: _Conceit upon her father._] Fancies respecting + her father.] + + [Footnote IV.15: _Don'd and dupp'd_] _To don_, is to _do on_, or + _put on_, as _doff_ is to _do off_, or _put off_. To _dupp_ is to + _do up_, or _lift up_ the latch.] + + [Footnote IV.16: _In a riotous head_,] The tide, strongly + flowing, is said to pour in with a great _head_.] + + [Footnote IV.17: _The chaste unsmirched brow of my true mother._] + _Unsmirched_ is unstained, not defiled.] + + [Footnote IV.18: _Doth hedge a king_,] The word _hedge_ is used + by the gravest writers upon the highest subjects.] + + [Footnote IV.19: _Both the worlds I give to negligence_,] I am + careless of my present and future prospects, my views in this + life, as well as that which is to come.] + + [Footnote IV.20: _My will, not all the world's:_] _i.e._, by my + will as far as my will is concerned, not all the world shall stop + me; and, as for my means, I'll husband them so well, they shall + go far, though really little.] + + [Footnote IV.21: _Sensible in grief_] Poignantly affected with.] + + [Footnote IV.22: _You must sing Down-a-down_,] This was the + burthen of an old song, well known in Shakespeare's time.] + + [Footnote IV.23: _How well the wheel becomes it!_] This probably + means that the song or charm is well adapted to those who are + occupied at spinning at the wheel.] + + [Footnote IV.24: _There's rosemary, that's for remembrance_;] + Rosemary was anciently supposed to strengthen the memory, and was + carried at funerals and wore at weddings. It was also considered + the emblem of fidelity in lovers; and at weddings it was usual to + dip the rosemary in the cup, and drink to the health of the new + married couple.] + + [Footnote IV.25: _There is pansies_,] _i.e._, a little flower + called _heart's-ease_. Pansies in French signifies _thoughts_.] + + [Footnote IV.26: _There's fennel for you, and columbines:_] + Fennel was considered an emblem of flattery, and columbine was + anciently supposed to be a _thankless flower_; signifying + probably that the courtiers flattered to get favours, and were + thankless after receiving them. Columbine was emblematical of + forsaken lovers.] + + [Footnote IV.27: _There's rue for you; and here's some for + me:--we may call it herb of grace o' Sundays:_] Probably a + quibble is meant here, as _rue_ anciently signified the same as + _ruth_, _i.e._, sorrow. In the common dictionaries of + Shakespeare's time, it was called _herb of grace_. Ophelia wishes + to remind the Queen of the sorrow and contrition she ought to + feel for her unlawful marriage; and that she may wear her rue + with peculiar propriety on Sundays, when she solicits pardon for + the crime which she has so much occasion to _rue_ and repent + of.--MALONE.] + + [Footnote IV.28: _You may wear your rue with a difference._] + _i.e._, to distinguish it from that worn by Ophelia, herself: + because her tears flowed from the loss of a father--those of the + Queen ought to flow for her guilt.] + + [Footnote IV.29: _There's a daisy:_] A daisy signified a warning + to young women, not to trust the fair promises of their lovers.] + + [Footnote IV.30: _I would give you some violets_,] Violets + signified faithfulness.] + + [Footnote IV.31: _For bonny sweet Robin is all my joy,--_] Part + of an old song.] + + [Footnote IV.32: _Thought and affliction_,] Thought here, as in + many other places, means melancholy.] + + [Footnote IV.33: _I must commune with your grief_,] _i.e._, + confer, discuss, or argue with.] + + [Footnote IV.34: _No trophy, sword, nor hatchment o'er his + bones_,] Not only the sword, but the helmet, gauntlet, spurs, and + tabard, (_i.e._, a coat whereon the armorial ensigns were + anciently depicted, from whence the term _coat_ of armour), are + hung over the grave of every knight.] + + [Footnote IV.35: _Cry to be heard_,] All these multiplied + incitements are things which cry, &c.] + + [Footnote IV.36: _Let the great axe fall._] _i.e._, the axe that + is to be laid to the root.] + + [Footnote IV.37: _Naked on your kingdom_,] _i.e._, unprovided and + defenceless.] + + [Footnote IV.38: _'Tis Hamlet's character_,] Peculiar mode of + shaping his letters.] + + [Footnote IV.39: _Made confession of_] Acknowledged.] + + [Footnote IV.40: _In your defence_,] _i.e._, "in your art and + science of defence."] + + [Footnote IV.41: _He, being remiss_,] _i.e._, unsuspicious, not + cautious.] + + [Footnote IV.42: _Peruse the foils_;] Closely inspect them.] + + [Footnote IV.43: _A sword unbated_,] Not blunted, as foils are by + a button fixed to the end.] + + [Footnote IV.44: _In a pass of practice_,] This probably means + some favourite pass, some trick of fencing, with which Hamlet was + inexperienced, and by which Laertes may be sure of success.] + + [Footnote IV.45: _No cataplasm_,] _i.e._, poultice--a healing + application.] + + [Footnote IV.46: _Collected from all simples_,] _i.e._, from all + ingredients in medicine.] + + [Footnote IV.47: _On your cunnings_,] _i.e._, on your dexterity.] + + [Footnote IV.48: _In your motion_] Exercise, rapid evolutions.] + + [Footnote IV.49: _For the nonce_;] _i.e._, present purpose or + design.] + + [Footnote IV.50: _Venom'd stuck_,] Thrust. Stuck was a term of + the fencing school.] + + [Footnote IV.51: _Long purples_,] One of the names for a species + of orchis, a common English flower.] + + [Footnote IV.52: _Our trick:_] Our course, or habit; a property + that clings to, or makes a part of, us.] + + [Footnote IV.53: + + _When these are gone_, + _The woman will be out._] + + When these tears are shed, this womanish passion will be over.] + + [Footnote IV.54: _But that this folly drowns it._] _i.e._, my + rage had flamed, if this flood of tears had not extinguished it.] + + + + +ACT V. + +SCENE I.--A CHURCH YARD. + + + _Enter two_ Clowns,[1] _with spades, &c._ (L.H.U.E.) + +_1st Clo._ (R.) Is she to be buried in christian burial that wilfully +seeks her own salvation? + +_2nd Clo._ (L.) I tell thee she is; therefore make her grave +straight:[2] the crowner[3] hath set on her, and finds it christian +burial. + +_1st Clo._ How can that be, unless she drowned herself in her own +defence? + +_2nd Clo._ Why, 'tis found so. + +_1st Clo._ It must be _se offendendo_;[4] it cannot be else. For here +lies the point: If I drown myself wittingly, it argues an act: and an +act hath three branches; it is, to act, to do, and to perform:[5] +argal,[6] she drowned herself wittingly. + +_2nd Clo._ Nay, but hear you, goodman delver.[7] + +_1st Clo._ Give me leave. Here lies the water; good: here stands the +man; good: If the man go to this water, and drown himself, it is, +will he, nill he, he goes,[8] mark you that; but if the water come +to him and drown him, he drowns not himself: argal, he that is not +guilty of his own death shortens not his own life. + +_2nd Clo._ But is this law? + +_1st Clo._ Ay, marry is't; crowner's-quest law.[9] + +_2nd Clo._ Will you ha' the truth on't? If this had not been a +gentlewoman, she should have been buried out of christian burial. + +_1st Clo._ Why, there thou say'st:[10] And the more pity that great +folks should have countenance in this world to drown or hang +themselves, more than their even christian.[11] Come, my spade. There +is no ancient gentlemen but gardeners, ditchers, and grave-makers: +they hold up Adam's profession. + +_2nd Clo._ Was he a gentleman?[12] + +_1st Clo._ He was the first that ever bore arms. I'll put another +question to thee: if thou answerest me not to the purpose, confess +thyself----[13] + +_2nd Clo._ Go to. + +_1st Clo._ What is he that builds stronger than either the mason, the +shipwright, or the carpenter? + +_2nd Clo._ The gallows-maker; for that frame outlives a thousand +tenants. + +_1st Clo._ I like thy wit well, in good faith: the gallows does well; +But how does it well? it does well to those that do ill: now, thou +dost ill to say the gallows is built stronger than the church: argal, +the gallows may do well to thee. To't again, come. + +_2nd Clo._ Who builds stronger than a mason, a shipwright, or a +carpenter? + +_1st Clo._ Ay, tell me that, and unyoke.[14] + +_2nd Clo._ Marry, now I can tell. + +_1st Clo._ To't. + +_2nd Clo._ Mass, I cannot tell. + +_1st Clo._ Cudgel thy brains no more about it,[15] for your dull ass +will not mend his pace with beating; and, when you are asked this +question next, say, a grave-maker, the houses that he makes, last +till doomsday. Go, get thee to Yaughan, and fetch me a stoup of +liquor.[16] + + [_Exit_ 2nd Clown, L.H.U.E.] + + _Enter_ HAMLET _and_ HORATIO (L.H.U.E.) + + First Clown _digs and sings._ + + _In youth, when I did love, did love_,[17] + _Methought, it was very sweet_, + _To contract, O, the time, for, ah, my behove_ + _O, methought, there was nothing meet._ + +_Ham._ + + (_Behind the grave._) + +Has this fellow no feeling of his business, he sings at grave-making? + +_Hor._ + + (_On_ HAMLET'S R.) + +Custom hath made it in him a property of easiness. + +_Ham._ 'Tis e'en so: the hand of little employment hath the daintier +sense.[18] + +_1st Clo._ + _But age, with his stealing steps_, + _Hath clawed me in his clutch_, + _And hath shipped me into the land_, + _As if I had never been such._ + + [_Throws up a skull._] + +_Ham._ That skull had a tongue in it, and could sing once: How the +knave jowls it to the ground, as if it were Cain's jaw-bone, that did +the first murder! This might be the pate of a politician, which this +ass now o'er-reaches; one that would circumvent Heaven, might it not? + +_Hor._ It might, my lord. + + [_Gravedigger throws up bones._] + +_Ham._ Did these bones cost no more the breeding, but to play at +loggats with them?[19] mine ache to think on't. + +_1st Clo._ + + [_Sings._] + + _A pick-axe and a spade, a spade_, + _For and a shrouding sheet:_[20] + _O, a pit of clay for to be made_ + _For such a guest is meet._ + + [_Throws up a skull._ + +_Ham._ There's another: Why may not that be the skull of a lawyer? +Where be his quiddits now, his quillets,[21] his cases, his tenures, +and his tricks? Why does he suffer this rude knave now to knock him +about the sconce[22] with a dirty shovel, and will not tell him of +his action of battery? I will speak to this fellow.--Whose grave's +this, sirrah? + +_1st Clo._ Mine, sir.-- + + [_Sings._] + + _O, a pit of clay for to be made_ + _For such a guest is meet._ + +_Ham._ (R. _of grave._) I think it be thine, indeed; for thou liest +in't. + +_1st Clo._ You lie out on't, sir, and therefore it is not yours: for +my part, I do not lie in't, yet it is mine. + +_Ham._ Thou dost lie in't, to be in't, and say it is thine: 'tis for +the dead, not for the quick; therefore thou liest. + +_1st. Clo._ 'Tis a quick lie, sir; 'twill away again, from me to you. + +_Ham._ What man dost thou dig it for? + +_1st Clo._ For no man, sir. + +_Ham._ What woman, then? + +_1st Clo._ For none, neither. + +_Ham._ Who is to be buried in't? + +_1st Clo._ One that was a woman, sir; but, rest her soul, she's dead. + +_Ham._ How absolute the knave is![23] we must speak by the card,[24] +or equivocation will undo us, + + [_To_ HORATIO, R.] + +How long hast thou been a grave-maker? + +_1st Clo._ Of all the days i'the year, I came to't that day that our +last king Hamlet overcame Fortinbras. + +_Ham._ How long's that since? + +_1st Clo._ Cannot you tell that? every fool can tell that: It was the +very day that young Hamlet was born,[25] he that is mad, and sent +into England. + +_Ham._ Ay, marry, why was he sent into England? + +_1st Clo._ Why, because he was mad: he shall recover his wits there; +or, if he do not, 'tis no great matter there. + +_Ham._ Why? + +_1st Clo._ 'Twill not be seen in him there; there the men are as mad +as he. + +_Ham._ How came he mad? + +_1st Clo._ Very strangely, they say. + +_Ham._ How strangely? + +_1st Clo._ 'Faith, e'en with losing his wits. + +_Ham._ Upon what ground? + +_1st Clo._ Why, here in Denmark: I have been sexton here, man and +boy, thirty years. + +_Ham._ How long will a man lie i'the earth ere he rot? + +_1st Clo._ 'Faith, if he be not rotten before he die, he will last +you some eight year or nine year: a tanner will last you nine year. + +_Ham._ Why he more than another? + +_1st Clo._ Why, sir, his hide is so tanned with his trade, that he +will keep out water a great while; and your water is a sore decayer +of your ill-begotten dead body. Here's a skull now, hath lain in the +earth three-and-twenty years. + +_Ham._ Whose was it? + +_1st Clo._ O, a mad fellow's it was: Whose do you think +it was? + +_Ham._ Nay, I know not. + +_1st Clo._ A pestilence on him for a mad rogue! he poured a flagon of +Rhenish on my head once. This same skull, sir, was Yorick's skull, +the king's jester. + +_Ham._ This? + + [_Takes the skull._] + +_1st Clo._ E'en that. + +_Ham._ Alas, poor Yorick! I knew him, Horatio: a fellow of infinite +jest, of most excellent fancy: he hath borne me on his back a +thousand times. Here hung those lips that I have kissed I know not +how oft; and now, how abhorred in my imagination it is! Where be your +gibes now? your gambols? your songs? your flashes of merriment, that +were wont to set the table on a roar? Not one now, to mock your own +grinning? Quite chap-fallen? Now get you to my lady's chamber, and +tell her, let her paint an inch thick, to this favour[26] she must +come; make her laugh at that. Prithee, Horatio, tell me one thing. + +_Hor._ What's that, my lord? + +_Ham._ Dost thou think Alexander look'd o'this fashion i'the earth? + +_Hor._ E'en so. + +_Ham._ And smelt so? pah! + + [_Gives the skull to HORATIO, who returns it to the grave-digger._] + +_Hor._ E'en so, my lord. + +_Ham._ To what base uses may we return, Horatio! Why may not +imagination trace the noble dust of Alexander, till it find it +stopping a bung-hole? + +_Hor._ 'Twere to consider too curiously,[27] to consider so. + +_Ham._ No, faith, not a jot; but to follow him thither with modesty +enough, and likelihood to lead it: As thus; Alexander died, Alexander +was buried, Alexander returneth to dust; the dust is earth; of earth +we make loam; And why of that loam, whereto he was converted, might +they not stop a beer barrel? + + Imperial Cæsar,[28] dead and turn'd to clay, + Might stop a hole to keep the wind away: + O, that the earth, which kept the world in awe, + Should patch a wall to expel the winter's flaw![29] + + But soft! but soft! aside: Here comes the king, + The queen, the courtiers: Who is this they follow? + And with such maimèd rites?[30] This doth betoken + The corse they follow did with desperate hand + Fordo its own life:[31] 'Twas of some estate.[32] + Couch we awhile, and mark. + + [_Retiring with_ HORATIO, R.H.] + + _Enter_ Priests, &c., _in procession; the corpse of_ OPHELIA, + LAERTES _and_ Mourners _following_; KING, QUEEN, _their_ + Trains, _&c._ + + _Laer._ + + (L. _of the grave._) + + What ceremony else? + + _Ham._ (R.) That is Laertes, + A very noble youth. + + _1st Priest._ + + (R. _of the grave._) + + Her obsequies have been as far enlarg'd + As we have warranty: Her death was doubtful; + And, but that great command o'ersways the order,[33] + She should in ground unsanctified have lodged + Till the last trumpet; for charitable prayers, + Shards,[34] flints, and pebbles, should be thrown on her: + Yet here she is allowed her virgin crants,[35] + Her maiden strewments, and the bringing home + Of bell and burial.[36] + + _Laer._ Must there no more be done? + + _1st Priest._ No more be done: + We should profane the service of the dead + To sing a _requiem_,[37] and such rest to her + As to peace-parted souls. + + _Laer._ O, from her fair and unpolluted flesh + May violets spring! I tell thee, churlish priest,[38] + A ministering angel shall my sister be, + When thou liest howling. + + _Ham._ What, the fair Ophelia! + + _Queen._ + + (_Behind the grave_, C. _with the_ KING.) + + Sweets to the sweet: Farewell! + + [_Scattering flowers._] + + I hop'd thou shouldst have been my Hamlet's wife; + I thought thy bride-bed to have deck'd, sweet maid, + And not have strew'd thy grave. + + _Laer._ O, treble woe + Fall ten times treble on that cursed head, + Whose wicked deed thy most ingenious sense[39] + Depriv'd thee of!--Hold off the earth a while, + Till I have caught her once more in mine arms: + + [_Leaps into the grave._] + + Now pile your dust upon the quick and dead, + Till of this flat a mountain you have made, + To o'ertop old Pelion,[40] or the skyish head + Of blue Olympus. + + _Ham._ + + (_Advancing._) + + What is he whose grief + Bears such an emphasis?--whose phrase of sorrow + Conjures the wand'ring stars, and makes them stand + Like wonder-wounded hearers?--this is I, + Hamlet the Dane. + + _Laer._ + + (L., _leaping from the grave._) + + The devil take thy soul! + + [_Grappling with him._] + + _Ham._ (R.C.) Thou pray'st not well. + I prithee, take thy fingers from my throat; + For, though I am not splenetive and rash, + Yet have I in me something dangerous, + Which let thy wisdom fear: Hold off thy hand! + + _King._ Pluck them asunder. + + _Queen._ (C.) Hamlet, Hamlet! + + _Ham._ (R.C.) Why, I will fight with him upon this theme + Until my eyelids will no longer wag. + + _Queen._ O my son, what theme? + + _Ham._ I lov'd Ophelia: forty thousand brothers + Could not, with all their quantity of love, + Make up my sum.--What wilt thou do for her? + + _Queen._ O, he is mad, Laertes. + + _Ham._ Come, show me what thou'lt do: + Wou'lt weep? wou'lt fight? wou'lt fast? wou'lt tear thyself? + I'll do't.--Dost thou come here to whine? + To outface me[41] with leaping in her grave? + Be buried quick with her, and so will I: + And, if thou prate of mountains, let them throw + Millions of acres on us, till our ground,[42] + Singeing his pate against the burning zone, + Make Ossa[43] like a wart! Nay, an thou'lt mouth, + I'll rant as well as thou. + + _Queen._ This is mere madness: + And thus a while the fit will work on him; + Anon, as patient as the female dove, + When that her golden couplets are disclos'd,[44] + His silence will sit drooping. + + _Ham._ Hear you, sir; + What is the reason that you use me thus? + I lov'd you ever: But it is no matter; + Let Hercules himself do what he may, + The cat will mew,[45] and dog will have his day. + + [_Exit_, R.H.] + + _King._ (C.) I pray thee, good Horatio, wait upon him. + + [_Exit_ HORATIO, R.H.] + + Good Gertrude, set some watch over your son, + + [_Exit_ QUEEN, _attended_, R.H.] + + Strengthen your patience in our last night's speech;[46] + + [_To_ LAERTES.] + + We'll put the matter to the present push.-- + This grave shall have a living monument:[47] + An hour of quiet shortly shall we see; + Till then, in patience our proceeding be. + + [_The characters group round the grave._] + + +SCENE II.--HALL IN THE CASTLE. + + _Enter_ HAMLET _and_ HORATIO (R.H.) + + _Ham._ But I am very sorry, good Horatio, + That to Laertes I forgot myself; + For by the image of my cause,[48] I see + The portraiture of his. + + _Hor._ Peace! who comes here? + + _Enter_ OSRIC (L.H.) + + _Osr._ Your lordship is right welcome back to Denmark. + +_Ham._ (C.) I humbly thank you, sir.--Dost know this water-fly?[49] + +_Hor._ (R.) No, my good lord. + +_Ham._ Thy state is the more gracious; for 'tis a vice to know him. + +_Osr._ (L.) Sweet lord, if your lordship were at leisure, I should +impart a thing to you from his majesty. + +_Ham._ I will receive it, sir, with all diligence of spirit.[50] Your +bonnet to his right use; 'tis for the head. + +_Osr._ I thank your lordship, 'tis very hot. + +_Ham._ No, believe me, 'tis very cold; the wind is northerly. + +_Osr._ It is indifferent cold, my lord, indeed. + +_Ham._ But yet, methinks it is very sultry and hot,[51] for my +complexion,-- + +_Osr._ Exceedingly, my lord; it is very sultry, as 'twere,--I cannot +tell how.--But, my lord, his majesty bade me signify to you, that he +has laid a great wager on your head: Sir, this is the matter,-- + +_Ham._ I beseech you, remember---- + + [HAMLET _moves him to put on his hat._] + +_Osr._ Nay, good my lord; for mine ease, in good faith.[52] Sir, here +is newly come to court Laertes; believe me, an absolute gentleman, +full of most excellent differences, of very soft society and great +showing:[53] Indeed, to speak feelingly of him,[54] he is the card or +calendar of gentry,[55] for you shall find in him the continent of +what part a gentleman would see.[56] + +_Ham._ What imports the nomination of this gentleman?[57] + +_Osr._ Of Laertes? + +_Ham._ Of him, sir. + +_Osr._ Sir, you are not ignorant of what excellence Laertes is-- + +_Ham._ I dare not confess that, lest I should compare with him in +excellence; but, to know a man well, were to know himself.[58] + +_Osr._ I mean, sir, for his weapon. + +_Ham._ What is his weapon? + +_Osr._ Rapier and dagger. + +_Ham._ That's two of his weapons: but, well. + +_Osr._ The king, sir, hath wagered with him six Barbary horses: +against the which he has imponed,[59] as I take it, six French +rapiers and poignards, with their assigns, as girdle, hangers,[60] or +so: Three of the carriages, in faith, are very dear to fancy, very +responsive to the hilts, most delicate carriages, and of very liberal +conceit.[61] + +_Ham._ What call you the carriages? + +_Osr._ The carriages, sir, are the hangers. + +_Ham._ The phrase would be more german[62] to the matter, if we could +carry cannon by our sides. + +_Osr._ The king, sir, hath laid, that in a dozen passes between +yourself and him, he shall not exceed you three hits; and it would +come to immediate trial, if your lordship would vouchsafe the +answer.[63] + +_Ham._ How if I answer no?[64] + +_Osr._ I mean, my lord, the opposition of your person in trial. + +_Ham._ Sir, it is the breathing time of day with me; let the foils be +brought, the gentleman willing, and the king hold his purpose, I will +win for him if I can; if not, I will gain nothing but my shame and +the odd hits. + +_Osr._ Shall I deliver you so? + +_Ham._ To this effect, sir; after what flourish your nature will. + +_Osr._ I commend my duty to your lordship. [_Exit_, L.H.] + +_Hor._ (R.) You will lose this wager, my lord. + +_Ham._ (C.) I do not think so; since he went into France, I have been +in continual practice; I shall win at the odds.[65] But thou wouldst +not think how ill all's here about my heart: but it is no matter. + +_Hor._ Nay, good my lord. + +_Ham._ It is but foolery; but it is such a kind of gain-giving,[66] +as would, perhaps, trouble a woman. + +_Hor._ If your mind dislike any thing, obey it:[67] I will forestall +their repair hither, and say, you are not fit. + +_Ham._ Not a whit, we defy augury: there is a special providence in +the fall of a sparrow. + + [_Exeunt_, L.H.] + + +SCENE III.--ROOM IN THE CASTLE. + + KING _and_ QUEEN, _on a dais_, LAERTES (R.), LORDS (R.), + LADIES (L.), OSRIC (R.) _and_ Attendants, _with Foils, &c., + discovered_ (R.H.); _Tables_ (R. _and_ L.)-- + _Flourish of Trumpets._ + + _Enter_ HAMLET _and_ HORATIO (L.H.) + + _King._ Come, Hamlet, come, and take this hand from + me. + + _Ham._ (_offering his hand to_ LAERTES) Give me your + pardon, sir: I have done you wrong; + But pardon it, as you are a gentleman. + Let my disclaiming from a purpos'd evil + Free me so far in your most generous thoughts, + That I have shot my arrow o'er the house, + And hurt my brother. + + _Laer._ (R.) I am satisfied in nature, + Whose motive, in this case, should stir me most + To my revenge. + I do receive your offer'd love like love, + And will not wrong it. + + _Ham._ I embrace it freely: + And will this brother's wager frankly play. + Give us the foils. + + _Laer._ Come, one for me. + + _Ham._ I'll be your foil, Laertes: in mine ignorance + Your skill shall, like a star i'the darkest night, + Stick fiery off indeed.[68] + + _Laer._ You mock me, sir. + + _Ham._ No, by this hand. + + _King._ Give them the foils, young Osric. Cousin Hamlet, + You know the wager? + + _Ham._ Very well, my lord; + Your grace hath laid the odds o'the weaker side. + + _King._ I do not fear it; I have seen you both: + But since he's better'd,[69] we have therefore odds. + + _Laer._ This is too heavy, let me see another. + + _Ham._ This likes me well. These foils have all a length? + + _Osr._ Ay, my good lord. + + _King._ Set me the stoups of wine[70] upon that table.-- + + [Pages _exeunt_ R. _and_ L.] + + If Hamlet give the first or second hit, + Or quit[71] in answer to the third exchange, + Let all the battlements their ordnance fire; + The king shall drink to Hamlet's better breath; + And in the cup an union shall he throw,[72] + Richer than that which four successive kings + In Denmark's crown have worn. + + [Pages _return with wine._] + + Give me the cup; + And let the kettle[73] to the trumpet speak, + The trumpet to the cannoneer without, + The cannons to the heavens, the heaven to earth, + _Now the king drinks to Hamlet._--Come, begin; + And you, the judges, bear a wary eye. + + _Ham._ Come on, sir. + + _Laer._ Come, my lord. + + [_They play._] + + _Ham._ One. + + _Laer._ No. + + _Ham._ Judgment. + + _Osr._ A hit, a very palpable hit. + + _Laer._ Well:--again. + + _King._ Stay; give me drink. Hamlet, this pearl is thine; + + [_Drops poison into the goblet._] + + Here's to thy health. + + [_Pretends to drink._] + [_Trumpets sound; and cannon shot off within._] + + Give him the cup. + + _Ham._ I'll play this bout first; set it by awhile. + + [Page _places the goblet on table_, L.] + + Come. + Another hit; What say you? + + [_They play._] + + _Laer._ A touch, a touch, I do confess. + + _King._ Our son shall win. + + _Queen._ The Queen carouses to thy fortune, Hamlet.[74] + + _Ham._ Good madam!---- + + [_Trumpets sound._] + + _King._ Gertrude, do not drink. + + _Queen._ I have, my lord; I pray you, pardon me. + + _King._ It is the poison'd cup; it is too late. + + [_Aside._] + + _Laer._ I'll hit him now + And yet it is almost against my conscience. + + [_Aside._] + + _Ham._ Come, for the third, Laertes: You do but dally; + I pray you, pass with your best violence; + I am afeard you make a wanton of me.[75] + + _Laer._ Say you so? come on. + + [_They play._] + + [LAERTES _wounds_ HAMLET; _then, in scuffling they change + Rapiers, and_ HAMLET _wounds_ LAERTES.] + + _King._ Part them; they are incensed. + + _Ham._ Nay, come, again. + + [_The_ QUEEN _falls back in her chair._] + + _Osr._ + + (_Supporting_ LAERTES, R.) + + Look to the queen there, ho! + + _Hor._ + + (_Supporting_ HAMLET, L.) + + How is it, my lord? + + _Osr._ How is't, Laertes? + + _Laer._ Why, as a woodcock to my own springe,[76] Osric; + I am justly killed with mine own treachery. + + _Ham._ How does the queen? + + _King._ She swoons to see them bleed. + + _Queen._ No, no, the drink, the drink,--O, my dear Hamlet,-- + The drink, the drink! I am poison'd. + + [_The_ QUEEN _is conveyed off the stage by her attendant_ + Ladies, _in a dying state_, L.H.U.E.] + + _Ham._ O villainy! Ho! let the doors be lock'd: + Treachery! seek it out. + + [LAERTES _falls._] + + _Laer._ (R.) It is here, Hamlet: Hamlet, thou art slain; + No medicine in the world can do thee good, + In thee there is not half an hour's life; + The treacherous instrument is in thy hand, + Unbated and envenom'd:[77] the foul practice[78] + Hath turn'd itself on me; lo, here I lie, + Never to rise again: Thy mother's poison'd: + I can no more: the king, the king's to blame. + + _Ham._ The point + Envenom'd too! Then, venom, to thy work. + Here, thou incestuous, murd'rous, damnèd Dane, + Follow my mother. + + [_Stabs the_ KING, _who is borne away by his attendants, + mortally wounded_, R.H.U.E.] + + _Laer._ He is justly serv'd; + Exchange forgiveness with me, noble Hamlet: + Mine and my father's death come not upon thee, + Nor thine on me! + + [_Dies._] + + _Ham._ (C.) Heaven make thee free of it! I follow thee. + You that look pale and tremble at this chance, + That are but mutes or audience to this act, + Had I but time (as this fell sergeant, death,[79] + Is strict in his arrest), O, I could tell you,-- + But let it be. Horatio, + Report me and my cause aright + To the unsatisfied. + + _Hor._ (L.) Never believe it: + I am more an antique Roman than a Dane: + Here's yet some liquor left. + + [_Seizing the goblet on table_, L.] + + _Ham._ As thou'rt a man,-- + Give me the cup: let go; by heaven, I'll have it. + + [_Dashes the goblet away._] + + O good Horatio, what a wounded name, + Things standing thus unknown, shall live behind me![80] + If thou didst ever hold me in thy heart, + Absènt thee from felicity awhile, + And in this harsh world draw thy breath in pain, + To tell my story.-- + O, I die, Horatio; + The potent poison quite o'er-crows my spirit;[81] + The rest is silence. + + [_Dies_, C., OSRIC _on his_ R., _and_ HORATIO _on his_ L.] + + _Dead March afar off._ + + _Curtain slowly descends._ + +THE END. + + + + +Notes + +Act V + + + [Footnote V.1: _Enter two Clowns_,] These characters are not in + the original story, but are introduced by Shakespeare.] + + [Footnote V.2: _Make her grave straight:_] _i.e._, straightways, + forthwith.] + + [Footnote V.3: _The crowner_] A corruption of coroner.] + + [Footnote V.4: _It must be se offendendo_;] A confusion of things + as well as of terms: used for _se defendendo_, a finding of the + jury in justifiable homicide.] + + [Footnote V.5: _To act, to do, and to perform:_] Warburton says, + this is ridicule on scholastic divisions without distinction, and + of distinctions without difference.] + + [Footnote V.6: _Argal_,] A corruption of the Latin word, _ergo, + therefore_.] + + [Footnote V.7: _Delver._] _i.e._, a digger, one that opens the + ground with a spade.] + + [Footnote V.8: _If the man go to this water,--it is, will he, + nill he, he goes_,] Still floundering and confounding himself. He + means to represent it as a _wilful_ act, and of course without + any mixture of _nill_ or nolens in] it. Had he gone, as stated, + whether he _would or not_, it would not have been of his own + accord, or his act.] + + [Footnote V.9: _Crowner's-quest law._] Crowner's-quest is a + vulgar corruption of coroner's inquest.] + + [Footnote V.10: _Why, there thou say'st_] Say'st something, + speak'st to the purpose.] + + [Footnote V.11: _More than their even christian._] An old English + expression for fellow-christian.] + + [Footnote V.12: _Was he a gentleman?_] Mr. Douce says this is + intended as a ridicule upon heraldry.] + + [Footnote V.13: _Confess thyself----_] Admit, or by + acknowledgment pass sentence upon thyself, as a simpleton? + "Confess, and be hanged," was a proverbial sentence.] + + [Footnote V.14: _Tell me that, and unyoke._] Unravel this, and + your day's work is done, your team may then unharness.] + + [Footnote V.15: _Cudgel thy brains no more about it_;] _i.e._, + beat about thy brains no more.] + + [Footnote V.16: _A stoup of liquor._] A stoup is a jug.] + + [Footnote V.17: _In youth, when I did love, did love._] The three + stanzas sung here by the Grave-Digger, are extracted, with a + slight variation, from a little poem called _The Aged Lover + renounceth Love_, written by Henry Howard, Earl of Surrey, who + was beheaded in 1547. The song is to be found in Dr. Percy's + _Reliques of Ancient English Poetry_.] + + [Footnote V.18: _The hand of little employment hath the daintier + sense._] _i.e._, its "palm less dulled or staled."] + + [Footnote V.19: _But to play at loggats with them?_] A _loggat_ + is a small _log_, or piece of wood; a diminutive from _log_. + Hence _loggats_, as the name of an old game among the common + people, and one of those forbidden by a statute of the 33rd of + Henry VIII. A stake was fixed into the ground, and those who + played threw _loggats_ at it.] + + [Footnote V.20: _For and a shrouding sheet:_] For and is an + ancient expression, answering to _and eke, and likewise_.] + + [Footnote V.21: _Where be his quiddits now, his quillets_,] + Quiddits are subtilties; quillets are nice and frivolous + distinctions.] + + [Footnote V.22: _Knock him about the sconce_] _i.e._, head.] + + [Footnote V.23: _How absolute the knave is!_] Peremptory, + strictly and tyrannously precise.] + + [Footnote V.24: _We must speak by the card_,] The _card_ is the + mariner's compass. Properly the paper on which the points of the + wind are marked. Hence, _to speak by the card_, meant to speak + with great exactness; true to a point.] + + [Footnote V.25: _The very day that young Hamlet was born_,] It + would appear by this that Hamlet was thirty years old, and knew + Yorick well, who had been dead twenty-two years.] + + [Footnote V.26: _Favour_] Feature, countenance, or complexion.] + + [Footnote V.27: _'Twere to consider too curiously_,] Be pressing + the argument with too much critical nicety, to dwell upon mere + possibilities.] + + [Footnote V.28: _Imperial Cæsar_,] In some edition it is + _imperious_ Cæsar. Imperious was a more ancient term, signifying + the same as imperial.] + + [Footnote V.29: _The winter's flaw!_] _i.e._, winter's blast.] + + [Footnote V.30: _Maimèd rites?_] Curtailed, imperfect.] + + [Footnote V.31: _Fordo its own life:_] Destroy.] + + [Footnote V.32: _'Twas of some estate._] _i.e._, of rank or + station.] + + [Footnote V.33: _Command o'ersways the order_,] The course which + ecclesiastical rules prescribe.] + + [Footnote V.34: _Shards_,] _i.e._, broken pots or tiles.] + + [Footnote V.35: _Virgin crants_,] _i.e._, virgin garlands. Nares, + in his Glossary, says that _crants_ is a German word, and + probably Icelandic.] + + [Footnote V.36: _Bringing home of bell and burial_,] Conveying to + her last home with these accustomed forms of the church, and this + sepulture in consecrated ground.] + + [Footnote V.37: _A requiem_,] A mass performed in Popish churches + for the rest of the soul of a person deceased.] + + [Footnote V.38: _Churlish priest_,] Churlish is, figuratively, + ill-humoured, ill-bred, uncourtly, "rustic and rude."] + + [Footnote V.39: _Ingenious sense_] Life and sense.] + + [Footnote V.40: _To o'ertop old Pelion_,] Pelion is one of a + lofty range of mountains in Thessaly. The giants, in their war + with the gods, are said to have attempted to heap Ossa and + Olympus on Pelion, in order to scale Heaven.] + + [Footnote V.41: _Outface me_] _i.e._, brave me.] + + [Footnote V.42: _Our ground_,] The earth about us.] + + [Footnote V.43: _Ossa_] A celebrated mountain in Thessaly, + connected with Pelion, and in the neighbourhood of Mount + Olympus.] + + [Footnote V.44: _Her golden couplets are disclos'd_,] To + disclose, was anciently used for to _hatch_. A pigeon never lays + more than two eggs.] + + [Footnote V.45: _The cat will mew, and dog, &c._] "Things have + their appointed course; nor have we power to divert it," may be + the sense here conveyed.] + + [Footnote V.46: _Strengthen your patience in our last night's + speech_;] Let the consideration of the topics then urged, confirm + your resolution taken of quietly waiting events a little longer.] + + [Footnote V.47: _This grave shall have a living monument:_] There + is an ambiguity in this phrase. It either means an _endurable_ + monument such as will outlive time, or it darkly hints at the + impending fate of Hamlet.] + + [Footnote V.48: _Image of my cause_,] Representation or + character.] + + [Footnote V.49: _Dost know this water-fly?_] Dr. Johnson remarks + that a _water-fly_ skips up and down upon the surface of the + water, without any apparent purpose or reason, and is thence the + proper emblem of a busy trifler.] + + [Footnote V.50: _All diligence of spirit._] "With the whole bent + of my mind." A happy phraseology; in ridicule, at the same time + that it was in conformity with the style of the airy, affected + insect that was playing round him.] + + [Footnote V.51: _Very sultry and hot_,] Hamlet is here playing + over the same farce with Osric which he had formerly done with + Polonius. The idea of this scene is evidently suggested by + Juvenal.] + + [Footnote V.52: _For mine ease, in good faith._] From + contemporary authors this appears to have been the ordinary + language of courtesy in our author's own time.] + + [Footnote V.53: _An absolute--a great showing:_] A finished + gentleman, full of various accomplishments, of gentle manners, + and very imposing appearance.] + + [Footnote V.54: _To speak feelingly of him_,] With insight and + intelligence. + + [Footnote V.55: _Card or calendar of gentry_,] The card by which + a gentleman is to direct his course; the calendar by which he is + to choose his time, that what he does may be both excellent and + seasonable.] + + [Footnote V.56: _The continent of what part a gentleman would + see._] The word continent in this sense is frequently used by + Shakespeare; _i.e._, you shall find him _containing_ and + _comprising_ every quality which a _gentleman_ would desire to + _contemplate_ for imitation.] + + [Footnote V.57: _What imports the nomination, &c._] What is the + object of the introduction of this gentleman's name?] + + [Footnote V.58: _I dare not--lest I should compare--were to know + himself._] No one can have a perfect conception of the measure of + another's excellence, unless he shall himself come up to that + standard. Dr. Johnson says, I dare not pretend to know him, lest + I should pretend to an equality: no man can completely know + another, but by knowing himself, which is the utmost extent of + human wisdom.] + + [Footnote V.59: _He has imponed_,] _i.e._, to lay down as a stake + or wager. Impono.] + + [Footnote V.60: _Hangers_,] That part of the girdle or belt by + which the swords were suspended was, in our poet's time, called + the _hangers_.] + + [Footnote V.61: _Very dear to fancy--very liberal conceit._] Of + exquisite invention, well adapted to their hilts, and in their + conception rich and high fashioned.] + + [Footnote V.62: _More german_] More a-kin.] + + [Footnote V.63: _Vouchsafe the answer._] Condescend to answer, or + meet his wishes.] + + [Footnote V.64: _How if I answer, no?_] Reply.] + + [Footnote V.65: _I shall win at the odds._] I shall succeed with + the advantage that I am allowed.] + + [Footnote V.66: _Gain-giving_,] Misgiving.] + + [Footnote V.67: _If your mind, &c._] If you have any presentiment + of evil, yield to its suggestion.] + + [Footnote V.68: _Like a star i'the darkest night, stick fiery + off_] Be made by the strongest relief to stand brightly + prominent.] + + [Footnote V.69: _Better'd_,] He stands higher in estimation.] + + [Footnote V.70: _Stoups of wine_] Flagons of wine.] + + [Footnote V.71: _Quit in answer_] Make the wager _quit_, or so + far drawn.] + + [Footnote V.72: _An union shall he throw_,] _i.e._, a fine pearl. + To swallow a pearl in a draught seems to have been equally common + to royal and mercantile prodigality. It may be observed that + pearls were supposed to possess an exhilarating quality. It was + generally thrown into the drink as a compliment to some + distinguished guest, and the King in this scene, under the + pretence of throwing a pearl into the cup, drops some poisonous + drug into the wine.] + + [Footnote V.73: _Kettle_] _i.e._, kettle drum.] + + [Footnote V.74: _The Queen carouses to thy fortune, Hamlet._] + _i.e._, drinks to your success.] + + [Footnote V.75: _You make a wanton of me._] _i.e._, you trifle + with me as if you were playing with a child.] + + [Footnote V.76: _As a woodcock to my own springe._] I have run + into a springe like a woodcock, and into such a noose or trap as + a fool only would have fallen into; one of my own setting.] + + [Footnote V.77: _Unbated, and envenom'd:_] _i.e._, having a sharp + point envenomed with poison.] + + [Footnote V.78: _The foul practice_] _i.e._, the wicked trick + which I have practised.] + + [Footnote V.79: _Fell sergeant, death_,] _i.e._, cruel + sergeant--sergeant being an officer of the law.] + + [Footnote V.80: _Live behind me!_] Survive me.] + + [Footnote V.81: _Quite o'ercrows my spirit_;] Overpowers, exults + over; no doubt an image taken from the lofty carriage of a + victorious cock.] + + + + + +End of the Project Gutenberg EBook of Hamlet, by William Shakespeare + +*** END OF THIS PROJECT GUTENBERG EBOOK HAMLET *** + +***** This file should be named 27761-0.txt or 27761-0.zip ***** +This and all associated files of various formats will be found in: + http://www.gutenberg.org/2/7/7/6/27761/ + +Produced by David Starner, Curtis Weyant and the Online +Distributed Proofreading Team at http://www.pgdp.net + + +Updated editions will replace the previous one--the old editions +will be renamed. + +Creating the works from public domain print editions means that no +one owns a United States copyright in these works, so the Foundation +(and you!) can copy and distribute it in the United States without +permission and without paying copyright royalties. Special rules, +set forth in the General Terms of Use part of this license, apply to +copying and distributing Project Gutenberg-tm electronic works to +protect the PROJECT GUTENBERG-tm concept and trademark. Project +Gutenberg is a registered trademark, and may not be used if you +charge for the eBooks, unless you receive specific permission. If you +do not charge anything for copies of this eBook, complying with the +rules is very easy. You may use this eBook for nearly any purpose +such as creation of derivative works, reports, performances and +research. They may be modified and printed and given away--you may do +practically ANYTHING with public domain eBooks. Redistribution is +subject to the trademark license, especially commercial +redistribution. + + + +*** START: FULL LICENSE *** + +THE FULL PROJECT GUTENBERG LICENSE +PLEASE READ THIS BEFORE YOU DISTRIBUTE OR USE THIS WORK + +To protect the Project Gutenberg-tm mission of promoting the free +distribution of electronic works, by using or distributing this work +(or any other work associated in any way with the phrase "Project +Gutenberg"), you agree to comply with all the terms of the Full Project +Gutenberg-tm License (available with this file or online at +http://gutenberg.net/license). + + +Section 1. General Terms of Use and Redistributing Project Gutenberg-tm +electronic works + +1.A. By reading or using any part of this Project Gutenberg-tm +electronic work, you indicate that you have read, understand, agree to +and accept all the terms of this license and intellectual property +(trademark/copyright) agreement. If you do not agree to abide by all +the terms of this agreement, you must cease using and return or destroy +all copies of Project Gutenberg-tm electronic works in your possession. +If you paid a fee for obtaining a copy of or access to a Project +Gutenberg-tm electronic work and you do not agree to be bound by the +terms of this agreement, you may obtain a refund from the person or +entity to whom you paid the fee as set forth in paragraph 1.E.8. + +1.B. "Project Gutenberg" is a registered trademark. It may only be +used on or associated in any way with an electronic work by people who +agree to be bound by the terms of this agreement. There are a few +things that you can do with most Project Gutenberg-tm electronic works +even without complying with the full terms of this agreement. See +paragraph 1.C below. There are a lot of things you can do with Project +Gutenberg-tm electronic works if you follow the terms of this agreement +and help preserve free future access to Project Gutenberg-tm electronic +works. See paragraph 1.E below. + +1.C. The Project Gutenberg Literary Archive Foundation ("the Foundation" +or PGLAF), owns a compilation copyright in the collection of Project +Gutenberg-tm electronic works. Nearly all the individual works in the +collection are in the public domain in the United States. If an +individual work is in the public domain in the United States and you are +located in the United States, we do not claim a right to prevent you from +copying, distributing, performing, displaying or creating derivative +works based on the work as long as all references to Project Gutenberg +are removed. Of course, we hope that you will support the Project +Gutenberg-tm mission of promoting free access to electronic works by +freely sharing Project Gutenberg-tm works in compliance with the terms of +this agreement for keeping the Project Gutenberg-tm name associated with +the work. You can easily comply with the terms of this agreement by +keeping this work in the same format with its attached full Project +Gutenberg-tm License when you share it without charge with others. + +1.D. The copyright laws of the place where you are located also govern +what you can do with this work. Copyright laws in most countries are in +a constant state of change. If you are outside the United States, check +the laws of your country in addition to the terms of this agreement +before downloading, copying, displaying, performing, distributing or +creating derivative works based on this work or any other Project +Gutenberg-tm work. The Foundation makes no representations concerning +the copyright status of any work in any country outside the United +States. + +1.E. Unless you have removed all references to Project Gutenberg: + +1.E.1. The following sentence, with active links to, or other immediate +access to, the full Project Gutenberg-tm License must appear prominently +whenever any copy of a Project Gutenberg-tm work (any work on which the +phrase "Project Gutenberg" appears, or with which the phrase "Project +Gutenberg" is associated) is accessed, displayed, performed, viewed, +copied or distributed: + +This eBook is for the use of anyone anywhere at no cost and with +almost no restrictions whatsoever. You may copy it, give it away or +re-use it under the terms of the Project Gutenberg License included +with this eBook or online at www.gutenberg.net + +1.E.2. If an individual Project Gutenberg-tm electronic work is derived +from the public domain (does not contain a notice indicating that it is +posted with permission of the copyright holder), the work can be copied +and distributed to anyone in the United States without paying any fees +or charges. If you are redistributing or providing access to a work +with the phrase "Project Gutenberg" associated with or appearing on the +work, you must comply either with the requirements of paragraphs 1.E.1 +through 1.E.7 or obtain permission for the use of the work and the +Project Gutenberg-tm trademark as set forth in paragraphs 1.E.8 or +1.E.9. + +1.E.3. If an individual Project Gutenberg-tm electronic work is posted +with the permission of the copyright holder, your use and distribution +must comply with both paragraphs 1.E.1 through 1.E.7 and any additional +terms imposed by the copyright holder. Additional terms will be linked +to the Project Gutenberg-tm License for all works posted with the +permission of the copyright holder found at the beginning of this work. + +1.E.4. Do not unlink or detach or remove the full Project Gutenberg-tm +License terms from this work, or any files containing a part of this +work or any other work associated with Project Gutenberg-tm. + +1.E.5. Do not copy, display, perform, distribute or redistribute this +electronic work, or any part of this electronic work, without +prominently displaying the sentence set forth in paragraph 1.E.1 with +active links or immediate access to the full terms of the Project +Gutenberg-tm License. + +1.E.6. You may convert to and distribute this work in any binary, +compressed, marked up, nonproprietary or proprietary form, including any +word processing or hypertext form. However, if you provide access to or +distribute copies of a Project Gutenberg-tm work in a format other than +"Plain Vanilla ASCII" or other format used in the official version +posted on the official Project Gutenberg-tm web site (www.gutenberg.net), +you must, at no additional cost, fee or expense to the user, provide a +copy, a means of exporting a copy, or a means of obtaining a copy upon +request, of the work in its original "Plain Vanilla ASCII" or other +form. Any alternate format must include the full Project Gutenberg-tm +License as specified in paragraph 1.E.1. + +1.E.7. Do not charge a fee for access to, viewing, displaying, +performing, copying or distributing any Project Gutenberg-tm works +unless you comply with paragraph 1.E.8 or 1.E.9. + +1.E.8. You may charge a reasonable fee for copies of or providing +access to or distributing Project Gutenberg-tm electronic works provided +that + +- You pay a royalty fee of 20% of the gross profits you derive from + the use of Project Gutenberg-tm works calculated using the method + you already use to calculate your applicable taxes. The fee is + owed to the owner of the Project Gutenberg-tm trademark, but he + has agreed to donate royalties under this paragraph to the + Project Gutenberg Literary Archive Foundation. Royalty payments + must be paid within 60 days following each date on which you + prepare (or are legally required to prepare) your periodic tax + returns. Royalty payments should be clearly marked as such and + sent to the Project Gutenberg Literary Archive Foundation at the + address specified in Section 4, "Information about donations to + the Project Gutenberg Literary Archive Foundation." + +- You provide a full refund of any money paid by a user who notifies + you in writing (or by e-mail) within 30 days of receipt that s/he + does not agree to the terms of the full Project Gutenberg-tm + License. You must require such a user to return or + destroy all copies of the works possessed in a physical medium + and discontinue all use of and all access to other copies of + Project Gutenberg-tm works. + +- You provide, in accordance with paragraph 1.F.3, a full refund of any + money paid for a work or a replacement copy, if a defect in the + electronic work is discovered and reported to you within 90 days + of receipt of the work. + +- You comply with all other terms of this agreement for free + distribution of Project Gutenberg-tm works. + +1.E.9. If you wish to charge a fee or distribute a Project Gutenberg-tm +electronic work or group of works on different terms than are set +forth in this agreement, you must obtain permission in writing from +both the Project Gutenberg Literary Archive Foundation and Michael +Hart, the owner of the Project Gutenberg-tm trademark. Contact the +Foundation as set forth in Section 3 below. + +1.F. + +1.F.1. Project Gutenberg volunteers and employees expend considerable +effort to identify, do copyright research on, transcribe and proofread +public domain works in creating the Project Gutenberg-tm +collection. Despite these efforts, Project Gutenberg-tm electronic +works, and the medium on which they may be stored, may contain +"Defects," such as, but not limited to, incomplete, inaccurate or +corrupt data, transcription errors, a copyright or other intellectual +property infringement, a defective or damaged disk or other medium, a +computer virus, or computer codes that damage or cannot be read by +your equipment. + +1.F.2. LIMITED WARRANTY, DISCLAIMER OF DAMAGES - Except for the "Right +of Replacement or Refund" described in paragraph 1.F.3, the Project +Gutenberg Literary Archive Foundation, the owner of the Project +Gutenberg-tm trademark, and any other party distributing a Project +Gutenberg-tm electronic work under this agreement, disclaim all +liability to you for damages, costs and expenses, including legal +fees. YOU AGREE THAT YOU HAVE NO REMEDIES FOR NEGLIGENCE, STRICT +LIABILITY, BREACH OF WARRANTY OR BREACH OF CONTRACT EXCEPT THOSE +PROVIDED IN PARAGRAPH F3. YOU AGREE THAT THE FOUNDATION, THE +TRADEMARK OWNER, AND ANY DISTRIBUTOR UNDER THIS AGREEMENT WILL NOT BE +LIABLE TO YOU FOR ACTUAL, DIRECT, INDIRECT, CONSEQUENTIAL, PUNITIVE OR +INCIDENTAL DAMAGES EVEN IF YOU GIVE NOTICE OF THE POSSIBILITY OF SUCH +DAMAGE. + +1.F.3. LIMITED RIGHT OF REPLACEMENT OR REFUND - If you discover a +defect in this electronic work within 90 days of receiving it, you can +receive a refund of the money (if any) you paid for it by sending a +written explanation to the person you received the work from. If you +received the work on a physical medium, you must return the medium with +your written explanation. The person or entity that provided you with +the defective work may elect to provide a replacement copy in lieu of a +refund. If you received the work electronically, the person or entity +providing it to you may choose to give you a second opportunity to +receive the work electronically in lieu of a refund. If the second copy +is also defective, you may demand a refund in writing without further +opportunities to fix the problem. + +1.F.4. Except for the limited right of replacement or refund set forth +in paragraph 1.F.3, this work is provided to you 'AS-IS' WITH NO OTHER +WARRANTIES OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +WARRANTIES OF MERCHANTIBILITY OR FITNESS FOR ANY PURPOSE. + +1.F.5. Some states do not allow disclaimers of certain implied +warranties or the exclusion or limitation of certain types of damages. +If any disclaimer or limitation set forth in this agreement violates the +law of the state applicable to this agreement, the agreement shall be +interpreted to make the maximum disclaimer or limitation permitted by +the applicable state law. The invalidity or unenforceability of any +provision of this agreement shall not void the remaining provisions. + +1.F.6. INDEMNITY - You agree to indemnify and hold the Foundation, the +trademark owner, any agent or employee of the Foundation, anyone +providing copies of Project Gutenberg-tm electronic works in accordance +with this agreement, and any volunteers associated with the production, +promotion and distribution of Project Gutenberg-tm electronic works, +harmless from all liability, costs and expenses, including legal fees, +that arise directly or indirectly from any of the following which you do +or cause to occur: (a) distribution of this or any Project Gutenberg-tm +work, (b) alteration, modification, or additions or deletions to any +Project Gutenberg-tm work, and (c) any Defect you cause. + + +Section 2. Information about the Mission of Project Gutenberg-tm + +Project Gutenberg-tm is synonymous with the free distribution of +electronic works in formats readable by the widest variety of computers +including obsolete, old, middle-aged and new computers. It exists +because of the efforts of hundreds of volunteers and donations from +people in all walks of life. + +Volunteers and financial support to provide volunteers with the +assistance they need are critical to reaching Project Gutenberg-tm's +goals and ensuring that the Project Gutenberg-tm collection will +remain freely available for generations to come. In 2001, the Project +Gutenberg Literary Archive Foundation was created to provide a secure +and permanent future for Project Gutenberg-tm and future generations. +To learn more about the Project Gutenberg Literary Archive Foundation +and how your efforts and donations can help, see Sections 3 and 4 +and the Foundation web page at http://www.pglaf.org. + + +Section 3. Information about the Project Gutenberg Literary Archive +Foundation + +The Project Gutenberg Literary Archive Foundation is a non profit +501(c)(3) educational corporation organized under the laws of the +state of Mississippi and granted tax exempt status by the Internal +Revenue Service. The Foundation's EIN or federal tax identification +number is 64-6221541. Its 501(c)(3) letter is posted at +http://pglaf.org/fundraising. Contributions to the Project Gutenberg +Literary Archive Foundation are tax deductible to the full extent +permitted by U.S. federal laws and your state's laws. + +The Foundation's principal office is located at 4557 Melan Dr. S. +Fairbanks, AK, 99712., but its volunteers and employees are scattered +throughout numerous locations. Its business office is located at +809 North 1500 West, Salt Lake City, UT 84116, (801) 596-1887, email +business@pglaf.org. Email contact links and up to date contact +information can be found at the Foundation's web site and official +page at http://pglaf.org + +For additional contact information: + Dr. Gregory B. Newby + Chief Executive and Director + gbnewby@pglaf.org + + +Section 4. Information about Donations to the Project Gutenberg +Literary Archive Foundation + +Project Gutenberg-tm depends upon and cannot survive without wide +spread public support and donations to carry out its mission of +increasing the number of public domain and licensed works that can be +freely distributed in machine readable form accessible by the widest +array of equipment including outdated equipment. Many small donations +($1 to $5,000) are particularly important to maintaining tax exempt +status with the IRS. + +The Foundation is committed to complying with the laws regulating +charities and charitable donations in all 50 states of the United +States. Compliance requirements are not uniform and it takes a +considerable effort, much paperwork and many fees to meet and keep up +with these requirements. We do not solicit donations in locations +where we have not received written confirmation of compliance. To +SEND DONATIONS or determine the status of compliance for any +particular state visit http://pglaf.org + +While we cannot and do not solicit contributions from states where we +have not met the solicitation requirements, we know of no prohibition +against accepting unsolicited donations from donors in such states who +approach us with offers to donate. + +International donations are gratefully accepted, but we cannot make +any statements concerning tax treatment of donations received from +outside the United States. U.S. laws alone swamp our small staff. + +Please check the Project Gutenberg Web pages for current donation +methods and addresses. Donations are accepted in a number of other +ways including including checks, online payments and credit card +donations. To donate, please visit: http://pglaf.org/donate + + +Section 5. General Information About Project Gutenberg-tm electronic +works. + +Professor Michael S. Hart is the originator of the Project Gutenberg-tm +concept of a library of electronic works that could be freely shared +with anyone. For thirty years, he produced and distributed Project +Gutenberg-tm eBooks with only a loose network of volunteer support. + + +Project Gutenberg-tm eBooks are often created from several printed +editions, all of which are confirmed as Public Domain in the U.S. +unless a copyright notice is included. Thus, we do not necessarily +keep eBooks in compliance with any particular paper edition. + + +Most people start at our Web site which has the main PG search facility: + + http://www.gutenberg.net + +This Web site includes information about Project Gutenberg-tm, +including how to make donations to the Project Gutenberg Literary +Archive Foundation, how to help produce our new eBooks, and how to +subscribe to our email newsletter to hear about new eBooks. diff --git a/python_tutorial_part_1_intro.ipynb b/python_tutorial_part_1_intro.ipynb new file mode 100644 index 0000000..de6659c --- /dev/null +++ b/python_tutorial_part_1_intro.ipynb @@ -0,0 +1,1378 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": false + }, + "source": [ + "# Quick Python Tutorial" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This tutorial should grow over time.\n", + "Python has a number of types. You need to be familiar with some of them as a start, then you will learn about more as you go. Let's quickly investigate some of these here:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#Integers and floats" + ] + }, + { + "cell_type": "code", + "execution_count": 231, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "22\n", + "33.33\n", + "I am an integer/int: 22\n", + "I am a float: 33.33\n" + ] + } + ], + "source": [ + "#---------------------\n", + "# Integers and floats:\n", + "#---------------------\n", + "# You can use Python as a calculator; and when you do, you are interacting with numbers that may have \n", + "# \"int\" or \"float\" types. Let's print these, with a \"print\" statement.\n", + "print(22) # an integer\n", + "print(33.33) # a float\n", + "\n", + "# You can print more than an object with the same print statement, if you use an \",\" (coma) in between\n", + "# (Hint: Both the integer 22 and the float 33.33 are 'objects' in the Pyathon language.\n", + "# They are objects of type 'int' and type 'float,' respectively)\n", + "print \"I am an integer/int:\", 22 # an integer\n", + "print \"I am a float:\",33.33 # a float" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1 2 3\n" + ] + } + ], + "source": [ + "# If you use a comma after the print statement/function, it will suppress\n", + "# the new line character \"\\n\". You see it better in a \"for loop\".\n", + "l=[1, 2, 3]\n", + "for i in l:\n", + " print i," + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I am an int division: 0\n", + "I am an float division: 0.666666666667\n" + ] + } + ], + "source": [ + "# You can perform operations on ints and floats, \n", + "# but be cautious as to the difference between int division and float divison\n", + "my_int= 22\n", + "my_new_int=33\n", + "my_float= 33.0\n", + "print \"I am an int division:\", my_int/my_new_int\n", + "print \"I am an float division:\", my_int/my_float\n", + "# (Hint: We assigned the numbers to some variables above, more about 'assignment' below.\n", + "# You can think about this just as storing something in another. It's like you put something in a box and \n", + "# you are now just looking at the box from outside. Another metaphor is simply that you called each of the numbers a name\n", + "# and can now interact with the numbers using these names.)" + ] + }, + { + "cell_type": "code", + "execution_count": 234, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "2.0" + ] + }, + "execution_count": 234, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#You can use the \"float\" function:\n", + "20/float(10)" + ] + }, + { + "cell_type": "code", + "execution_count": 256, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "22 S. Walnut St.\n", + "('This is the list', [1, 2, 3, 3, 4])\n", + "('This is the set', set([1, 2, 3, 4]))\n" + ] + } + ], + "source": [ + "# Side note:\n", + "# There are also other built-in functions for type-casting \n", + "# Casting an int into a string:\n", + "address= str(22)+\" S. Walnut St.\"\n", + "print(address)\n", + "# Casting a list into a set:\n", + "num_list=[1, 2, 3, 3, 4]\n", + "print(\"This is the list\", num_list)\n", + "num_set=set(num_list)\n", + "print(\"This is the set\",num_set)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Strings" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello world\n" + ] + } + ], + "source": [ + "#--------\n", + "# String:\n", + "#--------\n", + "# The string type is for characters like \"Hello world\". We can print this string:\n", + "print \"Hello world\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello world\n" + ] + } + ], + "source": [ + "# The above is called a print statement. We can assign the string to a variable\n", + "greeting = \"Hello world\"\n", + "# We cann the word \"greeting\" a \"variable\" and the string \"Hello world\" a value. \n", + "# What we did is \"assign\" the value \"Hello world\" to the variable \"greeting\".\n", + "# The \"=\" is called an operator and we use it for \"assignment\". (This is important!)\n", + "# We can now print \"grreting\"\n", + "print greeting" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The type of the variable: \n", + "The type of the value: 14\n" + ] + } + ], + "source": [ + "# For another example, we can assign another string value to another variable:\n", + "feeling=\"I love Python!\"\n", + "# Since you love Python, it loves you back and so gives you a number of \"built-in\" functions to work with. \n", + "# For more about these take a look here: https://docs.python.org/2/library/functions.html\n", + "# For example, the \"type()\" function tells us about the type of an object. \n", + "# Similarly, the \"len()\" function opertates in some objects, like strings, \n", + "# and tells us about their length in characters:\n", + "print \"The type of the variable: \", type(feeling)\n", + "print \"The type of the value: \", len(feeling)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Lists" + ] + }, + { + "cell_type": "code", + "execution_count": 247, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I'm an empty list []\n", + "I'm list of string items ['Python', 'Lua', 'Java']\n", + "I'm list of numbers [44, 11, 55]\n", + "I'm list of string items and numbers ['Hello', 88, 4.0, 'Hey there!']\n", + "I'm list of string items and numbers, and an internal list ['Hello', 88, 4.0, 'Hey there!', ['a', 'z']]\n" + ] + } + ], + "source": [ + "#--------\n", + "# List:\n", + "#--------\n", + "# A list is another Python data type where you can store and access your data with a lot of flexibility.\n", + "# The list is a square-bracketed, comma-separated sequence of items.\n", + "# So, to create a list, you use square brakets.\n", + "# This is an empty list:\n", + "my_first_list=[]\n", + "print \"I'm an empty list\", my_first_list\n", + "# Items in a list can be strings, or numbers, or a mixture\n", + "words=[\"Python\", \"Lua\", \"Java\"]\n", + "numbs= [44, 11, 55] \n", + "words_and_numbs= [\"Hello\", 88, 4.0, \"Hey there!\"]\n", + "print \"I'm list of string items\", words\n", + "print \"I'm list of numbers\", numbs\n", + "print \"I'm list of string items and numbers\", words_and_numbs\n", + "words_and_numbs_and_internal_list= [\"Hello\", 88, 4.0, \"Hey there!\", [\"a\", \"z\"]]\n", + "print \"I'm list of string items and numbers, and an internal list\",\\\n", + " words_and_numbs_and_list" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'z'" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Accessing an item from an internal list, directly:\n", + "[\"Hello\", 88, 4.0, \"Hey there!\", [\"a\", \"z\"]][-1][-1]" + ] + }, + { + "cell_type": "code", + "execution_count": 272, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'a'" + ] + }, + "execution_count": 272, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Here's the same as above, a bit unfolded:\n", + "x=[\"Hello\", 88, 4.0, \"Hey there!\", [\"a\", \"z\"]]\n", + "internal=x[-1]\n", + "internal[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 275, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'a'" + ] + }, + "execution_count": 275, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# One more example:\n", + "[\"Hello\", 88, 4.0, \"Hey there!\", [\"a\", \"z\"]][4][-2]" + ] + }, + { + "cell_type": "code", + "execution_count": 278, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "3\n", + "This is the first item in the list, and its index is zero Apple\n", + "This is the last item in the list Facebook\n", + "Pay him 3000 $\n" + ] + } + ], + "source": [ + "# Length of a list, and more on indexing and slicing:\n", + "#----------------------------------------------------\n", + "# Similar to a string, you can get the length of a list:\n", + "tech_comp=[\"Apple\", \"Google\", \"Facebook\"]\n", + "print len(tech_comp)\n", + "# You can also slice from a list, using the bractets with an integer index.\n", + "# Notice: we start from index \"zero\".\n", + "print \"This is the first item in the list, and its index is zero\", tech_comp[0]\n", + "# You can also access a list from the end, with a minus index\n", + "print \"This is the last item in the list\", tech_comp[-1]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This is my first tuple: ('Tiger', 'Lion', 'Monkey')\n" + ] + } + ], + "source": [ + "# Tuples:\n", + "#--------\n", + "# A tuple is like a list, but its items are immutable/unchangeable.\n", + "# The syntax is different in that the tuple employs the parathenses \"()\"\n", + "my_animals_tuple=(\"Tiger\", \"Lion\", \"Monkey\")\n", + "print \"This is my first tuple: \", my_tuple" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "My list before changes: ['Tiger', 'Lion', 'Monkey']\n", + "My list after changes: ['Goat', 'Lion', 'Monkey']\n" + ] + } + ], + "source": [ + "# So you can change an item in a list, but not in a tuple:\n", + "my_animals_list=[\"Tiger\", \"Lion\", \"Monkey\"]\n", + "print \"My list before changes: \", my_animals_list\n", + "my_animals_list[0]=\"Goat\"\n", + "print \"My list after changes: \", my_animals_list\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This will give an error!!!\n" + ] + }, + { + "ename": "NameError", + "evalue": "name 'my_tuple' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Trying to change this will give an error:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"This will give an error!!!\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mmy_tuple\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"Goat\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mNameError\u001b[0m: name 'my_tuple' is not defined" + ] + } + ], + "source": [ + "# Trying to change this will give an error:\n", + "print \"This will give an error!!!\"\n", + "my_tuple[0]=\"Goat\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('z', 'b')\n" + ] + } + ], + "source": [ + "# You can cast a tuple to a list and make the change, and cast back to a tuple!\n", + "t=(\"a\", \"b\")\n", + "l=list(t)\n", + "l[0]=\"z\"\n", + "t=tuple(l)\n", + "print t" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Dictionaries" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Printing the 'students' dict: {777: 'Mary', 1111: 'John'}\n", + "The length of the 'students' dict is: 2\n", + "The value of the key 777 in the 'students' dict is: Mary\n", + "{777: 'Mary', 555: 'Maggi', 1111: 'John'}\n", + "Maggi\n" + ] + } + ], + "source": [ + "# A Ptthon dictionary is a \"mapping\" type. We map a \"key\" to a \"value\".\n", + "# For example, we can map a \"student_id\" to the \"name\" of a student.\n", + "# The sytax is simple: We use the curly braces, and delimit each key:value pair by the \"colon\"\n", + "students={1111: \"John\", 777: \"Mary\"}\n", + "print \"Printing the 'students' dict: \", students\n", + "print \"The length of the 'students' dict is: \", len(students)\n", + "# This is how you access the value of the key 777\n", + "print \"The value of the key 777 in the 'students' dict is: \", students[777]\n", + "students[555]=\"Maggi\"\n", + "print students\n", + "print students[555]" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{777: 'Mary', 555: 'Maggi', 1111: 'John'}\n" + ] + } + ], + "source": [ + "my_dict={777: 'Mary', 555: 'Maggi', 1111: 'John'}\n", + "my_dict[\"XYZ\"]=\"___\"\n", + "#delete an item in the dict\n", + "del my_dict[\"XYZ\"]\n", + "\n", + "print my_dict" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['a', 'b', 'c']\n" + ] + }, + { + "data": { + "text/plain": [ + "'a'" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "l=[\"a\", \"b\"]\n", + "l.append(\"c\")\n", + "print l\n", + "l[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The value of the key 'CS' in the 'students' dict is: ['John', 'Alex', 'Amanda']\n" + ] + } + ], + "source": [ + "# A value in a Python dict can be a string, a list, another dict, etc.\n", + "# So, if \"Alex\" and \"Amanda\" are also students in CS, then we can have the value for the key 'CS' as a list:\n", + "students={\"CS\": [\"John\", \"Alex\", \"Amanda\"] , \"Business\": \"Mary\"}\n", + "# And now when we print, we get all the students in CS as a full list:\n", + "print \"The value of the key 'CS' in the 'students' dict is: \", students['CS']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Files: Just a quick note!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Python is very efficient with files and text processing. Let's see how we open and interact with a file\n", + "my_file=open(\"path_to_my_read_file\", \"r\").read() # Opens for reading and gets you the file content as a string\n", + "my_file=open(\"path_to_my_read_file\", \"r\").readlines() # Opens for reading and gets you the file content as a list\n", + "out_file=open(\"path_to_my_write_file\", \"w\") # Opens for writing\n", + "\n", + "# This is how to print/write to a file\n", + "s=\"This is a line\"\n", + "print>>out_file, s\n", + "out_file.write(s)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I'm a pretty line====\n" + ] + } + ], + "source": [ + "# get red of the \"\\n\" char\n", + "line=\"I'm a pretty line\\n\"\n", + "print line.strip()+\"====\"\n", + "print line[:-1]+\"====\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Loops" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "apple \t--> is a fruit!\n", + "strawberry \t--> is a fruit!\n", + "grapes \t--> is a fruit!\n" + ] + } + ], + "source": [ + "# Loops\n", + "fruits=[\"apple\", \"strawberry\", \"grapes\"]\n", + "for fruit in fruits:\n", + " print fruit, \"\\t--> is a fruit!\"\n", + "# \"\\t\" is the tab characters. Also be aware of \"\\n\", \"\\r\", and \"\\r\\n\" and how these work across different platforms." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "xz\n", + "bz\n" + ] + } + ], + "source": [ + "# Printing with conditionals:\n", + "# You can combine this with conditionals\n", + "my_dict={\"aaaa\":10, \"b\":20, \"bz\":20, \"xz\":55, \"ss\":55}\n", + "for k in my_dict:\n", + " if len(k)==2 and (k.endswith(\"z\") or k.startswith(\"x\")):\n", + " print k\n", + " #print mary[k]\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Functions" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello Dane !\n", + "Hello Chris !\n", + "Hello Lubna !\n", + "Hello Nora !\n", + "None\n" + ] + } + ], + "source": [ + "# This is a function that only prints something, but does not \n", + "# return anything\n", + "def greet(name):\n", + " print \"Hello\", name, \"!\"\n", + " \n", + "# This is how you call the function:\n", + "greet(\"Dane\")\n", + "greet(\"Chris\")\n", + "greet(\"Lubna\")\n", + "something =greet(\"Nora\")\n", + "print something" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# A sort of an 'empty' function, but it is a valid one!\n", + "# Bascially, it's a place-filler and you can go back and fill in the \n", + "# body of the function.\n", + "def place_filler():\n", + " \"\"\"\n", + " This is a place filler.\n", + " \"\"\"\n", + " pass\n", + "\n", + "place_filler()" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " ('A coke is $', 2)\n" + ] + } + ], + "source": [ + "# A function that returns some value\n", + "def coke_vending_machine():\n", + " return \"A coke is $\", 2 \n", + "\n", + "price = coke_vending_machine()\n", + "print type(price), price" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Vikas ---> 105000.0\n", + "Revathi ---> 73500.0\n" + ] + } + ], + "source": [ + "# Create a function that calculates a raise for a database of employees\n", + "# key is name of an employee, value is base_salary\n", + "employees={\"Revathi\": 70000, \"Vikas\": 100000} \n", + "\n", + "def yearly_raise(base_salary):\n", + " return base_salary *1.05\n", + "\n", + "\n", + "for k in employees:\n", + " print k,\"--->\" , yearly_raise(employees[k])\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "12\n", + "54\n" + ] + } + ], + "source": [ + "# Parallel processing of two lists:\n", + "list_a=[10, 50, 4]\n", + "list_b=[2, 4]\n", + "for i in range(len(list_a)):\n", + " try:\n", + " print list_a[i]+list_b[i]\n", + " except:\n", + " pass " + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(10, 2), (50, 4)]" + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Python gives you a function \"zip\" that returns a list of tuples when passed two lists:\n", + "x=[10, 50]\n", + "y=[2, 4]\n", + "zip(x,y)\n", + "# (Hint: There is also \"izip\". What does it do? Can you tell?)" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "12\n", + "54\n" + ] + } + ], + "source": [ + "# Process with \"zip\"\n", + "x=[10, 50]\n", + "y=[2, 4]\n", + "my_list_of_tuples=zip(x,y)\n", + "\n", + "for pair in my_list_of_tuples:\n", + " print pair[0]+pair[1]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[12, 54]" + ] + }, + "execution_count": 46, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# On the fly, with list comprehension:\n", + "[i[0]+i[1] for i in zip(x,y)]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[2, 3, 4, 5, 6, 7, 8, 9]" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Above, we used \"range\". It returns a list, possibly starting from a given point \\\n", + "# (as from 2 below), up to but not including another:\n", + "range(2, 10)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# More on functions" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I\n", + "am\n", + "happy\n" + ] + } + ], + "source": [ + "# First look at this:\n", + "s=\"I am happy\"\n", + "words= s.split()\n", + "for w in words:\n", + " print w" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'and': 4, 'learning.': 1, '(the': 1, 'family': 1, 'be': 1, 'other.': 1, 'experience,': 1, 'unknown.Artificial': 1, 'number': 1, 'numeric': 1, 'connections': 1, 'as': 1, 'brain)': 1, 'are': 4, 'learning': 1, 'in': 1, 'based': 1, 'tuned': 1, 'nets': 1, 'networks': 3, '(ANNs)': 1, 'functions': 1, 'depend': 1, 'capable': 1, 'nervous': 1, 'exchange': 1, 'generally': 2, 'approximate': 1, 'artificial': 1, 'machine': 1, 'to': 2, 'systems': 2, 'which': 1, 'between': 1, 'adaptive': 1, '\"neurons\"': 1, 'inputs': 2, 'used': 1, 'that': 2, 'models': 1, 'each': 1, 'animals,': 1, 'particular': 1, 'The': 1, 'estimate': 1, 'by': 1, 'a': 2, 'on': 2, 'central': 1, 'cognitive': 1, 'neural': 4, 'of': 5, 'inspired': 1, 'presented': 1, 'messages': 1, 'science,': 1, 'interconnected': 1, 'large': 1, 'weights': 1, 'can': 2, 'have': 1, 'In': 1, 'biological': 1, 'the': 1, 'or': 1, 'making': 1}\n" + ] + } + ], + "source": [ + "# A function can \"return\" an object.\n", + "# We provide an example here\n", + "\n", + "# text below is from https://en.wikipedia.org/wiki/Artificial_neural_network\n", + "sentences=[\"In machine learning and cognitive science, artificial neural networks (ANNs)\\\n", + " are a family of models inspired by biological neural networks (the central nervous systems of animals, \\\n", + " in particular the brain) and are used to estimate or approximate functions that can depend on a large\\\n", + " number of inputs and are generally unknown.\"\n", + " \"Artificial neural networks are generally presented as systems of interconnected \\\"neurons\\\" which \\\n", + " exchange messages between each other. The connections have numeric weights that can be tuned based \\\n", + " on experience, making neural nets adaptive to inputs and capable of learning.\"]\n", + "def get_dict(sentences):\n", + " \"\"\"\n", + " arguments:\n", + " input: @sentences: a list of sentences\n", + " returns: a dictionary of the words in the sentences.\n", + " dict key is a word and value is word frequency\n", + " \"\"\"\n", + " word_freq={}\n", + " for sent in sentences:\n", + " words=sent.split()\n", + " for w in words:\n", + " if w in word_freq:\n", + " word_freq[w]+=1\n", + " else:\n", + " word_freq[w]=1\n", + " return word_freq\n", + " \n", + " \n", + "my_word_freq_dict=get_dict(sentences)\n", + "print my_word_freq_dict" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "set(['it.', 'sure', 'like', 'People', 'I', 'people', 'am', 'do!'])\n", + "set(['it.', 'sure', 'like', 'people', 'i', 'do!', 'am'])\n" + ] + } + ], + "source": [ + "# Sidenote on lowercasing and splitting\n", + "text=\"People like it. I am sure people do!\"\n", + "x1=set(text.split())\n", + "print(x1)\n", + "x2=set(text.lower().split())\n", + "print(x2)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "People__dance-dance__like__dance-dance__it.__dance-dance__I__dance-dance__am__dance-dance__sure__dance-dance__people__dance-dance__do!\n" + ] + } + ], + "source": [ + "# Cleaning with regex\n", + "text=\"People like it. I am sure people do!\"\n", + "import re\n", + "text=re.sub(\"!\", \"\", text)\n", + "print text\n" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('', 'Ooops! There was text here!')\n", + "(' ', 'Ooops! There was text here!')\n" + ] + } + ], + "source": [ + "# Cleaning with regex, more..\n", + "import re\n", + "# Remove *all* chars (any character, including white space)\n", + "text=\"People like it. I am sure people do!\"\n", + "text=re.sub(\".\", \"\", text)\n", + "print(text, \"Ooops! There was text here!\")\n", + "# Remove all *non-white space* chars\n", + "text=\"People like it. I am sure people do!\"\n", + "text=re.sub(\"\\S+\", \"\", text)\n", + "print (text, \"Ooops! There was text here!\")" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Here're are your keys with values > 2:\n", + "****************************************\n", + "and 4\n", + "are 4\n", + "networks 3\n", + "neural 4\n", + "of 5\n", + "\n", + "Here're are your keys with values > 1 and keys of more than 5 chars:\n", + "**********************************************************************\n", + "networks 3\n", + "generally 2\n", + "systems 2\n", + "inputs 2\n", + "neural 4\n" + ] + } + ], + "source": [ + "# Here's the same function as above, but using python's \"defaultdict\"\n", + "from collections import defaultdict\n", + "sentences=[\"In machine learning and cognitive science, artificial neural networks (ANNs)\\\n", + " are a family of models inspired by biological neural networks (the central nervous systems of animals, \\\n", + " in particular the brain) and are used to estimate or approximate functions that can depend on a large\\\n", + " number of inputs and are generally unknown.\"\n", + " \"Artificial neural networks are generally presented as systems of interconnected \\\"neurons\\\" which \\\n", + " exchange messages between each other. The connections have numeric weights that can be tuned based \\\n", + " on experience, making neural nets adaptive to inputs and capable of learning.\"]\n", + "\n", + "def get_dict(sentences):\n", + " \"\"\"\n", + " arguments:\n", + " input: @sentences: a list of sentences\n", + " returns: a dictionary of the words in the sentences.\n", + " dict key is a word and value is word frequency\n", + " \"\"\"\n", + " word_freq=defaultdict(int)\n", + " for sent in sentences:\n", + " words=sent.split()\n", + " for w in words:\n", + " word_freq[w]+=1\n", + " return word_freq\n", + " \n", + "my_word_freq_dict=get_dict(sentences)\n", + "# Let's print only keys with values > 2 this time\n", + "print \"Here're are your keys with values > 2:\\n\", \"*\"*40\n", + "for k in my_word_freq_dict:\n", + " if my_word_freq_dict[k] > 2:\n", + " print k, my_word_freq_dict[k]\n", + "\n", + "# Let's print only keys whose length > 5 (so keys that have at least 6 characters/letters) and values > 1 \n", + "print \"\\nHere're are your keys with values > 1 and keys of more than 5 chars:\\n\", \"*\"*70\n", + "for k in my_word_freq_dict:\n", + " if my_word_freq_dict[k] > 1 and len(k) > 5:\n", + " print k, my_word_freq_dict[k]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Conditionals" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This is such a short list!\n" + ] + } + ], + "source": [ + "fruits=[\"apple\", \"strawberry\", \"grapes\"]\n", + "if len(fruits) < 10:\n", + " print \"This is such a short list!\"" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I need an apple!\n" + ] + } + ], + "source": [ + "fruits=[\"apple\", \"strawberry\", \"grapes\"]\n", + "if \"apple\" not in fruits:\n", + " print \"No apples?!\"\n", + "else:\n", + " print \"I need an apple!\"\n", + "#---------------\n" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "No apples?!\n" + ] + } + ], + "source": [ + "fruits=[]\n", + "if \"apple\" not in fruits:\n", + " print \"No apples?!\"\n", + "elif \"banana\" in fruits:\n", + " print \"I need a banana!\"\n", + "else:\n", + " print \"I need an apple!\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# List Comprehension" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['s', 't', 'r', 'a', 'w', 'b', 'e', 'r', 'r', 'y']\n" + ] + } + ], + "source": [ + "dessert=\"strawberry\"\n", + "chars=[char for char in dessert]\n", + "print chars" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['s', 't', 'r', 'w', 'b', 'r', 'r']\n" + ] + } + ], + "source": [ + "# With a condition\n", + "dessert=\"strawberry\"\n", + "vowels=[\"a\", \"e\", \"y\"]\n", + "chars=[char for char in dessert if char not in vowels]\n", + "print chars" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "metadata": { + "collapsed": false, + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "k\n", + "i\n", + "n\n", + "g\n", + "['i', 'n', 'g']\n" + ] + } + ], + "source": [ + "for c in \"king\":\n", + " print c\n", + " \n", + "x=[c for c in \"king\" if c !=\"k\"]\n", + "print x" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_2.ipynb b/python_tutorial_part_2_nltk.ipynb similarity index 97% rename from python_tutorial_part_2.ipynb rename to python_tutorial_part_2_nltk.ipynb index 50a53f5..21fe6d4 100644 --- a/python_tutorial_part_2.ipynb +++ b/python_tutorial_part_2_nltk.ipynb @@ -150,6 +150,30 @@ "# Can you play with some texts, say from presidential candidates and tell us what you find?" ] }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "6" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text=\"What interesting ways can you you use\".split()\n", + "len(text)\n", + "len(set(text))" + ] + }, { "cell_type": "code", "execution_count": 35, @@ -172,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 78, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -182,6 +206,7 @@ "output_type": "stream", "text": [ "None\n", + "['father', 'man', 'mother', 'woman']\n", "['father', 'man', 'mother', 'woman']\n" ] } @@ -189,7 +214,9 @@ "source": [ "#P ay attenstion to the difference of these!\n", "tokens=[\"man\", \"woman\", \"father\", \"mother\"]\n", - "print tokens.sort() # Returns \"None\", but sorts the list in place\n", + "x= tokens.sort() # Returns \"None\", but sorts the list in place\n", + "print x\n", + "print tokens\n", "print sorted(tokens) # Returns the sorted list\n" ] }, @@ -266,6 +293,32 @@ "print(words)" ] }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "2" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "l=[\"a\", \"b\"]\n", + "l[0]\n", + "\n", + "d= {\"Hi\": 44, \"Hello\": 2}\n", + "d[\"Hello\"]" + ] + }, { "cell_type": "code", "execution_count": 62, diff --git a/python_tutorial_part_3_rule_based_classifier.ipynb b/python_tutorial_part_3_rule_based_classifier.ipynb new file mode 100644 index 0000000..4e621d3 --- /dev/null +++ b/python_tutorial_part_3_rule_based_classifier.ipynb @@ -0,0 +1,343 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Rule-Based Sentiment Classifier" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The code below was written in class, to teach simple Python concepts.\n", + "No further polishing is provided and the code is not necessarily linear (so a cell does not necessarily follow, a previous cell)." + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['couthie', 'confidence man', 'definiteness', 'changelessness', 'morally']\n" + ] + } + ], + "source": [ + "import re\n", + "def clean_lexicon():\n", + " positive_words= open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\", \"r\").readlines()\n", + " new_pos_list=[]\n", + " for i in positive_words[:5]:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_pos_list.append(i)\n", + " return new_pos_list\n", + "\n", + "my_positive_list= clean_lexicon()\n", + "print my_positive_list[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['couthie', 'confidence man', 'definiteness', 'changelessness', 'morally', 'ethmoidal vein', 'unquestionableness', 'uselessness', 'top-quality', 'good-humoredness']\n", + "['twilight of the gods', 'rumbustious', 'screaming', 'grueling', 'inanimate', 'stern', 'changelessness', 'sugarless', 'order pseudoscorpiones', 'modest']\n" + ] + } + ], + "source": [ + "import re\n", + "\n", + "def clean_lexicon(lex_input):\n", + " lex_file_l=open(lex_input, \"r\").readlines()\n", + " \n", + " new_lex_file_l=[]\n", + " for i in lex_file_l:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_lex_file_l.append(i)\n", + " return new_lex_file_l\n", + "\n", + "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\")\n", + "print my_positive_list[:10]\n", + "\n", + "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", + "print my_positive_list[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "## hello ##\n", + "##hello##\n", + "##hello##\n" + ] + } + ], + "source": [ + "import re\n", + "s = \" hello \"\n", + "print \"##\"+ s + \"##\"\n", + "s2= re.sub(\" \", \"\", s)\n", + "print \"##\"+ s2 + \"##\"\n", + "s3=s.strip()\n", + "print \"##\"+ s3 + \"##\"" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "['couthie\\n', 'confidence_man\\n', 'definiteness\\n', 'changelessness\\n', 'morally\\n']\n", + "5440\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n", + "Predicted Label= POSITIVE\n" + ] + } + ], + "source": [ + "lines=open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/posTweets.txt\", \"r\").readlines()\n", + "print type(lines) \n", + "print positive_words[0:5]\n", + "print len(positive_words)\n", + "positive_words=positive_words#+[\"good\"]\n", + "#print lines[0:5]\n", + "pos_counter=0\n", + "for line in lines:\n", + " for entry in positive_words:\n", + " #print i[:-1]\n", + " #break\n", + " if entry in line and \"never\" not in line:\n", + " #print i\n", + " pos_counter+=1\n", + " if pos_counter > 1:\n", + " print(\"Predicted Label= POSITIVE\")\n", + " #else: #pos_counter ==0:\n", + " # print(\"No posiotive words found\")\n", + " pos_counter=0\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "x=open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/posTweets.txt\", \"r\").readlines()\n", + "print type(x) " + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "@Vivawonderwoman Got it! :)\n", + "Riri flow on Pandora..Christmas tree all done :)\n", + "Ah love feels so great :-)\n", + "@stephhybb okay maybe then but the other stores usually have better ones & okay yeah come after you're done at game stop!:) def!\n", + "@pammpimm haha gpp kok dek :) thanks yaaaa\n", + "@katelittle_ @soph_funari @kaseycreehan @kaylaaajx3 awe Kate I love youuu <333 :)\n" + ] + } + ], + "source": [ + "for l in x[:6]:\n", + " print l[:-1]" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "mixed tweet\t@chelvanderbaan well Idk if Thts good or bad for you ??? But its kinda nice to hear Haha :)\n", + "\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n", + "positive tweet\n" + ] + } + ], + "source": [ + "lines=x[:201]\n", + "from collections import defaultdict\n", + "d=defaultdict(int)\n", + "\n", + "for l in lines:\n", + " if \"good\" in l and \"bad\" in l:\n", + " print \"mixed tweet\\t\", l\n", + " elif \"bad\" in l:\n", + " print \"negative tweet\"\n", + " elif \"good\" in l:\n", + " print \"positive tweet\"\n", + " else:\n", + " pass #print \"\\t\\tobjective tweet\"" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1\n", + "2\n", + "3\n", + "4\n", + "5\n", + "6\n", + "7\n", + "8\n", + "9\n", + "10\n", + "11\n", + "12\n" + ] + } + ], + "source": [ + "x=open(\"~/Desktop/posTweets.txt\", \"r\").readlines()\n", + "lines=x[:201]\n", + "from collections import defaultdict\n", + "d=defaultdict(int)\n", + "\n", + "pos_lex=[\"good\", \"fantastic\", \"wonderful\", \"great\", \"fascinating\", \"pizza\"]\n", + "neg_lex=[\"bad\", \"ugly\", \"boring\", \"disguting\", \"lazy\"]\n", + "\n", + "count_pos=0\n", + "\n", + "for l in lines:\n", + " for entry in pos_lex:\n", + " if entry in l:\n", + " count_pos+=1\n", + " print count_pos #entry, lines.index(l)\n", + " count_pos=0\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_4.ipynb b/python_tutorial_part_4_numpy.ipynb similarity index 100% rename from python_tutorial_part_4.ipynb rename to python_tutorial_part_4_numpy.ipynb diff --git a/python_tutorial_part_5_gensim.ipynb b/python_tutorial_part_5_gensim.ipynb new file mode 100644 index 0000000..378b0c7 --- /dev/null +++ b/python_tutorial_part_5_gensim.ipynb @@ -0,0 +1,549 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Gensim Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "10\n" + ] + } + ], + "source": [ + "# Corpora and Vector Spaces: https://radimrehurek.com/gensim/tut1.html\n", + "#----------------------------------------------------------------------\n", + "import logging\n", + "logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)\n", + "from gensim import corpora, models, similarities\n", + "# Let's create a small corpus where each document is one sentence as in the gensim tutorial from the link above.\n", + "# Let's take text from Wikipedia article on deep learning: https://en.wikipedia.org/wiki/Deep_learning\n", + "# We have 10 documents (each doc is a sentence).\n", + "documents = [\n", + " \"Deep learning (deep structured learning, hierarchical learning or deep machine learning)\\\n", + " is a branch of machine learning based on a set of algorithms that attempt to model \\\n", + " high-level abstractions in data by using multiple processing layers with complex \\\n", + " structures, or otherwise composed of multiple non-linear transformations.[1][2][3][4][5][6]\",\n", + " \"Deep learning is part of a broader family of machine learning methods based on learning representations of data.\",\n", + " \"An observation (e.g., an image) can be represented in many ways such as a vector of intensity values per pixel,\\\n", + " or in a more abstract way as a set of edges, regions of particular shape, etc. Some representations make it \\\n", + " easier to learn tasks (e.g., face recognition or facial expression recognition[7]) \\\n", + " from examples. One of the promises of deep learning is replacing handcrafted features \\\n", + " with efficient algorithms for unsupervised or semi-supervised feature learning and hierarchical \\\n", + " feature extraction.[8]\",\n", + " \"Deep learning is part of a broader family of machine learning methods based on learning \\\n", + " representations of data.\",\n", + " \"An observation (e.g., an image) can be represented in many ways such as a vector of intensity\\\n", + " values per pixel, or in a more abstract way as a set of edges, regions of particular shape, etc.\",\n", + " \"Some representations make it easier to learn tasks (e.g., face recognition or facial expression recognition[7]) from examples.\",\n", + " \"One of the promises of deep learning is replacing handcrafted features with efficient algorithms for unsupervised or semi-supervised \\\n", + " feature learning and hierarchical feature extraction.[8]\",\n", + " \"Research in this area attempts to make better representations and create models to learn these representations\\\n", + " from large-scale unlabeled data.\",\n", + " \"Some of the representations are inspired by advances in neuroscience and are loosely based on interpretation of information processing\\\n", + " and communication patterns in a nervous system, such as neural coding which attempts to define a relationship between various stimuli \\\n", + " and associated neuronal responses in the brain.[9]\",\n", + " \"Various deep learning architectures such as deep neural networks, convolutional deep neural networks, \\\n", + " deep belief networks and recurrent neural networks have been applied to fields like computer vision, automatic\\\n", + " speech recognition, natural language processing, audio recognition and bioinformatics where they have been shown to produce state-of-the-art\\\n", + " results on various tasks.\"]\n", + "\n", + "print len(documents)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'i', u'me', u'my', u'myself', u'we', u'our', u'ours', u'ourselves', u'you', u'your', u'yours', u'yourself', u'yourselves', u'he', u'him', u'his', u'himself', u'she', u'her', u'hers', u'herself', u'it', u'its', u'itself', u'they', u'them', u'their', u'theirs', u'themselves', u'what', u'which', u'who', u'whom', u'this', u'that', u'these', u'those', u'am', u'is', u'are', u'was', u'were', u'be', u'been', u'being', u'have', u'has', u'had', u'having', u'do', u'does', u'did', u'doing', u'a', u'an', u'the', u'and', u'but', u'if', u'or', u'because', u'as', u'until', u'while', u'of', u'at', u'by', u'for', u'with', u'about', u'against', u'between', u'into', u'through', u'during', u'before', u'after', u'above', u'below', u'to', u'from', u'up', u'down', u'in', u'out', u'on', u'off', u'over', u'under', u'again', u'further', u'then', u'once', u'here', u'there', u'when', u'where', u'why', u'how', u'all', u'any', u'both', u'each', u'few', u'more', u'most', u'other', u'some', u'such', u'no', u'nor', u'not', u'only', u'own', u'same', u'so', u'than', u'too', u'very', u's', u't', u'can', u'will', u'just', u'don', u'should', u'now']\n" + ] + } + ], + "source": [ + "# Let's remove common words like \"a\" \"the\", etc. in English.\n", + "# These are called stop words and we can use nltk for a list of these in English\n", + "import nltk\n", + "from nltk.corpus import stopwords\n", + "stopwords= stopwords.words('english')\n", + "print stopwords" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[['deep', 'learning', 'deep', 'structured', 'learning', 'hierarchical', 'learning', 'deep', 'machine', 'learning', 'branch', 'machine', 'learning', 'based', 'set', 'algorithms', 'attempt', 'model', 'high', 'level', 'abstractions', 'data', 'using', 'multiple', 'processing', 'layers', 'complex', 'structures', 'otherwise', 'composed', 'multiple', 'non', 'linear', 'transformations', '1', '2', '3', '4', '5', '6']]\n" + ] + } + ], + "source": [ + "#We will need to lowercase text (some times we shouldn't do that naively if we care for things like\\\n", + "#named entities (which start with uppercase)')\n", + "# NLTK has a number of tokenization options here: http://www.nltk.org/api/nltk.tokenize.html\n", + "# Especially note that NLTK also supports Twitter tokenization, which will be useful for us\n", + "# Look at this line from the link above:\n", + "# from nltk.tokenize import TweetTokenizer\n", + "#-----------------------------------------\n", + "from nltk.tokenize import RegexpTokenizer\n", + "tokenizer = RegexpTokenizer(r'\\w+')\n", + "texts= [[w for w in tokenizer.tokenize(document.lower()) if w not in stopwords] for document in documents]\n", + "print texts[:1]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "defaultdict(, {'interpretation': 1, 'results': 1, 'brain': 1, 'attempts': 2, 'broader': 2, 'networks': 4, 'layers': 1, 'machine': 4, 'based': 4, 'nervous': 1, 'state': 1, 'better': 1, '4': 1, '8': 2, 'pixel': 2, 'non': 1, 'advances': 1, 'facial': 2, 'using': 1, 'like': 1, 'semi': 2, 'level': 1, 'fields': 1, 'loosely': 1, 'shape': 2, 'large': 1, 'vector': 2, 'neuronal': 1, 'automatic': 1, 'vision': 1, 'set': 3, 'art': 1, 'methods': 2, 'intensity': 2, 'computer': 1, 'examples': 2, 'recognition': 6, 'responses': 1, 'shown': 1, 'scale': 1, 'ways': 2, 'per': 2, 'research': 1, 'replacing': 2, '3': 1, 'various': 3, '7': 2, 'linear': 1, 'processing': 3, 'represented': 2, 'g': 4, 'many': 2, 'inspired': 1, 'abstractions': 1, 'etc': 2, 'produce': 1, 'supervised': 2, 'expression': 2, 'otherwise': 1, 'composed': 1, 'tasks': 3, 'features': 2, 'family': 2, 'communication': 1, 'image': 2, 'coding': 1, 'natural': 1, 'one': 2, 'learning': 16, 'neuroscience': 1, 'transformations': 1, 'area': 1, 'create': 1, 'structured': 1, 'system': 1, 'extraction': 2, '2': 1, 'way': 2, '6': 1, 'structures': 1, 'define': 1, 'convolutional': 1, 'relationship': 1, 'hierarchical': 3, 'particular': 2, 'e': 4, 'applied': 1, 'language': 1, 'neural': 4, 'easier': 2, 'regions': 2, 'values': 2, 'learn': 3, 'promises': 2, 'associated': 1, 'abstract': 2, 'speech': 1, 'deep': 11, 'high': 1, 'information': 1, 'efficient': 2, 'make': 3, 'recurrent': 1, 'feature': 4, '1': 1, 'belief': 1, 'complex': 1, '5': 1, 'branch': 1, '9': 1, 'handcrafted': 2, 'multiple': 2, 'unlabeled': 1, 'models': 1, 'edges': 2, 'architectures': 1, 'bioinformatics': 1, 'representations': 7, 'data': 4, 'attempt': 1, 'observation': 2, 'unsupervised': 2, 'stimuli': 1, 'face': 2, 'patterns': 1, 'part': 2, 'algorithms': 3, 'model': 1, 'audio': 1})\n" + ] + } + ], + "source": [ + "from collections import defaultdict\n", + "word_freq=defaultdict(int)\n", + "from itertools import groupby\n", + "for text in texts:\n", + " for w in text:\n", + " word_freq[w]+=1\n", + "print word_freq" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OrderedDict([('1', 1), ('2', 1), ('3', 1), ('4', 1), ('5', 1), ('6', 1), ('7', 2), ('8', 2), ('9', 1), ('abstract', 2), ('abstractions', 1), ('advances', 1), ('algorithms', 3), ('applied', 1), ('architectures', 1), ('area', 1), ('art', 1), ('associated', 1), ('attempt', 1), ('attempts', 2), ('audio', 1), ('automatic', 1), ('based', 4), ('belief', 1), ('better', 1), ('bioinformatics', 1), ('brain', 1), ('branch', 1), ('broader', 2), ('coding', 1), ('communication', 1), ('complex', 1), ('composed', 1), ('computer', 1), ('convolutional', 1), ('create', 1), ('data', 4), ('deep', 11), ('define', 1), ('e', 4), ('easier', 2), ('edges', 2), ('efficient', 2), ('etc', 2), ('examples', 2), ('expression', 2), ('extraction', 2), ('face', 2), ('facial', 2), ('family', 2), ('feature', 4), ('features', 2), ('fields', 1), ('g', 4), ('handcrafted', 2), ('hierarchical', 3), ('high', 1), ('image', 2), ('information', 1), ('inspired', 1), ('intensity', 2), ('interpretation', 1), ('language', 1), ('large', 1), ('layers', 1), ('learn', 3), ('learning', 16), ('level', 1), ('like', 1), ('linear', 1), ('loosely', 1), ('machine', 4), ('make', 3), ('many', 2), ('methods', 2), ('model', 1), ('models', 1), ('multiple', 2), ('natural', 1), ('nervous', 1), ('networks', 4), ('neural', 4), ('neuronal', 1), ('neuroscience', 1), ('non', 1), ('observation', 2), ('one', 2), ('otherwise', 1), ('part', 2), ('particular', 2), ('patterns', 1), ('per', 2), ('pixel', 2), ('processing', 3), ('produce', 1), ('promises', 2), ('recognition', 6), ('recurrent', 1), ('regions', 2), ('relationship', 1), ('replacing', 2), ('representations', 7), ('represented', 2), ('research', 1), ('responses', 1), ('results', 1), ('scale', 1), ('semi', 2), ('set', 3), ('shape', 2), ('shown', 1), ('speech', 1), ('state', 1), ('stimuli', 1), ('structured', 1), ('structures', 1), ('supervised', 2), ('system', 1), ('tasks', 3), ('transformations', 1), ('unlabeled', 1), ('unsupervised', 2), ('using', 1), ('values', 2), ('various', 3), ('vector', 2), ('vision', 1), ('way', 2), ('ways', 2)])\n" + ] + } + ], + "source": [ + "# Side note: OrderedDict in Python\n", + "# Take a look at the documentation of the Python collections module: \n", + "# https://docs.python.org/2/library/collections.html\n", + "from collections import OrderedDict\n", + "# dictionary sorted by key\n", + "print OrderedDict(sorted(word_freq.items(), key=lambda t: t[0]))" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OrderedDict([('learning', 16), ('deep', 11), ('representations', 7), ('recognition', 6), ('networks', 4), ('machine', 4), ('based', 4), ('g', 4), ('e', 4), ('neural', 4), ('feature', 4), ('data', 4), ('set', 3), ('various', 3), ('processing', 3), ('tasks', 3), ('hierarchical', 3), ('learn', 3), ('make', 3), ('algorithms', 3), ('attempts', 2), ('broader', 2), ('8', 2), ('pixel', 2), ('facial', 2), ('semi', 2), ('shape', 2), ('vector', 2), ('methods', 2), ('intensity', 2), ('examples', 2), ('ways', 2), ('per', 2), ('replacing', 2), ('7', 2), ('represented', 2), ('many', 2), ('etc', 2), ('supervised', 2), ('expression', 2), ('features', 2), ('family', 2), ('image', 2), ('one', 2), ('extraction', 2), ('way', 2), ('particular', 2), ('easier', 2), ('regions', 2), ('values', 2), ('promises', 2), ('abstract', 2), ('efficient', 2), ('handcrafted', 2), ('multiple', 2), ('edges', 2), ('observation', 2), ('unsupervised', 2), ('face', 2), ('part', 2), ('interpretation', 1), ('results', 1), ('brain', 1), ('layers', 1), ('nervous', 1), ('state', 1), ('better', 1), ('4', 1), ('non', 1), ('advances', 1), ('using', 1), ('like', 1), ('level', 1), ('fields', 1), ('loosely', 1), ('large', 1), ('neuronal', 1), ('automatic', 1), ('vision', 1), ('art', 1), ('computer', 1), ('responses', 1), ('shown', 1), ('scale', 1), ('research', 1), ('3', 1), ('linear', 1), ('inspired', 1), ('abstractions', 1), ('produce', 1), ('otherwise', 1), ('composed', 1), ('communication', 1), ('coding', 1), ('natural', 1), ('neuroscience', 1), ('transformations', 1), ('area', 1), ('create', 1), ('structured', 1), ('system', 1), ('2', 1), ('6', 1), ('structures', 1), ('define', 1), ('convolutional', 1), ('relationship', 1), ('applied', 1), ('language', 1), ('associated', 1), ('speech', 1), ('high', 1), ('information', 1), ('recurrent', 1), ('1', 1), ('belief', 1), ('complex', 1), ('5', 1), ('branch', 1), ('9', 1), ('unlabeled', 1), ('models', 1), ('architectures', 1), ('bioinformatics', 1), ('attempt', 1), ('stimuli', 1), ('patterns', 1), ('model', 1), ('audio', 1)])\n" + ] + } + ], + "source": [ + "# dictionary sorted by value, in reverse order\n", + "print OrderedDict(sorted(word_freq.items(), key=lambda t: t[1], reverse=True))" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OrderedDict([('transformations', 1), ('representations', 7), ('interpretation', 1), ('bioinformatics', 1), ('communication', 1), ('convolutional', 1), ('architectures', 1), ('abstractions', 1), ('neuroscience', 1), ('relationship', 1), ('hierarchical', 3), ('unsupervised', 2), ('recognition', 6), ('represented', 2), ('information', 1), ('handcrafted', 2), ('observation', 2), ('processing', 3), ('supervised', 2), ('expression', 2), ('structured', 1), ('extraction', 2), ('structures', 1), ('particular', 2), ('associated', 1), ('algorithms', 3), ('automatic', 1), ('intensity', 2), ('responses', 1), ('replacing', 2), ('otherwise', 1), ('efficient', 2), ('recurrent', 1), ('unlabeled', 1), ('attempts', 2), ('networks', 4), ('advances', 1), ('neuronal', 1), ('computer', 1), ('examples', 2), ('research', 1), ('inspired', 1), ('composed', 1), ('features', 2), ('learning', 16), ('language', 1), ('promises', 2), ('abstract', 2), ('multiple', 2), ('patterns', 1), ('results', 1), ('broader', 2), ('machine', 4), ('nervous', 1), ('loosely', 1), ('methods', 2), ('various', 3), ('produce', 1), ('natural', 1), ('applied', 1), ('regions', 2), ('feature', 4), ('complex', 1), ('attempt', 1), ('stimuli', 1), ('layers', 1), ('better', 1), ('facial', 2), ('fields', 1), ('vector', 2), ('vision', 1), ('linear', 1), ('family', 2), ('coding', 1), ('create', 1), ('system', 1), ('define', 1), ('neural', 4), ('easier', 2), ('values', 2), ('speech', 1), ('belief', 1), ('branch', 1), ('models', 1), ('brain', 1), ('based', 4), ('state', 1), ('pixel', 2), ('using', 1), ('level', 1), ('shape', 2), ('large', 1), ('shown', 1), ('scale', 1), ('tasks', 3), ('image', 2), ('learn', 3), ('edges', 2), ('model', 1), ('audio', 1), ('like', 1), ('semi', 2), ('ways', 2), ('many', 2), ('area', 1), ('deep', 11), ('high', 1), ('make', 3), ('data', 4), ('face', 2), ('part', 2), ('non', 1), ('set', 3), ('art', 1), ('per', 2), ('etc', 2), ('one', 2), ('way', 2), ('4', 1), ('8', 2), ('3', 1), ('7', 2), ('g', 4), ('2', 1), ('6', 1), ('e', 4), ('1', 1), ('5', 1), ('9', 1)])\n" + ] + } + ], + "source": [ + "# dictionary sorted by length of the key string, in reverse order (So you get longer keys first)\n", + "print OrderedDict(sorted(word_freq.items(), key=lambda t: len(t[0]), reverse=True))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[set(['algorithms',\n", + " 'based',\n", + " 'data',\n", + " 'deep',\n", + " 'hierarchical',\n", + " 'learning',\n", + " 'machine',\n", + " 'multiple',\n", + " 'processing',\n", + " 'set'])]\n" + ] + } + ], + "source": [ + "# Let's remove words of freq < 2 and keep only unique words, using a set\n", + "texts = [set([w for w in text if word_freq[w] > 1]) for text in texts]\n", + "from pprint import pprint\n", + "pprint(texts[:1])" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Dictionary(60 unique tokens: [u'promises', u'set', u'features', u'family', u'image']...)\n" + ] + } + ], + "source": [ + "# Let's represent each document as a bag-of-words, where each word is assigned a unique integer id\\\n", + "dictionary = corpora.Dictionary(texts)\n", + "print dictionary\n", + "# You can save this dictionary to desk for future reference, using gensim:\n", + "# dictionary.save('/tmp/word_freq.dict') # " + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{u'promises': 15, u'set': 0, u'features': 17, u'family': 12, u'image': 18, u'deep': 3, u'one': 19, u'shape': 20, u'tasks': 16, u'examples': 22, u'broader': 13, u'networks': 59, u'recognition': 23, u'methods': 14, u'regions': 28, u'based': 1, u'etc': 49, u'efficient': 21, u'make': 25, u'feature': 26, u'per': 27, u'machine': 4, u'extraction': 29, u'vector': 51, u'various': 57, u'supervised': 52, u'7': 30, u'8': 32, u'abstract': 34, u'handcrafted': 35, u'attempts': 56, u'multiple': 8, u'way': 36, u'replacing': 37, u'processing': 2, u'g': 44, u'hierarchical': 5, u'facial': 39, u'particular': 40, u'represented': 41, u'representations': 10, u'data': 9, u'values': 46, u'e': 42, u'observation': 43, u'semi': 31, u'unsupervised': 45, u'many': 33, u'edges': 38, u'neural': 58, u'intensity': 47, u'face': 48, u'ways': 24, u'easier': 50, u'part': 11, u'algorithms': 6, u'learning': 7, u'learn': 53, u'expression': 54, u'pixel': 55}\n" + ] + } + ], + "source": [ + "# You can get each word and its token id:\n", + "print(dictionary.token2id)" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(3, 2), (7, 2)]\n" + ] + } + ], + "source": [ + "# Let's add a new document and get a sparse vector of it using gensim's \"doc2bow\" \\\n", + "# dictionary attribute:\n", + "new_doc= \"Deep learning? I like deep learning a lot.\"\n", + "tokenized_and_split_doc =tokenizer.tokenize(new_doc.lower())\n", + "new_vec = dictionary.doc2bow(tokenized_and_split_doc) \n", + "# Only the words deep (id 3) and learning (id 7)\n", + "# occur in our previous dictionary, and each of these occur twice in this new document\n", + "print new_vec" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# What does the sparse vector [(3, 2), (7, 2)] mean?\n", + "# Well, all it means is that it has two words, \"deep\" and \"learning\", ids 3 and 7, respectively\n", + "# and that each of them occurs twice in this new_vec vector. This should be clear to you by now." + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[(0, 1), (1, 1), (2, 1), (3, 1), (4, 1), (5, 1), (6, 1), (7, 1), (8, 1), (9, 1)], [(1, 1), (3, 1), (4, 1), (7, 1), (9, 1), (10, 1), (11, 1), (12, 1), (13, 1), (14, 1)], [(0, 1), (3, 1), (5, 1), (6, 1), (7, 1), (10, 1), (15, 1), (16, 1), (17, 1), (18, 1), (19, 1), (20, 1), (21, 1), (22, 1), (23, 1), (24, 1), (25, 1), (26, 1), (27, 1), (28, 1), (29, 1), (30, 1), (31, 1), (32, 1), (33, 1), (34, 1), (35, 1), (36, 1), (37, 1), (38, 1), (39, 1), (40, 1), (41, 1), (42, 1), (43, 1), (44, 1), (45, 1), (46, 1), (47, 1), (48, 1), (49, 1), (50, 1), (51, 1), (52, 1), (53, 1), (54, 1), (55, 1)], [(1, 1), (3, 1), (4, 1), (7, 1), (9, 1), (10, 1), (11, 1), (12, 1), (13, 1), (14, 1)], [(0, 1), (18, 1), (20, 1), (24, 1), (27, 1), (28, 1), (33, 1), (34, 1), (36, 1), (38, 1), (40, 1), (41, 1), (42, 1), (43, 1), (44, 1), (46, 1), (47, 1), (49, 1), (51, 1), (55, 1)], [(10, 1), (16, 1), (22, 1), (23, 1), (25, 1), (30, 1), (39, 1), (42, 1), (44, 1), (48, 1), (50, 1), (53, 1), (54, 1)], [(3, 1), (5, 1), (6, 1), (7, 1), (15, 1), (17, 1), (19, 1), (21, 1), (26, 1), (29, 1), (31, 1), (32, 1), (35, 1), (37, 1), (45, 1), (52, 1)], [(9, 1), (10, 1), (25, 1), (53, 1), (56, 1)], [(1, 1), (2, 1), (10, 1), (56, 1), (57, 1), (58, 1)], [(2, 1), (3, 1), (7, 1), (16, 1), (23, 1), (57, 1), (58, 1), (59, 1)]]\n" + ] + } + ], + "source": [ + "# Then you get a sparse vector representation for each document.\n", + "# Remember, each word is represented as an integer and the code \n", + "corpus = [dictionary.doc2bow(text) for text in texts]\n", + "print corpus" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# This is how you save the dict to desk for later use, using gensim:\n", + "# corpora.MmCorpus.serialize('/tmp/dictionary.mm', corpus) " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# To be continued" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Memory friendly iteration over a corpus using Python's \"yield\".\n", + "# Function from tutorial at: https://radimrehurek.com/gensim/tut1.html\n", + "class MyCorpus(object):\n", + " def __iter__(self):\n", + " for line in open('mycorpus.txt'):\n", + " # assume there's one document per line, tokens separated by whitespace\n", + " yield dictionary.doc2bow(line.lower().split())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Corpus Streaming & Formats" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Take a look at the parts under the same names from the gensim tutorial at:\n", + "# https://radimrehurek.com/gensim/tut1.html\n", + "# You're now pretty much up and running with gensim. Congrats!" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Transformations" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Let's get, e.g., a tf*idf (https://en.wikipedia.org/wiki/Tf%E2%80%93idf) transformation of a document" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TfidfModel(num_docs=10, num_nnz=145)\n" + ] + } + ], + "source": [ + "tfidf = models.TfidfModel(corpus) # step 1 -- initialize a model\n", + "print tfidf" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(0, 0.31348023222449883), (1, 0.23857601299713876), (2, 0.31348023222449883), (3, 0.13300444544014492), (4, 0.31348023222449883), (5, 0.31348023222449883), (6, 0.31348023222449883), (7, 0.13300444544014492), (8, 0.5995275865658466), (9, 0.23857601299713876)]\n" + ] + } + ], + "source": [ + "# Now we can apply the transformation to the whole corpus:\n", + "corpus_tfidf = tfidf[corpus]\n", + "for doc in corpus_tfidf[:1]: # Only printing first document transformation\n", + " print doc" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[u'0.262*\"family\" + 0.262*\"broader\" + 0.262*\"methods\" + 0.262*\"part\" + 0.261*\"machine\" + 0.256*\"data\" + 0.248*\"based\" + 0.187*\"attempts\" + 0.173*\"processing\" + 0.170*\"representations\"']" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lsi = models.LsiModel(corpus_tfidf, id2word=dictionary, num_topics=2) # initialize an LSI transformation\n", + "corpus_lsi = lsi[corpus_tfidf] # create a double wrapper over the original corpus: bow->tfidf->fold-in-lsi\n", + "lsi.print_topics(1)" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(0, 0.48641308269852679), (1, 0.11579771159401867)]\n", + "[(0, 0.71628142228181579), (1, 0.59038188262621183)]\n", + "[(0, 0.56480415960439878), (1, -0.78758599492475101)]\n", + "[(0, 0.71628142228181579), (1, 0.59038188262621183)]\n", + "[(0, 0.33106538645556027), (1, -0.56392050838027818)]\n", + "[(0, 0.35299506797325275), (1, -0.42198478055514865)]\n", + "[(0, 0.33026858639927253), (1, -0.40718497786487473)]\n", + "[(0, 0.37419828239791864), (1, -0.025902785063798527)]\n", + "[(0, 0.39929537547357097), (1, 0.086715418602822544)]\n", + "[(0, 0.32548779377635029), (1, -0.037026269019385064)]\n" + ] + } + ], + "source": [ + "for doc in corpus_lsi:\n", + " print doc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_6_vector_space.ipynb b/python_tutorial_part_6_vector_space.ipynb new file mode 100644 index 0000000..bad8009 --- /dev/null +++ b/python_tutorial_part_6_vector_space.ipynb @@ -0,0 +1,836 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# A Vector Space Model, with scikit-learn" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# This is code to build a vector space model, with SVMs on Andrew Mass' \n", + "# distribution of movie review sentiment data." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "25000\n", + "200\n", + "200\n" + ] + } + ], + "source": [ + "from collections import namedtuple\n", + "\n", + "all_data = [] \n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " #print my_data[line_no]\n", + " #break\n", + "train_data = all_data[:25000]\n", + "test_data = all_data[25000:50000]\n", + "print len(train_data)\n", + "\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "7142\n", + "6994\n" + ] + } + ], + "source": [ + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "from collections import defaultdict\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0\n", + "200\n", + "200\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "#test_vecs= get_sparse_vectors(test_data, word_space)\n", + "\n", + "#print train_vecs, test_vecs[0]\n", + "print len(train_data[12500:12600])\n", + "print len(train_vecs)\n", + "print len(test_vecs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + "200\n", + "200\n" + ] + } + ], + "source": [ + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "from random import shuffle, randint\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "\n", + "\n", + "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]\n", + "print len(train_tags)\n", + "print len(test_tags)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(200, 7142)\n" + ] + } + ], + "source": [ + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "print train_vecs.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\n", + "Done fitting classifier on training data...\n", + "\n", + "================================================== \n", + "\n", + "Results with 5-fold cross validation:\n", + "\n", + "================================================== \n", + "\n", + "********************\n", + "\t accuracy_score\t0.715\n", + "********************\n", + "precision_score\t0.765432098765\n", + "recall_score\t0.62\n", + "\n", + "classification_report:\n", + "\n", + " precision recall f1-score support\n", + "\n", + " 0.0 0.68 0.81 0.74 100\n", + " 1.0 0.77 0.62 0.69 100\n", + "\n", + "avg / total 0.72 0.71 0.71 200\n", + "\n", + "\n", + "confusion_matrix:\n", + "\n", + "[[81 19]\n", + " [38 62]]\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], + "source": [ + "# Classification with scikit-learn\n", + "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", + "# Let's use sklearn to train an svm classifier:\n", + "#-------------------------------------------------\n", + "\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC, LinearSVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "import gensim\n", + "n_jobs = 2\n", + "\n", + "#train_vecs=array(train_vecs)\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "\n", + "print type(train_tags)\n", + "print type(train_vecs)\n", + "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + "clf.fit(train_vecs, train_tags)\n", + "print \"\\nDone fitting classifier on training data...\\n\"\n", + "\n", + "#------------------------------------------------------------------------------------------\n", + "print \"=\"*50, \"\\n\"\n", + "print \"Results with 5-fold cross validation:\\n\"\n", + "print \"=\"*50, \"\\n\"\n", + "#------------------------------------------------------------------------------------------\n", + "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", + "print \"*\"*20\n", + "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", + "print \"*\"*20\n", + "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", + "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", + "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + " \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Below was old code we wrote for emotion detection.\n", + "# Now deprecated!!" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This code will be cleaned further and more notes will be added.\n", + "The purpose is to build a vector space model for multi-class text classification.\n", + "We use scikit-learn, but build our own code to vectorize the data.\n", + "The example is based on emotion classification, with the 6 early Paul Ekman types of emotions: Anger, Fear, Happiness, Sadness, Disgust, and Surprise. There are other types of emotions, according to other theories. But the purpose here is to show how to build a vector space model, rather than get deeper into what types of emotions there are.\n", + "\n", + "There are a number of things I will change in the code, including the names of some functions.\n", + "For example, the function with the string \"OneHotVectors\" is a misnomer. A lot of the code was written and run in a couple of class sessions, to teach text classification." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "#!/usr/bin/env python\n", + "# -*- coding: utf-8 -*-\n", + "#######################\n", + "__version__ = \"0.5\"\n", + "__date__ = \"Nov. 30, 2015\"\n", + "__author__ = \"Muhammad Abdul-Mageed\"\n", + "####################################\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "n_jobs = 25\n", + "\n", + "def getListOfLines():\n", + " \"\"\"\n", + " Just takes a file and returns a list of its line\n", + " \"\"\"\n", + " # Change path to file\n", + " return codecs.open(\"PathToFile\", \"r\", \"utf-8\").readlines()\n", + " \n", + "def getThreeColumnFormat():\n", + " \"\"\"\n", + " \"\"\"\n", + " infileObject=codecs.open(\"PathToFile\", \"r\", \"utf-8\")\n", + " listOfLines= infileObject.readlines() \n", + " dataTuples=[(line.split(\"\\t\")[1], line.split(\"\\t\")[2].lower()) for line in listOfLines if line.split(\"\\t\")[1] !=\"NO-EMOTION\"]\n", + " return dataTuples\n", + "#####################################\n", + "\n", + "def tagInSecondHalf(tag, tweet):\n", + " \"\"\"\n", + " Conditioning position of tag in tweet.\n", + " P.S. Won't consider a tag like #happyday.\n", + " \"\"\"\n", + " tags= [\"#happy\", \"#sad\", \"#disgusted\", \"#fearful\" , \"#surprised\", \"#angry\"] #\"#scared\"\n", + " tweet=tweet.split()\n", + " if tag not in tweet:\n", + " return False\n", + " midPoint=(len(tweet)/2)\n", + " tagIndex=tweet.index(tag)\n", + " if tagIndex > midPoint:\n", + " return True\n", + " return False\n", + "\n", + "def tagInLastThird(tag, tweet):\n", + " \"\"\"\n", + " Conditioning position of tag in tweet.\n", + " P.S. Won't consider a tag like #happyday.\n", + " \"\"\"\n", + " tweet=tweet.split()\n", + " if tag not in tweet:\n", + " return False\n", + " thirdPoint=(len(tweet)/4)\n", + " tagIndex=tweet.index(tag)\n", + " if tagIndex > thirdPoint*3:\n", + " return True\n", + " return False\n", + "\n", + "def pure(tag, tweet):\n", + " tagList= [\"#happy\", \"#sad\", \"#disgusted\", \"#fearful\" , \"#surprised\", \"#angry\", \"#scared\"]\n", + " tagList.remove(tag)\n", + " for t in tagList:\n", + " if t in tweet: \n", + " return False\n", + " return True\n", + "\n", + "def removeSeed(seed, tweet):\n", + " \"\"\"\n", + " \"\"\"\n", + " if type(seed)==str:\n", + " tweet= re.sub(seed, \" \", tweet)\n", + " elif type(seed)==list:\n", + " for t in seed:\n", + " tweet= re.sub(t, \" \", tweet)\n", + " else:\n", + " print type(seed)\n", + " print \"arg1/Tag must be a string or list, you provided \", type(tag), \".\"\n", + " exit()\n", + " # clean\n", + " tweet=re.sub(\"\\s+\", \" \", tweet)\n", + " #tweet=tweet.trim()\n", + " tweet=tweet.rstrip()\n", + " tweet=tweet.lstrip()\n", + " return tweet\n", + "\n", + "def clean(tweet):\n", + " \"\"\"\n", + " \"\"\"\n", + " tweet= re.sub(\".\", \" \", tweet)\n", + " return tweet\n", + "\n", + "def longTweet(tweet):\n", + " \"\"\"\n", + " \"\"\"\n", + " if len(tweet.split()) > 10:\n", + " return True\n", + " return False\n", + " \n", + "#----------------------------------------------\n", + "def getDataDict(emotionLines):\n", + " shuffle(emotionLines)\n", + " #emotionLines=emotionLines[:10000]\n", + " tagLexicon= [\"happy\", \"sad\", \"disgusted\", \"fearful\" , \"surprised\", \"angry\", \"scared\"] #\"#scared\"\n", + " tagDict= {\"happy\": \"HAPPINESS\", \"sad\": \"SADNESS\", \"disgusted\": \"DISGUST\", \"fearful\": \"FEAR\" , \"surprised\": \"SURPRISE\", \"angry\": \"ANGER\", \"scared\": \"FEAR\"} #\"#scared\"\n", + " myData={}\n", + " for cat in tagLexicon:\n", + " tag=\"#\"+cat\n", + " myData[tagDict[cat]]=[tweet for tweet in emotionLines if tag in tweet.split() and pure(tag, tweet)\n", + " and tagInSecondHalf(tag, tweet) and len(tweet.split()) > 4\n", + " and removeSeed(tag, tweet) and clean(tweet) and longTweet(tweet)]\n", + " return myData\n", + "\n", + "def getThreeColumnDataDict(emotionLines):\n", + " shuffle(emotionLines)\n", + " #emotionLines=emotionLines[:10000]\n", + " classes= [\"HAPPINESS\", \"SADNESS\", \"DISGUST\", \"FEAR\" , \"SURPRISE\", \"ANGER\"]\n", + " myData={pair[0]: [] for pair in emotionLines}\n", + " for cat in classes:\n", + " for pair in emotionLines:\n", + " if pair[0]==cat:\n", + " myData[pair[0]].append(pair[1])\n", + " return myData\n", + "\n", + "def getDataStats(myData):\n", + " # Print some stats:\n", + " ##########################\n", + " majorClass=max([len(myData[k]) for k in myData])\n", + " totalCount=sum([len(myData[k]) for k in myData])\n", + " print \"Majority class count: \", majorClass\n", + " print \"Total data point count: \", totalCount\n", + " print \"Majority class % in train data: \", round((majorClass/float(totalCount))*100, 2), \"%\"\n", + " print \"*\"*50, \"\\n\"\n", + "\n", + "def getLabeledDataTuples(myData):\n", + " # At this point \"myData\" is a dict, with each emotion class as a key, and related tweet lines as a list of lines\n", + " ###############################################################\n", + " # The below gets me tweet body only (and filters out rest of each tweet line [e.g., tweetId.])\n", + " # newData will be a list of tuples, each tuple has 0 as an emotion class and 1 as the string/unicode of the tweet body\n", + " dataTuples=[(k, \"\".join(myData[k][i]).split(\"\\t\")[-1]) for k in myData for i in range(len(myData[k]))]\n", + " #shuffle(dataTuples)\n", + " #######################################################################\n", + " # See it: \n", + " #print \"The type of newData[0][0] is a: \", type(newData[0][0]), newData[0][0] # --> newData[0] is a string\n", + " #print \"The type of newData[0][1] is a: \", type(newData[0][1]), newData[0][1] # --> newData[1] is a unicode of tweet body\n", + " #######################################################################\n", + " return dataTuples\n", + " \n", + "def getFeatures(dataPoint):\n", + " features=defaultdict()\n", + " # label is class name, of course, and feats is just a list of words in this case.\n", + " label, feats=dataPoint[0], dataPoint[1].split()\n", + " # I could also add some code to remove the seeds from the feature dict instead of the heavy computation in\n", + " # the tweet cleaning in removeSeed\n", + " ###########################################\n", + " # Beautify the below, building \"has(word): True/False\" dict\n", + " for i in feats:\n", + " features[i]=i\n", + " if \"#fearful\" in features:\n", + " del features[\"#fearful\"]\n", + " if \"#scared\" in features:\n", + " del features[\"#scared\"]\n", + " return features, label\n", + "\n", + "#featuresets=[getFeatures(i) for i in newData]\n", + "\n", + "def getLabelsAndVectors(dataTuples):\n", + " \"\"\" \n", + " Input:\n", + " dataTuples is a list of tuples\n", + " Each tuple in the list has\n", + " 0=label\n", + " 1= tweet body as unicode/string\n", + " Returns an array of labels and another array for words \n", + " \"\"\"\n", + " labels=[]\n", + " vectors=[]\n", + " ids=[]\n", + " c=0\n", + " for dataPoint in dataTuples:\n", + " ids.append(c)\n", + " c+=1\n", + " label, vector=dataPoint[0], dataPoint[1].split()\n", + " labels.append(label)\n", + " vectors.append(vector)\n", + "\n", + " return ids, labels, vectors\n", + "\n", + "def getSpace(vectors):\n", + " # get the dictionary of all words in train; we call it the space as it is the space of features for bag of words\n", + " space={}\n", + " for dataPoint in vectors:\n", + " words=dataPoint\n", + " for w in words:\n", + " if w not in space:\n", + " space[w]=len(space)\n", + " return space\n", + "\n", + "def augmentSpace(space, featuresList):\n", + " \"\"\"\n", + " Adds a list of features to the bag-of-words dictionary, we named \"space\".\n", + " \"\"\"\n", + " for f in featuresList:\n", + " if f not in space:\n", + " space[f]=len(space) \n", + " return space\n", + "\n", + "def getReducedSpace(vectors, space):\n", + " # get the dictionary of all words in train; we call it the space as it is the space of features for bag of words\n", + " reducedSpace=defaultdict(int)\n", + " for dataPoint in vectors:\n", + " words=dataPoint\n", + " for w in words:\n", + " reducedSpace[w]+=1\n", + " for w in space:\n", + " # could parameterize with the threshold, instead of the following\n", + " if reducedSpace[w] < 3:\n", + " del reducedSpace[w]\n", + " reducedSpace={w: reducedSpace[w] for w in reducedSpace}\n", + " return reducedSpace\n", + "\n", + "\n", + "#-------------------------------------------------\n", + "def getOneHotVectors(ids, labels, vectors, space):\n", + " oneHotVectors={}\n", + " triples=zip(ids, labels, vectors)\n", + " vec = np.zeros((len(space)))\n", + " #for dataPoint in vectors:\n", + " for triple in triples:\n", + " idd, label, dataPoint= triple[0], triple[1], triple[2]\n", + " #for t in xrange(len(space)):\n", + " # populate a one-dimensional array of zeros of shape/length= len(space)\n", + " vec=np.zeros((len(space))) # ; second argument is domensionality of the array, which is 1\n", + " for w in dataPoint:\n", + " try:\n", + " vec[space[w]]=1\n", + " except:\n", + " continue\n", + " # add emotion lexicon features\n", + " vec=addEmotionLexiconFeatures(vec, dataPoint, space)\n", + " oneHotVectors[idd]=(vec, array(label))\n", + " return oneHotVectors\n", + "\n", + "def getOneHotVectorsAndLabels(oneHotVectorsDict):\n", + " vectors= array([oneHotVectorsDict[k][0] for k in oneHotVectorsDict])\n", + " labels= array([oneHotVectorsDict[k][1] for k in oneHotVectorsDict])\n", + " print \"labels.shape\", labels.shape \n", + " print \"vectors.shape\", vectors.shape \n", + " return vectors, labels\n", + "###############################\n", + "# try:\n", + "# vectors.shape[0]\n", + "# except:\n", + "# vectors=zeros(len(vectors))\n", + "\n", + "# Do grid search\n", + "#######################################\n", + "def SVM_gridSearch(trainVectors, trainLabels, kernel):\n", + " C_range = 10.0 ** arange(-2, 2)\n", + " gamma_range = 10.0 ** arange(-2, 2)\n", + " param_grid = dict(gamma=gamma_range, C=C_range)\n", + " cv = StratifiedKFold(y=trainLabels, n_folds=2)\n", + " grid = GridSearchCV(SVC(kernel=kernel), param_grid=param_grid, cv=cv, n_jobs=n_jobs) #GridSearchCV(SVC(kernel=kernel, class_weight='auto')\n", + " grid.fit(trainVectors, trainLabels)\n", + " ##################################\n", + " ## Estimated best parameters\n", + " C = grid.best_estimator_.C\n", + " gamma = grid.best_estimator_.gamma\n", + " ##################################\n", + " return C, gamma\n", + "#######################################\n", + "\n", + "def getCAndGamma(trainVectors, trainLabels, kernel = 'rbf'):\n", + " C, gamma = SVM_gridSearch(trainVectors, trainLabels, kernel)\n", + " print C\n", + " print gamma\n", + " return C, gamma\n", + "\n", + "def isRetweet(tweet):\n", + " if tweet.lower().split()[0] ==\"re\":\n", + " return True\n", + " return False\n", + "\n", + "\n", + "\n", + "emotionFeatures=[\"hasAngerWord\", \"hasDisgustWord\", \"hasFearWord\", \"hasHappinessWord\", \"hasSadnessWord\", \"hasSurpriseWord\"]\n", + "\n", + "def main():\n", + " #######################################\n", + " # Saima Aman emotion blog data\n", + " dataTuples=getThreeColumnFormat()\n", + " print \"Length of saimaDataTuples is: \", len(dataTuples)\n", + " #shuffle(dataTuples)\n", + " print \"saimaDataTuples\", dataTuples[0]\n", + " trainTuples=dataTuples#[:1000]\n", + " #testTuples=saimaDataTuples[1000:]\n", + "\n", + "# #######################################\n", + " myData=getThreeColumnDataDict(dataTuples)\n", + " totalCount=sum([len(myData[k]) for k in myData])\n", + " print totalCount\n", + "# del trainLines\n", + "# print\"*\"*50\n", + " getDataStats(myData)\n", + "# dataTuples=getLabeledDataTuples(myData)\n", + "# ####################################\n", + "# # Add first 1000 Saima tuples\n", + "# #dataTuples=dataTuples+saimaDataTuples[:1000]\n", + "# print dataTuples[0]\n", + "# del myData\n", + " ids, labels, vectors= getLabelsAndVectors(trainTuples)\n", + " space=getSpace(vectors)\n", + " print \"Total # of features in your space is: \", len(space)\n", + " # augment space with emotion features...\n", + " space= augmentSpace(space, emotionFeatures)\n", + " #reducedSpace=getReducedSpace(vectors, space)\n", + " print \"Total # of features in your augmented space is: \", len(space)\n", + " #print \"Total # of features in your reducedSpace is: \", len(reducedSpace)\n", + " oneHotVectors=getOneHotVectors(ids, labels, vectors, space)\n", + " vectors, labels=getOneHotVectorsAndLabels(oneHotVectors)\n", + " del oneHotVectors\n", + " trainVectors = vectors\n", + " trainLabels = labels\n", + " del vectors\n", + " del labels\n", + " #C, gamma = getCAndGamma(trainVectors, trainLabels, kernel = 'rbf')\n", + " # Train classifier\n", + " #clf = OneVsOneClassifier(SVC(C=C, kernel=kernel, class_weight='auto', gamma=gamma, verbose= True, probability=True))\n", + " clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + " clf.fit(trainVectors, trainLabels)\n", + " print \"\\nDone fitting classifier on training data...\\n\"\n", + " #del trainVectors\n", + " #del trainLabels\n", + "# dataTuples=getSAIMAThreeColumnFormat()\n", + "# print \"Length of dataTuples is: \", len(dataTuples)\n", + "# shuffle(dataTuples)\n", + "# print \"saimaDataTuples\", dataTuples[0]\n", + "# ids, labels, vectors= getLabelsAndVectors(testTuples)\n", + "# oneHotVectors=getOneHotVectors(ids, labels, vectors, space)\n", + "# vectors, labels=getOneHotVectorsAndLabels(oneHotVectors)\n", + "# del oneHotVectors\n", + "# testVectors = vectors\n", + "# testLabels = labels\n", + "# predicted_testLabels = clf.predict(testVectors)\n", + " #------------------------------------------------------------------------------------------\n", + " print \"=\"*50, \"\\n\"\n", + " print \"Results with 5-fold cross validation:\\n\"\n", + " print \"=\"*50, \"\\n\"\n", + " #------------------------------------------------------------------------------------------\n", + " predicted = cross_validation.cross_val_predict(clf, trainVectors, trainLabels, cv=5)\n", + " print \"*\"*20\n", + " print \"\\t accuracy_score\\t\", metrics.accuracy_score(trainLabels, predicted)\n", + " print \"*\"*20\n", + " print \"precision_score\\t\", metrics.precision_score(trainLabels, predicted)\n", + " print \"recall_score\\t\", metrics.recall_score(trainLabels, predicted)\n", + " print \"\\nclassification_report:\\n\\n\", metrics.classification_report(trainLabels, predicted)\n", + " print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(trainLabels, predicted)\n", + " \n", + " #\"------------------------------------------------------------------------------------------\n", + " print \"=\"*50, \"\\n\"\n", + " print \"Results with 10-fold cross validation:\\n\"\n", + " print \"=\"*50, \"\\n\"\n", + " #------------------------------------------------------------------------------------------\n", + " predicted = cross_validation.cross_val_predict(clf, trainVectors, trainLabels, cv=10)\n", + " print \"*\"*20\n", + " print \"\\t accuracy_score\\t\", metrics.accuracy_score(trainLabels, predicted)\n", + " print \"*\"*20\n", + " print \"precision_score\\t\", metrics.precision_score(trainLabels, predicted)\n", + " print \"recall_score\\t\", metrics.recall_score(trainLabels, predicted)\n", + " print \"\\nclassification_report:\\n\\n\", metrics.classification_report(trainLabels, predicted)\n", + " print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(trainLabels, predicted)\n", + " \n", + " #------------------------------------------------------------------------------------------\n", + " # Take a look at the metrics module at: http://scikit-learn.org/stable/modules/classes.html#module-sklearn.metrics\n", + " #------------------------------------------------------------------------------------------\n", + "\n", + "if __name__ == \"__main__\":\n", + " print \"Hello!!\"\n", + " main()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_8_collections_module.ipynb b/python_tutorial_part_8_collections_module.ipynb new file mode 100644 index 0000000..3d8b234 --- /dev/null +++ b/python_tutorial_part_8_collections_module.ipynb @@ -0,0 +1,388 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Python's collections module" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Documentation: https://docs.python.org/2/library/collections.html\n", + "# Per documentation, \"this module implements specialized container datatypes\\\n", + "# providing alternatives to Python’s general purpose built-in containers, dict, list, set, and tuple\"." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "DataDoc(tag='POSITIVE', words=['i', 'love', 'pizza'])\n", + "DataDoc(tag='POSITIVE', words=['i', 'like', 'apple'])\n", + "DataDoc(tag='POSITIVE', words=['i', 'enjoy', 'hiking'])\n", + "DataDoc(tag='POSITIVE', words=['i', 'am', 'passionate', 'about', 'traveling'])\n", + "DataDoc(tag='POSITIVE', words=['we', 'had', 'fun', 'writing', 'this', 'code'])\n", + "DataDoc(tag='NEGATIVE', words=['i', \"don't\", 'like', 'to', 'stay', 'up', 'late'])\n", + "DataDoc(tag='NEGATIVE', words=['i', 'am', 'tired'])\n", + "DataDoc(tag='NEGATIVE', words=['he', 'feels', 'sick'])\n" + ] + } + ], + "source": [ + "# namedtuple(): factory function for creating tuple subclasses with named fields\n", + "# Named tuples assign a name to each position in a tuple, thus enabling accessing\n", + "# fields by name instead of position index.\n", + "#-----------------------------------------------\n", + "# namedtuple(typename, field_names[, verbose=False][, rename=False])\n", + "# Returns a new tuple subclass named typename. \n", + "# The new subclass is used to create tuple-like objects that have fields accessible \n", + "# by attribute lookup as well as being indexable and iterable. \n", + "\n", + "from collections import namedtuple\n", + "# We create a named tuple with two fields, tags and words.\n", + "# tags will be a string\n", + "# words will be a list of words\n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "# we create a list and each item in the list will be a namedtuple element with the two fields \"tags\" and \"words\"\n", + "my_data=[]\n", + "# We have a list of document. Each document has a single sentence. \n", + "# The first word in each sentence/document is a tag from the set {POSITIVE, NEGATIVE}, so a sentiment analysis task. \n", + "documents = [\"POSITIVE I love pizza\", \"POSITIVE I like Apple\", \"POSITIVE I enjoy hiking\",\\\n", + " \"POSITIVE I am passionate about traveling\", \"POSITIVE We had fun writing this code\",\\\n", + " \"NEGATIVE I don't like to stay up late\", \"NEGATIVE I am tired\", \"NEGATIVE He feels sick\"]\n", + "\n", + "# Now we loop over the documents and populate the list of allsent, which is basically our container for the \n", + "# instances and their labels. From each document/sentence, we get the tag and the list of words\n", + "for line_no, doc in enumerate(documents):\n", + " label=doc.split()[0]\n", + " word_list=doc.lower().split()[1:]\n", + " my_data.append(DataDoc(label, word_list))\n", + " print my_data[line_no]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[DataDoc(tag='POSITIVE', words=['i', 'love', 'pizza']), DataDoc(tag='POSITIVE', words=['i', 'like', 'apple']), DataDoc(tag='POSITIVE', words=['i', 'enjoy', 'hiking']), DataDoc(tag='POSITIVE', words=['i', 'am', 'passionate', 'about', 'traveling']), DataDoc(tag='POSITIVE', words=['we', 'had', 'fun', 'writing', 'this', 'code']), DataDoc(tag='NEGATIVE', words=['i', \"don't\", 'like', 'to', 'stay', 'up', 'late']), DataDoc(tag='NEGATIVE', words=['i', 'am', 'tired']), DataDoc(tag='NEGATIVE', words=['he', 'feels', 'sick'])]\n" + ] + } + ], + "source": [ + "print my_data" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "POSITIVE\n" + ] + } + ], + "source": [ + "# Now you can access the tag of each instance\n", + "print my_data[0].tag" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['i', 'love', 'pizza']\n" + ] + } + ], + "source": [ + "# You can also access the instance word list itself\n", + "print my_data[0].words" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# We don't need this, but provided as an example\n", + "def get_text_list(data):\n", + " \"\"\"\n", + " input is a list of namedtuples (either train, dev, or text)\n", + " returns a list of lists, each inner list is just the list of words belonging to a given data point\n", + " Used to get train_text, dev_text, or test_text\n", + " \"\"\"\n", + " \n", + " text_list=[]\n", + " for i in range(len(data)):\n", + " text_list.append(data[i].words)\n", + " return text_list\n", + "\n", + "train_text= get_text_list(train_data)\n", + "print train_text[0][:6]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], + "source": [ + "from gensim import corpora\n", + "# Now let's use Gensim to get a dictionary of the words in the train data:\n", + "# We only need that dict from the training data (Can you think why?, use since we only learn using feature from train)\n", + "dictionary = corpora.Dictionary(train_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Dictionary(113562 unique tokens: [u'fawn', u'tsukino', u'nunnery', u'gah', u\"zuniga's\"]...)\n" + ] + } + ], + "source": [ + "print dictionary" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# We can do the below to get the id of each word in the dict\n", + "#print(dictionary.token2id)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(0, 1), (1, 1), (2, 1), (3, 1), (4, 5), (5, 1), (6, 6), (7, 1), (8, 1), (9, 1), (10, 1), (11, 2), (12, 1), (13, 1), (14, 1), (15, 4), (16, 1), (17, 1), (18, 1), (19, 1), (20, 1), (21, 1), (22, 3), (23, 1), (24, 1), (25, 1), (26, 1), (27, 1), (28, 1), (29, 4), (30, 1), (31, 1), (32, 1), (33, 1), (34, 1), (35, 1), (36, 1), (37, 1), (38, 1), (39, 1), (40, 4), (41, 1), (42, 27), (43, 2), (44, 1), (45, 1), (46, 3), (47, 1), (48, 1), (49, 1), (50, 4), (51, 1), (52, 1), (53, 1), (54, 1), (55, 1), (56, 1), (57, 2), (58, 1), (59, 1), (60, 1), (61, 1), (62, 2), (63, 1), (64, 1), (65, 1), (66, 1), (67, 1), (68, 4), (69, 2), (70, 4), (71, 2), (72, 2), (73, 2), (74, 1), (75, 1), (76, 1), (77, 1), (78, 1), (79, 1), (80, 1), (81, 1), (82, 1), (83, 1), (84, 1), (85, 2), (86, 1), (87, 2), (88, 1), (89, 1), (90, 4), (91, 1), (92, 1), (93, 4), (94, 1), (95, 1), (96, 9)]\n" + ] + } + ], + "source": [ + "# Now let's vectorize the training data\n", + "train_vecs= [dictionary.doc2bow(doc) for doc in train_text]\n", + "print train_vecs[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['david', \"bryce's\", 'comments', 'nearby', 'are', 'exceptionally', 'well', 'written', 'and', 'informative']\n" + ] + }, + { + "data": { + "text/plain": [ + "[(0, 1),\n", + " (1, 1),\n", + " (4, 16),\n", + " (6, 4),\n", + " (16, 2),\n", + " (28, 1),\n", + " (29, 5),\n", + " (37, 2),\n", + " (39, 1),\n", + " (40, 2)]" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Now let's get vectors for the test data:\n", + "# First we need the text of each data point in test, let's use the function we developed above\n", + "test_text= get_text_list(test_data)\n", + "print test_text[-1][:10]\n", + "# We can now use the test_text to get test_vecs\n", + "test_vecs= [dictionary.doc2bow(doc) for doc in test_text]\n", + "test_vecs[-1][:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0 [(0, 1), (4, 9), (6, 2), (8, 1), (16, 1), (29, 5), (32, 1), (33, 1), (37, 1), (39, 2)]\n" + ] + } + ], + "source": [ + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TAG: 1.0, Vector: [(0, 1), (1, 1), (2, 1), (3, 1), (4, 5), (5, 1), (6, 6), (7, 1), (8, 1), (9, 1)]\n", + "TAG: 1.0, Vector: [(0, 1), (1, 1), (4, 21), (6, 20), (11, 1), (13, 1), (17, 2), (20, 5), (28, 1), (29, 5)]\n", + "TAG: 1.0, Vector: [(4, 8), (6, 2), (17, 1), (26, 1), (29, 2), (31, 1), (37, 1), (40, 2), (42, 11), (56, 1)]\n", + "TAG: 1.0, Vector: [(4, 9), (6, 1), (16, 1), (21, 2), (28, 1), (29, 6), (42, 7), (44, 1), (50, 2), (60, 2)]\n", + "TAG: 1.0, Vector: [(0, 2), (4, 8), (6, 5), (16, 2), (17, 1), (29, 1), (40, 4), (42, 6), (43, 1), (44, 1)]\n" + ] + } + ], + "source": [ + "# You can loop over the data to get the tags and vectors easily now:\n", + "for i in range(5): # len(train_tage)\n", + " print(\"TAG: %s, Vector: %s\" % (train_tags[i], train_vecs[i][:10]))" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "DataDoc(tag='_*0', words=['bromwell', 'high', 'is', 'a', 'cartoon', 'comedy', '.', 'it', 'ran', 'at', 'the', 'same', 'time', 'as', 'some', 'other', 'programs', 'about', 'school', 'life', ',', 'such', 'as', '\"', 'teachers', '\"', '.', 'my', '35', 'years', 'in', 'the', 'teaching', 'profession', 'lead', 'me', 'to', 'believe', 'that', 'bromwell', \"high's\", 'satire', 'is', 'much', 'closer', 'to', 'reality', 'than', 'is', '\"', 'teachers', '\"', '.', 'the', 'scramble', 'to', 'survive', 'financially', ',', 'the', 'insightful', 'students', 'who', 'can', 'see', 'right', 'through', 'their', 'pathetic', \"teachers'\", 'pomp', ',', 'the', 'pettiness', 'of', 'the', 'whole', 'situation', ',', 'all', 'remind', 'me', 'of', 'the', 'schools', 'i', 'knew', 'and', 'their', 'students', '.', 'when', 'i', 'saw', 'the', 'episode', 'in', 'which', 'a', 'student', 'repeatedly', 'tried', 'to', 'burn', 'down', 'the', 'school', ',', 'i', 'immediately', 'recalled', '.', '.', '.', '.', '.', '.', '.', '.', '.', 'at', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', 'high', '.', 'a', 'classic', 'line', ':', 'inspector', ':', \"i'm\", 'here', 'to', 'sack', 'one', 'of', 'your', 'teachers', '.', 'student', ':', 'welcome', 'to', 'bromwell', 'high', '.', 'i', 'expect', 'that', 'many', 'adults', 'of', 'my', 'age', 'think', 'that', 'bromwell', 'high', 'is', 'far', 'fetched', '.', 'what', 'a', 'pity', 'that', 'it', \"isn't\", '!'])\n" + ] + } + ], + "source": [ + "print train_data[0]" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From c9ed0be63a7c188e3a7a7be1a338bd31a9394338 Mon Sep 17 00:00:00 2001 From: mageed Date: Fri, 29 Jan 2016 16:06:22 -0500 Subject: [PATCH 15/36] resuflling, updating --- ...orial_part_6_vector_space-checkpoint.ipynb | 393 ++++++++++++++++ python_tutorial_part_6_vector_space.ipynb | 443 ------------------ 2 files changed, 393 insertions(+), 443 deletions(-) create mode 100644 .ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb diff --git a/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb new file mode 100644 index 0000000..4d41aaf --- /dev/null +++ b/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb @@ -0,0 +1,393 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# A Vector Space Model, with scikit-learn" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# This is code to build a vector space model, with SVMs on Andrew Mass' \n", + "# distribution of movie review sentiment data." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "25000\n", + "200\n", + "200\n" + ] + } + ], + "source": [ + "from collections import namedtuple\n", + "\n", + "all_data = [] \n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " #print my_data[line_no]\n", + " #break\n", + "train_data = all_data[:25000]\n", + "test_data = all_data[25000:50000]\n", + "print len(train_data)\n", + "\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "7142\n", + "6994\n" + ] + } + ], + "source": [ + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "from collections import defaultdict\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0\n", + "200\n", + "200\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "#test_vecs= get_sparse_vectors(test_data, word_space)\n", + "\n", + "#print train_vecs, test_vecs[0]\n", + "print len(train_data[12500:12600])\n", + "print len(train_vecs)\n", + "print len(test_vecs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + "200\n", + "200\n" + ] + } + ], + "source": [ + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "from random import shuffle, randint\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "\n", + "\n", + "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]\n", + "print len(train_tags)\n", + "print len(test_tags)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(200, 7142)\n" + ] + } + ], + "source": [ + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "print train_vecs.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\n", + "Done fitting classifier on training data...\n", + "\n", + "================================================== \n", + "\n", + "Results with 5-fold cross validation:\n", + "\n", + "================================================== \n", + "\n", + "********************\n", + "\t accuracy_score\t0.715\n", + "********************\n", + "precision_score\t0.765432098765\n", + "recall_score\t0.62\n", + "\n", + "classification_report:\n", + "\n", + " precision recall f1-score support\n", + "\n", + " 0.0 0.68 0.81 0.74 100\n", + " 1.0 0.77 0.62 0.69 100\n", + "\n", + "avg / total 0.72 0.71 0.71 200\n", + "\n", + "\n", + "confusion_matrix:\n", + "\n", + "[[81 19]\n", + " [38 62]]\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], + "source": [ + "# Classification with scikit-learn\n", + "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", + "# Let's use sklearn to train an svm classifier:\n", + "#-------------------------------------------------\n", + "\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC, LinearSVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "import gensim\n", + "n_jobs = 2\n", + "\n", + "#train_vecs=array(train_vecs)\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "\n", + "print type(train_tags)\n", + "print type(train_vecs)\n", + "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + "clf.fit(train_vecs, train_tags)\n", + "print \"\\nDone fitting classifier on training data...\\n\"\n", + "\n", + "#------------------------------------------------------------------------------------------\n", + "print \"=\"*50, \"\\n\"\n", + "print \"Results with 5-fold cross validation:\\n\"\n", + "print \"=\"*50, \"\\n\"\n", + "#------------------------------------------------------------------------------------------\n", + "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", + "print \"*\"*20\n", + "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", + "print \"*\"*20\n", + "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", + "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", + "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + " \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_6_vector_space.ipynb b/python_tutorial_part_6_vector_space.ipynb index bad8009..4d41aaf 100644 --- a/python_tutorial_part_6_vector_space.ipynb +++ b/python_tutorial_part_6_vector_space.ipynb @@ -359,449 +359,6 @@ " \n" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Below was old code we wrote for emotion detection.\n", - "# Now deprecated!!" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This code will be cleaned further and more notes will be added.\n", - "The purpose is to build a vector space model for multi-class text classification.\n", - "We use scikit-learn, but build our own code to vectorize the data.\n", - "The example is based on emotion classification, with the 6 early Paul Ekman types of emotions: Anger, Fear, Happiness, Sadness, Disgust, and Surprise. There are other types of emotions, according to other theories. But the purpose here is to show how to build a vector space model, rather than get deeper into what types of emotions there are.\n", - "\n", - "There are a number of things I will change in the code, including the names of some functions.\n", - "For example, the function with the string \"OneHotVectors\" is a misnomer. A lot of the code was written and run in a couple of class sessions, to teach text classification." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "#!/usr/bin/env python\n", - "# -*- coding: utf-8 -*-\n", - "#######################\n", - "__version__ = \"0.5\"\n", - "__date__ = \"Nov. 30, 2015\"\n", - "__author__ = \"Muhammad Abdul-Mageed\"\n", - "####################################\n", - "import argparse\n", - "import codecs\n", - "import time\n", - "import sys\n", - "import os, re, glob\n", - "import nltk\n", - "from collections import defaultdict\n", - "from random import shuffle, randint\n", - "import numpy as np\n", - "from numpy import array, arange, zeros, hstack, argsort\n", - "import unicodedata\n", - "from scipy.sparse import csr_matrix\n", - "from sklearn.svm import SVC\n", - "from sklearn import preprocessing\n", - "from sklearn.cross_validation import StratifiedKFold\n", - "from sklearn.grid_search import GridSearchCV\n", - "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", - "from sklearn import metrics\n", - "from sklearn.cross_validation import train_test_split\n", - "from sklearn.decomposition import TruncatedSVD\n", - "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", - "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "from sklearn.linear_model import LogisticRegression\n", - "from sklearn import cross_validation\n", - "n_jobs = 25\n", - "\n", - "def getListOfLines():\n", - " \"\"\"\n", - " Just takes a file and returns a list of its line\n", - " \"\"\"\n", - " # Change path to file\n", - " return codecs.open(\"PathToFile\", \"r\", \"utf-8\").readlines()\n", - " \n", - "def getThreeColumnFormat():\n", - " \"\"\"\n", - " \"\"\"\n", - " infileObject=codecs.open(\"PathToFile\", \"r\", \"utf-8\")\n", - " listOfLines= infileObject.readlines() \n", - " dataTuples=[(line.split(\"\\t\")[1], line.split(\"\\t\")[2].lower()) for line in listOfLines if line.split(\"\\t\")[1] !=\"NO-EMOTION\"]\n", - " return dataTuples\n", - "#####################################\n", - "\n", - "def tagInSecondHalf(tag, tweet):\n", - " \"\"\"\n", - " Conditioning position of tag in tweet.\n", - " P.S. Won't consider a tag like #happyday.\n", - " \"\"\"\n", - " tags= [\"#happy\", \"#sad\", \"#disgusted\", \"#fearful\" , \"#surprised\", \"#angry\"] #\"#scared\"\n", - " tweet=tweet.split()\n", - " if tag not in tweet:\n", - " return False\n", - " midPoint=(len(tweet)/2)\n", - " tagIndex=tweet.index(tag)\n", - " if tagIndex > midPoint:\n", - " return True\n", - " return False\n", - "\n", - "def tagInLastThird(tag, tweet):\n", - " \"\"\"\n", - " Conditioning position of tag in tweet.\n", - " P.S. Won't consider a tag like #happyday.\n", - " \"\"\"\n", - " tweet=tweet.split()\n", - " if tag not in tweet:\n", - " return False\n", - " thirdPoint=(len(tweet)/4)\n", - " tagIndex=tweet.index(tag)\n", - " if tagIndex > thirdPoint*3:\n", - " return True\n", - " return False\n", - "\n", - "def pure(tag, tweet):\n", - " tagList= [\"#happy\", \"#sad\", \"#disgusted\", \"#fearful\" , \"#surprised\", \"#angry\", \"#scared\"]\n", - " tagList.remove(tag)\n", - " for t in tagList:\n", - " if t in tweet: \n", - " return False\n", - " return True\n", - "\n", - "def removeSeed(seed, tweet):\n", - " \"\"\"\n", - " \"\"\"\n", - " if type(seed)==str:\n", - " tweet= re.sub(seed, \" \", tweet)\n", - " elif type(seed)==list:\n", - " for t in seed:\n", - " tweet= re.sub(t, \" \", tweet)\n", - " else:\n", - " print type(seed)\n", - " print \"arg1/Tag must be a string or list, you provided \", type(tag), \".\"\n", - " exit()\n", - " # clean\n", - " tweet=re.sub(\"\\s+\", \" \", tweet)\n", - " #tweet=tweet.trim()\n", - " tweet=tweet.rstrip()\n", - " tweet=tweet.lstrip()\n", - " return tweet\n", - "\n", - "def clean(tweet):\n", - " \"\"\"\n", - " \"\"\"\n", - " tweet= re.sub(\".\", \" \", tweet)\n", - " return tweet\n", - "\n", - "def longTweet(tweet):\n", - " \"\"\"\n", - " \"\"\"\n", - " if len(tweet.split()) > 10:\n", - " return True\n", - " return False\n", - " \n", - "#----------------------------------------------\n", - "def getDataDict(emotionLines):\n", - " shuffle(emotionLines)\n", - " #emotionLines=emotionLines[:10000]\n", - " tagLexicon= [\"happy\", \"sad\", \"disgusted\", \"fearful\" , \"surprised\", \"angry\", \"scared\"] #\"#scared\"\n", - " tagDict= {\"happy\": \"HAPPINESS\", \"sad\": \"SADNESS\", \"disgusted\": \"DISGUST\", \"fearful\": \"FEAR\" , \"surprised\": \"SURPRISE\", \"angry\": \"ANGER\", \"scared\": \"FEAR\"} #\"#scared\"\n", - " myData={}\n", - " for cat in tagLexicon:\n", - " tag=\"#\"+cat\n", - " myData[tagDict[cat]]=[tweet for tweet in emotionLines if tag in tweet.split() and pure(tag, tweet)\n", - " and tagInSecondHalf(tag, tweet) and len(tweet.split()) > 4\n", - " and removeSeed(tag, tweet) and clean(tweet) and longTweet(tweet)]\n", - " return myData\n", - "\n", - "def getThreeColumnDataDict(emotionLines):\n", - " shuffle(emotionLines)\n", - " #emotionLines=emotionLines[:10000]\n", - " classes= [\"HAPPINESS\", \"SADNESS\", \"DISGUST\", \"FEAR\" , \"SURPRISE\", \"ANGER\"]\n", - " myData={pair[0]: [] for pair in emotionLines}\n", - " for cat in classes:\n", - " for pair in emotionLines:\n", - " if pair[0]==cat:\n", - " myData[pair[0]].append(pair[1])\n", - " return myData\n", - "\n", - "def getDataStats(myData):\n", - " # Print some stats:\n", - " ##########################\n", - " majorClass=max([len(myData[k]) for k in myData])\n", - " totalCount=sum([len(myData[k]) for k in myData])\n", - " print \"Majority class count: \", majorClass\n", - " print \"Total data point count: \", totalCount\n", - " print \"Majority class % in train data: \", round((majorClass/float(totalCount))*100, 2), \"%\"\n", - " print \"*\"*50, \"\\n\"\n", - "\n", - "def getLabeledDataTuples(myData):\n", - " # At this point \"myData\" is a dict, with each emotion class as a key, and related tweet lines as a list of lines\n", - " ###############################################################\n", - " # The below gets me tweet body only (and filters out rest of each tweet line [e.g., tweetId.])\n", - " # newData will be a list of tuples, each tuple has 0 as an emotion class and 1 as the string/unicode of the tweet body\n", - " dataTuples=[(k, \"\".join(myData[k][i]).split(\"\\t\")[-1]) for k in myData for i in range(len(myData[k]))]\n", - " #shuffle(dataTuples)\n", - " #######################################################################\n", - " # See it: \n", - " #print \"The type of newData[0][0] is a: \", type(newData[0][0]), newData[0][0] # --> newData[0] is a string\n", - " #print \"The type of newData[0][1] is a: \", type(newData[0][1]), newData[0][1] # --> newData[1] is a unicode of tweet body\n", - " #######################################################################\n", - " return dataTuples\n", - " \n", - "def getFeatures(dataPoint):\n", - " features=defaultdict()\n", - " # label is class name, of course, and feats is just a list of words in this case.\n", - " label, feats=dataPoint[0], dataPoint[1].split()\n", - " # I could also add some code to remove the seeds from the feature dict instead of the heavy computation in\n", - " # the tweet cleaning in removeSeed\n", - " ###########################################\n", - " # Beautify the below, building \"has(word): True/False\" dict\n", - " for i in feats:\n", - " features[i]=i\n", - " if \"#fearful\" in features:\n", - " del features[\"#fearful\"]\n", - " if \"#scared\" in features:\n", - " del features[\"#scared\"]\n", - " return features, label\n", - "\n", - "#featuresets=[getFeatures(i) for i in newData]\n", - "\n", - "def getLabelsAndVectors(dataTuples):\n", - " \"\"\" \n", - " Input:\n", - " dataTuples is a list of tuples\n", - " Each tuple in the list has\n", - " 0=label\n", - " 1= tweet body as unicode/string\n", - " Returns an array of labels and another array for words \n", - " \"\"\"\n", - " labels=[]\n", - " vectors=[]\n", - " ids=[]\n", - " c=0\n", - " for dataPoint in dataTuples:\n", - " ids.append(c)\n", - " c+=1\n", - " label, vector=dataPoint[0], dataPoint[1].split()\n", - " labels.append(label)\n", - " vectors.append(vector)\n", - "\n", - " return ids, labels, vectors\n", - "\n", - "def getSpace(vectors):\n", - " # get the dictionary of all words in train; we call it the space as it is the space of features for bag of words\n", - " space={}\n", - " for dataPoint in vectors:\n", - " words=dataPoint\n", - " for w in words:\n", - " if w not in space:\n", - " space[w]=len(space)\n", - " return space\n", - "\n", - "def augmentSpace(space, featuresList):\n", - " \"\"\"\n", - " Adds a list of features to the bag-of-words dictionary, we named \"space\".\n", - " \"\"\"\n", - " for f in featuresList:\n", - " if f not in space:\n", - " space[f]=len(space) \n", - " return space\n", - "\n", - "def getReducedSpace(vectors, space):\n", - " # get the dictionary of all words in train; we call it the space as it is the space of features for bag of words\n", - " reducedSpace=defaultdict(int)\n", - " for dataPoint in vectors:\n", - " words=dataPoint\n", - " for w in words:\n", - " reducedSpace[w]+=1\n", - " for w in space:\n", - " # could parameterize with the threshold, instead of the following\n", - " if reducedSpace[w] < 3:\n", - " del reducedSpace[w]\n", - " reducedSpace={w: reducedSpace[w] for w in reducedSpace}\n", - " return reducedSpace\n", - "\n", - "\n", - "#-------------------------------------------------\n", - "def getOneHotVectors(ids, labels, vectors, space):\n", - " oneHotVectors={}\n", - " triples=zip(ids, labels, vectors)\n", - " vec = np.zeros((len(space)))\n", - " #for dataPoint in vectors:\n", - " for triple in triples:\n", - " idd, label, dataPoint= triple[0], triple[1], triple[2]\n", - " #for t in xrange(len(space)):\n", - " # populate a one-dimensional array of zeros of shape/length= len(space)\n", - " vec=np.zeros((len(space))) # ; second argument is domensionality of the array, which is 1\n", - " for w in dataPoint:\n", - " try:\n", - " vec[space[w]]=1\n", - " except:\n", - " continue\n", - " # add emotion lexicon features\n", - " vec=addEmotionLexiconFeatures(vec, dataPoint, space)\n", - " oneHotVectors[idd]=(vec, array(label))\n", - " return oneHotVectors\n", - "\n", - "def getOneHotVectorsAndLabels(oneHotVectorsDict):\n", - " vectors= array([oneHotVectorsDict[k][0] for k in oneHotVectorsDict])\n", - " labels= array([oneHotVectorsDict[k][1] for k in oneHotVectorsDict])\n", - " print \"labels.shape\", labels.shape \n", - " print \"vectors.shape\", vectors.shape \n", - " return vectors, labels\n", - "###############################\n", - "# try:\n", - "# vectors.shape[0]\n", - "# except:\n", - "# vectors=zeros(len(vectors))\n", - "\n", - "# Do grid search\n", - "#######################################\n", - "def SVM_gridSearch(trainVectors, trainLabels, kernel):\n", - " C_range = 10.0 ** arange(-2, 2)\n", - " gamma_range = 10.0 ** arange(-2, 2)\n", - " param_grid = dict(gamma=gamma_range, C=C_range)\n", - " cv = StratifiedKFold(y=trainLabels, n_folds=2)\n", - " grid = GridSearchCV(SVC(kernel=kernel), param_grid=param_grid, cv=cv, n_jobs=n_jobs) #GridSearchCV(SVC(kernel=kernel, class_weight='auto')\n", - " grid.fit(trainVectors, trainLabels)\n", - " ##################################\n", - " ## Estimated best parameters\n", - " C = grid.best_estimator_.C\n", - " gamma = grid.best_estimator_.gamma\n", - " ##################################\n", - " return C, gamma\n", - "#######################################\n", - "\n", - "def getCAndGamma(trainVectors, trainLabels, kernel = 'rbf'):\n", - " C, gamma = SVM_gridSearch(trainVectors, trainLabels, kernel)\n", - " print C\n", - " print gamma\n", - " return C, gamma\n", - "\n", - "def isRetweet(tweet):\n", - " if tweet.lower().split()[0] ==\"re\":\n", - " return True\n", - " return False\n", - "\n", - "\n", - "\n", - "emotionFeatures=[\"hasAngerWord\", \"hasDisgustWord\", \"hasFearWord\", \"hasHappinessWord\", \"hasSadnessWord\", \"hasSurpriseWord\"]\n", - "\n", - "def main():\n", - " #######################################\n", - " # Saima Aman emotion blog data\n", - " dataTuples=getThreeColumnFormat()\n", - " print \"Length of saimaDataTuples is: \", len(dataTuples)\n", - " #shuffle(dataTuples)\n", - " print \"saimaDataTuples\", dataTuples[0]\n", - " trainTuples=dataTuples#[:1000]\n", - " #testTuples=saimaDataTuples[1000:]\n", - "\n", - "# #######################################\n", - " myData=getThreeColumnDataDict(dataTuples)\n", - " totalCount=sum([len(myData[k]) for k in myData])\n", - " print totalCount\n", - "# del trainLines\n", - "# print\"*\"*50\n", - " getDataStats(myData)\n", - "# dataTuples=getLabeledDataTuples(myData)\n", - "# ####################################\n", - "# # Add first 1000 Saima tuples\n", - "# #dataTuples=dataTuples+saimaDataTuples[:1000]\n", - "# print dataTuples[0]\n", - "# del myData\n", - " ids, labels, vectors= getLabelsAndVectors(trainTuples)\n", - " space=getSpace(vectors)\n", - " print \"Total # of features in your space is: \", len(space)\n", - " # augment space with emotion features...\n", - " space= augmentSpace(space, emotionFeatures)\n", - " #reducedSpace=getReducedSpace(vectors, space)\n", - " print \"Total # of features in your augmented space is: \", len(space)\n", - " #print \"Total # of features in your reducedSpace is: \", len(reducedSpace)\n", - " oneHotVectors=getOneHotVectors(ids, labels, vectors, space)\n", - " vectors, labels=getOneHotVectorsAndLabels(oneHotVectors)\n", - " del oneHotVectors\n", - " trainVectors = vectors\n", - " trainLabels = labels\n", - " del vectors\n", - " del labels\n", - " #C, gamma = getCAndGamma(trainVectors, trainLabels, kernel = 'rbf')\n", - " # Train classifier\n", - " #clf = OneVsOneClassifier(SVC(C=C, kernel=kernel, class_weight='auto', gamma=gamma, verbose= True, probability=True))\n", - " clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", - " clf.fit(trainVectors, trainLabels)\n", - " print \"\\nDone fitting classifier on training data...\\n\"\n", - " #del trainVectors\n", - " #del trainLabels\n", - "# dataTuples=getSAIMAThreeColumnFormat()\n", - "# print \"Length of dataTuples is: \", len(dataTuples)\n", - "# shuffle(dataTuples)\n", - "# print \"saimaDataTuples\", dataTuples[0]\n", - "# ids, labels, vectors= getLabelsAndVectors(testTuples)\n", - "# oneHotVectors=getOneHotVectors(ids, labels, vectors, space)\n", - "# vectors, labels=getOneHotVectorsAndLabels(oneHotVectors)\n", - "# del oneHotVectors\n", - "# testVectors = vectors\n", - "# testLabels = labels\n", - "# predicted_testLabels = clf.predict(testVectors)\n", - " #------------------------------------------------------------------------------------------\n", - " print \"=\"*50, \"\\n\"\n", - " print \"Results with 5-fold cross validation:\\n\"\n", - " print \"=\"*50, \"\\n\"\n", - " #------------------------------------------------------------------------------------------\n", - " predicted = cross_validation.cross_val_predict(clf, trainVectors, trainLabels, cv=5)\n", - " print \"*\"*20\n", - " print \"\\t accuracy_score\\t\", metrics.accuracy_score(trainLabels, predicted)\n", - " print \"*\"*20\n", - " print \"precision_score\\t\", metrics.precision_score(trainLabels, predicted)\n", - " print \"recall_score\\t\", metrics.recall_score(trainLabels, predicted)\n", - " print \"\\nclassification_report:\\n\\n\", metrics.classification_report(trainLabels, predicted)\n", - " print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(trainLabels, predicted)\n", - " \n", - " #\"------------------------------------------------------------------------------------------\n", - " print \"=\"*50, \"\\n\"\n", - " print \"Results with 10-fold cross validation:\\n\"\n", - " print \"=\"*50, \"\\n\"\n", - " #------------------------------------------------------------------------------------------\n", - " predicted = cross_validation.cross_val_predict(clf, trainVectors, trainLabels, cv=10)\n", - " print \"*\"*20\n", - " print \"\\t accuracy_score\\t\", metrics.accuracy_score(trainLabels, predicted)\n", - " print \"*\"*20\n", - " print \"precision_score\\t\", metrics.precision_score(trainLabels, predicted)\n", - " print \"recall_score\\t\", metrics.recall_score(trainLabels, predicted)\n", - " print \"\\nclassification_report:\\n\\n\", metrics.classification_report(trainLabels, predicted)\n", - " print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(trainLabels, predicted)\n", - " \n", - " #------------------------------------------------------------------------------------------\n", - " # Take a look at the metrics module at: http://scikit-learn.org/stable/modules/classes.html#module-sklearn.metrics\n", - " #------------------------------------------------------------------------------------------\n", - "\n", - "if __name__ == \"__main__\":\n", - " print \"Hello!!\"\n", - " main()" - ] - }, { "cell_type": "code", "execution_count": null, From d002d47aea29bb8e807c8285dbf50822fdf5fa95 Mon Sep 17 00:00:00 2001 From: mageed Date: Fri, 5 Feb 2016 10:31:46 -0500 Subject: [PATCH 16/36] updating with some organization --- ...utorial_part_3_rule_based_classifier.ipynb | 386 +++++++++++++++--- 1 file changed, 334 insertions(+), 52 deletions(-) diff --git a/python_tutorial_part_3_rule_based_classifier.ipynb b/python_tutorial_part_3_rule_based_classifier.ipynb index 4e621d3..c9e87f6 100644 --- a/python_tutorial_part_3_rule_based_classifier.ipynb +++ b/python_tutorial_part_3_rule_based_classifier.ipynb @@ -50,7 +50,7 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -60,11 +60,18 @@ "output_type": "stream", "text": [ "['couthie', 'confidence man', 'definiteness', 'changelessness', 'morally', 'ethmoidal vein', 'unquestionableness', 'uselessness', 'top-quality', 'good-humoredness']\n", + "**************************************************\n", "['twilight of the gods', 'rumbustious', 'screaming', 'grueling', 'inanimate', 'stern', 'changelessness', 'sugarless', 'order pseudoscorpiones', 'modest']\n" ] } ], "source": [ + "# Let's make this function more general so that we can use it to read lexical files,\n", + "# whether they are positive or negative. To do that, we simply parameterize the function.\n", + "# What this means is that we make it work with a parameter, which will be a file name that we pass to\n", + "# the function when we are calling it. Now, this parameter can be either the name of the positive lexicon file\n", + "# or the name of the negative lexicon file. So, that is a desirable change.\n", + "\n", "import re\n", "\n", "def clean_lexicon(lex_input):\n", @@ -80,14 +87,75 @@ "\n", "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\")\n", "print my_positive_list[:10]\n", + "print \"*\"*50\n", + "my_neg_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", + "print my_neg_list[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false, + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "% of positive: 0.0864\n", + "% of negative: 0.1653\n" + ] + } + ], + "source": [ + "# What if we wanted to know the percentages of positive and negative words to the overall words (tokens) in a file.\n", + "# Let's write some code to do that based on the positive and negative entries we acquired from SentiWordNet:\n", + "import re\n", "\n", - "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", - "print my_positive_list[:10]" + "def clean_lexicon(lex_input):\n", + " lex_file_l=open(lex_input, \"r\").readlines()\n", + " \n", + " new_lex_file_l=[]\n", + " for i in lex_file_l:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_lex_file_l.append(i)\n", + " return new_lex_file_l\n", + "\n", + "# Change the path to your local path:\n", + "pos_lex= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\")\n", + "neg_lex= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", + "\n", + "\n", + "# Determine the percentage of positive words in a file:\n", + "def get_sentiment_diversity(pos_lex, neg_lex, input_file):\n", + " '''\n", + " just returns some stats about % of pos and neg sentiment in a file...\n", + " '''\n", + " input_string=open(input_file, \"r\").read().lower()\n", + " len_words= float(len(input_string.split()))\n", + " pos_count=0\n", + " neg_count=0\n", + " for w in pos_lex:\n", + " pos_count+= input_string.count(w)\n", + " for w in neg_lex:\n", + " neg_count += input_string.count(w)\n", + " return pos_count, neg_count, len_words\n", + " \n", + "# Call the function...\n", + "input_file=\"/Users/mam/CORE/TEACHING/ssa/git_hub/python_tutorial/hamlet.txt\"\n", + "pos_count, neg_count, len_words= get_sentiment_diversity(pos_lex, neg_lex, input_file)\n", + "#-------------------------\n", + "print \"% of positive: \", round(pos_count/len_words, 4) \n", + "print \"% of negative: \", round(neg_count/len_words, 4)" ] }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -96,13 +164,30 @@ "name": "stdout", "output_type": "stream", "text": [ - "## hello ##\n", - "##hello##\n", - "##hello##\n" + "['!', '\"', '#', '$', '%', '&', \"'\", '(', ')', '*', '+', ',', '-', '.', '/', ':', ';', '<', '=', '>', '?', '@', '[', '\\\\', ']', '^', '_', '`', '{', '|', '}', '~']\n" ] } ], "source": [ + "import string\n", + "punc = [char for char in string.punctuation]\n", + "print punc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# What if we wanted to remove all punctuation marks from a file?\n", + "# There are many ways to do this.\n", + "# As an introduction to regular expressions and the \"string\" module, let's do something along the following lines:\n", + "#----------------\n", + "# Let's take a look at the \"re\" module first. Here's an example:\n", + "\n", "import re\n", "s = \" hello \"\n", "print \"##\"+ s + \"##\"\n", @@ -112,6 +197,239 @@ "print \"##\"+ s3 + \"##\"" ] }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hey there 654%$21!!!...? $& + ___ | %\n", + "Hey there 65421 \n" + ] + } + ], + "source": [ + "\n", + "import string\n", + "import re\n", + "\n", + "def clean(to_filter_list, text):\n", + " '''\n", + " input: \n", + " a. list of undesirable items we want to remove from text\n", + " b. text we want to clean\n", + " output:\n", + " cleaned text\n", + " '''\n", + " for i in to_filter_list:\n", + " #print i\n", + " i=\"\\\\\"+i\n", + " text=re.sub(i, \"\", text)\n", + " return text\n", + "\n", + "#----------------------\n", + "# Call the function...\n", + "punc = [char for char in string.punctuation]\n", + "text=\"Hey there 654%$21!!!...? $& + ___ | %\"\n", + "\n", + "new=clean(punc, text)\n", + "print text\n", + "print new\n", + "#print punc" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Detecting Retweets & Removing Duplicates..." + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['@alex Did you make it to the meeting?']\n", + "['@alex Did you make it to the meeting?']\n" + ] + } + ], + "source": [ + "# Some work on filtering out undesirable content, for example retweets from a file.\n", + "# The first step is to do some analysis and understand the structure of a retweet.\n", + "# Below we assume simply thar a retweet is just a tweet that starts with either \"RT\" or \"rt\"\n", + "#--------------------------------------------\n", + "# How do we get red of retweets, for example?\n", + "# Let's say we have the following list of lines, returned from a file we opened\n", + "lines=[\"RT @abhi I like #soccer!!!!\", \"rt @abhi I cooked lentil soup\",\\\n", + " \"@alex Did you make it to the meeting?\"]\n", + "\n", + "new_list=[]\n", + "for line in lines:\n", + " if not line.startswith(\"RT\") and not line.startswith(\"rt\"):\n", + " #print line\n", + " new_list.append(line)\n", + " \n", + "print new_list\n", + "\n", + "clean_list=[line for line in lines if not line.startswith(\"RT\") and not line.startswith(\"rt\")]\n", + "print clean_list" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "set(['RT @abhi I like #soccer!!!!', '@alex Did you maaaaake it to the meeting?', '@alex Did you make it there to the meeting?', '@alex Did you make it to the meeting?', 'rt @abhi I cooked lentil soup', '@alex Did you make it to the meeting...', '@alex did you make it to the meeting?'])\n" + ] + } + ], + "source": [ + "# Now, let's filter out duplicates:\n", + "lines=[\"RT @abhi I like #soccer!!!!\", \"rt @abhi I cooked lentil soup\",\\\n", + " \"@alex Did you make it to the meeting?\",\\\n", + " \"@alex Did you make it to the meeting...\",\\\n", + " \"@alex Did you make it there to the meeting?\",\\\n", + " \"@alex did you make it to the meeting?\",\\\n", + " \"@alex Did you maaaaake it to the meeting?\"]\n", + "\n", + "print set(lines)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Using a Main function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Why don't we now use a main function to call the code we wrote so far?" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Welcome to the sentiment statistician!!!\n", + "% of positive: 0.0864\n", + "% of negative: 0.1673\n" + ] + } + ], + "source": [ + "\n", + "import re\n", + "import string\n", + "punc = [char for char in string.punctuation]\n", + "\n", + "def clean(to_filter_list, text):\n", + " '''\n", + " input: \n", + " a. list of undesirable items we want to remove from text\n", + " b. text we want to clean\n", + " output:\n", + " cleaned text\n", + " '''\n", + " for i in to_filter_list:\n", + " #print i\n", + " i=\"\\\\\"+i\n", + " text=re.sub(i, \"\", text)\n", + " return text\n", + "\n", + "\n", + "def clean_lexicon(lex_input):\n", + " lex_file_l=open(lex_input, \"r\").readlines()\n", + " \n", + " new_lex_file_l=[]\n", + " for i in lex_file_l:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_lex_file_l.append(i)\n", + " return new_lex_file_l\n", + "\n", + "\n", + "# Determine the percentage of positive words in a file:\n", + "def get_sentiment_diversity(pos_lex, neg_lex, input_file):\n", + " '''\n", + " just returns some stats about % of pos and neg sentiment in a file...\n", + " '''\n", + " input_string=open(input_file, \"r\").read().lower()\n", + " input_string= clean(punc, input_string)\n", + " len_words= float(len(input_string.split()))\n", + " pos_count=0\n", + " neg_count=0\n", + " for w in pos_lex:\n", + " pos_count+= input_string.count(w)\n", + " for w in neg_lex:\n", + " neg_count += input_string.count(w)\n", + " return pos_count, neg_count, len_words\n", + " \n", + "def main():\n", + " # Call the code...\n", + " #------------------\n", + " print(\"Welcome to the sentiment statistician!!!\")\n", + " # Get the lexicon:\n", + " pos_lex= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\")\n", + " neg_lex= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", + " # Read the hamlet file\n", + " input_file=\"/Users/mam/CORE/TEACHING/ssa/git_hub/python_tutorial/hamlet.txt\"\n", + " # get sentiment stats\n", + " pos_count, neg_count, len_words= get_sentiment_diversity(pos_lex, neg_lex, input_file)\n", + " #-------------------------\n", + " print \"% of positive: \", round(pos_count/len_words, 4) \n", + " print \"% of negative: \", round(neg_count/len_words, 4)\n", + "\n", + " \n", + "if __name__ == \"__main__\":\n", + " main()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Miscellaneous code to loop over lines from a file, and do something (e.g., counting positive and negative words)" + ] + }, { "cell_type": "code", "execution_count": 51, @@ -189,51 +507,6 @@ " " ] }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], - "source": [ - "x=open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/posTweets.txt\", \"r\").readlines()\n", - "print type(x) " - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "@Vivawonderwoman Got it! :)\n", - "Riri flow on Pandora..Christmas tree all done :)\n", - "Ah love feels so great :-)\n", - "@stephhybb okay maybe then but the other stores usually have better ones & okay yeah come after you're done at game stop!:) def!\n", - "@pammpimm haha gpp kok dek :) thanks yaaaa\n", - "@katelittle_ @soph_funari @kaseycreehan @kaylaaajx3 awe Kate I love youuu <333 :)\n" - ] - } - ], - "source": [ - "for l in x[:6]:\n", - " print l[:-1]" - ] - }, { "cell_type": "code", "execution_count": 34, @@ -317,6 +590,15 @@ " print count_pos #entry, lines.index(l)\n", " count_pos=0\n" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] } ], "metadata": { From a10424f1006c0fdc5b6272d752dc00c4c21e58af Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 7 Feb 2016 09:35:07 -0500 Subject: [PATCH 17/36] Adding Theano-based logistic regression example --- logistic_regression_theano.ipynb | 969 ++++++++++++++++++ ...utorial_part_3_rule_based_classifier.ipynb | 17 +- 2 files changed, 971 insertions(+), 15 deletions(-) create mode 100644 logistic_regression_theano.ipynb diff --git a/logistic_regression_theano.ipynb b/logistic_regression_theano.ipynb new file mode 100644 index 0000000..3104e2f --- /dev/null +++ b/logistic_regression_theano.ipynb @@ -0,0 +1,969 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Logistic Regression for Sentiment Analysis with Theano" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a tutorial on logistic regression using Theano based on the MNST \n", + "logistic regression code provided at http://deeplearning.net/tutorial/logreg.html.\n", + "We make some code to read Andrew Mass' distribution of IMBD data\n", + "and make some changes in the original MNST Theano tutorial as needed. \n", + "You will need to familiarize yourself with Theano to understand this tutorial well.\n", + "Take a look here: http://deeplearning.net/software/theano/tutorial/" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Simple Example" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "It will probably help if you consider this simpler code for logistic regression,\n", + "This code is provided at: http://deeplearning.net/software/theano/tutorial/examples.html.\n", + "Maybe try changing values of \"N\" and the number of \"training_steps\" and see what you get.\n", + "Later, we will use logistic regression too, but with a technique called \"stochastic gradient descent.\" " + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial model:\n", + "Final model:\n", + "target values for D:\n", + "[1 1 1 1 1 0 1 0 1 1 0 1 0 1 0 1 0 1 1 0 1 0 1 1 1 1 1 0 1 1 0 1 1 0 1 1 1\n", + " 0 1 1 0 0 0 0 1 1 1 1 0 0 1 1 1 0 0 1 1 1 0 0 0 1 1 0 1 1 0 0 1 1 1 1 1 0\n", + " 1 0 1 0 1 0 1 1 1 0 0 1 1 1 0 0 1 0 0 1 0 0 0 0 0 1 0 0 0 1 0 1 0 0 1 0 1\n", + " 1 1 1 1 0 0 0 1 0 1 1 0 0 1 0 0 0 1 1 0 1 1 1 0 0 1 0 0 1 1 1 0 0 1 1 1 0\n", + " 0 1 0 1 0 0 1 1 0 0 1 1 0 0 0 1 0 0 1 0 0 1 1 0 1 1 0 0 0 0 1 1 0 0 0 1 0\n", + " 1 0 0 0 0 0 1 0 0 1 0 0 1 0 1 0 1 0 0 1 0 0 1 1 0 0 0 1 1 1 1 0 1 1 1 1 1\n", + " 1 1 0 1 1 0 1 0 1 1 1 1 1 1 0 1 1 1 0 0 1 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 1\n", + " 0 0 1 0 1 1 1 1 0 0 1 0 0 1 1 1 0 1 1 1 0 1 0 1 0 1 0 0 0 1 0 0 0 0 1 1 1\n", + " 1 1 0 1 0 1 0 0 1 0 0 0 0 0 0 0 1 1 1 0 0 0 0 1 1 0 1 1 1 1 1 1 0 1 0 1 1\n", + " 1 1 0 0 0 0 0 1 0 0 1 1 1 1 1 0 0 1 0 0 0 0 1 1 1 0 1 0 0 0 1 0 0 0 1 1 0\n", + " 0 0 1 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 1 1 1 0 1 0]\n", + "prediction on D:\n", + "[0 1 1 1 0 1 1 0 1 1 1 1 1 0 0 0 0 1 1 0 0 1 1 1 1 1 0 0 1 1 1 1 1 1 0 1 1\n", + " 1 1 0 1 0 0 0 1 0 1 0 0 1 0 0 0 1 1 1 0 1 0 1 1 0 0 0 0 0 1 0 1 0 1 1 0 0\n", + " 1 0 1 1 0 0 0 1 1 1 0 0 0 1 1 1 0 0 1 1 0 0 0 1 1 0 1 1 0 1 0 1 0 1 0 0 0\n", + " 0 1 1 1 0 1 0 1 1 0 1 1 0 1 1 0 1 0 1 0 1 1 1 1 1 1 1 0 1 0 1 1 0 1 0 1 0\n", + " 1 0 1 0 1 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 1 1 1 0 1 0 0 0 0 1\n", + " 1 0 0 0 0 0 1 1 0 0 0 1 1 1 1 0 1 1 1 1 0 1 1 1 1 0 1 0 0 0 1 0 1 0 1 0 0\n", + " 0 0 0 1 0 0 0 1 1 1 0 0 1 0 0 1 1 1 0 0 0 0 1 1 1 1 0 1 1 0 0 0 0 0 0 1 0\n", + " 1 1 0 1 1 1 1 1 1 1 1 0 0 0 0 0 1 0 0 1 0 0 0 1 0 1 0 1 1 0 0 1 1 0 1 0 0\n", + " 1 1 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 1 0 1 0 1 0 1 1 0 0 1 0 0 0 0 1 0 0 0 1\n", + " 0 1 1 1 1 0 0 0 1 1 0 0 1 0 1 1 0 0 1 0 1 1 0 1 1 0 1 0 1 1 1 1 0 1 0 0 1\n", + " 1 0 0 0 0 0 0 1 0 0 0 1 0 1 0 1 0 0 0 1 0 0 0 1 1 0 1 1 1 0]\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], + "source": [ + "import numpy\n", + "import theano\n", + "import theano.tensor as T\n", + "rng = numpy.random\n", + "\n", + "N = 400 # training sample size\n", + "feats = 784 # number of input variables\n", + "\n", + "# generate a dataset: D = (input_values, target_class)\n", + "D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", + "training_steps = 100\n", + "#np.asarray(your_data, dtype=theano.config.floatX)\n", + "\n", + "# Declare Theano symbolic variables\n", + "x = T.matrix(\"x\")\n", + "y = T.vector(\"y\")\n", + "\n", + "# initialize the weight vector w randomly\n", + "#\n", + "# this and the following bias variable b\n", + "# are shared so they keep their values\n", + "# between training iterations (updates)\n", + "w = theano.shared(rng.randn(feats), name=\"w\")\n", + "\n", + "# initialize the bias term\n", + "b = theano.shared(0., name=\"b\")\n", + "#print b.eval()\n", + "print(\"Initial model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "\n", + "# Construct Theano expression graph\n", + "p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1\n", + "prediction = p_1 > 0.5 # The prediction thresholded\n", + "xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function\n", + "cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize\n", + "gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost\n", + " # w.r.t weight vector w and\n", + " # bias term b\n", + " # (we shall return to this in a\n", + " # following section of this tutorial)\n", + "\n", + "# Compile\n", + "train = theano.function(\n", + " inputs=[x,y],\n", + " outputs=[prediction, xent],\n", + " updates=((w, w - 0.1 * gw), (b, b - 0.1 * gb)),\n", + " allow_input_downcast=True) # added downcasting...\n", + "predict = theano.function(inputs=[x], outputs=prediction)\n", + "\n", + "# Train\n", + "for i in range(training_steps):\n", + " pred, err = train(D[0], D[1])\n", + "\n", + "print(\"Final model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "print(\"target values for D:\")\n", + "print(D[1])\n", + "print(\"prediction on D:\")\n", + "print(predict(D[0]))\n", + "#----------------------------------------------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Now try the code with different values of \"training_steps\" and see what you get.\n", + "# For example, you can try:\n", + "# training_steps= 100, training_steps=500, training_steps=10000" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Logistic Regression with Theano, using Stochastic Gradient Descent" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "space_len: 3047\n", + "train_vecs.shape: 25000, 3047\n", + "dev_vecs.shape: 5000, 3047\n", + "test_vecs.shape: 20000, 3047\n", + "Subtensor{int64}.0\n", + "... building the model\n", + "... training the model\n", + "epoch 1, minibatch 41/41, validation error 52.083333 %\n", + " epoch 1, minibatch 41/41, test error of best model 50.505051 %\n", + "epoch 2, minibatch 41/41, validation error 52.083333 %\n", + "epoch 3, minibatch 41/41, validation error 52.062500 %\n", + " epoch 3, minibatch 41/41, test error of best model 50.505051 %\n", + "epoch 4, minibatch 41/41, validation error 51.895833 %\n", + " epoch 4, minibatch 41/41, test error of best model 50.297980 %\n", + "epoch 5, minibatch 41/41, validation error 50.937500 %\n", + " epoch 5, minibatch 41/41, test error of best model 49.560606 %\n", + "epoch 6, minibatch 41/41, validation error 49.416667 %\n", + " epoch 6, minibatch 41/41, test error of best model 48.267677 %\n", + "epoch 7, minibatch 41/41, validation error 47.708333 %\n", + " epoch 7, minibatch 41/41, test error of best model 46.616162 %\n", + "epoch 8, minibatch 41/41, validation error 45.958333 %\n", + " epoch 8, minibatch 41/41, test error of best model 44.853535 %\n", + "epoch 9, minibatch 41/41, validation error 44.395833 %\n", + " epoch 9, minibatch 41/41, test error of best model 43.176768 %\n", + "epoch 10, minibatch 41/41, validation error 42.875000 %\n", + " epoch 10, minibatch 41/41, test error of best model 41.651515 %\n", + "epoch 11, minibatch 41/41, validation error 41.458333 %\n", + " epoch 11, minibatch 41/41, test error of best model 40.393939 %\n", + "epoch 12, minibatch 41/41, validation error 39.979167 %\n", + " epoch 12, minibatch 41/41, test error of best model 39.333333 %\n", + "epoch 13, minibatch 41/41, validation error 38.520833 %\n", + " epoch 13, minibatch 41/41, test error of best model 38.378788 %\n", + "epoch 14, minibatch 41/41, validation error 37.708333 %\n", + " epoch 14, minibatch 41/41, test error of best model 37.489899 %\n", + "epoch 15, minibatch 41/41, validation error 36.583333 %\n", + " epoch 15, minibatch 41/41, test error of best model 36.686869 %\n", + "epoch 16, minibatch 41/41, validation error 35.562500 %\n", + " epoch 16, minibatch 41/41, test error of best model 36.025253 %\n", + "epoch 17, minibatch 41/41, validation error 34.937500 %\n", + " epoch 17, minibatch 41/41, test error of best model 35.272727 %\n", + "epoch 18, minibatch 41/41, validation error 34.250000 %\n", + " epoch 18, minibatch 41/41, test error of best model 34.631313 %\n", + "epoch 19, minibatch 41/41, validation error 33.604167 %\n", + " epoch 19, minibatch 41/41, test error of best model 33.989899 %\n", + "epoch 20, minibatch 41/41, validation error 33.166667 %\n", + " epoch 20, minibatch 41/41, test error of best model 33.479798 %\n", + "epoch 21, minibatch 41/41, validation error 32.562500 %\n", + " epoch 21, minibatch 41/41, test error of best model 33.010101 %\n", + "epoch 22, minibatch 41/41, validation error 32.208333 %\n", + " epoch 22, minibatch 41/41, test error of best model 32.555556 %\n", + "epoch 23, minibatch 41/41, validation error 31.770833 %\n", + " epoch 23, minibatch 41/41, test error of best model 32.171717 %\n", + "epoch 24, minibatch 41/41, validation error 31.375000 %\n", + " epoch 24, minibatch 41/41, test error of best model 31.772727 %\n", + "epoch 25, minibatch 41/41, validation error 31.083333 %\n", + " epoch 25, minibatch 41/41, test error of best model 31.500000 %\n", + "epoch 26, minibatch 41/41, validation error 30.770833 %\n", + " epoch 26, minibatch 41/41, test error of best model 31.267677 %\n", + "epoch 27, minibatch 41/41, validation error 30.437500 %\n", + " epoch 27, minibatch 41/41, test error of best model 31.065657 %\n", + "epoch 28, minibatch 41/41, validation error 30.062500 %\n", + " epoch 28, minibatch 41/41, test error of best model 30.792929 %\n", + "epoch 29, minibatch 41/41, validation error 29.875000 %\n", + " epoch 29, minibatch 41/41, test error of best model 30.570707 %\n", + "epoch 30, minibatch 41/41, validation error 29.479167 %\n", + " epoch 30, minibatch 41/41, test error of best model 30.328283 %\n", + "epoch 31, minibatch 41/41, validation error 29.291667 %\n", + " epoch 31, minibatch 41/41, test error of best model 30.050505 %\n", + "epoch 32, minibatch 41/41, validation error 29.083333 %\n", + " epoch 32, minibatch 41/41, test error of best model 29.858586 %\n", + "epoch 33, minibatch 41/41, validation error 29.000000 %\n", + " epoch 33, minibatch 41/41, test error of best model 29.691919 %\n", + "epoch 34, minibatch 41/41, validation error 28.770833 %\n", + " epoch 34, minibatch 41/41, test error of best model 29.474747 %\n", + "epoch 35, minibatch 41/41, validation error 28.625000 %\n", + " epoch 35, minibatch 41/41, test error of best model 29.222222 %\n", + "epoch 36, minibatch 41/41, validation error 28.437500 %\n", + " epoch 36, minibatch 41/41, test error of best model 28.989899 %\n", + "epoch 37, minibatch 41/41, validation error 28.333333 %\n", + " epoch 37, minibatch 41/41, test error of best model 28.813131 %\n", + "epoch 38, minibatch 41/41, validation error 28.104167 %\n", + " epoch 38, minibatch 41/41, test error of best model 28.656566 %\n", + "epoch 39, minibatch 41/41, validation error 27.770833 %\n", + " epoch 39, minibatch 41/41, test error of best model 28.484848 %\n", + "epoch 40, minibatch 41/41, validation error 27.645833 %\n", + " epoch 40, minibatch 41/41, test error of best model 28.343434 %\n", + "epoch 41, minibatch 41/41, validation error 27.458333 %\n", + " epoch 41, minibatch 41/41, test error of best model 28.252525 %\n", + "epoch 42, minibatch 41/41, validation error 27.312500 %\n", + " epoch 42, minibatch 41/41, test error of best model 28.166667 %\n", + "epoch 43, minibatch 41/41, validation error 27.312500 %\n", + "epoch 44, minibatch 41/41, validation error 27.125000 %\n", + " epoch 44, minibatch 41/41, test error of best model 27.964646 %\n", + "epoch 45, minibatch 41/41, validation error 26.979167 %\n", + " epoch 45, minibatch 41/41, test error of best model 27.803030 %\n", + "epoch 46, minibatch 41/41, validation error 26.791667 %\n", + " epoch 46, minibatch 41/41, test error of best model 27.717172 %\n", + "epoch 47, minibatch 41/41, validation error 26.729167 %\n", + " epoch 47, minibatch 41/41, test error of best model 27.606061 %\n", + "epoch 48, minibatch 41/41, validation error 26.541667 %\n", + " epoch 48, minibatch 41/41, test error of best model 27.555556 %\n", + "epoch 49, minibatch 41/41, validation error 26.395833 %\n", + " epoch 49, minibatch 41/41, test error of best model 27.500000 %\n", + "epoch 50, minibatch 41/41, validation error 26.354167 %\n", + " epoch 50, minibatch 41/41, test error of best model 27.378788 %\n", + "epoch 51, minibatch 41/41, validation error 26.291667 %\n", + " epoch 51, minibatch 41/41, test error of best model 27.287879 %\n", + "epoch 52, minibatch 41/41, validation error 26.187500 %\n", + " epoch 52, minibatch 41/41, test error of best model 27.267677 %\n", + "epoch 53, minibatch 41/41, validation error 26.041667 %\n", + " epoch 53, minibatch 41/41, test error of best model 27.196970 %\n", + "epoch 54, minibatch 41/41, validation error 25.937500 %\n", + " epoch 54, minibatch 41/41, test error of best model 27.106061 %\n", + "epoch 55, minibatch 41/41, validation error 25.791667 %\n", + " epoch 55, minibatch 41/41, test error of best model 27.060606 %\n", + "epoch 56, minibatch 41/41, validation error 25.750000 %\n", + " epoch 56, minibatch 41/41, test error of best model 26.979798 %\n", + "epoch 57, minibatch 41/41, validation error 25.645833 %\n", + " epoch 57, minibatch 41/41, test error of best model 26.944444 %\n", + "epoch 58, minibatch 41/41, validation error 25.666667 %\n", + "epoch 59, minibatch 41/41, validation error 25.562500 %\n", + " epoch 59, minibatch 41/41, test error of best model 26.823232 %\n", + "epoch 60, minibatch 41/41, validation error 25.562500 %\n", + "epoch 61, minibatch 41/41, validation error 25.541667 %\n", + " epoch 61, minibatch 41/41, test error of best model 26.636364 %\n", + "epoch 62, minibatch 41/41, validation error 25.520833 %\n", + " epoch 62, minibatch 41/41, test error of best model 26.595960 %\n", + "epoch 63, minibatch 41/41, validation error 25.479167 %\n", + " epoch 63, minibatch 41/41, test error of best model 26.520202 %\n", + "epoch 64, minibatch 41/41, validation error 25.500000 %\n", + "epoch 65, minibatch 41/41, validation error 25.416667 %\n", + " epoch 65, minibatch 41/41, test error of best model 26.409091 %\n", + "epoch 66, minibatch 41/41, validation error 25.375000 %\n", + " epoch 66, minibatch 41/41, test error of best model 26.343434 %\n", + "epoch 67, minibatch 41/41, validation error 25.354167 %\n", + " epoch 67, minibatch 41/41, test error of best model 26.287879 %\n", + "epoch 68, minibatch 41/41, validation error 25.333333 %\n", + " epoch 68, minibatch 41/41, test error of best model 26.202020 %\n", + "epoch 69, minibatch 41/41, validation error 25.312500 %\n", + " epoch 69, minibatch 41/41, test error of best model 26.202020 %\n", + "epoch 70, minibatch 41/41, validation error 25.291667 %\n", + " epoch 70, minibatch 41/41, test error of best model 26.156566 %\n", + "epoch 71, minibatch 41/41, validation error 25.250000 %\n", + " epoch 71, minibatch 41/41, test error of best model 26.106061 %\n", + "epoch 72, minibatch 41/41, validation error 25.270833 %\n", + "epoch 73, minibatch 41/41, validation error 25.250000 %\n", + "epoch 74, minibatch 41/41, validation error 25.250000 %\n", + "epoch 75, minibatch 41/41, validation error 25.187500 %\n", + " epoch 75, minibatch 41/41, test error of best model 25.904040 %\n", + "epoch 76, minibatch 41/41, validation error 25.208333 %\n", + "epoch 77, minibatch 41/41, validation error 25.208333 %\n", + "epoch 78, minibatch 41/41, validation error 25.187500 %\n", + "epoch 79, minibatch 41/41, validation error 25.229167 %\n", + "epoch 80, minibatch 41/41, validation error 25.187500 %\n", + "epoch 81, minibatch 41/41, validation error 25.145833 %\n", + " epoch 81, minibatch 41/41, test error of best model 25.696970 %\n", + "epoch 82, minibatch 41/41, validation error 25.125000 %\n", + " epoch 82, minibatch 41/41, test error of best model 25.656566 %\n", + "epoch 83, minibatch 41/41, validation error 25.041667 %\n", + " epoch 83, minibatch 41/41, test error of best model 25.636364 %\n", + "epoch 84, minibatch 41/41, validation error 25.041667 %\n", + "epoch 85, minibatch 41/41, validation error 25.062500 %\n", + "epoch 86, minibatch 41/41, validation error 25.062500 %\n", + "epoch 87, minibatch 41/41, validation error 25.041667 %\n", + "epoch 88, minibatch 41/41, validation error 25.041667 %\n", + "epoch 89, minibatch 41/41, validation error 25.000000 %\n", + " epoch 89, minibatch 41/41, test error of best model 25.474747 %\n", + "epoch 90, minibatch 41/41, validation error 25.041667 %\n", + "epoch 91, minibatch 41/41, validation error 24.958333 %\n", + " epoch 91, minibatch 41/41, test error of best model 25.434343 %\n", + "epoch 92, minibatch 41/41, validation error 24.958333 %\n", + " epoch 92, minibatch 41/41, test error of best model 25.414141 %\n", + "epoch 93, minibatch 41/41, validation error 24.895833 %\n", + " epoch 93, minibatch 41/41, test error of best model 25.409091 %\n", + "epoch 94, minibatch 41/41, validation error 24.895833 %\n", + "epoch 95, minibatch 41/41, validation error 24.895833 %\n", + "epoch 96, minibatch 41/41, validation error 24.854167 %\n", + " epoch 96, minibatch 41/41, test error of best model 25.318182 %\n", + "epoch 97, minibatch 41/41, validation error 24.854167 %\n", + "epoch 98, minibatch 41/41, validation error 24.833333 %\n", + " epoch 98, minibatch 41/41, test error of best model 25.303030 %\n", + "epoch 99, minibatch 41/41, validation error 24.833333 %\n", + "epoch 100, minibatch 41/41, validation error 24.812500 %\n", + " epoch 100, minibatch 41/41, test error of best model 25.257576 %\n", + "epoch 101, minibatch 41/41, validation error 24.812500 %\n", + "epoch 102, minibatch 41/41, validation error 24.791667 %\n", + " epoch 102, minibatch 41/41, test error of best model 25.227273 %\n", + "epoch 103, minibatch 41/41, validation error 24.770833 %\n", + " epoch 103, minibatch 41/41, test error of best model 25.207071 %\n", + "epoch 104, minibatch 41/41, validation error 24.770833 %\n", + "epoch 105, minibatch 41/41, validation error 24.770833 %\n", + "epoch 106, minibatch 41/41, validation error 24.770833 %\n", + "epoch 107, minibatch 41/41, validation error 24.750000 %\n", + " epoch 107, minibatch 41/41, test error of best model 25.161616 %\n", + "epoch 108, minibatch 41/41, validation error 24.750000 %\n", + "epoch 109, minibatch 41/41, validation error 24.750000 %\n", + "epoch 110, minibatch 41/41, validation error 24.729167 %\n", + " epoch 110, minibatch 41/41, test error of best model 25.070707 %\n", + "epoch 111, minibatch 41/41, validation error 24.729167 %\n", + "epoch 112, minibatch 41/41, validation error 24.708333 %\n", + " epoch 112, minibatch 41/41, test error of best model 25.030303 %\n", + "epoch 113, minibatch 41/41, validation error 24.666667 %\n", + " epoch 113, minibatch 41/41, test error of best model 25.010101 %\n", + "epoch 114, minibatch 41/41, validation error 24.625000 %\n", + " epoch 114, minibatch 41/41, test error of best model 24.984848 %\n", + "epoch 115, minibatch 41/41, validation error 24.625000 %\n", + "epoch 116, minibatch 41/41, validation error 24.604167 %\n", + " epoch 116, minibatch 41/41, test error of best model 24.934343 %\n", + "epoch 117, minibatch 41/41, validation error 24.583333 %\n", + " epoch 117, minibatch 41/41, test error of best model 24.924242 %\n", + "epoch 118, minibatch 41/41, validation error 24.562500 %\n", + " epoch 118, minibatch 41/41, test error of best model 24.914141 %\n", + "epoch 119, minibatch 41/41, validation error 24.562500 %\n", + "epoch 120, minibatch 41/41, validation error 24.541667 %\n", + " epoch 120, minibatch 41/41, test error of best model 24.893939 %\n", + "epoch 121, minibatch 41/41, validation error 24.541667 %\n", + "Optimization complete with best validation score of 24.541667 %,with test performance 24.893939 %\n", + "The code run for 122 epochs, with 4.817208 epochs/sec\n", + "The code for file best_model.pkl ran for 25.3s\n", + "Now predicting...\n", + "Predicted values for the first 10 examples in test set:\n", + "[0 0 0 1 1 1 1 0 0 0]\n" + ] + } + ], + "source": [ + "\"\"\"\n", + "This tutorial introduces logistic regression using Theano and stochastic\n", + "gradient descent.\n", + "\n", + "Logistic regression is a probabilistic, linear classifier. It is parametrized\n", + "by a weight matrix :math:`W` and a bias vector :math:`b`. Classification is\n", + "done by projecting data points onto a set of hyperplanes, the distance to\n", + "which is used to determine a class membership probability.\n", + "\n", + "Mathematically, this can be written as:\n", + "\n", + ".. math::\n", + " P(Y=i|x, W,b) &= softmax_i(W x + b) \\\\\n", + " &= \\frac {e^{W_i x + b_i}} {\\sum_j e^{W_j x + b_j}}\n", + "\n", + "\n", + "The output of the model or prediction is then done by taking the argmax of\n", + "the vector whose i'th element is P(Y=i|x).\n", + "\n", + ".. math::\n", + "\n", + " y_{pred} = argmax_i P(Y=i|x,W,b)\n", + "\n", + "\n", + "This tutorial presents a stochastic gradient descent optimization method\n", + "suitable for large datasets.\n", + "\n", + "\n", + "References:\n", + "\n", + " - textbooks: \"Pattern Recognition and Machine Learning\" -\n", + " Christopher M. Bishop, section 4.3.2\n", + "\n", + "\"\"\"\n", + "from collections import namedtuple, defaultdict\n", + "from random import shuffle, randint\n", + "#----------------------------------------------------\n", + "__docformat__ = 'restructedtext en'\n", + "\n", + "import cPickle\n", + "import gzip\n", + "import os\n", + "import sys\n", + "import timeit\n", + "\n", + "import numpy\n", + "import numpy as np\n", + "import theano\n", + "import theano.tensor as T\n", + "#----------------------------------------------------\n", + "\n", + "def get_data():\n", + " '''\n", + " \n", + " '''\n", + " all_data = [] \n", + " DataDoc= namedtuple('DataDoc', 'tag words')\n", + " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " train_data = all_data[:25000]\n", + " dev_data = all_data[25000:27500]+all_data[47500:50000]\n", + " test_data=all_data[27500:47500]\n", + " # labels\n", + " train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + " dev_tags= [ 1.0 for i in range(2500)] + [ 0.0 for i in range(2500)]\n", + " test_tags= [ 1.0 for i in range(10000)] + [ 0.0 for i in range(10000)]\n", + " return train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", + " #--------------------------------------------------\n", + "#train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + "########################\n", + "\n", + "\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]+=1\n", + " return word_space\n", + "\n", + "# train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + "# word_space=get_space(train_data)\n", + "# word_space={w: word_space[w] for w in word_space if word_space[w] > 500}\n", + "# space_len=len(word_space)\n", + "# print \"space_len: \", space_len\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "# train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "# test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "# dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", + "# #---------------------------\n", + "# train_vecs=np.array(train_vecs)\n", + "# train_tags=np.array(train_tags)\n", + "# dev_vecs=np.array(dev_vecs)\n", + "# dev_tags=np.array(dev_tags)\n", + "# test_vecs=np.array(test_vecs)\n", + "# test_tags=np.array(test_tags)\n", + "# print train_vecs.shape\n", + "# print dev_vecs.shape\n", + "# print test_vecs.shape\n", + "\n", + "\n", + "def load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags):\n", + " #------------------------------\n", + " # Modified from Theano tutorial.\n", + " # I basically pass data_x, data_y instead of data_xy\n", + " def shared_dataset(data_x, data_y, borrow=True):\n", + " \"\"\" Function that loads the dataset into shared variables\n", + "\n", + " The reason we store our dataset in shared variables is to allow\n", + " Theano to copy it into the GPU memory (when code is run on GPU).\n", + " Since copying data into the GPU is slow, copying a minibatch everytime\n", + " is needed (the default behaviour if the data is not in a shared\n", + " variable) would lead to a large decrease in performance.\n", + " \"\"\"\n", + " shared_x = theano.shared(numpy.asarray(data_x,\n", + " dtype=theano.config.floatX), borrow=borrow)\n", + " shared_y = theano.shared(numpy.asarray(data_y,\n", + " dtype=theano.config.floatX),\n", + " borrow=borrow)\n", + " # When storing data on the GPU it has to be stored as floats\n", + " # therefore we will store the labels as ``floatX`` as well\n", + " # (``shared_y`` does exactly that). But during our computations\n", + " # we need them as ints (we use labels as index, and if they are\n", + " # floats it doesn't make sense) therefore instead of returning\n", + " # ``shared_y`` we will have to cast it to int. This little hack\n", + " # lets ous get around this issue\n", + " return shared_x, T.cast(shared_y, 'int32')\n", + " #-----------------------------------------------------------------\n", + " train_set_x, train_set_y = shared_dataset(train_vecs, train_tags)\n", + " valid_set_x, valid_set_y = shared_dataset(dev_vecs, dev_tags)\n", + " test_set_x, test_set_y = shared_dataset(test_vecs, test_tags)\n", + "\n", + " rval = [(train_set_x, train_set_y), (valid_set_x, valid_set_y),\n", + " (test_set_x, test_set_y)]\n", + " return rval\n", + "\n", + "#rval=load_data(train_vecs, train_tags)\n", + "#print rval\n", + "\n", + "\n", + "class LogisticRegression(object):\n", + " \"\"\"Multi-class Logistic Regression Class\n", + "\n", + " The logistic regression is fully described by a weight matrix :math:`W`\n", + " and bias vector :math:`b`. Classification is done by projecting data\n", + " points onto a set of hyperplanes, the distance to which is used to\n", + " determine a class membership probability.\n", + " \"\"\"\n", + "\n", + " def __init__(self, input, n_in, n_out):\n", + " \"\"\" Initialize the parameters of the logistic regression\n", + "\n", + " :type input: theano.tensor.TensorType\n", + " :param input: symbolic variable that describes the input of the\n", + " architecture (one minibatch)\n", + "\n", + " :type n_in: int\n", + " :param n_in: number of input units, the dimension of the space in\n", + " which the datapoints lie\n", + "\n", + " :type n_out: int\n", + " :param n_out: number of output units, the dimension of the space in\n", + " which the labels lie\n", + "\n", + " \"\"\"\n", + " # start-snippet-1\n", + " # initialize with 0 the weights W as a matrix of shape (n_in, n_out)\n", + " self.W = theano.shared(\n", + " value=numpy.zeros(\n", + " (n_in, n_out),\n", + " dtype=theano.config.floatX\n", + " ),\n", + " name='W',\n", + " borrow=True\n", + " )\n", + " # initialize the biases b as a vector of n_out 0s\n", + " self.b = theano.shared(\n", + " value=numpy.zeros(\n", + " (n_out,),\n", + " dtype=theano.config.floatX\n", + " ),\n", + " name='b',\n", + " borrow=True\n", + " )\n", + "\n", + " # symbolic expression for computing the matrix of class-membership\n", + " # probabilities\n", + " # Where:\n", + " # W is a matrix where column-k represent the separation hyperplane for\n", + " # class-k\n", + " # x is a matrix where row-j represents input training sample-j\n", + " # b is a vector where element-k represent the free parameter of\n", + " # hyperplane-k\n", + " self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b)\n", + "\n", + " # symbolic description of how to compute prediction as class whose\n", + " # probability is maximal\n", + " self.y_pred = T.argmax(self.p_y_given_x, axis=1)\n", + " # end-snippet-1\n", + "\n", + " # parameters of the model\n", + " self.params = [self.W, self.b]\n", + "\n", + " # keep track of model input\n", + " self.input = input\n", + "\n", + " def negative_log_likelihood(self, y):\n", + " \"\"\"Return the mean of the negative log-likelihood of the prediction\n", + " of this model under a given target distribution.\n", + "\n", + " .. math::\n", + "\n", + " \\frac{1}{|\\mathcal{D}|} \\mathcal{L} (\\theta=\\{W,b\\}, \\mathcal{D}) =\n", + " \\frac{1}{|\\mathcal{D}|} \\sum_{i=0}^{|\\mathcal{D}|}\n", + " \\log(P(Y=y^{(i)}|x^{(i)}, W,b)) \\\\\n", + " \\ell (\\theta=\\{W,b\\}, \\mathcal{D})\n", + "\n", + " :type y: theano.tensor.TensorType\n", + " :param y: corresponds to a vector that gives for each example the\n", + " correct label\n", + "\n", + " Note: we use the mean instead of the sum so that\n", + " the learning rate is less dependent on the batch size\n", + " \"\"\"\n", + " # start-snippet-2\n", + " # y.shape[0] is (symbolically) the number of rows in y, i.e.,\n", + " # number of examples (call it n) in the minibatch\n", + " # T.arange(y.shape[0]) is a symbolic vector which will contain\n", + " # [0,1,2,... n-1] T.log(self.p_y_given_x) is a matrix of\n", + " # Log-Probabilities (call it LP) with one row per example and\n", + " # one column per class LP[T.arange(y.shape[0]),y] is a vector\n", + " # v containing [LP[0,y[0]], LP[1,y[1]], LP[2,y[2]], ...,\n", + " # LP[n-1,y[n-1]]] and T.mean(LP[T.arange(y.shape[0]),y]) is\n", + " # the mean (across minibatch examples) of the elements in v,\n", + " # i.e., the mean log-likelihood across the minibatch.\n", + " return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])\n", + " # end-snippet-2\n", + "\n", + " def errors(self, y):\n", + " \"\"\"Return a float representing the number of errors in the minibatch\n", + " over the total number of examples of the minibatch ; zero one\n", + " loss over the size of the minibatch\n", + "\n", + " :type y: theano.tensor.TensorType\n", + " :param y: corresponds to a vector that gives for each example the\n", + " correct label\n", + " \"\"\"\n", + "\n", + " # check if y has same dimension of y_pred\n", + " if y.ndim != self.y_pred.ndim:\n", + " raise TypeError(\n", + " 'y should have the same shape as self.y_pred',\n", + " ('y', y.type, 'y_pred', self.y_pred.type)\n", + " )\n", + " # check if y is of the correct datatype\n", + " if y.dtype.startswith('int'):\n", + " # the T.neq operator returns a vector of 0s and 1s, where 1\n", + " # represents a mistake in prediction\n", + " return T.mean(T.neq(self.y_pred, y))\n", + " else:\n", + " raise NotImplementedError()\n", + "\n", + "\n", + "def sgd_optimization(learning_rate=0.13, n_epochs=1000,\n", + " batch_size=600):\n", + " \"\"\"\n", + " Demonstrate stochastic gradient descent optimization of a log-linear\n", + " model\n", + "\n", + " This is demonstrated on MNIST.\n", + "\n", + " :type learning_rate: float\n", + " :param learning_rate: learning rate used (factor for the stochastic\n", + " gradient)\n", + "\n", + " :type n_epochs: int\n", + " :param n_epochs: maximal number of epochs to run the optimizer\n", + "\n", + " :type dataset: string\n", + " :param dataset: the path of the MNIST dataset file from\n", + " http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz\n", + "\n", + " \"\"\"\n", + " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", + " train_set_x, train_set_y = datasets[0]\n", + " valid_set_x, valid_set_y = datasets[1]\n", + " test_set_x, test_set_y = datasets[2]\n", + " print train_set_x.shape[0]\n", + " # compute number of minibatches for training, validation and testing\n", + " n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size\n", + " n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size\n", + " n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size\n", + "\n", + " ######################\n", + " # BUILD ACTUAL MODEL #\n", + " ######################\n", + " print '... building the model'\n", + "\n", + " # allocate symbolic variables for the data\n", + " index = T.lscalar() # index to a [mini]batch\n", + "\n", + " # generate symbolic variables for input (x and y represent a\n", + " # minibatch)\n", + " x = T.matrix('x') # data, presented as rasterized images\n", + " y = T.ivector('y') # labels, presented as 1D vector of [int] labels\n", + "\n", + " # construct the logistic regression class\n", + " # Each MNIST image has size 28*28\n", + " # MAM: We change size: n_in=space_len\n", + " classifier = LogisticRegression(input=x, n_in=space_len, n_out=2)\n", + "\n", + " # the cost we minimize during training is the negative log likelihood of\n", + " # the model in symbolic format\n", + " cost = classifier.negative_log_likelihood(y)\n", + "\n", + " # compiling a Theano function that computes the mistakes that are made by\n", + " # the model on a minibatch\n", + " test_model = theano.function(\n", + " inputs=[index],\n", + " outputs=classifier.errors(y),\n", + " givens={\n", + " x: test_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: test_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + "\n", + " validate_model = theano.function(\n", + " inputs=[index],\n", + " outputs=classifier.errors(y),\n", + " givens={\n", + " x: valid_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: valid_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + "\n", + " # compute the gradient of cost with respect to theta = (W,b)\n", + " g_W = T.grad(cost=cost, wrt=classifier.W)\n", + " g_b = T.grad(cost=cost, wrt=classifier.b)\n", + "\n", + " # start-snippet-3\n", + " # specify how to update the parameters of the model as a list of\n", + " # (variable, update expression) pairs.\n", + " updates = [(classifier.W, classifier.W - learning_rate * g_W),\n", + " (classifier.b, classifier.b - learning_rate * g_b)]\n", + "\n", + " # compiling a Theano function `train_model` that returns the cost, but in\n", + " # the same time updates the parameter of the model based on the rules\n", + " # defined in `updates`\n", + " train_model = theano.function(\n", + " inputs=[index],\n", + " outputs=cost,\n", + " updates=updates,\n", + " givens={\n", + " x: train_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: train_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + " # end-snippet-3\n", + "\n", + " ###############\n", + " # TRAIN MODEL #\n", + " ###############\n", + " print '... training the model'\n", + " # early-stopping parameters\n", + " patience = 5000 # look as this many examples regardless\n", + " patience_increase = 2 # wait this much longer when a new best is\n", + " # found\n", + " improvement_threshold = 0.995 # a relative improvement of this much is\n", + " # considered significant\n", + " validation_frequency = min(n_train_batches, patience / 2)\n", + " # go through this many\n", + " # minibatche before checking the network\n", + " # on the validation set; in this case we\n", + " # check every epoch\n", + "\n", + " best_validation_loss = numpy.inf\n", + " test_score = 0.\n", + " start_time = timeit.default_timer()\n", + "\n", + " done_looping = False\n", + " epoch = 0\n", + " while (epoch < n_epochs) and (not done_looping):\n", + " epoch = epoch + 1\n", + " for minibatch_index in xrange(n_train_batches):\n", + "\n", + " minibatch_avg_cost = train_model(minibatch_index)\n", + " # iteration number\n", + " iter = (epoch - 1) * n_train_batches + minibatch_index\n", + "\n", + " if (iter + 1) % validation_frequency == 0:\n", + " # compute zero-one loss on validation set\n", + " validation_losses = [validate_model(i)\n", + " for i in xrange(n_valid_batches)]\n", + " this_validation_loss = numpy.mean(validation_losses)\n", + "\n", + " print(\n", + " 'epoch %i, minibatch %i/%i, validation error %f %%' %\n", + " (\n", + " epoch,\n", + " minibatch_index + 1,\n", + " n_train_batches,\n", + " this_validation_loss * 100.\n", + " )\n", + " )\n", + "\n", + " # if we got the best validation score until now\n", + " if this_validation_loss < best_validation_loss:\n", + " #improve patience if loss improvement is good enough\n", + " if this_validation_loss < best_validation_loss * \\\n", + " improvement_threshold:\n", + " patience = max(patience, iter * patience_increase)\n", + "\n", + " best_validation_loss = this_validation_loss\n", + " # test it on the test set\n", + "\n", + " test_losses = [test_model(i)\n", + " for i in xrange(n_test_batches)]\n", + " test_score = numpy.mean(test_losses)\n", + "\n", + " print(\n", + " (\n", + " ' epoch %i, minibatch %i/%i, test error of'\n", + " ' best model %f %%'\n", + " ) %\n", + " (\n", + " epoch,\n", + " minibatch_index + 1,\n", + " n_train_batches,\n", + " test_score * 100.\n", + " )\n", + " )\n", + "\n", + " # save the best model\n", + " with open('best_model.pkl', 'w') as f:\n", + " cPickle.dump(classifier, f)\n", + "\n", + " if patience <= iter:\n", + " done_looping = True\n", + " break\n", + "\n", + " end_time = timeit.default_timer()\n", + " print(\n", + " (\n", + " 'Optimization complete with best validation score of %f %%,'\n", + " 'with test performance %f %%'\n", + " )\n", + " % (best_validation_loss * 100., test_score * 100.)\n", + " )\n", + " print 'The code run for %d epochs, with %f epochs/sec' % (\n", + " epoch, 1. * epoch / (end_time - start_time))\n", + " print ('The code for file ' +\n", + " 'best_model.pkl' +\n", + " ' ran for %.1fs' % ((end_time - start_time)))\n", + "\n", + "\n", + "def predict():\n", + " \"\"\"\n", + " An example of how to load a trained model and use it\n", + " to predict labels.\n", + " \"\"\"\n", + "\n", + " # load the saved model\n", + " classifier = cPickle.load(open('best_model.pkl'))\n", + "\n", + " # compile a predictor function\n", + " predict_model = theano.function(\n", + " inputs=[classifier.input],\n", + " outputs=classifier.y_pred)\n", + "\n", + " # We can test it on some examples from test test\n", + " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", + " #train_set_x, train_set_y = datasets[0]\n", + " #valid_set_x, valid_set_y = datasets[1]\n", + " test_set_x, test_set_y = datasets[2]\n", + " test_set_x = test_set_x.get_value()\n", + " predicted_values = predict_model(test_set_x[:10])\n", + " print (\"Predicted values for the first 10 examples in test set:\")\n", + " print predicted_values\n", + "\n", + "\n", + "if __name__ == '__main__':\n", + " train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + " word_space=get_space(train_data)\n", + " # We only use words with a give frequency threshold. This also helps much with memory.\n", + " # Note that I am using a *very* high threshold here, since the goal is to demonstrate \n", + " # the technique and its utility, rather than higher accuracy. I also had some memory issues.\n", + " word_space={w: word_space[w] for w in word_space if word_space[w] > 150} \n", + " space_len=len(word_space)\n", + " print(\"space_len: %d\" % space_len)\n", + " train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + " test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + " dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", + " #del word_space\n", + " #---------------------------\n", + " train_vecs=np.array(train_vecs)\n", + " train_tags=np.array(train_tags)\n", + " dev_vecs=np.array(dev_vecs)\n", + " dev_tags=np.array(dev_tags)\n", + " test_vecs=np.array(test_vecs)\n", + " test_tags=np.array(test_tags)\n", + " #del train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", + " print('train_vecs.shape: %d, %d' % train_vecs.shape)\n", + " print('dev_vecs.shape: %d, %d' % dev_vecs.shape)\n", + " print('test_vecs.shape: %d, %d' % test_vecs.shape)\n", + " sgd_optimization()\n", + " #------------------------------------------------------\n", + " print('Now predicting...')\n", + " predict()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_3_rule_based_classifier.ipynb b/python_tutorial_part_3_rule_based_classifier.ipynb index c9e87f6..2f2ff62 100644 --- a/python_tutorial_part_3_rule_based_classifier.ipynb +++ b/python_tutorial_part_3_rule_based_classifier.ipynb @@ -420,12 +420,8 @@ ] }, { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], + "cell_type": "markdown", + "metadata": {}, "source": [ "# Miscellaneous code to loop over lines from a file, and do something (e.g., counting positive and negative words)" ] @@ -590,15 +586,6 @@ " print count_pos #entry, lines.index(l)\n", " count_pos=0\n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] } ], "metadata": { From 6970587565681cc5f905cdf2f3cab064f74db7fc Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 7 Feb 2016 09:44:26 -0500 Subject: [PATCH 18/36] Adding Theano-based logistic regression example --- ...ogistic_regression_theano-checkpoint.ipynb | 969 ++++++ ...art_2_forward_propagation-checkpoint.ipynb | 945 ++++++ ...t_3_rule_based_classifier-checkpoint.ipynb | 389 ++- ...orial_part_6_vector_space-checkpoint.ipynb | 208 +- ...orial_part_9_neural_net_a-checkpoint.ipynb | 668 +++++ .../theano_tutorial-checkpoint.ipynb | 2650 +++++++++++++++++ best_model.pkl | 857 ++++++ ...mystified_part_2_forward_propagation.ipynb | 982 ++++++ python_tutorial_part_6_vector_space.ipynb | 214 +- python_tutorial_part_9_neural_net_a.ipynb | 677 +++++ theano_tutorial.ipynb | 2160 ++++++++++++++ 11 files changed, 10506 insertions(+), 213 deletions(-) create mode 100644 .ipynb_checkpoints/logistic_regression_theano-checkpoint.ipynb create mode 100644 .ipynb_checkpoints/neural_net_demystified_part_2_forward_propagation-checkpoint.ipynb create mode 100644 .ipynb_checkpoints/python_tutorial_part_9_neural_net_a-checkpoint.ipynb create mode 100644 .ipynb_checkpoints/theano_tutorial-checkpoint.ipynb create mode 100644 best_model.pkl create mode 100644 neural_net_demystified_part_2_forward_propagation.ipynb create mode 100644 python_tutorial_part_9_neural_net_a.ipynb create mode 100644 theano_tutorial.ipynb diff --git a/.ipynb_checkpoints/logistic_regression_theano-checkpoint.ipynb b/.ipynb_checkpoints/logistic_regression_theano-checkpoint.ipynb new file mode 100644 index 0000000..3104e2f --- /dev/null +++ b/.ipynb_checkpoints/logistic_regression_theano-checkpoint.ipynb @@ -0,0 +1,969 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Logistic Regression for Sentiment Analysis with Theano" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a tutorial on logistic regression using Theano based on the MNST \n", + "logistic regression code provided at http://deeplearning.net/tutorial/logreg.html.\n", + "We make some code to read Andrew Mass' distribution of IMBD data\n", + "and make some changes in the original MNST Theano tutorial as needed. \n", + "You will need to familiarize yourself with Theano to understand this tutorial well.\n", + "Take a look here: http://deeplearning.net/software/theano/tutorial/" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Simple Example" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "It will probably help if you consider this simpler code for logistic regression,\n", + "This code is provided at: http://deeplearning.net/software/theano/tutorial/examples.html.\n", + "Maybe try changing values of \"N\" and the number of \"training_steps\" and see what you get.\n", + "Later, we will use logistic regression too, but with a technique called \"stochastic gradient descent.\" " + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial model:\n", + "Final model:\n", + "target values for D:\n", + "[1 1 1 1 1 0 1 0 1 1 0 1 0 1 0 1 0 1 1 0 1 0 1 1 1 1 1 0 1 1 0 1 1 0 1 1 1\n", + " 0 1 1 0 0 0 0 1 1 1 1 0 0 1 1 1 0 0 1 1 1 0 0 0 1 1 0 1 1 0 0 1 1 1 1 1 0\n", + " 1 0 1 0 1 0 1 1 1 0 0 1 1 1 0 0 1 0 0 1 0 0 0 0 0 1 0 0 0 1 0 1 0 0 1 0 1\n", + " 1 1 1 1 0 0 0 1 0 1 1 0 0 1 0 0 0 1 1 0 1 1 1 0 0 1 0 0 1 1 1 0 0 1 1 1 0\n", + " 0 1 0 1 0 0 1 1 0 0 1 1 0 0 0 1 0 0 1 0 0 1 1 0 1 1 0 0 0 0 1 1 0 0 0 1 0\n", + " 1 0 0 0 0 0 1 0 0 1 0 0 1 0 1 0 1 0 0 1 0 0 1 1 0 0 0 1 1 1 1 0 1 1 1 1 1\n", + " 1 1 0 1 1 0 1 0 1 1 1 1 1 1 0 1 1 1 0 0 1 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 1\n", + " 0 0 1 0 1 1 1 1 0 0 1 0 0 1 1 1 0 1 1 1 0 1 0 1 0 1 0 0 0 1 0 0 0 0 1 1 1\n", + " 1 1 0 1 0 1 0 0 1 0 0 0 0 0 0 0 1 1 1 0 0 0 0 1 1 0 1 1 1 1 1 1 0 1 0 1 1\n", + " 1 1 0 0 0 0 0 1 0 0 1 1 1 1 1 0 0 1 0 0 0 0 1 1 1 0 1 0 0 0 1 0 0 0 1 1 0\n", + " 0 0 1 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 1 1 1 0 1 0]\n", + "prediction on D:\n", + "[0 1 1 1 0 1 1 0 1 1 1 1 1 0 0 0 0 1 1 0 0 1 1 1 1 1 0 0 1 1 1 1 1 1 0 1 1\n", + " 1 1 0 1 0 0 0 1 0 1 0 0 1 0 0 0 1 1 1 0 1 0 1 1 0 0 0 0 0 1 0 1 0 1 1 0 0\n", + " 1 0 1 1 0 0 0 1 1 1 0 0 0 1 1 1 0 0 1 1 0 0 0 1 1 0 1 1 0 1 0 1 0 1 0 0 0\n", + " 0 1 1 1 0 1 0 1 1 0 1 1 0 1 1 0 1 0 1 0 1 1 1 1 1 1 1 0 1 0 1 1 0 1 0 1 0\n", + " 1 0 1 0 1 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 1 1 1 0 1 0 0 0 0 1\n", + " 1 0 0 0 0 0 1 1 0 0 0 1 1 1 1 0 1 1 1 1 0 1 1 1 1 0 1 0 0 0 1 0 1 0 1 0 0\n", + " 0 0 0 1 0 0 0 1 1 1 0 0 1 0 0 1 1 1 0 0 0 0 1 1 1 1 0 1 1 0 0 0 0 0 0 1 0\n", + " 1 1 0 1 1 1 1 1 1 1 1 0 0 0 0 0 1 0 0 1 0 0 0 1 0 1 0 1 1 0 0 1 1 0 1 0 0\n", + " 1 1 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 1 0 1 0 1 0 1 1 0 0 1 0 0 0 0 1 0 0 0 1\n", + " 0 1 1 1 1 0 0 0 1 1 0 0 1 0 1 1 0 0 1 0 1 1 0 1 1 0 1 0 1 1 1 1 0 1 0 0 1\n", + " 1 0 0 0 0 0 0 1 0 0 0 1 0 1 0 1 0 0 0 1 0 0 0 1 1 0 1 1 1 0]\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], + "source": [ + "import numpy\n", + "import theano\n", + "import theano.tensor as T\n", + "rng = numpy.random\n", + "\n", + "N = 400 # training sample size\n", + "feats = 784 # number of input variables\n", + "\n", + "# generate a dataset: D = (input_values, target_class)\n", + "D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", + "training_steps = 100\n", + "#np.asarray(your_data, dtype=theano.config.floatX)\n", + "\n", + "# Declare Theano symbolic variables\n", + "x = T.matrix(\"x\")\n", + "y = T.vector(\"y\")\n", + "\n", + "# initialize the weight vector w randomly\n", + "#\n", + "# this and the following bias variable b\n", + "# are shared so they keep their values\n", + "# between training iterations (updates)\n", + "w = theano.shared(rng.randn(feats), name=\"w\")\n", + "\n", + "# initialize the bias term\n", + "b = theano.shared(0., name=\"b\")\n", + "#print b.eval()\n", + "print(\"Initial model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "\n", + "# Construct Theano expression graph\n", + "p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1\n", + "prediction = p_1 > 0.5 # The prediction thresholded\n", + "xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function\n", + "cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize\n", + "gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost\n", + " # w.r.t weight vector w and\n", + " # bias term b\n", + " # (we shall return to this in a\n", + " # following section of this tutorial)\n", + "\n", + "# Compile\n", + "train = theano.function(\n", + " inputs=[x,y],\n", + " outputs=[prediction, xent],\n", + " updates=((w, w - 0.1 * gw), (b, b - 0.1 * gb)),\n", + " allow_input_downcast=True) # added downcasting...\n", + "predict = theano.function(inputs=[x], outputs=prediction)\n", + "\n", + "# Train\n", + "for i in range(training_steps):\n", + " pred, err = train(D[0], D[1])\n", + "\n", + "print(\"Final model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "print(\"target values for D:\")\n", + "print(D[1])\n", + "print(\"prediction on D:\")\n", + "print(predict(D[0]))\n", + "#----------------------------------------------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Now try the code with different values of \"training_steps\" and see what you get.\n", + "# For example, you can try:\n", + "# training_steps= 100, training_steps=500, training_steps=10000" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Logistic Regression with Theano, using Stochastic Gradient Descent" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "space_len: 3047\n", + "train_vecs.shape: 25000, 3047\n", + "dev_vecs.shape: 5000, 3047\n", + "test_vecs.shape: 20000, 3047\n", + "Subtensor{int64}.0\n", + "... building the model\n", + "... training the model\n", + "epoch 1, minibatch 41/41, validation error 52.083333 %\n", + " epoch 1, minibatch 41/41, test error of best model 50.505051 %\n", + "epoch 2, minibatch 41/41, validation error 52.083333 %\n", + "epoch 3, minibatch 41/41, validation error 52.062500 %\n", + " epoch 3, minibatch 41/41, test error of best model 50.505051 %\n", + "epoch 4, minibatch 41/41, validation error 51.895833 %\n", + " epoch 4, minibatch 41/41, test error of best model 50.297980 %\n", + "epoch 5, minibatch 41/41, validation error 50.937500 %\n", + " epoch 5, minibatch 41/41, test error of best model 49.560606 %\n", + "epoch 6, minibatch 41/41, validation error 49.416667 %\n", + " epoch 6, minibatch 41/41, test error of best model 48.267677 %\n", + "epoch 7, minibatch 41/41, validation error 47.708333 %\n", + " epoch 7, minibatch 41/41, test error of best model 46.616162 %\n", + "epoch 8, minibatch 41/41, validation error 45.958333 %\n", + " epoch 8, minibatch 41/41, test error of best model 44.853535 %\n", + "epoch 9, minibatch 41/41, validation error 44.395833 %\n", + " epoch 9, minibatch 41/41, test error of best model 43.176768 %\n", + "epoch 10, minibatch 41/41, validation error 42.875000 %\n", + " epoch 10, minibatch 41/41, test error of best model 41.651515 %\n", + "epoch 11, minibatch 41/41, validation error 41.458333 %\n", + " epoch 11, minibatch 41/41, test error of best model 40.393939 %\n", + "epoch 12, minibatch 41/41, validation error 39.979167 %\n", + " epoch 12, minibatch 41/41, test error of best model 39.333333 %\n", + "epoch 13, minibatch 41/41, validation error 38.520833 %\n", + " epoch 13, minibatch 41/41, test error of best model 38.378788 %\n", + "epoch 14, minibatch 41/41, validation error 37.708333 %\n", + " epoch 14, minibatch 41/41, test error of best model 37.489899 %\n", + "epoch 15, minibatch 41/41, validation error 36.583333 %\n", + " epoch 15, minibatch 41/41, test error of best model 36.686869 %\n", + "epoch 16, minibatch 41/41, validation error 35.562500 %\n", + " epoch 16, minibatch 41/41, test error of best model 36.025253 %\n", + "epoch 17, minibatch 41/41, validation error 34.937500 %\n", + " epoch 17, minibatch 41/41, test error of best model 35.272727 %\n", + "epoch 18, minibatch 41/41, validation error 34.250000 %\n", + " epoch 18, minibatch 41/41, test error of best model 34.631313 %\n", + "epoch 19, minibatch 41/41, validation error 33.604167 %\n", + " epoch 19, minibatch 41/41, test error of best model 33.989899 %\n", + "epoch 20, minibatch 41/41, validation error 33.166667 %\n", + " epoch 20, minibatch 41/41, test error of best model 33.479798 %\n", + "epoch 21, minibatch 41/41, validation error 32.562500 %\n", + " epoch 21, minibatch 41/41, test error of best model 33.010101 %\n", + "epoch 22, minibatch 41/41, validation error 32.208333 %\n", + " epoch 22, minibatch 41/41, test error of best model 32.555556 %\n", + "epoch 23, minibatch 41/41, validation error 31.770833 %\n", + " epoch 23, minibatch 41/41, test error of best model 32.171717 %\n", + "epoch 24, minibatch 41/41, validation error 31.375000 %\n", + " epoch 24, minibatch 41/41, test error of best model 31.772727 %\n", + "epoch 25, minibatch 41/41, validation error 31.083333 %\n", + " epoch 25, minibatch 41/41, test error of best model 31.500000 %\n", + "epoch 26, minibatch 41/41, validation error 30.770833 %\n", + " epoch 26, minibatch 41/41, test error of best model 31.267677 %\n", + "epoch 27, minibatch 41/41, validation error 30.437500 %\n", + " epoch 27, minibatch 41/41, test error of best model 31.065657 %\n", + "epoch 28, minibatch 41/41, validation error 30.062500 %\n", + " epoch 28, minibatch 41/41, test error of best model 30.792929 %\n", + "epoch 29, minibatch 41/41, validation error 29.875000 %\n", + " epoch 29, minibatch 41/41, test error of best model 30.570707 %\n", + "epoch 30, minibatch 41/41, validation error 29.479167 %\n", + " epoch 30, minibatch 41/41, test error of best model 30.328283 %\n", + "epoch 31, minibatch 41/41, validation error 29.291667 %\n", + " epoch 31, minibatch 41/41, test error of best model 30.050505 %\n", + "epoch 32, minibatch 41/41, validation error 29.083333 %\n", + " epoch 32, minibatch 41/41, test error of best model 29.858586 %\n", + "epoch 33, minibatch 41/41, validation error 29.000000 %\n", + " epoch 33, minibatch 41/41, test error of best model 29.691919 %\n", + "epoch 34, minibatch 41/41, validation error 28.770833 %\n", + " epoch 34, minibatch 41/41, test error of best model 29.474747 %\n", + "epoch 35, minibatch 41/41, validation error 28.625000 %\n", + " epoch 35, minibatch 41/41, test error of best model 29.222222 %\n", + "epoch 36, minibatch 41/41, validation error 28.437500 %\n", + " epoch 36, minibatch 41/41, test error of best model 28.989899 %\n", + "epoch 37, minibatch 41/41, validation error 28.333333 %\n", + " epoch 37, minibatch 41/41, test error of best model 28.813131 %\n", + "epoch 38, minibatch 41/41, validation error 28.104167 %\n", + " epoch 38, minibatch 41/41, test error of best model 28.656566 %\n", + "epoch 39, minibatch 41/41, validation error 27.770833 %\n", + " epoch 39, minibatch 41/41, test error of best model 28.484848 %\n", + "epoch 40, minibatch 41/41, validation error 27.645833 %\n", + " epoch 40, minibatch 41/41, test error of best model 28.343434 %\n", + "epoch 41, minibatch 41/41, validation error 27.458333 %\n", + " epoch 41, minibatch 41/41, test error of best model 28.252525 %\n", + "epoch 42, minibatch 41/41, validation error 27.312500 %\n", + " epoch 42, minibatch 41/41, test error of best model 28.166667 %\n", + "epoch 43, minibatch 41/41, validation error 27.312500 %\n", + "epoch 44, minibatch 41/41, validation error 27.125000 %\n", + " epoch 44, minibatch 41/41, test error of best model 27.964646 %\n", + "epoch 45, minibatch 41/41, validation error 26.979167 %\n", + " epoch 45, minibatch 41/41, test error of best model 27.803030 %\n", + "epoch 46, minibatch 41/41, validation error 26.791667 %\n", + " epoch 46, minibatch 41/41, test error of best model 27.717172 %\n", + "epoch 47, minibatch 41/41, validation error 26.729167 %\n", + " epoch 47, minibatch 41/41, test error of best model 27.606061 %\n", + "epoch 48, minibatch 41/41, validation error 26.541667 %\n", + " epoch 48, minibatch 41/41, test error of best model 27.555556 %\n", + "epoch 49, minibatch 41/41, validation error 26.395833 %\n", + " epoch 49, minibatch 41/41, test error of best model 27.500000 %\n", + "epoch 50, minibatch 41/41, validation error 26.354167 %\n", + " epoch 50, minibatch 41/41, test error of best model 27.378788 %\n", + "epoch 51, minibatch 41/41, validation error 26.291667 %\n", + " epoch 51, minibatch 41/41, test error of best model 27.287879 %\n", + "epoch 52, minibatch 41/41, validation error 26.187500 %\n", + " epoch 52, minibatch 41/41, test error of best model 27.267677 %\n", + "epoch 53, minibatch 41/41, validation error 26.041667 %\n", + " epoch 53, minibatch 41/41, test error of best model 27.196970 %\n", + "epoch 54, minibatch 41/41, validation error 25.937500 %\n", + " epoch 54, minibatch 41/41, test error of best model 27.106061 %\n", + "epoch 55, minibatch 41/41, validation error 25.791667 %\n", + " epoch 55, minibatch 41/41, test error of best model 27.060606 %\n", + "epoch 56, minibatch 41/41, validation error 25.750000 %\n", + " epoch 56, minibatch 41/41, test error of best model 26.979798 %\n", + "epoch 57, minibatch 41/41, validation error 25.645833 %\n", + " epoch 57, minibatch 41/41, test error of best model 26.944444 %\n", + "epoch 58, minibatch 41/41, validation error 25.666667 %\n", + "epoch 59, minibatch 41/41, validation error 25.562500 %\n", + " epoch 59, minibatch 41/41, test error of best model 26.823232 %\n", + "epoch 60, minibatch 41/41, validation error 25.562500 %\n", + "epoch 61, minibatch 41/41, validation error 25.541667 %\n", + " epoch 61, minibatch 41/41, test error of best model 26.636364 %\n", + "epoch 62, minibatch 41/41, validation error 25.520833 %\n", + " epoch 62, minibatch 41/41, test error of best model 26.595960 %\n", + "epoch 63, minibatch 41/41, validation error 25.479167 %\n", + " epoch 63, minibatch 41/41, test error of best model 26.520202 %\n", + "epoch 64, minibatch 41/41, validation error 25.500000 %\n", + "epoch 65, minibatch 41/41, validation error 25.416667 %\n", + " epoch 65, minibatch 41/41, test error of best model 26.409091 %\n", + "epoch 66, minibatch 41/41, validation error 25.375000 %\n", + " epoch 66, minibatch 41/41, test error of best model 26.343434 %\n", + "epoch 67, minibatch 41/41, validation error 25.354167 %\n", + " epoch 67, minibatch 41/41, test error of best model 26.287879 %\n", + "epoch 68, minibatch 41/41, validation error 25.333333 %\n", + " epoch 68, minibatch 41/41, test error of best model 26.202020 %\n", + "epoch 69, minibatch 41/41, validation error 25.312500 %\n", + " epoch 69, minibatch 41/41, test error of best model 26.202020 %\n", + "epoch 70, minibatch 41/41, validation error 25.291667 %\n", + " epoch 70, minibatch 41/41, test error of best model 26.156566 %\n", + "epoch 71, minibatch 41/41, validation error 25.250000 %\n", + " epoch 71, minibatch 41/41, test error of best model 26.106061 %\n", + "epoch 72, minibatch 41/41, validation error 25.270833 %\n", + "epoch 73, minibatch 41/41, validation error 25.250000 %\n", + "epoch 74, minibatch 41/41, validation error 25.250000 %\n", + "epoch 75, minibatch 41/41, validation error 25.187500 %\n", + " epoch 75, minibatch 41/41, test error of best model 25.904040 %\n", + "epoch 76, minibatch 41/41, validation error 25.208333 %\n", + "epoch 77, minibatch 41/41, validation error 25.208333 %\n", + "epoch 78, minibatch 41/41, validation error 25.187500 %\n", + "epoch 79, minibatch 41/41, validation error 25.229167 %\n", + "epoch 80, minibatch 41/41, validation error 25.187500 %\n", + "epoch 81, minibatch 41/41, validation error 25.145833 %\n", + " epoch 81, minibatch 41/41, test error of best model 25.696970 %\n", + "epoch 82, minibatch 41/41, validation error 25.125000 %\n", + " epoch 82, minibatch 41/41, test error of best model 25.656566 %\n", + "epoch 83, minibatch 41/41, validation error 25.041667 %\n", + " epoch 83, minibatch 41/41, test error of best model 25.636364 %\n", + "epoch 84, minibatch 41/41, validation error 25.041667 %\n", + "epoch 85, minibatch 41/41, validation error 25.062500 %\n", + "epoch 86, minibatch 41/41, validation error 25.062500 %\n", + "epoch 87, minibatch 41/41, validation error 25.041667 %\n", + "epoch 88, minibatch 41/41, validation error 25.041667 %\n", + "epoch 89, minibatch 41/41, validation error 25.000000 %\n", + " epoch 89, minibatch 41/41, test error of best model 25.474747 %\n", + "epoch 90, minibatch 41/41, validation error 25.041667 %\n", + "epoch 91, minibatch 41/41, validation error 24.958333 %\n", + " epoch 91, minibatch 41/41, test error of best model 25.434343 %\n", + "epoch 92, minibatch 41/41, validation error 24.958333 %\n", + " epoch 92, minibatch 41/41, test error of best model 25.414141 %\n", + "epoch 93, minibatch 41/41, validation error 24.895833 %\n", + " epoch 93, minibatch 41/41, test error of best model 25.409091 %\n", + "epoch 94, minibatch 41/41, validation error 24.895833 %\n", + "epoch 95, minibatch 41/41, validation error 24.895833 %\n", + "epoch 96, minibatch 41/41, validation error 24.854167 %\n", + " epoch 96, minibatch 41/41, test error of best model 25.318182 %\n", + "epoch 97, minibatch 41/41, validation error 24.854167 %\n", + "epoch 98, minibatch 41/41, validation error 24.833333 %\n", + " epoch 98, minibatch 41/41, test error of best model 25.303030 %\n", + "epoch 99, minibatch 41/41, validation error 24.833333 %\n", + "epoch 100, minibatch 41/41, validation error 24.812500 %\n", + " epoch 100, minibatch 41/41, test error of best model 25.257576 %\n", + "epoch 101, minibatch 41/41, validation error 24.812500 %\n", + "epoch 102, minibatch 41/41, validation error 24.791667 %\n", + " epoch 102, minibatch 41/41, test error of best model 25.227273 %\n", + "epoch 103, minibatch 41/41, validation error 24.770833 %\n", + " epoch 103, minibatch 41/41, test error of best model 25.207071 %\n", + "epoch 104, minibatch 41/41, validation error 24.770833 %\n", + "epoch 105, minibatch 41/41, validation error 24.770833 %\n", + "epoch 106, minibatch 41/41, validation error 24.770833 %\n", + "epoch 107, minibatch 41/41, validation error 24.750000 %\n", + " epoch 107, minibatch 41/41, test error of best model 25.161616 %\n", + "epoch 108, minibatch 41/41, validation error 24.750000 %\n", + "epoch 109, minibatch 41/41, validation error 24.750000 %\n", + "epoch 110, minibatch 41/41, validation error 24.729167 %\n", + " epoch 110, minibatch 41/41, test error of best model 25.070707 %\n", + "epoch 111, minibatch 41/41, validation error 24.729167 %\n", + "epoch 112, minibatch 41/41, validation error 24.708333 %\n", + " epoch 112, minibatch 41/41, test error of best model 25.030303 %\n", + "epoch 113, minibatch 41/41, validation error 24.666667 %\n", + " epoch 113, minibatch 41/41, test error of best model 25.010101 %\n", + "epoch 114, minibatch 41/41, validation error 24.625000 %\n", + " epoch 114, minibatch 41/41, test error of best model 24.984848 %\n", + "epoch 115, minibatch 41/41, validation error 24.625000 %\n", + "epoch 116, minibatch 41/41, validation error 24.604167 %\n", + " epoch 116, minibatch 41/41, test error of best model 24.934343 %\n", + "epoch 117, minibatch 41/41, validation error 24.583333 %\n", + " epoch 117, minibatch 41/41, test error of best model 24.924242 %\n", + "epoch 118, minibatch 41/41, validation error 24.562500 %\n", + " epoch 118, minibatch 41/41, test error of best model 24.914141 %\n", + "epoch 119, minibatch 41/41, validation error 24.562500 %\n", + "epoch 120, minibatch 41/41, validation error 24.541667 %\n", + " epoch 120, minibatch 41/41, test error of best model 24.893939 %\n", + "epoch 121, minibatch 41/41, validation error 24.541667 %\n", + "Optimization complete with best validation score of 24.541667 %,with test performance 24.893939 %\n", + "The code run for 122 epochs, with 4.817208 epochs/sec\n", + "The code for file best_model.pkl ran for 25.3s\n", + "Now predicting...\n", + "Predicted values for the first 10 examples in test set:\n", + "[0 0 0 1 1 1 1 0 0 0]\n" + ] + } + ], + "source": [ + "\"\"\"\n", + "This tutorial introduces logistic regression using Theano and stochastic\n", + "gradient descent.\n", + "\n", + "Logistic regression is a probabilistic, linear classifier. It is parametrized\n", + "by a weight matrix :math:`W` and a bias vector :math:`b`. Classification is\n", + "done by projecting data points onto a set of hyperplanes, the distance to\n", + "which is used to determine a class membership probability.\n", + "\n", + "Mathematically, this can be written as:\n", + "\n", + ".. math::\n", + " P(Y=i|x, W,b) &= softmax_i(W x + b) \\\\\n", + " &= \\frac {e^{W_i x + b_i}} {\\sum_j e^{W_j x + b_j}}\n", + "\n", + "\n", + "The output of the model or prediction is then done by taking the argmax of\n", + "the vector whose i'th element is P(Y=i|x).\n", + "\n", + ".. math::\n", + "\n", + " y_{pred} = argmax_i P(Y=i|x,W,b)\n", + "\n", + "\n", + "This tutorial presents a stochastic gradient descent optimization method\n", + "suitable for large datasets.\n", + "\n", + "\n", + "References:\n", + "\n", + " - textbooks: \"Pattern Recognition and Machine Learning\" -\n", + " Christopher M. Bishop, section 4.3.2\n", + "\n", + "\"\"\"\n", + "from collections import namedtuple, defaultdict\n", + "from random import shuffle, randint\n", + "#----------------------------------------------------\n", + "__docformat__ = 'restructedtext en'\n", + "\n", + "import cPickle\n", + "import gzip\n", + "import os\n", + "import sys\n", + "import timeit\n", + "\n", + "import numpy\n", + "import numpy as np\n", + "import theano\n", + "import theano.tensor as T\n", + "#----------------------------------------------------\n", + "\n", + "def get_data():\n", + " '''\n", + " \n", + " '''\n", + " all_data = [] \n", + " DataDoc= namedtuple('DataDoc', 'tag words')\n", + " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " train_data = all_data[:25000]\n", + " dev_data = all_data[25000:27500]+all_data[47500:50000]\n", + " test_data=all_data[27500:47500]\n", + " # labels\n", + " train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + " dev_tags= [ 1.0 for i in range(2500)] + [ 0.0 for i in range(2500)]\n", + " test_tags= [ 1.0 for i in range(10000)] + [ 0.0 for i in range(10000)]\n", + " return train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", + " #--------------------------------------------------\n", + "#train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + "########################\n", + "\n", + "\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]+=1\n", + " return word_space\n", + "\n", + "# train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + "# word_space=get_space(train_data)\n", + "# word_space={w: word_space[w] for w in word_space if word_space[w] > 500}\n", + "# space_len=len(word_space)\n", + "# print \"space_len: \", space_len\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "# train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "# test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "# dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", + "# #---------------------------\n", + "# train_vecs=np.array(train_vecs)\n", + "# train_tags=np.array(train_tags)\n", + "# dev_vecs=np.array(dev_vecs)\n", + "# dev_tags=np.array(dev_tags)\n", + "# test_vecs=np.array(test_vecs)\n", + "# test_tags=np.array(test_tags)\n", + "# print train_vecs.shape\n", + "# print dev_vecs.shape\n", + "# print test_vecs.shape\n", + "\n", + "\n", + "def load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags):\n", + " #------------------------------\n", + " # Modified from Theano tutorial.\n", + " # I basically pass data_x, data_y instead of data_xy\n", + " def shared_dataset(data_x, data_y, borrow=True):\n", + " \"\"\" Function that loads the dataset into shared variables\n", + "\n", + " The reason we store our dataset in shared variables is to allow\n", + " Theano to copy it into the GPU memory (when code is run on GPU).\n", + " Since copying data into the GPU is slow, copying a minibatch everytime\n", + " is needed (the default behaviour if the data is not in a shared\n", + " variable) would lead to a large decrease in performance.\n", + " \"\"\"\n", + " shared_x = theano.shared(numpy.asarray(data_x,\n", + " dtype=theano.config.floatX), borrow=borrow)\n", + " shared_y = theano.shared(numpy.asarray(data_y,\n", + " dtype=theano.config.floatX),\n", + " borrow=borrow)\n", + " # When storing data on the GPU it has to be stored as floats\n", + " # therefore we will store the labels as ``floatX`` as well\n", + " # (``shared_y`` does exactly that). But during our computations\n", + " # we need them as ints (we use labels as index, and if they are\n", + " # floats it doesn't make sense) therefore instead of returning\n", + " # ``shared_y`` we will have to cast it to int. This little hack\n", + " # lets ous get around this issue\n", + " return shared_x, T.cast(shared_y, 'int32')\n", + " #-----------------------------------------------------------------\n", + " train_set_x, train_set_y = shared_dataset(train_vecs, train_tags)\n", + " valid_set_x, valid_set_y = shared_dataset(dev_vecs, dev_tags)\n", + " test_set_x, test_set_y = shared_dataset(test_vecs, test_tags)\n", + "\n", + " rval = [(train_set_x, train_set_y), (valid_set_x, valid_set_y),\n", + " (test_set_x, test_set_y)]\n", + " return rval\n", + "\n", + "#rval=load_data(train_vecs, train_tags)\n", + "#print rval\n", + "\n", + "\n", + "class LogisticRegression(object):\n", + " \"\"\"Multi-class Logistic Regression Class\n", + "\n", + " The logistic regression is fully described by a weight matrix :math:`W`\n", + " and bias vector :math:`b`. Classification is done by projecting data\n", + " points onto a set of hyperplanes, the distance to which is used to\n", + " determine a class membership probability.\n", + " \"\"\"\n", + "\n", + " def __init__(self, input, n_in, n_out):\n", + " \"\"\" Initialize the parameters of the logistic regression\n", + "\n", + " :type input: theano.tensor.TensorType\n", + " :param input: symbolic variable that describes the input of the\n", + " architecture (one minibatch)\n", + "\n", + " :type n_in: int\n", + " :param n_in: number of input units, the dimension of the space in\n", + " which the datapoints lie\n", + "\n", + " :type n_out: int\n", + " :param n_out: number of output units, the dimension of the space in\n", + " which the labels lie\n", + "\n", + " \"\"\"\n", + " # start-snippet-1\n", + " # initialize with 0 the weights W as a matrix of shape (n_in, n_out)\n", + " self.W = theano.shared(\n", + " value=numpy.zeros(\n", + " (n_in, n_out),\n", + " dtype=theano.config.floatX\n", + " ),\n", + " name='W',\n", + " borrow=True\n", + " )\n", + " # initialize the biases b as a vector of n_out 0s\n", + " self.b = theano.shared(\n", + " value=numpy.zeros(\n", + " (n_out,),\n", + " dtype=theano.config.floatX\n", + " ),\n", + " name='b',\n", + " borrow=True\n", + " )\n", + "\n", + " # symbolic expression for computing the matrix of class-membership\n", + " # probabilities\n", + " # Where:\n", + " # W is a matrix where column-k represent the separation hyperplane for\n", + " # class-k\n", + " # x is a matrix where row-j represents input training sample-j\n", + " # b is a vector where element-k represent the free parameter of\n", + " # hyperplane-k\n", + " self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b)\n", + "\n", + " # symbolic description of how to compute prediction as class whose\n", + " # probability is maximal\n", + " self.y_pred = T.argmax(self.p_y_given_x, axis=1)\n", + " # end-snippet-1\n", + "\n", + " # parameters of the model\n", + " self.params = [self.W, self.b]\n", + "\n", + " # keep track of model input\n", + " self.input = input\n", + "\n", + " def negative_log_likelihood(self, y):\n", + " \"\"\"Return the mean of the negative log-likelihood of the prediction\n", + " of this model under a given target distribution.\n", + "\n", + " .. math::\n", + "\n", + " \\frac{1}{|\\mathcal{D}|} \\mathcal{L} (\\theta=\\{W,b\\}, \\mathcal{D}) =\n", + " \\frac{1}{|\\mathcal{D}|} \\sum_{i=0}^{|\\mathcal{D}|}\n", + " \\log(P(Y=y^{(i)}|x^{(i)}, W,b)) \\\\\n", + " \\ell (\\theta=\\{W,b\\}, \\mathcal{D})\n", + "\n", + " :type y: theano.tensor.TensorType\n", + " :param y: corresponds to a vector that gives for each example the\n", + " correct label\n", + "\n", + " Note: we use the mean instead of the sum so that\n", + " the learning rate is less dependent on the batch size\n", + " \"\"\"\n", + " # start-snippet-2\n", + " # y.shape[0] is (symbolically) the number of rows in y, i.e.,\n", + " # number of examples (call it n) in the minibatch\n", + " # T.arange(y.shape[0]) is a symbolic vector which will contain\n", + " # [0,1,2,... n-1] T.log(self.p_y_given_x) is a matrix of\n", + " # Log-Probabilities (call it LP) with one row per example and\n", + " # one column per class LP[T.arange(y.shape[0]),y] is a vector\n", + " # v containing [LP[0,y[0]], LP[1,y[1]], LP[2,y[2]], ...,\n", + " # LP[n-1,y[n-1]]] and T.mean(LP[T.arange(y.shape[0]),y]) is\n", + " # the mean (across minibatch examples) of the elements in v,\n", + " # i.e., the mean log-likelihood across the minibatch.\n", + " return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])\n", + " # end-snippet-2\n", + "\n", + " def errors(self, y):\n", + " \"\"\"Return a float representing the number of errors in the minibatch\n", + " over the total number of examples of the minibatch ; zero one\n", + " loss over the size of the minibatch\n", + "\n", + " :type y: theano.tensor.TensorType\n", + " :param y: corresponds to a vector that gives for each example the\n", + " correct label\n", + " \"\"\"\n", + "\n", + " # check if y has same dimension of y_pred\n", + " if y.ndim != self.y_pred.ndim:\n", + " raise TypeError(\n", + " 'y should have the same shape as self.y_pred',\n", + " ('y', y.type, 'y_pred', self.y_pred.type)\n", + " )\n", + " # check if y is of the correct datatype\n", + " if y.dtype.startswith('int'):\n", + " # the T.neq operator returns a vector of 0s and 1s, where 1\n", + " # represents a mistake in prediction\n", + " return T.mean(T.neq(self.y_pred, y))\n", + " else:\n", + " raise NotImplementedError()\n", + "\n", + "\n", + "def sgd_optimization(learning_rate=0.13, n_epochs=1000,\n", + " batch_size=600):\n", + " \"\"\"\n", + " Demonstrate stochastic gradient descent optimization of a log-linear\n", + " model\n", + "\n", + " This is demonstrated on MNIST.\n", + "\n", + " :type learning_rate: float\n", + " :param learning_rate: learning rate used (factor for the stochastic\n", + " gradient)\n", + "\n", + " :type n_epochs: int\n", + " :param n_epochs: maximal number of epochs to run the optimizer\n", + "\n", + " :type dataset: string\n", + " :param dataset: the path of the MNIST dataset file from\n", + " http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz\n", + "\n", + " \"\"\"\n", + " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", + " train_set_x, train_set_y = datasets[0]\n", + " valid_set_x, valid_set_y = datasets[1]\n", + " test_set_x, test_set_y = datasets[2]\n", + " print train_set_x.shape[0]\n", + " # compute number of minibatches for training, validation and testing\n", + " n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size\n", + " n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size\n", + " n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size\n", + "\n", + " ######################\n", + " # BUILD ACTUAL MODEL #\n", + " ######################\n", + " print '... building the model'\n", + "\n", + " # allocate symbolic variables for the data\n", + " index = T.lscalar() # index to a [mini]batch\n", + "\n", + " # generate symbolic variables for input (x and y represent a\n", + " # minibatch)\n", + " x = T.matrix('x') # data, presented as rasterized images\n", + " y = T.ivector('y') # labels, presented as 1D vector of [int] labels\n", + "\n", + " # construct the logistic regression class\n", + " # Each MNIST image has size 28*28\n", + " # MAM: We change size: n_in=space_len\n", + " classifier = LogisticRegression(input=x, n_in=space_len, n_out=2)\n", + "\n", + " # the cost we minimize during training is the negative log likelihood of\n", + " # the model in symbolic format\n", + " cost = classifier.negative_log_likelihood(y)\n", + "\n", + " # compiling a Theano function that computes the mistakes that are made by\n", + " # the model on a minibatch\n", + " test_model = theano.function(\n", + " inputs=[index],\n", + " outputs=classifier.errors(y),\n", + " givens={\n", + " x: test_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: test_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + "\n", + " validate_model = theano.function(\n", + " inputs=[index],\n", + " outputs=classifier.errors(y),\n", + " givens={\n", + " x: valid_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: valid_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + "\n", + " # compute the gradient of cost with respect to theta = (W,b)\n", + " g_W = T.grad(cost=cost, wrt=classifier.W)\n", + " g_b = T.grad(cost=cost, wrt=classifier.b)\n", + "\n", + " # start-snippet-3\n", + " # specify how to update the parameters of the model as a list of\n", + " # (variable, update expression) pairs.\n", + " updates = [(classifier.W, classifier.W - learning_rate * g_W),\n", + " (classifier.b, classifier.b - learning_rate * g_b)]\n", + "\n", + " # compiling a Theano function `train_model` that returns the cost, but in\n", + " # the same time updates the parameter of the model based on the rules\n", + " # defined in `updates`\n", + " train_model = theano.function(\n", + " inputs=[index],\n", + " outputs=cost,\n", + " updates=updates,\n", + " givens={\n", + " x: train_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: train_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + " # end-snippet-3\n", + "\n", + " ###############\n", + " # TRAIN MODEL #\n", + " ###############\n", + " print '... training the model'\n", + " # early-stopping parameters\n", + " patience = 5000 # look as this many examples regardless\n", + " patience_increase = 2 # wait this much longer when a new best is\n", + " # found\n", + " improvement_threshold = 0.995 # a relative improvement of this much is\n", + " # considered significant\n", + " validation_frequency = min(n_train_batches, patience / 2)\n", + " # go through this many\n", + " # minibatche before checking the network\n", + " # on the validation set; in this case we\n", + " # check every epoch\n", + "\n", + " best_validation_loss = numpy.inf\n", + " test_score = 0.\n", + " start_time = timeit.default_timer()\n", + "\n", + " done_looping = False\n", + " epoch = 0\n", + " while (epoch < n_epochs) and (not done_looping):\n", + " epoch = epoch + 1\n", + " for minibatch_index in xrange(n_train_batches):\n", + "\n", + " minibatch_avg_cost = train_model(minibatch_index)\n", + " # iteration number\n", + " iter = (epoch - 1) * n_train_batches + minibatch_index\n", + "\n", + " if (iter + 1) % validation_frequency == 0:\n", + " # compute zero-one loss on validation set\n", + " validation_losses = [validate_model(i)\n", + " for i in xrange(n_valid_batches)]\n", + " this_validation_loss = numpy.mean(validation_losses)\n", + "\n", + " print(\n", + " 'epoch %i, minibatch %i/%i, validation error %f %%' %\n", + " (\n", + " epoch,\n", + " minibatch_index + 1,\n", + " n_train_batches,\n", + " this_validation_loss * 100.\n", + " )\n", + " )\n", + "\n", + " # if we got the best validation score until now\n", + " if this_validation_loss < best_validation_loss:\n", + " #improve patience if loss improvement is good enough\n", + " if this_validation_loss < best_validation_loss * \\\n", + " improvement_threshold:\n", + " patience = max(patience, iter * patience_increase)\n", + "\n", + " best_validation_loss = this_validation_loss\n", + " # test it on the test set\n", + "\n", + " test_losses = [test_model(i)\n", + " for i in xrange(n_test_batches)]\n", + " test_score = numpy.mean(test_losses)\n", + "\n", + " print(\n", + " (\n", + " ' epoch %i, minibatch %i/%i, test error of'\n", + " ' best model %f %%'\n", + " ) %\n", + " (\n", + " epoch,\n", + " minibatch_index + 1,\n", + " n_train_batches,\n", + " test_score * 100.\n", + " )\n", + " )\n", + "\n", + " # save the best model\n", + " with open('best_model.pkl', 'w') as f:\n", + " cPickle.dump(classifier, f)\n", + "\n", + " if patience <= iter:\n", + " done_looping = True\n", + " break\n", + "\n", + " end_time = timeit.default_timer()\n", + " print(\n", + " (\n", + " 'Optimization complete with best validation score of %f %%,'\n", + " 'with test performance %f %%'\n", + " )\n", + " % (best_validation_loss * 100., test_score * 100.)\n", + " )\n", + " print 'The code run for %d epochs, with %f epochs/sec' % (\n", + " epoch, 1. * epoch / (end_time - start_time))\n", + " print ('The code for file ' +\n", + " 'best_model.pkl' +\n", + " ' ran for %.1fs' % ((end_time - start_time)))\n", + "\n", + "\n", + "def predict():\n", + " \"\"\"\n", + " An example of how to load a trained model and use it\n", + " to predict labels.\n", + " \"\"\"\n", + "\n", + " # load the saved model\n", + " classifier = cPickle.load(open('best_model.pkl'))\n", + "\n", + " # compile a predictor function\n", + " predict_model = theano.function(\n", + " inputs=[classifier.input],\n", + " outputs=classifier.y_pred)\n", + "\n", + " # We can test it on some examples from test test\n", + " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", + " #train_set_x, train_set_y = datasets[0]\n", + " #valid_set_x, valid_set_y = datasets[1]\n", + " test_set_x, test_set_y = datasets[2]\n", + " test_set_x = test_set_x.get_value()\n", + " predicted_values = predict_model(test_set_x[:10])\n", + " print (\"Predicted values for the first 10 examples in test set:\")\n", + " print predicted_values\n", + "\n", + "\n", + "if __name__ == '__main__':\n", + " train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + " word_space=get_space(train_data)\n", + " # We only use words with a give frequency threshold. This also helps much with memory.\n", + " # Note that I am using a *very* high threshold here, since the goal is to demonstrate \n", + " # the technique and its utility, rather than higher accuracy. I also had some memory issues.\n", + " word_space={w: word_space[w] for w in word_space if word_space[w] > 150} \n", + " space_len=len(word_space)\n", + " print(\"space_len: %d\" % space_len)\n", + " train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + " test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + " dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", + " #del word_space\n", + " #---------------------------\n", + " train_vecs=np.array(train_vecs)\n", + " train_tags=np.array(train_tags)\n", + " dev_vecs=np.array(dev_vecs)\n", + " dev_tags=np.array(dev_tags)\n", + " test_vecs=np.array(test_vecs)\n", + " test_tags=np.array(test_tags)\n", + " #del train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", + " print('train_vecs.shape: %d, %d' % train_vecs.shape)\n", + " print('dev_vecs.shape: %d, %d' % dev_vecs.shape)\n", + " print('test_vecs.shape: %d, %d' % test_vecs.shape)\n", + " sgd_optimization()\n", + " #------------------------------------------------------\n", + " print('Now predicting...')\n", + " predict()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/.ipynb_checkpoints/neural_net_demystified_part_2_forward_propagation-checkpoint.ipynb b/.ipynb_checkpoints/neural_net_demystified_part_2_forward_propagation-checkpoint.ipynb new file mode 100644 index 0000000..4f816c0 --- /dev/null +++ b/.ipynb_checkpoints/neural_net_demystified_part_2_forward_propagation-checkpoint.ipynb @@ -0,0 +1,945 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Populating the interactive namespace from numpy and matplotlib\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%pylab inline\n", + "# Neural Networks Demystified\n", + "# Part 1: Data + Architecture\n", + "\n", + "from IPython.display import YouTubeVideo\n", + "YouTubeVideo('bxe2T-V8XRs')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "#Import code from last time\n", + "\n", + "#\n", + "from IPython.display import YouTubeVideo\n", + "YouTubeVideo('bxe2T-V8XRs')\n", + "# Supporting code for short YouTube series on artificial neural networks.\n", + "#\n", + "# Stephen Welch\n", + "# @stephencwelch\n", + "\n", + "import numpy as np\n", + "\n", + "# X = (hours sleeping, hours studying), y = Score on test\n", + "X = np.array(([3,5], [5,1], [10,2]), dtype=float)\n", + "y = np.array(([75], [82], [93]), dtype=float)\n", + "\n", + "# Normalize (by dividing by the maximum value in each array)\n", + "X = X/np.amax(X, axis=0)\n", + "y = y/100 #Max test score is 100" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(3, 2) (3, 1)\n" + ] + } + ], + "source": [ + "print X.shape, y.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0.3 1. ]\n", + " [ 0.5 0.2]\n", + " [ 1. 0.4]]\n", + "[[ 0.75]\n", + " [ 0.82]\n", + " [ 0.93]]\n" + ] + } + ], + "source": [ + "print X\n", + "print y" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "class Neural_Network(object):\n", + " def __init__(self): \n", + " #Define Hyperparameters\n", + " self.inputLayerSize = 2\n", + " self.outputLayerSize = 1\n", + " self.hiddenLayerSize = 3\n", + " \n", + " def forward(self, X):\n", + " #Propagate inputs though network" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "def sigmoid(z):\n", + " #Apply sigmoid activation function to scalar, vector, or matrix\n", + " return 1/(1+np.exp(-z))" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAEACAYAAACuzv3DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHwZJREFUeJzt3XucHHWZ7/HPlwkkCnKTFRWiCXIXUFACBxQjgkYW5Iju\nsllxBd0jqwZvKxoQIYAKiBdwUQEJRMUFXS8ILoq4muMFhEQJFwmQoFESDiAqilwT8pw/fhXSGWam\na2aqu7p+/X2/Xv2a/k3X9DxPKnlS/VT9fqWIwMzMmmu9ugMwM7PxcSE3M2s4F3Izs4ZzITczazgX\ncjOzhnMhNzNruLaFXNKFku6VdPMI23xW0hJJN0ravdoQzcxsJGWOyC8CZgz3oqSDgG0jYjvg7cAX\nKorNzMxKaFvII+KnwJ9H2OR1wJeKba8DNpW0ZTXhmZlZO1X0yLcC7moZLwe2ruB9zcyshKpOdmrQ\n2PP+zcy6ZEIF77ECmNwy3rr43jokubibmY1BRAw+WF5HFYX8cmAWcKmkvYEHIuLesQTTZJLmRMSc\nuuPolJzzyzk36L/8JNYDnkVq+2496OtWxWt/B2zB+Grgo8BDxePhER5rXn+k+JnHgceKr63Ph/mq\nxe0CaZuEpEuAVwBbSLoLOAlYHyAizouIKyUdJGlpEfBRpf4I8jOl7gA6bErdAXTQlLoD6LApdQdQ\nNYkNgW2AF8Cefy/xLGDbNGYyRY0q4UHgfuAPLY8/Ag8Afxni8dc1zyNYWVlCI1CJw9+2hTwiZpbY\nZla5kMzMypOYBOwIvBDYpeUxZe1WOwO8dNCP/pHU4l0+6OsK4F5Swb4/gkc7GH7XVNFasWRe3QF0\n2Ly6A+igeXUH0GHz6g6gDIn1SUV6GrBn8XVnYGCIzVcCvwXuhP0fBX6SnnMnsCyCh7sSdI9Qt24s\nISly7pGb2egU7ZF9gFeS2rd7AJMGbbYaWALcMuixNIJV3Yu2PmVqp4/IKyJpekTMrzuOTsk5v5xz\ng97JT2ICsDdppvh00hH34F72EmABcH3xdVG7o+teya9OLuRm1jESmwKvAQ4BXgts3vLyamAhML94\nXBvBn7ocYhbcWjGzSkk8AzgUmAm8mnUPGO8Evgv8EPhZBA90P8JmcWvFzLqiaJscBLwZOJi1ve7V\npKPt7xaPOyI887tqXo+8IpKm1x1DJ+WcX865QWfzk3i+xCnA74DvAG8kFfGfAu8CnhPBKyP4VAS3\nd6KI577/yvARuZmNioRIJyv/nXQUvuZj/xLgAuCSiHUW0rMOc4/czEop2idvBD4AvKT49mPAN4Hz\ngZ+4bVI998jNbNyKAv5m4ETWzqj8A/AfwBciuL+m0KzgHnlFcu/T5ZxfzrnB2POTWE9iJnArcCGp\niN8BHA08P4JTe6GI577/yvARuZk9hcT+wFnArsW3lgJzgEsjeKKuuGxo7pGb2ZMkpgKfBA4rvvV7\n4BTgy91a7c/W5R65mZUiMRE4HvgQMJG0JPXHgU/nskJgztwjr0jufbqc88s5N2ifn8Q+wCLSycyJ\nwMXADhF8vAlFPPf9V4aPyM36lMRGpKPuWaRrwW8H/jWCn9UamI2ae+RmfUjiJcAlwHbAE8AZwKlN\nOALvN+6Rm9k6ivtZvp90JL4+aW3vN0ewqNbAbFzcI69I7n26nPPLOTdYm5/EFsD3gDNJRfwcYFrT\ni3ju+68MH5Gb9QGJPYBvA88j3c/yqAiuqDcqq4p75GaZk3gTaTGrSaQ777whguX1RmVllamdbq2Y\nZaqYYn8m6XLCScBcYD8X8fy4kFck9z5dzvnlmJvEJOBrwAfgx6uAdwD/J4LH6o2sejnuv9FyITfL\njMTmwNWkJWf/Ct/5YATneonZfLlHbpYRiecD3wd2BJYDB0Vwc71R2Xj4OnKzPiLxAuBHpCtTbiYV\ncffD+4BbKxXJvU+Xc3455CaxA/ATUhG/lpaTmjnkN5Lc8yvDhdys4SReCPxf4LmkYv6aCB6oNyrr\nJvfIzRpMYidS8d4C+CFwaAQP1xuVVcnXkZtlrLgJxA9JRfwq4BAX8f7kQl6R3Pt0OefXxNwknkMq\n4mvaKYcNt3JhE/MbjdzzK8OF3KxhJJ5Juk58G2AhPhLve+6RmzWIxNNIlxjuTbq7/St64U721jnu\nkZtlpFhL/CukIv574NUu4gYu5JXJvU+Xc34Nyu104A3AX0mTfVaU+aEG5TcmuedXhgu5WQNIHA0c\nC6wiLUP765pDsh7StkcuaQZwFjAAXBARZwx6fRPSMpmTSVP+PxkR84Z4H/fIzcZA4gDS+ikDwNsi\nuLDmkKyLytTOEQu5pAHSnbUPAFYAC4CZEbG4ZZvjgWdExHGStii23zIiVo02GDNbl8QU4JfA5sDp\nERxXb0TWbVWc7JwGLI2IZRGxErgUOHTQNquBjYvnGwN/HFzE+0Hufbqc8+vV3CSeTro92+bAlcAJ\nY3uf3syvKrnnV0a7Qr4VcFfLeHnxvVbnADtLuhu4EXhPdeGZ9ScJAecBLwbuBI6I4Il6o7Je1W4Z\n2zIXmc8AfhURr5T0AuBqSS+KiAcHbyhpHrCsGD4ALIqI+cVr0wGaOl7zvV6Jx/mVH0fE/F6KBwDO\nOhtefARMfwj436AXSfnkl/v+G8+4eH4kyTJKaNcj3xuYExEzivFxwOrWE56SvgucFhE/L8b/A3wo\nIhYOei/3yM1KkNgL+BnpQOsfI/ivmkOyGlXRI18IbCdpiqQNgMOBywdt83vSyVAkbQnsAPxmbCE3\nV+59upzz66XcJDYlnYuaAJxVRRHvpfw6Iff8yhixtRIRqyTNIq2sNgDMjYjFko4uXj8POBWYJ+km\nQMAHI+JPHY7bLDtFX/x8ePJKldm1BmSN4bVWzHpEMennXOBBYI8IltYckvUAr7Vi1hASu5Im3gEc\n7SJuo+FCXpHc+3Q551d3bhITga8Ck4C5EVxS7fvnu+8g//zKcCE3q98cYFdgKZ6HYWPgHrlZjST2\nAX5aDF8ewTV1xmO9xz1ysx4msSHwJdK/wzNdxG2sXMgrknufLuf8asztE8C2wM3ASZ36JTnvO8g/\nvzJcyM1qUCxN+05gJfDmCB6rOSRrMPfIzbqsaKncDEwFTojgYzWHZD3MPXKz3nQyqYjfSGqvmI2L\nC3lFcu/T5ZxfN3OTeAnwPtI6/v8awcrO/8589x3kn18ZLuRmXSKxPnAB6d/d2REsbPMjZqW4R27W\nJRIfBM4grTG9SwQP1RuRNUGZ2ulCbtYFEi8AbiFNw58RwVU1h2QN4ZOdXZR7ny7n/DqdW7E87edJ\nRfyr3S7iOe87yD+/MlzIzTrv9cCrSbc3fH/NsViG3Fox6yCJpwOLgecBsyL4XM0hWcO4tWJWv9mk\nIr6IdNMIs8q5kFck9z5dzvl1KrfiBOcHi+GsCJ7oxO9pH0e++w7yz68MF3KzzjkLmAh8OYKf1x2M\n5cs9crMOkDgYuAL4K7BDBPfUHJI1lHvkZjWQmAScXQxPchG3TnMhr0jufbqc8+tAbu8GtgF+DfVf\npZLzvoP88yvDhdysQhLPAj5cDN/XjUWxzNwjN6uQxOeBdwDfi+CguuOx5vNaK2ZdJLEzcFMx3C2C\nW+uMx/Lgk51dlHufLuf8KsztTGAAOL+XinjO+w7yz68MF3KzCki8GjgIeBCYU2801m/cWjEbJ4kB\n4AZgV2B2BGfUHJJlxK0Vs+44ilTEf8fa68fNusaFvCK59+lyzm88uUlsCJxaDGdH8GglQVUo530H\n+edXhgu52fi8B3g2sAD4Ws2xWJ9yj9xsjCSeCfwG2Bh4VQQ/qjkky5B75GadNZtUxK92Ebc6uZBX\nJPc+Xc75jSU3ia2BY4rhcZUGVLGc9x3kn18ZLuRmY3MSaa3xr0fwy7qDsf7WtkcuaQZpgfwB4IKI\neMo1ssX/iJ8B1gfuj4jpQ2zjHrllQWJH0sqGAewcwR01h2QZK1M7J7R5gwHgHOAAYAWwQNLlEbG4\nZZtNSUt1viYilkvaYvyhm/W0j5I+zZ7vIm69oF1rZRqwNCKWRcRK4FLg0EHb/DPwzYhYDhAR91cf\nZu/LvU+Xc36jyU1iT+ANwKPAKZ2KqUo57zvIP78y2hXyrYC7WsbLi++12g7YXNKPJS2U9OYqAzTr\nFRICTi+GZ0ewos54zNYYsbVC6gG2sz6wB/Aq4OnAtZJ+ERFLxhtck0TE/Lpj6KSc8xtFbgcA+wMP\nQHPWU8l530H++ZXRrpCvACa3jCeTjspb3UU6wfkI8IiknwAvAp5SyCXNA5YVwweARWt2wpqPRx57\n3Jvjgelw2dlwCMAnQC+Seik+j3MZF8+PJFlGCSNetSJpAnA76Wj7buB6YOagk507kk6IvoZ0OdZ1\nwOERceug98r6qhVJ03M+Msg5vzK5SbwWuBK4H5gawd+6EVsVct530Bf5je+qlYhYJWkWcBXp8sO5\nEbFY0tHF6+dFxG2Svk+6M8pq4IuDi7hZkxW98TUnNs9oUhG3/uC1VszakDgYuAK4D9gmgodqDsn6\niNdaMRun4mj85GJ4uou49SIX8orkfi1rzvm1ye11pKuy7gHO7UpAFct530H++ZXhQm42jOJofE4x\nPD2CR2oMx2xY7pGbDUPi9cC3SFdsbetCbnVwj9xsjCTWY21v/DQXcetlLuQVyb1Pl3N+w+R2GOmG\nyiuAC7oaUMVy3neQf35luJCbDVIcjc8phh/rxRsqm7Vyj9xsEInDSSt93gVsF8FjNYdkfcw9crNR\nkhgg3f0H4KMu4tYELuQVyb1Pl3N+g3I7HNgJ+B0wr454qpbzvoP88yvDhdysUByNn1gMPxrB43XG\nY1aWe+RmBYkjgK8AvwV2iGBlzSGZuUduVpbEBNb2xk91EbcmcSGvSO59upzzK3J7E7AtcCfpqDwb\nOe87yD+/MlzIzdh4APhIMTglglV1RmM2Wu6RW9+TeCswl3R7wp1dyK2XuEdu1obE+sAJxfBkF3Fr\nIhfyiuTep8s4v7fA/KnAbaTZnNnJeN8B+edXhgu59S2JDVi3N/5EnfGYjZV75Na3JI4m3fXnVmA3\nF3LrRe6Rmw1DYiLw4WJ4sou4NZkLeUVy79NlmN/bgMnALbDBH+oOppMy3HfryD2/MlzIre9ITAKO\nL4ZzYGV3+otmHeIeufUdiWOAzwI3AbtHsLrmkMyGVaZ2upBbX5F4Gmka/nOAwyL4ds0hmY3IJzu7\nKPc+XUb5HU0q4jcAl0FWuQ3J+eXPhdz6hsSGwHHF8MQI3Bu3LLi1Yn1D4gPAmcACYC8XcmsC98jN\nChIbkW4YsQXw2gi+X3NIZqW4R95FuffpMshvFqmI/wK4qvWFDHIbkfPLnwu5ZU9iY+DYYujeuGXH\nrRXLnsQJwKnAz4D9XMitSdwjt74nsQmwDNgU2D+CH9cbkdnouEfeRbn36Rqc33tJRXz+cEW8wbmV\n4vzy50Ju2ZLYDHh/MTypzljMOsmtFcuWxKmk27j9MIID647HbCwqaa1ImiHpNklLJH1ohO32lLRK\n0mFjCdasShLPJLVVwEfjlrkRC7mkAeAcYAawMzBT0k7DbHcG8H2gL4+6c+/TNTC/Y4GNgKsiuGak\nDRuY26g4v/y1OyKfBiyNiGURsZJ0c9pDh9juGOAbQNYL9FszSDwbeHcxPLHOWMy6oV0h3wq4q2W8\nvPjekyRtRSruXyi+1ZfX6EbE/Lpj6KSG5XcC8DTgsgiub7dxw3IbNeeXv3aFvExRPguYHemsqejT\n1or1BompwNtJf3c/UnM4Zl0xoc3rK0j3NVxjMumovNVLgEslQbEgkaSVEXH54DeTNI80OQPgAWDR\nmv9N1/S5Gjx+b2b5NDI/iLcC68OlP4CZW6w5Fhnp51t7rHXH34mx82vWuHh+ZJHSMkoY8fJDSROA\n24FXAXcD1wMzI2LxMNtfBFwREd8a4rWsLz+UND3nj3hNyE9iF9Lt21YBO0Tw23I/1/u5jYfza7Yy\ntXPEI/KIWCVpFmm1uAFgbkQslnR08fp5lUXbcDn/RYLG5PdRUmvv/LJFHBqT25g5v/x5QpBlQWIv\n0hK1jwDbRHBPzSGZVcJrrXRR7teyNiC/jxdfzx5tEW9AbuPi/PLnQm6NJ3EAsD/pBPonag7HrOvc\nWrFGkxBwHbAncHwEp9UcklmlytROF3JrNInDSTOO7wG2jeChmkMyq5R75F2Ue5+uF/OTmAicXgxP\nHGsR78XcquT88udCbk32LmAKcCtwUb2hmNXHrRVrJInNgaXAZsDBEfx3zSGZdYRbK5azD5OK+I+A\nK2uOxaxWLuQVyb1P10v5SWwDzCqGx0aMb8XNXsqtE5xf/lzIrYk+DmwAfCWCX9UdjFnd3CO3RmmZ\niv8YsH0Ev685JLOOco/cslJM/vlUMfyMi7hZ4kJekdz7dD2S30xgX+A+1l4/Pm49klvHOL/8uZBb\nI0hsBJxZDI+L4C91xmPWS9wjt0aQ+BhwPLAQ2CuC1TWHZNYVXmvFsiDxAtLszQ2AfSK4tuaQzLrG\nJzu7KPc+Xc35fYq1lxtWXsS975ot9/zKcCG3niZxIHAo8BAwu+ZwzHqSWyvWsyQ2ABYBOwGzIzij\n5pDMus6tFWu6D5CK+BLgrJpjMetZLuQVyb1P1+38ivVUPlIM3xnBY537Xd53TZZ7fmW4kFvPKWZw\nfg6YBHw1gh/WHJJZT3OP3HqOxD8AXyfdTHnHCO6tOSSz2rhHbo0jsTFwdjGc7SJu1p4LeUVy79N1\nMb+PAs8hrXD4xW78Qu+7Zss9vzJcyK1nSOxLumHEE8C/eRq+WTnukVtPkHga6Zrx7YGPRXBCzSGZ\n9QT3yK1JTiEV8V8Dp9Yci1mjuJBXJPc+XSfzK+76835gNXBUJ68ZH/r3e981We75leFCbrWSmARc\nRPq7+MkIFtQcklnjuEdutZI4jbQY1u3AiyN4tOaQzHqK1yO3niaxHzAfCODlEVxTb0RmvccnO7so\n9z5d1flJbAp8BRBwWp1F3Puu2XLPrwwXcqvL54DnAQuAk2uOxazR3FqxrpN4E3Ax6WYRu0ewpOaQ\nzHqWWyvWcySmAJ8vhu91ETcbv1KFXNIMSbdJWiLpQ0O8/iZJN0q6SdLPJe1Wfai9Lfc+XRX5SUwE\n/gvYGPg2MHe871kF77tmyz2/MtoWckkDwDnADGBnYKaknQZt9htgv4jYjTQr7/yqA7UsfBp4KbAM\neFsE3enrmWWubY9c0v8CToqIGcV4NkBEnD7M9psBN0fE1oO+7x55H5P4J+AS4HFg3wgW1hySWSNU\n1SPfCrirZby8+N5w3gZcWeJ9rU9I7ARcUAzf6yJuVq0JJbYp/fFX0iuBtwL7DvP6PNLHakh3f1kU\nEfOL16YDNHj83szyqSQ/iF8B34D5G8K9/wOHn9sj+Tw5bu2x9kI8zq+/8yueH1mktIwSyrRW9gbm\ntLRWjgNWR8QZg7bbDfgWMCMilg7xPlm3ViRNX7NTcjSW/CQGSCc1DwEWA9Mi+FsHwhsX77tm64P8\nxj9FX9IE0joYrwLuBq4HZkbE4pZtngf8CDgiIn4x1mAsLy3rqPyZVMSf8h+8mY2sTO1s21qJiFWS\nZgFXAQPA3IhYLOno4vXzgBOBzYAvSAJYGRHTxpuANVcx6Wc26W4/b3QRN+scz+ysSB98vCudn8Q0\n4CfARGBWBJ/rZGzj5X3XbH2Qn2d2WndJbAd8l1TEz2PtLE4z6xAfkVtlJLYErgWmklpxh0Swst6o\nzJrNR+TWNRLPIM0fmAosJPXFXcTNusCFvCK5r/cwUn7FGirfBPYA7gT+vhcvMxxOP++7HOSeXxku\n5DYuEusDXwMOBO4DXhPBffVGZdZf3CO3MSuK+CXAG0gzdfeP4IZ6ozLLi3vk1jESE0i3ansD8Bfg\nQBdxs3q4kFck9z5da37FkfiXgMOBB0ntlMYuhNVP+y5HuedXRplFs8yeJPE04OvAwcDfgBkRXFdv\nVGb9zT1yK01iE+ByYD/gT8BrI7i+3qjM8lbJWitmABLPAr5HusTwblJP/NZ6ozIzcI+8Mjn36SR2\ngatuZO114vvmVMRz3nfg/PqBC7mNSGIGcA1MfDZpCeOXRZRb7N7MusM9chuShIBjgM+Q/sP/OnBk\nBI/UGphZn/F15DYmEhsBFwNnk/6OnALMdBE3600u5BXJpU8nsTOphfLPwEOkAn4SaL96I+ucXPbd\ncJxf/lzIDUitFIm3AAuAnYBbgT0juLTeyMysHffIDYm/I90E4vXFty4G/i2Ch+qLyszAPXIrQeJQ\n4BZSEX8QOAr4Fxdxs+ZwIa9I0/p0Es+V+BpwGfAs4MfArhHMi+ApH9Oalt9o5JwbOL9+4ELeZyQG\nJGYBi4F/BB4G3gccEMHvag3OzMbEPfI+IjEd+BRphibAFcAxLuBmvctrrRjw5CWFZ5BWLARYTprs\n852h2ihm1ixurVSkF/t0EttIXADczNplZz8C7BjBZaMp4r2YX1Vyzg2cXz/wEXmGJLYHjgeOAAaA\nJ4BzgTkR3FtnbGZWPffIM1GsjbIP8B7S7dfWIxXwi4GPR3BHjeGZ2Ri5R94HJCaRbrn2btaexFwJ\nzAVOj+A3dcVmZt3hHnlFutmnK6bT7yHxWdKJy3mkIn4/8DFgagRvr7KI59yHzDk3cH79wEfkDSIx\nmXTt91uAXVteugH4LHBpBI/WEZuZ1cc98h4nsS2p530YMK3lpfuBrwLzIlhUR2xm1nnukTdQcZf6\nlwGvBmYAu7S8/DDpvpkXA1dG8Hj3IzSzXuNCXhFJ0yNi/uh/jg1I/e2XAQcArwAmtWzyV9IMzG8C\nV0Xw8PijHb2x5tcEOecGzq8fuJB3WbFk7DRgX1Lx3pN1CzfAIuAHwFXAzyN4rKtBmlmjuEfeIRLr\nAVOB3YEXF4/dgecOsfltwM+B+cDVnrRjZmu4R94FEpsA2wM7FI8di6/b8dQjbUjT5G8AriEV72si\n+GN3ojWzHLUt5JJmAGeRpnpfEBFnDLHNZ4HXkk7GHRkRN1QdaB2K/vWWpKPo5xeP5w16vmnaej4w\nffBb/D9Sm2QRqXgvAu6MYHWHQ69czn3InHMD59cPRizkkgaAc0gn4VYACyRdHhGLW7Y5CNg2IraT\ntBfwBWDvDsY8JkWrY2NS4d2seGwKbE4q1kM9Nivx1o8AS+C/V8P07wK3F487IvhL1XnU6MWk/61y\nlHNu4Pyy1+6IfBqwNCKWAUi6FDiUdFOCNV4HfAkgIq6TtKmkLSNi1H1eiQFgYvHYoOX5RFKbYiNg\nw5bHSOONWLdgb8LoZ7I+AdxHOrL+XfH4/aDn90cQ0ifnRJw5Z7Q5N8imdQfQQTnnBs4ve+0K+VbA\nXS3j5cBeJbbZGp56wk5iIesW58EFe2AUsY/Fg8ADwJ+Lx5rn9w7z+GMT2yBm1l/aFfKyl7QMPqM6\n3M+9pMTvewx4vPja+ngUeIh0svChlsffRnjeWrT/EsGqkvmMxZQOvncvmFJ3AB00pe4AOmxK3QF0\n2JS6A6hbu0K+ApjcMp5MOuIeaZuti+8Noe3VhyK1UIa62mPc1OGLHyW9pbO/oV4555dzbuD8cteu\nkC8EtpM0BbibtFzqzEHbXA7MAi6VtDfwwFD98X66htzMrJtGLOQRsUrSLNIMwwFgbkQslnR08fp5\nEXGlpIMkLSW1NI7qeNRmZvakrs3sNDOzzujqjSUkHSNpsaRbJD1lYlEOJP27pNWSNq87lipJOrPY\ndzdK+pakTeqOqQqSZki6TdISSR+qO54qSZos6ceSfl38m3t33TFVTdKApBskXVF3LFUrLuX+RvHv\n7taidT2krhVySa8kXXO+W0TsAnyyW7+7WyRNBg4kXWOemx8AL4yIFwF3AMfVHM+4tUx4mwHsDMyU\ntFO9UVVqJfC+iHghaZLeuzLLD9I9am+l/BV2TXI2cGVE7ATsxrrzd9bRzSPydwCnRcRKgIj4Qxd/\nd7d8Gvhg3UF0QkRcHRFrrqm/jnR1UtM9OeGt+Hu5ZsJbFiLinohYVDz/G6kQDLVoWyNJ2ho4CLiA\nEpfENUnxifflEXEhpPOVETHsTPFuFvLtgP0k/ULSfEkv7eLv7jhJhwLLI+KmumPpgrcCV9YdRAWG\nmsy2VU2xdFRx5dnupP+Ec/EZ4FjIctLeVOAPki6S9CtJX5T09OE2rnT1Q0lXA88e4qUPF79rs4jY\nW9KewNeBbar8/Z3WJr/jSHf1eXLzrgRVoRHyOz4irii2+TDweET8Z1eD64wcP44/haSNgG8A7ymO\nzBtP0sHAfRFxQ6Y3X55AuuHMrIhYIOksYDZw4nAbVyYiDhzuNUnvAL5VbLegOCH4zIhozBKuw+Un\naRfS/6A3Ks062hr4paRpEXFfF0Mcl5H2H4CkI0kfZV/VlYA6r8yEt0aTtD7p7lIXR8RldcdToX2A\n1xWL9k0CNpb05Yj4l5rjqspy0if8BcX4G6RCPqRutlYuA/YHkLQ9sEGTivhIIuKWiNgyIqZGxFTS\nTtijSUW8nWI542OBQyPi0brjqciTE94kbUCa8HZ5zTFVRumoYi5wa0ScVXc8VYqI4yNicvHv7Z+A\nH2VUxImIe4C7iloJaQXaXw+3fTdvLHEhcKGkm0lrqWTzhz6EHD+y/wdpgbOri08d10bEO+sNaXyG\nm/BWc1hV2hc4ArhJ0pp7BBwXEd+vMaZOyfHf3DHAV4uDjDsZYbKlJwSZmTVcVycEmZlZ9VzIzcwa\nzoXczKzhXMjNzBrOhdzMrOFcyM3MGs6F3Mys4VzIzcwa7v8DRBNbVaWemOYAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "testInput = np.arange(-6,6,0.01)\n", + "plot(testInput, sigmoid(testInput), linewidth= 2)\n", + "grid(1)\n", + "legend(['sigmoid'])" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.7310585786300049" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sigmoid(1)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 0.26894142, 0.5 , 0.73105858])" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sigmoid(np.array([-1,0,1]))" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.46600501, 0.37731874, 0.5415919 ],\n", + " [ 0.23157348, 0.41235015, 0.55084673],\n", + " [ 0.37693986, 0.10342644, 0.711002 ]])" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sigmoid(np.random.randn(3,3))" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Whole Class with additions:\n", + "class Neural_Network(object):\n", + " def __init__(self): \n", + " #Define Hyperparameters\n", + " self.inputLayerSize = 2\n", + " self.outputLayerSize = 1\n", + " self.hiddenLayerSize = 3\n", + " \n", + " #Weights (parameters)\n", + " self.W1 = np.random.randn(self.inputLayerSize,self.hiddenLayerSize)\n", + " self.W2 = np.random.randn(self.hiddenLayerSize,self.outputLayerSize)\n", + " \n", + " def forward(self, X):\n", + " #Propogate inputs though network\n", + " self.z2 = np.dot(X, self.W1)\n", + " self.a2 = self.sigmoid(self.z2)\n", + " self.z3 = np.dot(self.a2, self.W2)\n", + " yHat = self.sigmoid(self.z3) \n", + " return yHat\n", + " \n", + " def sigmoid(self, z):\n", + " #Apply sigmoid activation function to scalar, vector, or matrix\n", + " return 1/(1+np.exp(-z))\n", + " \n", + " def sigmoidPrime(self,z):\n", + " #Gradient of sigmoid\n", + " return np.exp(-z)/((1+np.exp(-z))**2)\n", + " \n", + " def costFunction(self, X, y):\n", + " #Compute cost for given X,y, use weights already stored in class.\n", + " self.yHat = self.forward(X)\n", + " J = 0.5*sum((y-self.yHat)**2)\n", + " return J\n", + " \n", + " def costFunctionPrime(self, X, y):\n", + " #Compute derivative with respect to W and W2 for a given X and y:\n", + " self.yHat = self.forward(X)\n", + " \n", + " delta3 = np.multiply(-(y-self.yHat), self.sigmoidPrime(self.z3))\n", + " dJdW2 = np.dot(self.a2.T, delta3)\n", + " \n", + " delta2 = np.dot(delta3, self.W2.T)*self.sigmoidPrime(self.z2)\n", + " dJdW1 = np.dot(X.T, delta2) \n", + " \n", + " return dJdW1, dJdW2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0.48736193]\n", + " [ 0.54813314]\n", + " [ 0.54630022]]\n" + ] + } + ], + "source": [ + "NN= Neural_Network()\n", + "yHat=NN.forward(X)\n", + "print yHat" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0.75]\n", + " [ 0.82]\n", + " [ 0.93]]\n" + ] + } + ], + "source": [ + "print y" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Third part" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEACAYAAABI5zaHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAF4hJREFUeJzt3X+MVeWdx/H3V/yBjoBULLMCdax1V9iklsVFss1SmnZb\nJFqSxeiincWupmazLNv+o9vNZqvpH5umm6axboyuFpqQFNMf21AEdLMpWdO0tliVWtBIGlaQQJFS\nQKTALN/9496h43Xm3jvPPHfOfb7zeSXEe+595vA8fJzvnPnec88xd0dERGI5r+oJiIhIfiruIiIB\nqbiLiASk4i4iEpCKu4hIQCruIiIBtSzuZvYNMztoZr9oMuYhM3vNzF4ys/l5pygiIqPVzpH7WmDp\nSC+a2TLgA+5+LfBZ4JFMcxMRkUQti7u7PwscaTLkU8A362OfAy4zs5l5piciIily9NxnAXuHbO8D\nZmfYr4iIJMr1hqo1bOuaBiIiFTo/wz7eAOYM2Z5df+4dzEwFX0Qkgbs3HkC3lKO4bwRWAxvMbBHw\nW3c/ONzAlAmWwswecPcHqp5Hp0ReX+S1gdZXutQD45bF3cy+BXwEmGFme4EvAhcAuPuj7r7ZzJaZ\n2W7gBPCZlIkE0Ff1BDqsr+oJdFBf1RPosL6qJ9BhfVVPoBu1LO7uvrKNMavzTEdERHLQJ1TzWVf1\nBDpsXdUT6KB1VU+gw9ZVPYEOW1f1BLqRjdfNOszMI/fcRUQ6IbV25nhDVQAzW+Lu26qeR6dEXl/k\ntUF3rU9nzTWX8wBYxV1ExpVu7Tk8s7yNDbVlRGTc1OtA1dPoSmY27JF7au3UG6oiIgGpuGdiZkuq\nnkMnRV5f5LVB/PXJ8FTcRUQCUs9dRMbNcD33efNu4MSJzv2dPT2wc+f2zv0FmeTuuetsGRGp1IkT\ncMUVnSu+hw7d0LF9dzO1ZTKJ3teMvL7Ia4P468vhK1/5Crfeeus7nluzZg2f+9znKprR2OnIXSS8\nyevMequeRFfr7+/nwQcf5OjRo0ybNo2BgQGefPJJtm7dWvXUkqm4Z9ItnwDslMjri7y2mmnAgTer\nnkWNXVX1DIbT29vL4sWL+fa3v80999zD1q1bueKKK5g/f37VU0umtoyICLBq1SrWr18PwPr16+nv\n7694RmOj4p5J9L5m5PVFXlvNqclVz6AEy5cvZ8eOHbz88ss89dRT3HnnnVVPaUxU3EVEgMmTJ3Pr\nrbdyxx13cOONNzJ79uyqpzQm6rlnEr1vG3l9kddWc9Hvqp5BMz09nT1dsaen/bGrVq3i8ccfZ+3a\ntR2bz3hRcReRSnXTB4zmzJnDxRdfzIoVK6qeypipLZNJ9L5t5PVFXluNeu7tOHv2LF/96ldZuXIl\nl156adXTGTMduYvIhHfixAlmzpzJ1VdfXfS57UPp2jIiwZn17umi89wX6Hruw9P13EVEpCUV90yi\n920jry/y2mrUc5+IVNxFRAJSz10kOPXcy6Ceu4iItKTinkn0vm3k9UVeW4167hORznMXkUrdMG8e\nnb7P3vadO5O/fMmSJfT393P33Xefe27btm309/ezd+/ell8/mrE5qbhnEv36JJHXF3ltNd19bRlO\nnGD7FVd0bPc3HDo0pq83M8zKe7tQbRkRmfDGepu9tWvXMm/ePKZOnco111zDY489BtQ++XrTTTex\nf/9+pkyZwtSpUzlw4ED2+Q9HxT2T6H3byOuLvLYa9dxb6e/vZ+vWrRw9ehTg3G32Vq1aBUCrM3xm\nzpzJU089xbFjx1i7di2f//zneeGFF+jp6WHr1q1ceeWVHD9+nGPHjtHbOz63PFRxF5EJb+ht9oB3\n3GbP3VmzZg3Tp08/9+eWW255R6tm2bJlXH311QAsXryYT3ziEzz77LNA6x8MnaLinkn0vm3k9UVe\nW02X99y7xEi32TMzvv71r3PkyJFzfzZt2vSOor1lyxYWLVrE5ZdfzvTp09m8eTOHDx+uZB2DVNxF\nRBjdbfaGFvZTp06xYsUK7rvvPn79619z5MgRli1bdm5MVW/GqrhnEr1vG3l9kddWo557O5rdZq9Z\na+X06dOcPn2aGTNmcN5557FlyxaeeeaZc6/PnDmTw4cPc+zYsY7Ov5FOhRSRavX0jPl0xVb7b9dI\nt9kb7uh78LkpU6bw0EMPcdttt3Hq1CluueUWli9ffm7cddddx8qVK3n/+9/P2bNn2blz57i8qdry\n2jJmthT4GjAJeNzdv9zw+jRgPTCH2g+Lf3P3dcPsR9eWKZjZlJ3Qc0nV8xjeibfdj8+rehbdSteW\nad/rr7/O3LlzOXjw4LjfjSn3tWWaHrmb2STgYeDjwBvAz8xso7vvGjLs74CX3f0WM5sBvGpm6919\nYLSTkW7Wc0n3FIhGvTOqnoGUb6LdZm8hsNvd9wCY2QZgOTC0uJ8FptYfTwUOT8TCbmZLYp91Ebdv\nq+wk4m32WhX3WcDQCyLsA25sGPMw8AMz2w9MAW7LNz0Rkc7r6enhrbfeqnoaWbUq7u00x5YCP3f3\nj5rZNcB/mdn17n68caCZrQP21Dd/C7w4eMQ0eMZCqduDz3XLfDqxPtg0BW4+/vvH0C3bY1mfu2+r\n+t+3s9sX/a7qfH6/La3Us7urvrkneT/N3twws0XAA+6+tL79BeDs0DdVzWwT8K/u/qP69n8D97v7\n9oZ96Q3VgnXXm3KNeme4H+irehbdqruy6+43VKs03jfr2A5ca2Z9ZnYhcDuwsWHM69TecMXMZgJ/\nBPxqtBMpnc6VLpeyk4iatmXcfcDMVgNPUzsV8gl332Vm99ZffxT4ErDOzHYABtzn7r/p8LxFpFAl\nXj63RLqHqrSlu361b6S2TDPKrmy6h6qIiJyj4p6J+rblUnZli59fGhV3EZGAVNwzif0JR4h8TXBl\nV7b4+aVRcRcRCUjFPZP4fb+4fVtlV7b4+aUZ1+u5107J6ka6ZKyIxDLON+vo3nNtx7qH+H2/uH1b\nZVe2+PmlUVtGRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM\n4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVd\nRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z\n4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf9\n4vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCSg\nlsXdzJaa2Stm9pqZ3T/CmCVm9oKZvWxm27LPsgDx+35x+7bKrmzx80tzfrMXzWwS8DDwceAN4Gdm\nttHddw0Zcxnw78An3X2fmc3o5IRFRKS1VkfuC4Hd7r7H3c8AG4DlDWPuAL7r7vsA3P3N/NPsfvH7\nfnH7tsqubPHzS9OquM8C9g7Z3ld/bqhrgfeY2Q/NbLuZ9eecoIiIjF7TtgzgbezjAuBPgI8BlwA/\nNrOfuPtrY51cSeL3/eL2bZVd2eLnl6ZVcX8DmDNkew61o/eh9gJvuvtJ4KSZ/Q9wPTBMcV/RB1ed\nrj2eNgALTsLNx2vbm6bU/lvN9uCvdoP/o2j7ndu1X+03TemWvJTfaPPrrrx+v11T9b9PN23XH99V\n/6fZQyJzH/ng3MzOB16ldlS+H/gpsLLhDdXrqL3p+kngIuA54HZ339mwLwd/PnWindU7w/1A31j2\nYGZLIh9BmE0/AEcaf7B3ibHlp+yqpO+9VszM3d1G+3VNj9zdfcDMVgNPA5OAJ9x9l5ndW3/9UXd/\nxcy2AjuAs8B/NBZ2EREZX02P3LP+RcGP3KMz690DB7r0TCjl14yyK1vqkbs+oSoiEpCKeybxz7WN\ne660sitb/PzSqLiLiASk4p5J5Hfra+KeK63syhY/vzQq7iIiAam4ZxK/7xe3b6vsyhY/vzQq7iIi\nAbW6/IC0KX7fr3v7tpfw5pW9ZntSv34m0GujPo24LSfg7ePu8zqy87Z1b3Y5xP/eS6PiLsW7BLcD\n0JUf0ukF3d+gibH+YO6k7vjBnE7FPZPo17eI3LfdBFNuhuNVz6Nzuje7HD+YO5Vf6T+Y1XMXEQlI\nxT2T2EftELlvG/uoHSJnBxMhvzQq7iIiAam4ZxL/XNvu7duO1SaY0npUyeJmBxMhvzQq7iIiAam4\nZ6Kee7ni92zjZgcTIb80Ku4iIgGpuGeinnu54vds42YHEyG/NCruIiIBqbhnop57ueL3bONmBxMh\nvzQq7iIiAenaMuS5eNEpmHwRZD9C6p6LF8Xt2+raMmWLn18aFXd08SIRiUdtmUziHznE7dsqu7LF\nzy+NiruISEAq7pnEP9c2bt9W2ZUtfn5pVNxFRAJScc8kft8vbt9W2ZUtfn5pVNxFRAJScc8kft8v\nbt9W2ZUtfn5pVNxFRAJScc8kft8vbt9W2ZUtfn5pVNxFRAJScc8kft8vbt9W2ZUtfn5pVNxFRAJS\ncc8kft8vbt9W2ZUtfn5pWhZ3M1tqZq+Y2Wtmdn+TcX9qZgNm9pd5pygiIqPVtLib2STgYWApMA9Y\naWZzRxj3ZWArYB2YZ9eL3/eL27dVdmWLn1+aVkfuC4Hd7r7H3c8AG4Dlw4z7e+A7wKHM8xMRkQSt\nivssYO+Q7X31584xs1nUCv4j9ac82+wKEr/vF7dvq+zKFj+/NK2KezuF+mvAP7q7U2vJTMi2jIhI\nN2l1m703gDlDtudQO3ofagGwwcygdku4m8zsjLtvfPfuVvTBVadrj6cNwIKTcHP9p+6met+smu3B\nvt3gUcBotx+E9y6Ak6lfP9L2IDNbAuDu26rYhremwqZT3ZJXzvyG/lvHzO/U5KrzGXm7vtWF+Z2C\nc+9VjGde9cd31f/qPSSy2gH3CC+anQ+8CnwM2A/8FFjp7rtGGL8W+IG7f2+Y1xz8+dSJdtIMJl1/\niLMvjWUfnbyH6gH3vtz7HS2z6QfgSOMP9q4w1vw6eYPlbsgvcnYwEb73zN191B2Rpkfu7j5gZquB\np4FJwBPuvsvM7q2//mjSbAOK3/eL27dVdmWLn1+aVm0Z3H0LsKXhuWGLurt/JtO8RERkDPQJ1Uzi\nn2sb91xpZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEX\nEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2\n+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/\nuH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlI\nxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+Pml\nUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1b\nZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQmoreJuZkvN7BUze83M7h/m9TvN7CUz22Fm\nPzKzD+afaneL3/eL27dVdmWLn1+alsXdzCYBDwNLgXnASjOb2zDsV8Bid/8g8CXgsdwTFRGR9rVz\n5L4Q2O3ue9z9DLABWD50gLv/2N2P1jefA2bnnWb3i9/3i9u3VXZli59fmnaK+yxg75DtffXnRnI3\nsHkskxIRkbE5v40x3u7OzOyjwN8AHx5+xIo+uOp07fG0AVhwEm6u/9TdVO+bVbM92LcbPAoY7faD\n8N4FcDL160faHmRmSwDcfVsV2/DWVNh0qlvyypnf0H/rmPmdmlx1PiNv17e6ML9TcO69ivHMq/74\nrvpfvYdE5t68dpvZIuABd19a3/4CcNbdv9ww7oPA94Cl7r57mP04+POpE+2kGUy6/hBnXxrLPjbB\nlE78etgLMw649+Xe72iZTT8AR/ZVPY/hjDW/TmUH3ZFf5OxgInzvmbu7jfbr2mnLbAeuNbM+M7sQ\nuB3Y2PCXv49aYf/0cIV9Iojf94vbt1V2ZYufX5qWbRl3HzCz1cDTwCTgCXffZWb31l9/FPgXYDrw\niJkBnHH3hZ2btoiINNNOzx133wJsaXju0SGP7wHuyTu1snTyV/vuEPdcaWVXtvj5pdEnVEVEAlJx\nzyT+kUPcvq2yK1v8/NKouIuIBKTinkn861vE7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+N\niruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsq\nu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE\n7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruI\nSEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LF\nzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF\n7dsqu7LFzy+NiruISEAti7uZLTWzV8zsNTO7f4QxD9Vff8nM5uefZveL3/eL27dVdmWLn1+apsXd\nzCYBDwNLgXnASjOb2zBmGfABd78W+CzwSIfm2tWeh4urnkNnnbmw6hl0irIrW/z80rQ6cl8I7Hb3\nPe5+BtgALG8Y8yngmwDu/hxwmZnNzD7TLncUzq96Dp3lYVt4yq5s8fNL0yr0WcDeIdv76s+1GjN7\n7FMTEZFUrYq7t7kfS/y6MP4XQv/qC/8X9uhI2ZUtfn5pWoX+BjBnyPYcakfmzcbMrj83DFswuumN\njzcBgzHPzeDyDNN5937NuuSHZdz8OpUddEt+cbODifC9N3qtivt24Foz6wP2A7cDKxvGbARWAxvM\nbBHwW3c/2Lgjd288uhcRkQ5pWtzdfcDMVgNPA5OAJ9x9l5ndW3/9UXffbGbLzGw3cAL4TMdnLSIi\nTZl7sb91iIjICLKfIhX5Q0+t1mZmS8zsqJm9UP/zz1XMM4WZfcPMDprZL5qMKTI3aL2+krMDMLM5\nZvZDM/ulmb1sZmtGGFdkhu2sr9QMzWyymT1nZi/W1/bACONGl527Z/tDrXWzG+gDLgBeBOY2jFkG\nbK4/vhH4Sc45dOpPm2tbAmyseq6J6/tzYD7wixFeLzK3Uayv2Ozq8+8FPlR/fCnwapTvvVGsr9gM\ngUvq/z0f+Alw41izy33kHvlDT+2sDd59WmgR3P1Z4EiTIaXmBrS1Pig0OwB3P+DuL9YfvwXsAq5s\nGFZshm2uDwrN0N3frj+8kNrB49mGIaPOLndxj/yhp3bW5sCf1X9t2mxm88Ztdp1Xam7tCpNd/ey2\n+cBzDS+FyLDJ+orN0MzOM7MXgYPAM+7+s4Yho84u94cbIn/oqZ05/hyY4+5vm9lNwPeBP+zstMZV\nibm1K0R2ZnYp8B3gH+pHuO8a0rBdVIYt1ldshu5+FviQmU0D/tPM/tjdf9kwbFTZ5T5yz/yhp67S\ncm3ufnzw1yt33wJcYGbvGb8pdlSpubUlQnZmdgHwXWC9u39/mCFFZ9hqfREydPejwA+pXaxxqFFn\nl7u4n/vQk5ldSO1DTxsbxmwE/hqg2YeeulDLtZnZTDOz+uOF1E41/c34T7UjSs2tLaVnV5/7E8BO\nd//aCMOKzbCd9ZWaoZnNMLPL6o8vBv6C2nsKQ406u6xtGQ/8oad21gbcCvytmQ0AbwN/VdmER8nM\nvgV8BJhhZnuBL1J7Y6fo3Aa1Wh8FZ1f3YeDTwA4ze6H+3D8B74MQGbZcH+Vm+AfAN612ifXzgCfr\nWY2pbupDTCIiAYW+zrOIyESl4i4iEpCKu4hIQCruIiIBqbiLiASk4i4iEpCKu4hIQCruIiIB/T96\nyfLdDqRLRwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "#Compare estimate, yHat, to actually score\n", + "bar([0,1,2], y, width = 0.35, alpha=0.8)\n", + "bar([0.35,1.35,2.35],yHat, width = 0.35, color='r', alpha=0.8)\n", + "grid(1)\n", + "legend(['y', 'yHat'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Fourth part: https://github.com/stephencwelch/Neural-Networks-Demystified/blob/master/Part%204%20Backpropagation.ipynb" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "def sigmoid(z):\n", + " #Apply sigmoid activation function to scalar, vector, or matrix\n", + " return 1/(1+np.exp(-z))\n", + "\n", + "def sigmoidPrime(z):\n", + " #Derivative of sigmoid function\n", + " return np.exp(-z)/((1+np.exp(-z))**2)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAEACAYAAACuzv3DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmYFNXV+PHvYTaGTTZZhl1ABQy+bohoDK6gUYxbEKO+\naFRcUGN+JgJGetoNiTFRI0YElbhE44tR0dc1Iq/7QkRRBAWVHRUQZR+28/vj1gzDMMz0DNVdXXfO\n53nmgeouus+hZs5Un6p7r6gqxhhj4qte1AEYY4zZPVbIjTEm5qyQG2NMzFkhN8aYmLNCbowxMWeF\n3BhjYq7aQi4iD4jItyLySRX73CUic0XkYxE5INwQjTHGVCWVM/IHgYG7elJETgS6qWp34GLgbyHF\nZowxJgXVFnJVfQNYVcUug4C/B/u+BzQVkdbhhGeMMaY6YfTI2wGLym0vBtqH8LrGGGNSENbFTqmw\nbeP+jTEmQ3JDeI0lQIdy2+2Dx3YgIlbcjTGmFlS14snyDsIo5FOA4cDjItIX+EFVv61NMHEmIsWq\nWhx1HOnic34+5wZ1Lz8R6gGtcG3f9hX+bBc8tyfQktrVwPGqXLKbYacslZPgapMQkceAnwEtRWQR\nkADyAFR1vKo+LyInisg8YB1w/u6FHVudow4gzTpHHUAadY46gDTrHHUAYROhIbAX0BUO+bkIrYBu\nbpsOBDUqBWuAFcDycl8rcTd4/Aj8EHyV//v34WUSjmoLuaoOSWGf4eGEY4wx24lQH9gX6AXsV+6r\n8/a9egIcXOGfrsS1eBdX+HMJ8C2uYK9QZWMaw8+YMForxpkUdQBpNinqANJoUtQBpNmkqANIhQh5\nuCLdBzgk+LMnkFPJ7puBr4Ev4eiNwOvu73wJzFdlfUaCzhKSqYUlRER97pEbY2omaI/0A47CtW8P\nBOpX2G0bMBf4tMLXPFW2ZC7a6KRSO62Qh0RE+qvqtKjjSBef88vW3OxOr7qnshqZSu201ooxWcyW\nYqw7RGp/nmtn5MZkqeBnJuowTIaISK3PyG0aW2OMiTkr5CERkf5Rx5BOPufnc26mbrBCbowJzZgx\nY7jooouy7n07d+7Mq6++msGIMst65MZkKeuRh6dLly7cf//9HH300VGHskvWIzfGmDrMCnlIfO+z\n+pyfz7ml09ixY2nfvj1NmjRh3333ZerUqRQXF3PuueeW7fPQQw/RqVMnWrZsyU033UTnzp2ZOnUq\nAMXFxZx55pmce+65NGnShN69ezN37lzGjBlD69at6dixI6+88krZay1dupRBgwbRokULunfvzsSJ\nE8ueq/i+Dz/8cNn73nLLLRn434iWFXJjYkoknK/a+Pzzzxk3bhzTp09n9erVvPzyy3Tu3HmHe6E/\n++wzLr/8ch577DGWLVvGjz/+yNKlS3d4neeee47zzjuPVatWccABBzBgwADAFe3Ro0czbNiwsn3P\nOussOnbsyLJly5g8eTKjRo3itddeC/4vdnzfyy67jEcffZSlS5eycuVKFi9eXLtEY8IKeUiycWRg\nmHzOz+fc0iUnJ4eSkhJmzZrF5s2b6dixI3vttdcOA5gmT57MoEGD6NevH3l5edxwww07DXo58sgj\nOe6448jJyeGMM85g+fLljBgxgpycHAYPHsz8+fNZvXo1ixYt4u2332bs2LHk5+ez//77c+GFF/LQ\nQw8B7PS+J598MkcccQT5+fnceOON1Kvnd6nzOztjPKYazldtdOvWjTvuuIPi4mJat27NkCFDWLZs\n2Q77LF26lPbtt6/6WFhYSIsWLXbYp1WrVjs837Jly7JiX1hYCMDatWtZunQpzZs3p2HDhmX7d+zY\nkSVLdlrDZqf3bdCgwU7v6xsr5CHxvc/qc34+55ZOQ4YM4Y033mDBggWICNdee+0OZ9xFRUU7tDQ2\nbNjAypUra/VeRUVFfP/996xdu7bssYULF+5QsMvvu2jR9mWE169fX+v3jQsr5MaYGvviiy+YOnUq\nJSUlFBQUUL9+fXJydpxt9vTTT+fZZ5/lnXfeYdOmTRQXF9d67pgOHTrQr18/Ro4cSUlJCTNnzuSB\nBx7gnHPO2Wnf008/neeee4633nqLTZs2MXr0aLZt21ar940LK+Qh8b3P6nN+PueWLiUlJYwcOZI9\n99yTtm3bsmLFCsaMGQNsv/DYq1cv/vrXv3LWWWdRVFRE48aNadWqFQUFBWX7VeyZV7X92GOPMX/+\nfIqKijjttNO44YYbyu4LL/9avXr1Yty4cZx99tkUFRXRvHlzOnTogM9sQJAxWcq3AUFr166lWbNm\nzJs3j06dOkUdTtaxAUFZwPc+q8/5+Zxb1J599lnWr1/PunXruOaaa+jdu7cV8TSwQm6MSZspU6bQ\nrl072rVrx5dffsnjjz8edUhestaKMVnKt9aKqZq1Vowxpg6zQh4S3/usPufnc26mbrBCbowxMWc9\ncmOylPXI6xbrkRtjTB1mhTwkvvdZfc7P59wyzcel3i699FJuuumm2oaWEdZaCYmI9Pd5qLfP+WVr\nbtZaCU/5pd6Ki4u5+eabqV+/Prm5ufTs2ZPbb7+dvn37RhqjtVayQDYWgjD5nJ/PuZmdiQhDhgxh\nzZo1LF++nCOOOILTTjut0n3jMtmWFXJjTK3Edak3VS2bhTE3N5fzzjuPb775hpUrVzJ06FAuvfRS\nTjzxRBo1asRrr73G0KFDuf766wGYNm0a7du357bbbqN169YUFRXxzDPP8Pzzz7PPPvvQokWLssnD\nSt/r1ltvpVu3brRs2ZLBgwezatWqEI+Ckxv6K9ZR2frxPCw+5xfX3CQZTqdSEzVv35Rf6q1NmzYs\nXLiQLVu28MYbb5TtU7rU20svvcQhhxzCqFGjKl3qbcqUKUyaNIkLLriAAQMGcNFFF7F06VIefPBB\nhg0bxldffQW4pd569+7N5MmTmT17Nscddxxdu3blqKOOqnSptxdeeIE+ffowcuTIXS71VlJSwqRJ\nk+jYsWPZ4hOPPfYYL7zwAocddhglJSU88sgjO7z+t99+S0lJSVmMF154IQMGDODDDz9kwYIFHHzw\nwZx99tl06tSJu+66iylTpvD666+z5557csUVV3D55Zfzj3/8o8b/51WxM3JjTI3Ffam3J554gmbN\nmtGxY0dmzJjBU089VfbcL37xCw477DCAsil3y79+Xl4e1113XVmMK1eu5KqrrqJhw4b07NmTnj17\n8vHHHwNw7733ctNNN1FUVEReXh6JRILJkyeH3rKxM/KQxPGMriZ8zi+uudXmTDos5Zd6mzVrFgMG\nDODPf/7zDvtkYqm36dOn7xRbKku9DR48uOyXQHkiUumqQ+W1aNFipxhbt269Qx6lKxktWLCAU089\ndYdfJLm5uXz77be0bdu2yvepCTsjN8bUSpyXeqvp3UAVP0mkqmPHjrz44ousWrWq7Gv9+vWhFnGw\nQh4a3+9F9jk/n3NLF1+XeqssvvIXR2vqkksuYdSoUSxcuBCA5cuXM2XKlFq9VlWskBtjaizOS71V\n9r5VPVfxsepiLu+qq65i0KBBHH/88TRp0oTDDjuM999/f5f711a1A4JEZCBwB5ADTFTVsRWe3wN4\nBOiA67n/SVUnVfI6Xg8IMiZsvg0IsqXeqpa2AUEikgPcDQwEegJDRKRHhd0uBz5V1f8C+gO3i4hd\nRDXG2FJvGVJda6UPME9V56vqZuBx4JQK+2wDmgR/bwKsVNUt4YaZ/Xzvs/qcn8+5Rc2WesuM6s6c\n2wGLym0vBg6tsM/dwLMishRoDPwyvPCMqZtE8KINOWHCBCZMmBB1GN6rrpCn0qAbCHyoqkeJSFfg\nFRHZX1XXVNxRRCYB84PNH4CPSu/hLT0riut26WPZEo/ll/q2qk7LpngA4I47MXVS8L0wNNicn9K/\nqepiioj0BYpVdWCwPRLYVv6Cp4g8B4xR1beC7VeBa1V1eoXXsoudxqRAhEOBN0FyfbrYaaqWztkP\npwPdRaSziOQDg4GKN0EuBI4N3rA1sA/wVarB+8L3PqvP+WVTbiI0xV2LshsGTMqq/GZR1S0iMhx4\nCXf74f2qOltEhgXPjwduBCaJyExAgN+r6vdpjtsY7wR98fuAzsB/gINqO6LQ1C22sIQxWUKEYcC9\nwBrgQFXmRRySyQK2sIQxMSHCT3AD7wCGWRE3NWGFPCTZ1GdNB5/zizo3EQqAR4H6wP2qPBbu6/t7\n7MD//FJhhdyY6BUDPwHmAVdFG4qJI+uRGxMhEfoBpcvq/FSVt6OMx2Qf65Ebk8VEaAj8HfdzeJsV\ncVNbVshD4nufzuf8Isztj0A34BMgka438fnYgf/5pcIKuTEREOFY4DJgM3CuKiURh2RizHrkxmRY\n0FL5BOgC/EGVmyMOyWQx65Ebk52SuCL+Ma69YsxusUIeEt/7dD7nl8ncRDgIuBo3j/+FqmxO/3v6\ne+zA//xSYYXcmAwRIQ+YiPu5u1OV6dX8E2NSYj1yYzJEhN8DY3FzTO+nyrpoIzJxkErttEJuTAaI\n0BX4FDcMf6AqL0UckokJu9iZQb736XzOL925BdPT3oMr4o9muoj7fOzA//xSYYXcmPQ7FTget7zh\nbyOOxXjIWivGpJEIDYDZQEdguCrjIg7JxIy1VoyJ3ghcEf8It2iEMaGzQh4S3/t0PueXrtyCC5y/\nDzaHq7I1He9TfRz+HjvwP79UWCE3Jn3uAAqAh1R5K+pgjL+sR25MGohwEvAssBrYR5VvIg7JxJT1\nyI2JgAj1gTuDzYQVcZNuVshD4nufzuf80pDblcBewCyI/i4Vn48d+J9fKqyQGxMiEVoB1wWbV2di\nUixjrEduTIhEuAe4FHhBlROjjsfEn821YkwGidATmBls9lblsyjjMX6wi50Z5Hufzuf8QsztNiAH\nuC+birjPxw78zy8VVsiNCYEIxwMnAmuA4mijMXWNtVaM2U0i5AAzgJ8AI1QZG3FIxiPWWjEmM87H\nFfEFbL9/3JiMsUIeEt/7dD7ntzu5idAQuDHYHKHKxlCCCpHPxw78zy8VVsiN2T1XAW2AD4B/RhyL\nqaOsR25MLYnQAvgKaAIco8rUiEMyHrIeuTHpNQJXxF+xIm6iZIU8JL736XzOrza5idAeuCLYHBlq\nQCHz+diB//mlwgq5MbWTwM01/oQq/4k6GFO3VdsjF5GBuAnyc4CJqrrTPbLBb8S/AHnAClXtX8k+\n1iM3XhBhX9zMhgr0VOWLiEMyHkulduZW8wI5wN3AscAS4AMRmaKqs8vt0xQ3VecAVV0sIi13P3Rj\nstpNuE+z91kRN9mgutZKH2Ceqs5X1c3A48ApFfY5G3hSVRcDqOqK8MPMfr736XzOrya5iXAIcDqw\nEbghXTGFyedjB/7nl4rqCnk7YFG57cXBY+V1B5qLyGsiMl1Ezg0zQGOyhQgC3Bps3qnKkijjMaZU\nla0VXA+wOnnAgcAxQAPgHRF5V1Xn7m5wcaKq06KOIZ18zq8GuR0LHA38APGZT8XnYwf+55eK6gr5\nEqBDue0OuLPy8hbhLnBuADaIyOvA/sBOhVxEJgHzg80fgI9KD0LpxyPbtu3s3M7pD0/fCScD/BFk\nf5Fsis+2fdkO/j4UZz4pqPKuFRHJBT7HnW0vBd4HhlS42Lkv7oLoANztWO8Bg1X1swqv5fVdKyLS\n3+czA5/zSyU3EU4AngdWAF1UWZuJ2MLg87GDOpHf7t21oqpbRGQ48BLu9sP7VXW2iAwLnh+vqnNE\n5EXcyijbgAkVi7gxcRb0xksvbI6NUxE3dYPNtWJMNUQ4CXgW+A7YS5V1EYdk6hCba8WY3RScjSeD\nzVutiJtsZIU8JL7fy+pzftXkNgh3V9Y3wL0ZCShkPh878D+/VFghN2YXgrPx4mDzVlU2RBiOMbtk\nPXJjdkGEU4F/4e7Y6maF3ETBeuTG1JII9djeGx9jRdxkMyvkIfG9T+dzfrvI7TTcgspLgIkZDShk\nPh878D+/VFghN6aC4Gy8ONi8ORsXVDamPOuRG1OBCINxM30uArqrUhJxSKYOsx65MTUkQg5u9R+A\nm6yImziwQh4S3/t0PudXIbfBQA9gATApinjC5vOxA//zS4UVcmMCwdn46GDzJlU2RRmPMamyHrkx\nARHOAR4Gvgb2UWVzxCEZYz1yY1IlQi7be+M3WhE3cWKFPCS+9+l8zi/I7VdAN+BL3Fm5N3w+duB/\nfqmwQm4MTXKA64ONG1TZEmU0xtSU9chNnSfCBcD9uOUJe1ohN9nEeuTGVEOEPOAPwWbSiriJIyvk\nIfG9T+dxfv8N07oAc3CjOb3j8bED/M8vFVbITZ0lQj479sa3RhmPMbVlPXJTZ4kwDLfqz2dAbyvk\nJhtZj9yYXRChALgu2ExaETdxZoU8JL736TzM79dAB+BTyF8edTDp5OGx24Hv+aXCCrmpc0SoD4wK\nNothc2b6i8akifXITZ0jwhXAXcBM4ABVtkUckjG7lErttEJu6hQRCnHD8NsCp6nyVMQhGVMlu9iZ\nQb736TzKbxiuiM8AngavcquU5ec/K+SmzhChITAy2BytivXGjRestWLqDBGuAW4DPgAOtUJu4sB6\n5MYERGiEWzCiJXCCKi9GHJIxKbEeeQb53qfzIL/huCL+LvBS+Sc8yK1Klp//rJAb74nQBPhdsGm9\nceMda60Y74nwB+BG4E3gSCvkJk6sR27qPBH2AOYDTYGjVXkt2oiMqRnrkWeQ7326GOf3G1wRn7ar\nIh7j3FJi+fnPCrnxlgjNgN8Gm4koYzEmnay1Yrwlwo24Zdz+rcpxUcdjTG2E0loRkYEiMkdE5orI\ntVXsd4iIbBGR02oTrDFhEqEFrq0CdjZuPFdlIReRHOBuYCDQExgiIj12sd9Y4EWgTp51+96ni2F+\nvwMaAS+p8nZVO8Ywtxqx/PxX3Rl5H2Ceqs5X1c24xWlPqWS/K4DJgNcT9Jt4EKENcGWwOTrKWIzJ\nhOoKeTtgUbntxcFjZUSkHa64/y14qE7eo6uq06KOIZ1ilt8fgELgaVXer27nmOVWY5af/6or5KkU\n5TuAEequmgp1tLVisoMIXYCLcd+710ccjjEZkVvN80tw6xqW6oA7Ky/vIOBxEYFgQiIR2ayqUyq+\nmIhMwg3OAPgB+Kj0t2lpnyvG27/xLJ9Y5gd6AZAHj78MQ1qWnotU9e/L91ijjj8d25ZfvLaDvw8N\nUppPCqq8/VBEcoHPgWOApcD7wBBVnb2L/R8EnlXVf1XynNe3H4pIf58/4sUhPxH2wy3ftgXYR5Wv\nU/t32Z/b7rD84i2V2lnlGbmqbhGR4bjZ4nKA+1V1togMC54fH1q0MefzNxLEJr+bcK29+1It4hCb\n3GrN8vOfDQgyXhDhUNwUtRuAvVT5JuKQjAmFzbWSQb7fyxqD/G4J/ryzpkU8BrntFsvPf1bITeyJ\ncCxwNO4C+h8jDseYjLPWiok1EQR4DzgEGKXKmIhDMiZUqdROK+Qm1kQYjBtx/A3QTZV1EYdkTKis\nR55BvvfpsjE/EQqAW4PN0bUt4tmYW5gsP/9ZITdxdjnQGfgMeDDaUIyJjrVWTCyJ0ByYBzQDTlLl\nfyMOyZi0sNaK8dl1uCI+FXg+4liMiZQV8pD43qfLpvxE2AsYHmz+TnX3ZtzMptzSwfLznxVyE0e3\nAPnAw6p8GHUwxkTNeuQmVsoNxS8B9lZlYcQhGZNW1iM3XgkG/9webP7FirgxjhXykPjep8uS/IYA\nhwPfsf3+8d2WJbmljeXnPyvkJhZEaATcFmyOVOXHKOMxJptYj9zEggg3A6OA6cChqmyLOCRjMsLm\nWjFeEKErbvRmPtBPlXciDsmYjLGLnRnke58u4vxuZ/vthqEXcTt28eZ7fqmwQm6ymgjHAacA64AR\nEYdjTFay1orJWiLkAx8BPYARqoyNOCRjMs5aKybursEV8bnAHRHHYkzWskIeEt/7dJnOL5hP5fpg\n8zJVStL3Xnbs4sz3/FJhhdxknWAE5zigPvCoKv+OOCRjspr1yE3WEeFM4AncYsr7qvJtxCEZExnr\nkZvYEaEJcGewOcKKuDHVs0IeEt/7dBnM7yagLW6GwwmZeEM7dvHme36psEJusoYIh+MWjNgKXGLD\n8I1JjfXITVYQoRB3z/jewM2q/CHikIzJCtYjN3FyA66IzwJujDgWY2LFzshDIiL9VXVa1HGkSzrz\nC1b9eTvY7KvKB6G+flJygP2Ag4GDgK5AR6ANUMDXFNCFEtxdMiuBecAc3CeENzShS8OMJ9PsezPe\nUqmduZkKxpjKiFAfeBD36fCPYRVxSUpD4KTgayDQspp/Uhh8tcUV/fKvNQ+YAjwJvKsJtd69ySp2\nRm4iJcIY3GRYnwP/pcrGWr9WUgToC/wa+CXQuNzTC3B3wkwHZgMLgaXABmAzUAA0BVrjWjz7Aofi\nViRqVO51FuPuppmgCV1W21iNSZXNR26ymghHAtMABX6qWtZeqdnrJKUecDJwLXBYuafeBSYDzwNz\nNFHzb3ZJSi6uoJ8OnAF0CJ7aErz2LZrQT2oTtzGpsEKeQXWgTxdqfiI0BT7G9aprdZdKcAb+C+Bm\n3ORaAKuAicCDmtDZqcWSWm7B+x0NXIabWjcneOpJ4AZN6MwaJZAh9r0Zb9YjN9lsHK6IfwAka/qP\nJSn9cGt49gseWgT8GZioCV0bVpDlBWf0rwKvSlI6AL8DLsadrZ8mSXkQuE4T+k063t+YXbEzcpNx\nIvwKeAS3WMQBqsxN+d8mpQ2uYA8JHlqO+0UwQRO6KexYU4inLa6lcxmQB6zFfUL4iyY0bTM2mrrD\nWism64jQGddSaQJcpMrElP6d64NfBNyKuyi5AfgT8CdN6Or0RJs6SUr3IJ5BwUOzgQs1obXq+xtT\nKrQBQSIyUETmiMhcEbm2kud/JSIfi8hMEXlLRHrXNui48n2+hzDyE6EA+B9cEX8KuD+lf5eUHsCb\nwL24Iv4C0EsTOjqMIh5GbprQuZrQU4DjgS9wPfs3JSl/laQ0rvpfp5d9b/qv2kIuIjnA3bh7cXsC\nQ0SkR4XdvgKOVNXeuFF594UdqPHCn3GDcuYDv1alyo+DkpR6kpQrgQ9xd6N8g7ut8Oea0K/THGut\naEJfAfYHbsHNGTMc+FSS8rNIAzNeq7a1IiKHAQlVHRhsjwBQ1Vt3sX8z4BNVbV/hcWut1GEinAU8\nBmwCDldlepX7J6U9MAk4JnhoEnC1JvSHNIYZKknK/rhPHQfhbrH8IzA6il6+ia+wWivtcHcElFoc\nPLYrv8bdt2sMACL0gLJe+G9SKOJDgE9wRXwFcJom9Pw4FXEATejHuE8SN+IK+bXAu0GryJjQpHL7\nYcpXQ0XkKOAC3Gi4yp6fhPtYDW5ei49K7/8s7XPFePs3nuUTSn6gHwKTYVpD+PZVGHzvLvdvTAH/\nj8HA+XwNbOAdenKaJvSbdOZXvseapv+/0XKwfEsPrqMbBwAfygC5h1d4Vrel//hlIL9It33LL/j7\n0CCl+aQgldZKX6C4XGtlJLBNVcdW2K838C9goKrOq+R1vG6t1IFBCTXOT4Qc3EXNk3F3cfRRpdJ7\nvCUp++IuhO4HbASuwt1SmPbbqjJ17CQppasfDQ0eegwYpgldk9b3te/NWAvl9kMRycXNg3EMbm6K\n94EhqttHzYlIR2AqcI6qvlvbYIxfys2jsgpXxHf6BQ8gSfkVMB5oiPteO9PnYe8V8v0Cl29Wjgo1\n0QvtPnIROQG4Azck+X5VHSMiwwBUdbyITAROxU1EBLBZVfvUNBjjj3KDfrYCx6sydad9klKIO0O9\nKHjoH8Al6T5DzQaSlH1wn0B+gvsEciVuVGpmBnaY2LABQRlUBz7epZyfCH2A13EzCg5XZdxO+yRl\nb+AJ3K16JbhClpFWyk6xRHTsKvlF9ijuF1moUwzY92a8hTYgyJhUidAdeA5XxMcD9+y0T1IGA//B\nFfF5QF9N6H117WxUE7pBE3oxcA5uuoJfAdMlKT+JNjITN3ZGbkIjQmvgHaAL8BJwsiqby55PSgFu\nUNBlwUNPABdlwxD7qFW42LsBuEwTOinSoExWsNaKyRgRGuPmFj8Qt3jDUeXvUJGkdMEVqoNwg4Ku\nBv5W187CqyJJaYAbRX1+8NAk4HJN6PrIgjKRs9ZKBvk+30NV+QVzqDyJK+JfAj+vUMQH4YbZH4S7\nL/ZwTeg92VLEs+XYaULXa0IvwBXyDbjbFN8LztZrLVvySxff80uFFXKzW0TIA/4JHAd8BwxQ5TsA\nSUqeJOWPwDO4ya6mAAdqQqsc2VnXBS2VPrhbMffD9c3PjjQok9WstWJqLSjij+EWVvgBOFqVGQCS\nlHbA48ARuFsQR+KmnM2Ks/A4kKQ0wl0wLi3i44HfaEJrva6piR/rkZu0ESEXd5/4YOBH4NjSOVQk\nKcfi7gnfEzeI7CxN6BtRxRpnwfJyF+NuUywAPsINIKp0cJXxj/XIM8j3Pl35/IIz8b/jivgaXDtl\nuiQlR5IyGngZV8RfBQ7I9iKezcdOE6qa0PG4ybe+BP4L+I8k5fRUXyOb8wuD7/mlwgq5qRERCnFz\n6pyNW9ZsoCrvBWtYvsr29TeTwABN6HfRROoXTegM3MXiJ3ELc0yWpNwpScmPNjKTDay1YlImwh64\nC5ZHAt8DJ6jyviTlVNy8281wiz+cFyywYEIWtFqGA7fj1gj9ANe6+irSwEzaWI/chEaEVrgl1g7E\n9b2Po1jm4wb4DAt2+1/gfE3o8kiCrEMkKX1wA6o64T4ZXQlMsovJ/rEeeQb53KcTYT946WO23yd+\nOMVSejY4DDfA5yrg5DgW8TgeO03o+7jjMRloBDwAPClJaVlx3zjmVxO+55cKK+SmSiIMBN6GgjbA\n+7T6pD/Fci5u9GZPYA7QRxN6l50NZpYm9HvcGqb/jbvofCrwiSTlhEgDMxlnrRVTKREEuAL4C+4X\n/hNcsfdYWsy9D3fRDWAccK0mdF1EYZqAJKUz8BDw0+Chv+GOjfdTAvvOeuSmVkTYcSBKvU038ofC\nddTbdgOQDywALtCE7jTHuImOJCUHuAa3Rmgebn2AYZrQFyMNzOwW65FnkC99OhF64laBOhtYx8H3\njGR0wQlIFF/BAAAL4UlEQVQs2HYrrohPAHr7VMR9OXaa0K2a0LHAIbhpgjsCL8gZ8pIkpUW00aWP\nL8dvd1ghN4BrpYjw37gLmD0o+GEOV3d4kpMuvxk4mG0sB07QhF5s085mN03ox0Bf4PfARhpxPDBb\nkjIkuH3ReMZaKwYR9sS1Uk4FhUPG/R8nXtkN0XbANtwyf4mwV64x6SdJ6Yb7FNU/eOg14EpN6KeR\nBWVqxHrkploinALcB7Si9cx1DBk0n6YLegVPf4Drsc6ILkKzuyQp9YALgFuBFrhJzMbhfjn/EGVs\npnrWI8+guPXpRCgS4Z/A0zT8rhVn/HIpl+xfGBTxH3B3rBxWWsTjll9N+JwbAMUcqQmdCOyNK+CC\nG0D0hSTlYklKbqTx7Sbvj18KrJDXMSLkiDAcmE3e+l9yxC2bubrDRvb7nyIExa1Q000TercmdGvE\n4ZoQaUK/14QOxw0kegPKWmqzJClnWP88vqy1UoeI0B+4ndyNB3LQeOifLKFwVUHw9PPANZrQ2dFF\naDIlKNpnAjcD3YKHpwMjNKGvRhaY2Yn1yA1QdkvhWHJKTuKAB+FnN2yl8bKc4OnpwHWa0JcjDNFE\nRJKSB/waSABtgodfB24BXrbRutGzQp5BItJfVadFHUd5IuwFjKL+qvM56L569L1DafxN6TGYCVwP\nPJvKD2s25hcWn3OD1PKTpDTE9c1/j1uWD9y96LcAT2tCt6U1yN1QB45ftbUz1hc5TOVE2BsYRZOF\n59D3zhwOug8K1oK7yPUJbuTfk9n8w2kyK5hmYYwkZRxwKfBbts9/PkeSchfwsN2Cmp3sjNwTwdwo\n/ZCtV9Hl1dM5eHw99n0G6pVdr5wK3Aa8ZB+XTXUkKYW4WxZ/jxshCrAaeBAYpwmdG1VsdY21VuoA\nEeoDg2m09Lfs/0hvDhoPzYM1BpStCP8D3KYJ/TDKOE08BT3003C3ox5e7qmXcUX9GU3ohihiqyus\nkGdQJvt0wdn3ARSuvJC9nzuXXk80ouvLkLPF7bAtZwn1tv4NeEATuiyc9/S3D+lzbhBefpKUA3Cr\nE50N1A8e/hH4JzAJeDeKT3t14PhZIc+UTHwzidCBwhVn0/Xfl9D9fzvT4ynID2aQVdmG1nuRelvv\nAV4M+x5wn39YfM4Nws8vmIDrLGAocHC5p77Grec6GXg/U9dg6sDxs0IedyJ0o/kX59LtxXPoMm0v\nur4E+eu371DSaCYFaycAT9hCxybTJCm9cAtbnAO0LffUElxRfwZ4UxNaEkF4XrBCHkMiFNJgeX+6\nvXAubT4+hvbvtqL9u1Cv3MnN+uZfkr/2YXI3PaIJ/TK6aI1xgrnQDwPOwPXUO5R7ej0wDXgReAmY\naxfcU2eFPINq+/FOhHyaft2Xvf59Js3nHUfrmd3p9Ea9spYJwLacbaxtM5OC1ZMoWPOkJnRxiKGn\nGKe/H199zg0yn18wavQQXEE/AehdYZeFuCkC3gDeBGbvThumDhw/K+SZkuo3k+Sv3ZOuL/+cVrMG\n0fzLPrScXUTbGULO5h13XNN2BRua/R9NljxK/R9fjXoOcJ9/WHzODaLPT5LSFjgeGAgch5uBsbzv\ngbeA94APgf/UpE0YdX7pZoU8QiLUo+iDHnR4+yT2WPRTGi/pRdMFRbScnU9hhZlDVWB1+1Wsb/kR\nBasn0/zLp8K628SYbBK0YPYDjsCtL/pToKiSXZfgRpZ+CHwKfAbM04RurmRfr1khzwBpNasZnf7v\nZzRe1o8GK3rTcHlXGn7Thj0WNWSPRYJU8v+7odlmVrdbxMam71L4/ZO0+uxVTeiPmY/emGgFbZjO\nuMJ+EG5mxgOARpXsvgWYC8zGFfYvcHfKfA0s83WkciiFXEQG4laIyQEmqurYSva5C9cLWw8MVd15\nIYI4FnLp8loBLed0p8GK3hSs/gkFq/em/o+dqP9DawpXNqPhd4U0XlqPnC3uW6lLhRfYmgtr2q1l\nTdsFlDSeQb1tU2n74Yt66/exO9v2+eOrz7lB/PILFsLozvai3jP46oybZmJH7mevBLco+FfBIwuB\npRW+fozjRdbdLuQikgN8DhyL+6jzATBEdftUpyJyIjBcVU8UkUOBO1W1b22CSSfp9UQ9Gi9rTcGa\njuStb0+9TUXkbmpN7sa25G4oIm9da/LXNadgdRMKVjeg8Pt8GqyQskE2VVnbagtv5Gxgnx5z2dRo\nDlvzp1O46nX2enWmLx8FReQ3qnpH1HGkg8+5gT/5SVIaAPvginoP3PS7e/EmPTmChim8xAa2F/Xv\ngJXAiir+XJ0NZ/lhTJrVB5inqvODF3wcOAX30abUIODvAKr6nog0FZHWqvptrSPfDXLsqMNp/84/\nydtQSN76+uSvyadgTQ6n/5haUa5oYxNlQ4sS1rf4kY1Nl7Op0SI2F85jW/4scjbOoNObM/X2xRtF\npFjfXVocekLZo2n1u8SWz7mBJ/lpQtcDM4KvMiJSzBHcjjtj7xJ8tcf13ku/2gENga7BV0pvKUlZ\ngxu9Wv5riiZ0/O7mE6bqCnk7YFG57cXAoSns0x6IpJBTb1N9ukxrV+lzmwthY5OtbGq8hc0NStjc\nYANbCteyqeFKNjf4lq0FS9hSsIBtuV8h2+bQ5uMvdMK7GzOcgTGmhjSha3Aze36yq30kKY3ZXthb\nBl8tqvizSbmv8vfFZ92EYdUV8lT7SRVP+6PrQ2nODD789V/YmvcdmrOMrXmLqbd1EYXfL9YnH1lf\n/QvUWuc0vnY26Bx1AGnUOeoA0qxz1AGkWedUdgqK/efBV7WCO2waA3uU+2qK68Vnlep65H2BYlUd\nGGyPBLaVv+ApIvcC01T18WB7DvCziq0Vkcpu3zDGGFOd3e2RTwe6i0hn3AWCwcCQCvtMwc2I9nhQ\n+H+orD8etztWjDEmLqos5Kq6RUSG4+ZHyAHuV9XZIjIseH68qj4vIieKyDxgHXB+2qM2xhhTJmMD\ngowxxqRHvUy+mYhcISKzReRTEdlpYJEPROT/icg2EWkedSxhEpHbgmP3sYj8S0T2iDqmMIjIQBGZ\nIyJzReTaqOMJk4h0EJHXRGRW8DN3ZdQxhU1EckRkhog8G3UsYQtu5Z4c/Nx9FrSuK5WxQi4iR+Hu\nOe+tqvsBf8rUe2eKiHTATQqUdVe1Q/Ay0EtV98cNjR4ZcTy7LRjwdjduMqeewBAR6RFtVKHaDFyt\nqr2AvsDlnuUHcBVuuL6PrYU7gedVtQduBsnZu9oxk2fklwJjVN1IR1VdnsH3zpQ/4xar9Y6qvqJa\nNsrtPdxYgbgrG/AWfF+WDnjzgqp+o6ofBX9fiysElU1QFUsi0h44EZhIZUP3Yyz4xPtTVX0A3PVK\n1V3Px5TJQt4dOFJE3hWRaSJycLX/IkZE5BRgsarOjDqWDLgAeD7qIEJQ2WC2ygeTxVxw59kBuF/C\nvvgL8Dsg8mH0adAFWC4iD4rIhyIyQUQa7Grn6m4/rBEReQVoU8lT1wXv1UxV+4rIIcATwF5hvn+6\nVZPfSNycy2W7ZySoEFWR3yhVfTbY5zpgk6r+I6PBpYePH8d3IiKNcOtoXhWcmceeiJwEfKeqM0Sk\nf9TxpEEubtKw4ar6gYjcAYwARu9q59Co6nG7ek5ELsWt4UcQ2DYRaaGqK8OMIZ12lZ+I7If7Dfqx\niIBrO/xHRPqoxmcdzaqOH4CIDMV9lD0mIwGl3xJ2HHrdAXdW7g0RyQOeBB5R1aejjidE/YBBwaR9\n9YEmIvKQqp4XcVxhWYz7hP9BsD0ZV8grlcnWytPA0QAisjeQH6ciXhVV/VRVW6tqF1XtgjsIB8ap\niFcnmM74d8ApqurL/DNlA95EJB834G1KxDGFRtxZxf3AZz7Mflieqo5S1Q7Bz9tZwFSPijiq+g2w\nKKiV4GagnbWr/UM9I6/GA8ADIvIJsAnw5j+9Ej5+ZP8rkA+8EnzqeEdVL4s2pN2zqwFvEYcVpsNx\nq9vPFJHSGQNHquqLEcaULj7+zF0BPBqcZHxJFYMtbUCQMcbEXEYHBBljjAmfFXJjjIk5K+TGGBNz\nVsiNMSbmrJAbY0zMWSE3xpiYs0JujDExZ4XcGGNi7v8DzCg4W/oGfTYAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "testValues = np.arange(-5,5,0.01)\n", + "plot(testValues, sigmoid(testValues), linewidth=2)\n", + "plot(testValues, sigmoidPrime(testValues), linewidth=2)\n", + "grid(1)\n", + "legend(['sigmoid', 'sigmoidPrime'])\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[-0.00916908, -0.00390022, -0.00285075],\n", + " [-0.00483125, -0.00184321, -0.0016088 ]])" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN = Neural_Network()\n", + "cost1 = NN.costFunction(X,y)\n", + "dJdW1, dJdW2 = NN.costFunctionPrime(X,y)\n", + "dJdW1" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[-0.02903 ],\n", + " [-0.02442499],\n", + " [-0.04018761]])" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dJdW2" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0175075086056 0.0293176938671\n" + ] + } + ], + "source": [ + "scalar = 3\n", + "NN.W1 = NN.W1 + scalar*dJdW1\n", + "NN.W2 = NN.W2 + scalar*dJdW2\n", + "cost2 = NN.costFunction(X,y)\n", + "print cost1, cost2" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0293176938671 0.0133981591651\n" + ] + } + ], + "source": [ + "dJdW1, dJdW2 = NN.costFunctionPrime(X,y)\n", + "NN.W1 = NN.W1 - scalar*dJdW1\n", + "NN.W2 = NN.W2 - scalar*dJdW2\n", + "cost3 = NN.costFunction(X, y)\n", + "print cost2, cost3" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Neural Networks Demystified\n", + "# Part 2: Forward Propagation " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# https://github.com/stephencwelch/Neural-Networks-Demystified/blob/master/Part%202%20Forward%20Propagation.ipynb\n", + "from IPython.display import YouTubeVideo\n", + "YouTubeVideo('UJwK6jAStmg')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Visualizations:" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAEACAYAAAC9Gb03AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHLRJREFUeJzt3Xu0XXV57vHvQxKuGgiICZdIUKBFAUEQImgJFzVFbqLc\nBtqknlM4heCl53ggaIVaWxVti1qHh1OuHkWgaCUKiDEQpAoYCgEkIEaIISAIQgJBEQLv+WPOHXZ2\n9mXtvdaavzl/6/mMscdac6+593xfJnmz8qx5UURgZmb52iB1AWZm1l0e9GZmmfOgNzPLnAe9mVnm\nPOjNzDLnQW9mlrm2B72kiyQ9LumeYdb5sqRfSrpL0l7tbtPMzFrXiXf0FwMzh3pR0mHAThGxM3Ay\n8LUObNPMzFrU9qCPiJuBp4dZ5Ujg0nLd24AtJE1ud7tmZtaaKjL67YCH+y2vALavYLtmZkZ1H8Zq\nwLKvu2BmVpHxFWzjEWBqv+Xty++tQ5KHv5nZGETEwDfT66hi0M8D5gCXS5oOrIyIxwdbcaRim0zS\nORFxTuo6uiHn3qB3+pOYBBwAvBV4M7AnsMMIP/4H4LHy63HgKWAVsLJ87Hv+DPAc8Hz5M31fzwPP\nR7Cmw22t1QP7b8Q3yW0PeknfAg4EXiPpYeBsYAJARJwfEddKOkzSUood/ZftbrOhpqUuoIumpS6g\ny6alLqAbJDYGDoaDjpd4L7A768esfwSWAEuBB/t9LQd+A6yOqH0UOy11Aam1Pegj4sQW1pnT7nbM\nrH0SmwBHA+8H3g1sBq/re/kF4DbgFmAxcBfwQDffbVs1qohurHBJ6gK66JLUBXTZJakLaJfEXhTn\nsZwIbN7vpTtg/3uAi4CfRfB8ivq67JLUBaSmutx4RFLknNGbVU1CwCHAGcCh/V5aBHwduDpinUOf\nrYFamZ1+R18RSTMiYmHqOroh596gmf1JvA34IrB/+a3VwIXABRH8fN1169Wfj8Ab2ljfDHvQm2VE\nYlvgX4Djym89AZwHfC1i2DPYa6UuSUOdSGMPPBzdmGWgjGlmUwz5zSkOW/wn4NwInklY2qiVsyB1\nGbUjadB39I5uzHpAefz7pcAR5beuAU6NYHm6qqxOfD36ikiakbqGbsm5N6h3fxJ7A3dQDPmngQ8A\nR4xmyNe5P+sMD3qzhpJ4P/ATihOC/gt4SwTfbMAJTFbaYIMNePDBB7u/na5vwQCo01ENnZZzb1DP\n/iROB64ENgIuAN4ewbKx/K469ldn06ZN44Ybbkhdxqh40Js1jMQngS9TXK7gLODkTE90qqXyQ9HU\nZYyKB31Fcs5Bc+4N6tWfxMeBvwdeBj4UwWfbjWrq1F/dffCDH2T58uUcccQRvPrVr+YLX/gCxx57\nLNtssw1bbLEFBx54IEuWLFm7/uzZsznttNM4/PDDmThxItOnT18vqpk/fz677LILkyZNYs6cLl0t\nJiJq8VWUkr6OLvY3I3UN7q3Z/UGcBhHl16zc+utXTwyn33+Dtr7Gatq0abFgwYK1yxdffHGsXr06\nXnjhhfjoRz8ae+6559rXZs2aFVtttVUsWrQo1qxZEyeddFKccMIJa1+XFEcccUSsWrUqli9fHltv\nvXX84Ac/GKJvYuB/q4jWZqff0VckMs5Bc+4N6tGfxHso4hqAUyKK23N2Qh36a7LZs2ez2WabMWHC\nBM4++2zuuusunn32WaCIeY455hj22Wcfxo0bx0knncTixYvX+fkzzzyTiRMnMnXqVA466KD1Xu8E\nD3qzmpPYA7ic4s/rORH838QlJdWp9/Sd8NJLL3HmmWey0047sfnmm7PjjjsC8OSTT65dZ/LkV26R\nvckmm7B69ep1fseUKVPWPt90003Xe70TPOgrknMOmnNvkLY/iS0pbt7zKuAy4NOd30be+6/T+l+K\n4LLLLmPevHksWLCAVatW8dBDDwH1u4SDB71ZTZWXNbiI4i5PPwP+W4SPkU9t8uTJ/OpXvwLg2Wef\nZaONNmLLLbfkueee46yzzlpn3dEO/G79BeFBX5Gcc9Cce4Ok/Z0OHEVxO77jo0uHUOa+/zpt7ty5\nfOYzn2HSpEk8/fTT7LDDDmy33XbstttuvO1tb1vnHb+k9S5GNvD1ga+1c/GyofiiZmY1JLE7xdmu\nE4D3RfCdxCVVxhc1G1w7FzXzO/qK5JyD5twbVN+fxHjgYoohf363h3zu+8886M3q6G+AvSluwP3x\nxLVYBhzdmNWIxM7A3cDGwMwIrk9cUuUc3QzO0Y1ZPs6jGPJf78Uhb93hQV+RnHPQnHuD6vqTOAw4\nDHiGCiOb3PefedCb1YLEBOCfy8W/i+C3KeuxvDijN6sBiY9QxDYPALtH8ELikpKRVI+hVENjzeg9\n6M0Sk3g18CDwGorbAH4/cUnWIP4wtkZyzkFz7g0q6e90iiH/U4obe1fK+y9/HvRmCUlswSsfvP6t\nr2Vj3eDoxiwhib8DPgXcGMHBqeux5nFGb1ZjEhOBh4GJFDf3/knikqyBnNHXSM45Yc69QVf7O5li\nyC9MOeS9//LnQW+WgMSGwEfLxS+krMXy5+jGLAGJWcAlwM+BPfwhrI2VoxuzGirvHNV3pM0XPeSt\n2zzoK5JzTphzb9CV/g4B3gQ8Cnyrw7971Lz/8udBb1a9U8vHr/XypQ6sOs7ozSoksT3wa+BlYGoE\njyUuyRrOGb1Z/ZxM8efu2x7yVhUP+orknBPm3Bt0rr/ykMq/Khe/1onf2Qnef/nzoDerzpHAFGAJ\n8OPEtVgPcUZvVhGJ7wPvAT4WwXmp67E8+Fo3ZjUhMQVYAQSwbQRPJC7JMuEPY2sk55ww596gY/2d\nBIwDrqnbkPf+y58HvVmXlWfCzi4XL0lXifUqRzdmXSaxN3A78DuK2MYnSVnHOLoxq4dZ5eM3PeQt\nBQ/6iuScE+bcG7TXn8R44MRy8dKOFNRh3n/5a3vQS5op6X5Jv5R0xiCvz5C0StKd5dcn292mWYMc\nTHHj718AdyauxXrU+HZ+WNI44F+BQ4FHgEWS5kXEfQNWvSkijmxnW00XEQtT19AtOfcGbfd3XPl4\nRV0vR+z9l79239HvCyyNiGUR8SJwOXDUIOv5Q1brOeUlD44pF69MWYv1tnYH/XYUNzfus6L8Xn8B\n7C/pLknXSnpjm9tspJxzwpx7g7b6OxSYBNwbwb2dq6izvP/y11Z0Ay39U/QOYGpE/F7SnwPfBXYZ\nbEVJlwDLysWVwOK+f3b17aymLgN7SqpNPV6uYvmyD5efw15Rj3q8nMNy+Xw2hWW0oK3j6CVNB86J\niJnl8lzg5Yj4/DA/8xCwd0Q8NeD7Po7esiGxEfA4sDnwJxE8kLgky1QVx9HfDuwsaZqkDYHjgXkD\nipgsSeXzfSn+cnlq/V9llpV3UQz5xR7yllpbgz4i1gBzgOspLr16RUTcJ+kUSaeUq70fuEfSYuA8\n4IR2ttlUOeeEOfcGY+7vveXjVR0spSu8//LXbkZPRFwHXDfge+f3e/5V4KvtbsesKSTGAYeXi1en\nrMUMfK0bs46TeDtwM/ArYOe6Hj9veagiozez9fWdS3K1h7zVgQd9RXLOCXPuDUbXX3lJ4qPLxUbE\nNt5/+fOgN+usXYGdKC5J/NPEtZgBzujNOkpiLvCPwKURa09qMesaZ/Rm1VubzyetwqwfD/qK5JwT\n5twbtN6fxDbAfsDzwA+7WVMnef/lz4PerHNmlo8LInguaSVm/TijN+sQiSuBY4E5ET5J0KrRyuz0\noDfrgPKWgU8AWwBviODBxCVZj/CHsTWSc06Yc2/Qcn/TKYb8A00b8t5/+fOgN+uMPy8frxt2LbME\nHN2YdYDEHcBewMwIrk9dj/UOZ/RmFZCYAvwG+AOwZQTPJy7Jeogz+hrJOSfMuTdoqb++wypvbOKQ\n9/7Lnwe9Wfv68vkfJK3CbAiObszaMOCwyp0jWJq4JOsxjm7Muu+tFEN+qYe81ZUHfUVyzglz7g1G\n7O/g8nF+BaV0RY/vv57gQW/Wnr5Bf0PSKsyG4YzebIwkNgaeBjYGto7gycQlWQ9yRm/WXdMphvzd\nHvJWZx70Fck5J8y5Nxi2v4PKx0bHNj28/3qGB73Z2PXl8zcmrcJsBM7ozcZAYjOKfH4csFUEKxOX\nZD3KGb1Z9xwATADu8JC3uvOgr0jOOWHOvcGQ/WVzWGWP7r+e4kFvNjZ9H8Q6n7fac0ZvNkoSmwNP\nAS8DkyJYnbgk62HO6M264x0Uf3Z+5iFvTeBBX5Gcc8Kce4NB+8smn4ee3H89x4PebPScz1ujOKM3\nGwWJrYAngT9S5PN/SFyS9Thn9Gadd2D5eIuHvDWFB31Fcs4Jc+4N1usvq3weem7/9SQPerPRyeJC\nZtZbnNGbtUhiCvAb4PcU+fwLiUsyc0Zv1mF97+Zv9pC3JvGgr0jOOWHOvcE6/WV5WGUP7b+e5UFv\n1rrsPoi13uCM3qwFEq8Dfg08Q3H9+TWJSzIDnNGbdVJfbHOTh7w1jQd9RXLOCXPuDdb2l2U+Dz2z\n/3qaB73ZiDYA5/PWYM7ozUYg8QZgKfA74LURvJy4JLO1nNGbdUbfu/mFHvLWRB70Fck5J8y5t8IV\nJ5RPssvnIf/9l3t/rWh70EuaKel+Sb+UdMYQ63y5fP0uSXu1u02zqkgItuz7f9b5vDVSWxm9pHHA\nL4BDgUeARcCJEXFfv3UOA+ZExGGS9gO+FBHTB/ldzuitdiTeCNwLPAZsG0E9PtQyK1WR0e8LLI2I\nZRHxInA5cNSAdY4ELgWIiNuALSRNbnO7ZlVZe1ilh7w1VbuDfjvg4X7LK8rvjbTO9m1ut3Fyzglz\n7g04GBZCxrFN5vsv+/5aMb7Nn2/1Hc7Af1YM+nOSLgGWlYsrgcURsbB8bQZAU5eBPSXVph4vt7I8\nQfDCDAB4z++la2fUqz4v9+Jy+Xw2hWW0oN2MfjpwTkTMLJfnAi9HxOf7rfN/gIURcXm5fD9wYEQ8\nPuB3OaO3WpHYE7gTWA5Mc3RjdVRFRn87sLOkaZI2BI4H5g1YZx7wF2VB04GVA4e8WU05n7cstDXo\nI2INMAe4HlgCXBER90k6RdIp5TrXAg9KWgqcD5zaZs2NlHNOmHFv5YlS//ho2jK6K+P9B+TfXyva\nzeiJiOuA6wZ87/wBy3Pa3Y5ZlSTGA39WLM2/E85KWo9ZO3ytG7NBSOwL3AYsjWDn1PWYDcXXujEb\nu77r22R52QPrLR70Fck5J8y0t74PYm/ItL+13F/+POjNBpDYEHh7ubgwYSlmHeGM3mwAibcDNwNL\nInhT6nrMhuOM3mxsfDcpy4oHfUVyzgkz7G2d+8Nm2N863F/+POjN+pHYBNif4npMNyUux6wjnNGb\n9SNxMLAAWByBb5JjteeM3mz0fPy8ZceDviI554SZ9XZI+big7xuZ9bce95c/D3qzksRE4K3AS8CP\nE5dj1jHO6M1KEocD3wNuiWD/1PWYtcIZvdnorBfbmOXAg74iOeeEGfU26KDPqL9Bub/8edCbARKv\nBXYHngduTVyOWUc5ozcDJI4HLgd+FME7U9dj1ipn9Gatcz5v2fKgr0jOOWEmvfWdKLXeoM+kvyG5\nv/x50FvPk9gBeAOwCrgjcTlmHeeM3nqexIeAC4GrIzg6dT1mo+GM3qw1Q8Y2ZjnwoK9Izjlhk3uT\nEK98EDvojUaa3F8r3F/+POit1+0KTAEeA5YkrsWsK5zRW0+TOB34MnBZBCelrsdstJzRm43s3eXj\n/KRVmHWRB31Fcs4Jm9qbxEa8cn/YHw69XjP7a5X7y58HvfWyA4BNgXsieDR1MWbd4kFfkYhYmLqG\nbmlwb32xzfXDrdTg/lri/vLnQW+9rKVBb9Z0HvQVyTknbGJvEtsAbwb+APzn8Os2r7/RcH/586C3\nXvWu8nFhBM8nrcSsy3wcvfUkicuAE4GPRvCl1PWYjZWPozcbhMQGsPbmIs7nLXse9BXJOSdsYG9v\nAV4D/Br4xUgrN7C/UXF/+fOgt140s3y8PoJ6ZJdmXeSM3nqOxK3AfsDREVyduh6zdrQyOz3oradI\nTAZ+A7wAbBXBc4lLMmuLP4ytkZxzwob1dhgg4IZWh3zD+hs195c/D3rrNYeXj99PWoVZhRzdWM8o\nr1b5JPAqYIcIlicuyaxtjm7M1jWDYsjf7SFvvcSDviI554QN6q0vtvneaH6oQf2NifvLnwe99YTy\nJuBHlIvO562nOKO3niCxO3A38AQwJYKXE5dk1hHO6M1e8d7y8Xse8tZrPOgrknNO2JDe3l8+fnu0\nP9iQ/sbM/eVv/Fh/UNKWwBXADsAy4LiIWDnIesuAZ4CXgBcjYt+xbtNsLCR2BnYHVgELEpdjVrkx\nZ/SSzgWejIhzJZ0BTIqIMwdZ7yFg74h4aoTf54zeukLiTOCzwDci+GDqesw6qdsZ/ZHApeXzS4Gj\nh6ulje2YtWvMsY1ZDtoZ9JMj4vHy+ePA5CHWC+BHkm6X9FdtbK/Rcs4J69ybxDRgb+A5xniTkTr3\n1wnuL3/DZvSS5gNTBnnpE/0XIiIkDZUBHRARv5G0NTBf0v0RcfMQ27uEIu8HWAksjoiF5Wszym01\nchnYU1Jt6umVZYi9i8crfwbH7wf1qs/LXh7tcvl8NoVltKCdjP5+YEZEPCZpG+DGiPjTEX7mbGB1\nRPzTIK85o7eOk7gFmA4cF8G/p67HrNO6ndHPA2aVz2cB3x2kgE0lvbp8vhnwLuCeNrZp1jKJN1AM\n+eeAaxOXY5ZMO4P+c8A7JT0AHFwuI2lbSdeU60wBbpa0GLgN+H5E/LCdgpsq55ywxr2dVD7+Rzs3\nGKlxfx3h/vI35uPoy8MlDx3k+48C7ymfPwjsOebqzMaovLZN36D/RspazFLztW4sSxL7AIuA3wLb\nRbAmcUlmXeFr3Vgv+0D5+C0Peet1HvQVyTknrFtvEuOBE8rFb7b/++rVX6e5v/x50FuODqU4ge8B\n4PbEtZgl54zesiNxFfA+4JMR/EPqesy6qZXZ6UFvWZGYDKyguL7S6yJ4NHFJZl3lD2NrJOecsGa9\nzaI4bPiaTg35mvXXce4vfx70lo3y2Pn/Xi7+W8pazOrE0Y1lQ+JAYCHwCDDNh1VaL3B0Y73m1PLx\nYg95s1d40Fck55ywDr1JTKU40mYNcH5nf3f6/rrJ/eXPg95ycRowDrgqghWpizGrE2f01ngSmwEP\nA5OA6RHclrgks8o4o7de8QGKIX+rh7zZ+jzoK5JzTpiyN4lxwN+Ui1/qzjby3Xfg/nqBB7013bHA\nLsBDwFWJazGrJWf01lgSGwB3AbsBJ0f4JCnrPc7oLXdHUgz5FcDXE9diVlse9BXJOSdM0Vt5uYNP\nlovnRvDH7m0r330H7q8XeNBbU70f2Bt4DLggcS1mteaM3hpHYkPgXmAn4H9EdPZMWLMmcUZvuTqZ\nYsj/ArgwcS1mtedBX5Gcc8Iqe5OYCHyqXJxbxcXLct534P56gQe9Nc2nga2BnwLfTVyLWSM4o7fG\nkNiLV272vU8Ed6asx6wOnNFbNsqTo75G8f/sVzzkzVrnQV+RnHPCino7FdgPeJRXMvpK5LzvwP31\nAg96qz2JPwHOLRdPj+CZlPWYNY0zeqs1iQnAT4C3Al+PYFbiksxqxRm95eBTFEN+OfDhxLWYNZIH\nfUVyzgm71ZvEkRTXs3kZmBXBqm5sZ+Q68t134P56gQe91VKZy/+/cvGsCBYmLMes0ZzRW+1IbAX8\nJ/CnFDcTOS6CevyPalYzrcxOD3qrlfJG3z8CpgP3AAdE8Gzaqszqyx/G1kjOOWGneiuvSnkFxZBf\nDsysw5DPed+B++sFHvRWCxIbA/8BvAf4HfDuCB5NW5VZHhzdWHISr6K4QNkhFEP+nb7EgVlrWpmd\n46sqxmwwEjsA84A9KO4WdWgE96atyiwvjm4qknNOONbeJN4B/IxiyD8AvKOOQz7nfQfurxd40Fvl\nJMZLfBpYCLyW8iibCJYmLcwsU87orVISewDnUxxZE8DngLMjeDFpYWYN5YzeakNic+BsiuvVjANW\nAB/0Ga9m3efopiI554TD9SbxKom5wEPAxwABXwF2a8qQz3nfgfvrBX5Hb10hsS1wCvDXFPd4Bfgx\n8LEI7khWmFkPckZvHSOxETAT+ABwNK+8kbgV+Ftgga9ZY9ZZzuit6yS2pDjR6TDgvcDm5UsvAf8O\nfBX4sQe8WTpjzuglHSvpXkkvSXrLMOvNlHS/pF9KOmOs22u6XHJCie0kjpH4nMRtwJOw8EpgNsWQ\nXwycAewQwXER3NT0IZ/LvhuK+8tfO+/o76F4B3f+UCtIGgf8K3Ao8AiwSNK8iLivje021Z7QlA8f\nEUWuvguwK8XlgncF3gxsO2D1F+FHy2DGRcDVEeS4bxuz78bI/WVuzIM+Iu4HkIaNhvYFlkbEsnLd\ny4GjIMthMJItUm5cYhzFO+6tgC0HPE4Gtgemll/bAxsN8atWAosozmi9BbgJ/uF/RXzmc11tIK2k\n+64C7i9z3c7otwMe7re8Ativy9sclsTWwOspDvPr/8Ug3xvN1wg//45dJd43wu/YAJhQfo3v93zg\n8sDXNgQ2AzYd5nHTfnW2YiXwS+B+ir+Y7wfuBZZG8PKA/6ZmVmPDDnpJ84Epg7x0VkR8r4XfX8ds\n9kjgguo3+3qA46rf7jpWAk9RXCHyqX7Pn6D4C3lF32MEq0fxe6d1tszamZa6gC6blrqALpuWuoDU\nhh30EfHONn//IxRRQJ+pFMNkUJLq+BdDB12auoAtyq/Xj7TiaN+lS5o1tpKawf01W+79jaRT0c1Q\nY+F2YGdJ04BHgeOBEwdb0cfQm5l1RzuHV75X0sMUF6e6RtJ15fe3lXQNQESsAeYA1wNLgCt69Igb\nM7NkanNmrJmZdUetLmom6XRJ90n6uaTPp66nGyT9T0kvS9oydS2dJOkL5b67S9J3JG0+8k/VX84n\n/EmaKunG8sTHn0v6cOqaOk3SOEl3Smrl4JFGkbSFpKvKP3dLJE0fat3aDHpJB1EcEbNHROwGfDFx\nSR0naSrwTuDXqWvpgh8Cb4qIN1PcLWpu4nra1u+Ev5nAG4ETJe2atqqOehH4WES8iSKCPS2z/gA+\nQhEb5xhdfAm4NiJ2pbhL25CxeG0GPcVVDj8bES8CRMQTievphn8G/nfqIrohIuZHRN/x9bdRnHTV\ndGtP+Cv/v+w74S8LEfFYRCwun6+mGBQDz3xuLEnbU1yD6QJGdw5J7ZX/Yn5HRFwExeehEbFqqPXr\nNOh3Bv5M0q2SFkraJ3VBnSTpKGBFRNydupYKfAi4NnURHTDYCX/bJaqlq8oj4/ai+Es6F/8CfBzW\nPcEvEzsCT0i6WNIdkv5N0qZDrVzp1SuHOQHrE2UtkyJiuqS3AlfSwvHedTJCf3OBd/VfvZKiOqiV\nE+gkfQJ4ISIuq7S47sjxn/vrkfQq4CrgI+U7+8aTdDjw24i4M9OLmo0H3gLMiYhFks4DzgQ+NdTK\nlRnuBCxJfw18p1xvUfmB5VYR8bvKCmzTUP1J2o3ib+C7ymsDbQ/8l6R9I+K3FZbYlpFOoJM0m+Kf\nyodUUlD3jeqEvyaSNAH4NvCNiPhu6no6aH/gSEmHARsDEyV9PSL+InFdnbKCIiFYVC5fRTHoB1Wn\n6Oa7wMEAknYBNmzSkB9ORPw8IiZHxI4RsSPFTnpLk4b8SCTNpPhn8lER8Xzqejpk7Ql/kjakOOFv\nXuKaOkbFu44LgSURcV7qejopIs6KiKnln7cTgBsyGvJExGPAw+WshOIKwfcOtX6dbjxyEXCRpHuA\nF4BsdsogcowEvkJxcbX55b9abomIU9OW1J6IWCOp74S/ccCFmZ3wdwDF3cDulnRn+b25EfGDhDV1\nS45/5k4Hvlm+CfkV8JdDregTpszMMlen6MbMzLrAg97MLHMe9GZmmfOgNzPLnAe9mVnmPOjNzDLn\nQW9mljkPejOzzP1/WZbWrJSu7KcAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import numpy as np\n", + "testValues = np.arange(-5,5,0.01)\n", + "\n", + "plot(testValues, np.tanh(testValues), linewidth=2)\n", + "grid(1)\n", + "legend(['tanh'])\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def ReLU(x):\n", + " return x * (x > 0)\n", + "# See here: http://stackoverflow.com/questions/32109319/how-to-implement-the-relu-function-in-numpy" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWgAAAEACAYAAACeQuziAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFKlJREFUeJzt3X2MXPV1xvHnxDZJKTgxDXFNsLtEclQMpW5KCHUKbBpC\nLcsEJJAwUh2ZKm9uQoAGKC8qdaUqBhUVV6mqVMH2UjlNWiBAzFvsOCyltARIvCHYDi2tjWw3Di81\n4JBYLPj0j7nr7LJ3996duW+/3/1+pBW+O7Mz5+yVD7Nnn7k2dxcAoHneVncBAIB0DGgAaCgGNAA0\nFAMaABqKAQ0ADcWABoCGmp7nTma2S9Krkt6UNOzup5VZFAAg54CW5JL63f3/yiwGAPBLU1lxWGlV\nAADGyTugXdJ3zOxJM/tUmQUBADryrjg+7O4/MbNjJW02sx+7+yNlFgYAbZdrQLv7T5L/vmBmd0k6\nTdIjkmRmXMwDALrg7pOujjMHtJkdKWmaux8ws1+VdI6kv5zKk4TMzFa5+6q66ygL/YUt5v6q6M1M\nsyTtkDRb0ifdtbbM5xv73NkvbvO8gp4t6S4zG7n/19x9U4+1haSv7gJK1ld3ASXrq7uAkvXVXUCJ\n+ip4jtXqzLhHJK2v4PmmJHNAu/tOSQsrqAUAKmOmRZI+I2lY0mfddajmksbhnYTZBuouoGQDdRdQ\nsoG6CyjZQN0FlGigrAc20wxJ/5Ac3uSu7WU9Vy+s1wv2m5nHvIMGEB8zXaPOeuNZSae46xfV15A9\nOxnQGcys390H666jLPQXtqb0R5prcmkzMs/szJuDBoBJ8c/npUsCFt19La+gAfQqmQN1l9FIZtb1\nK2h+SQgADcWAzmBm/XXXUCb6C1vs/bUdAxoAGooBnaEJvyEvE/2FLfb+itDX16cjjzxSRx99tObM\nmaNLLrlEr732WubX9ff3a+3a8e/87uvr05YtW8Z8bmBgQGeccUZhNY9gQAOImpnp3nvv1YEDBzQ0\nNKStW7dq9erVub4uLYEx0efLwIDOEPuOj/7CFnt/RZs9e7bOOeccDQ0NSZIee+wxLVq0SLNmzdLC\nhQv18MMP11zhWAxoAKUzK+6jGyMRwD179ujBBx/U/PnztXfvXi1dulQ33HCD9u/fr5tvvlkXXHCB\nXnrppQI77w0DOkPsOz76C1vs/RXB3XX++edr5syZmjdvnmbPnq1Vq1Zpw4YNWrJkiRYvXixJOvvs\ns3Xqqafqvvvuq7niX2JAAyide3EfU2Vmuueee/Tqq69qcHBQO3bs0IsvvqjnnntOt99+u2bNmnX4\n49FHH9W+ffsmfbzp06dreHh4zOeGh4c1Y8aMqReXgQGdIfYdH/2FLfb+inbmmWdqxYoVuvLKKzVv\n3jwtX75c+/fvP/xx4MABXX311ZM+xrx587Rz584xn9u5c6f6+voKr5cBDaBVLr/8cm3evFmLFi3S\nxo0btWnTJr355ps6ePCgBgcHtXfv3sP3HR4e1sGDBw9/DA8P66KLLtKaNWv0zDPPyN315JNPav36\n9Vq2bFnxxbp7Tx+dh+jtMfjgg4+wP5I50Eh9fX2+ZcuWMZ9buXKlX3jhhf7444/7WWed5cccc4wf\ne+yxvnTpUt+9e7e7u/f397uZjflYvny5Hzp0yG+88UafP3++z5w50xcsWODr1q2b8PknmpF5ZicX\nSwLQMy6WNDEullSi2Hd89Be22PtrOwY0ADQUKw4APWPFMTFWHAAQIQZ0hth3fPQXttj7azsGNAA0\nFDtoAD3jX/WeXLc7aP5VbwA9q+NFmpmukbRa0rOSTnHXL6quoWysODLEvuOjv7DF3N9kvZnpfZL+\nIjlcGeNwlhjQAAJjJpP095LeIWmDu75Tc0mlYQcNIChmWibp65L2S/pNdz1fc0ldIQcNICpmmiVp\nTXJ4VajDOS8GdIaYd3wS/YUu5v4m6G21pNmSHpG0vtKCasCABhAEMy2S9BlJw5I+665DNZdUOnbQ\nABrPTDMk/UDSyZL+yl1/XnNJPWMHDSAWX1RnOD8r6Us111IZBnSGmHd8Ev2FLub+RnprS+Y5DQMa\nQGO1KfOcJtcO2symSXpS0h53P/ctt7GDBlCKWDLPaYrcQV8mabskLogCoBJtyzynyRzQZna8pCWS\nbpXUulfKMe/4JPoLXdz9bbhNLco8p8nzCvoWSVdJ8WcOATRDJ/N8/LlqUeY5zaSXGzWzpZKed/et\nk19ZygYk7UoOX5Y05O6DyW39khTq8cjnmlIP/dFf/P3NnCa9skbql/TVb0iffo/k25tTX3fHyZ9X\nqGOXcpj0l4Rm9iVJyyW9oc5vUWdKutPdPzHqPvySEEBh2nCdZ6mAXxK6+3XuPtfdT5C0TNJ3Rw/n\nNoh7x0d/oYutv7GZ5z/9SqzDOa+p5qBJcQAoxVszz9It36+5pNpxLQ4AjRBz5jkN1+IAEAQyz+kY\n0Bli2/G9Ff2FLaL+xl3nOaLeusaABlCrNl7nOS920ABqE+N1nvNiBw2g6Vp5nee8GNAZYt+D0V/Y\nQu4v6zrPIfdWFAY0gMq1/TrPebGDBlC5tmWe07CDBtA4ZJ7zY0BniH0PRn9hC7S/cZnnNIH2VigG\nNIDKkHmeGnbQACrR5sxzGnbQAJqEzPMUMaAzxL4Ho7+whdJfVuY5/WvC6K1MDGgApSLz3D120ABK\nReY5HTtoALUi89wbBnSG2Pdg9Be2APrLlXlOE0BvpWNAAygFmefesYMGUDgyz9nYQQOoC5nnAjCg\nM8S+B6O/sDWxv24yz+mP07zeqsaABlAYMs/FYgcNoDBknvNjBw2gMmSei8eAzhD7Hoz+wtaw/rrO\nPKdpWG+1YEAD6BmZ53KwgwbQEzLP3WEHDaAKZJ5LwoDOEPsejP7CVnd/RWWe0x877nOXBwMaQFfI\nPJePHTSArpB57g07aAClIPNcDQZ0htj3YPQXthr7KzTznCb2c5cHAxrAlJB5rk7mDtrM3iHpYUlv\nlzRd0h3uvmrU7eyggZYg81ycPLNzetaDuPtBM/uIu//czKZL+jcze8Ddv1dYpQBCQea5QrlWHO7+\n8+SPR0iaIbXnR5rY92D0F7Yq+ysz85z+fHGfuzxyDWgze5uZDUn6qaRN7v5EuWUBaBIyz/WYUg7a\nzN4p6S5Jl7r7tuRz7KCByJF5Ll4hO+jR3P0VM3tI0mJJ20Y90YCkXcnhy5KG3H0wua0/+VqOOeY4\nyOMFR0nbkszzTbdK1yyQ/Pnm1BfGcfLnFZ3v4+F5Oak8KY53S3rD3V82s1+R9G1JN7r7/cntUb+C\nNrP+kW92jOgvbFX0Z6avqBOre0RSf1Wxuhacu0JeQc+RdJuZTVNnZ/3PI8MZQNzIPNeLa3EASEXm\nuVxciwNAL8g814wBnSH2LCb9ha2s/qrOPKfXEPe5y4MBDWAMMs/NwQ4awBhknqvBDhrAlHCd52Zh\nQGeIfQ9Gf2Erob/Sr/OcV+znLg8GNABJZJ6biB00ADLPNWAHDSAvMs8NxIDOEPsejP7CVkR/Tcg8\np4n93OXBgAZajMxzs7GDBlqMzHN92EEDmBCZ5+ZjQGeIfQ9Gf2Hrsb/GZJ7TxH7u8mBAAy1E5jkM\n7KCBliHz3AzsoAGkIfMcCAZ0htj3YPQXtqn219TMc5rYz10eDGigJcg8h4cdNNASZJ6bhR00AElk\nnkPFgM4Q+x6M/sI2hf4anXlOE/u5y4MBDUSOzHO42EEDESPz3FzsoAGQeQ4YAzpD7Hsw+gvbZP2F\nlHlOE/u5y4MBDUSIzHMc2EEDESLz3HzsoIEWIvMcDwZ0htj3YPQXtgn6Cy7znCb2c5cHAxqICJnn\nuLCDBiJB5jks7KCBdiHzHBkGdIbY92D0F7aR/kLPPKeJ/dzlwYAGAkfmOV7soIHAkXkOUyE7aDOb\na2YPmdk2M3vazL5QXIkAekHmOW55VhzDkq5w95MknS7pc2Z2YrllNUfsezD6C92G2xRB5jlN/Ocu\nW+aAdvd97j6U/PlnknZIOq7swgBMrpN5Pv5ckXmO1pR20GbWJ+lhSSclw5odNFADMs/hKzQHbWZH\nSbpD0mUjwxlAbcg8t8D0PHcysxmS7pS0wd3vTrl9QNKu5PBlSUPuPpjc1i9JAR9fHlk/9Nes+ro4\nXjxHeiDJPH9kszT4IalJ9RVzPHoH3YR6CupnRdLSLuWQueIwM5N0m6SX3P2KlNujXnGYWf/INztG\n9BeWJPP8gKQ/lLRBsrUx9TdabOfurfLMzjwD+vcl/aukpySN3Plad38w75MAKAaZ53gUMqCLeBIA\nvUsyzzvUidV90l1ray4JPeBiSQWIPYtJf0EZd53nyPobI+be8mJAAwHgOs/txIoDaDgyz3FixQHE\ngcxzSzGgM8S+B6O/Zsu6znPo/U0m5t7yYkADDcV1nsEOGmgoMs9xYwcNBIrrPENiQGeKfQ9Gf401\nLvOcJuD+MsXcW14MaKBhyDxjBDtooEHIPLcHO2ggPGSecRgDOkPsezD6a46szHP614TT31TF3Fte\nDGigAcg8Iw07aKAByDy3DztoIABknjERBnSG2Pdg9NcIuTLPaQLprysx95YXAxqoEZlnTIYdNFAT\nMs/txg4aaDYyz5gUAzpD7Hsw+qtHN5nn9MdpZn9FiLm3vBjQQMXIPCMvdtBAxcg8Q2IHDTQOmWdM\nBQM6Q+x7MPqrXNeZ5zQN7K8wMfeWFwMaqAiZZ0wVO2igAmSe8VbsoIHmIPOMKWNAZ4h9D0Z/VdRQ\nTOY5/bHr768sMfeWFwMaKBGZZ/SCHTRQIjLPmAg7aKBGZJ7RKwZ0htj3YPRXqkIzz2liPn8x95YX\nAxooAZlnFIEdNFAwMs/Igx00UA8yzyhE5oA2s3Vm9lMz+1EVBTVN7Hsw+iv6+crLPKc/X7znL+be\n8srzCnq9pMVlFwKEjswzipZrB21mfZI2uvtvpdzGDhoQmWdMDTtooCJknlGG6UU8iJkNSNqVHL4s\nacjdB5Pb+iUp4OPLI+uH/kp4PsmXSZotfesp6cL/kV5XTP3VcTx6B92EegrqZ0XS0i7lwIojg5n1\nj3yzY0R/RTyHFkl6VJ3M80J3bS/z+cY+d7znL+bepHyzkwEN9IDMM7pVyA7azL4u6d8lvd/MdpvZ\nJUUVCESAzDNKkzmg3f1idz/O3d/u7nPdvZRrCjRV7FlM+uvlsavNPKfXEO/5i7m3vEhxAF0g84wq\ncC0OoAtkntErctBACcg8oyoM6Ayx78HoryulX+c5r5jPX8y95cWABqaA6zyjSuyggZzIPKNI7KCB\nYpF5RqUY0Bli34PRX97HqT/znCbm8xdzb3kxoIEMZJ5RF3bQQAYyzygDO2igR2SeUScGdIbY92D0\nl6kxmec0MZ+/mHvLiwENTIDMM+rGDhpIQeYZZWMHDXSPzDNqx4DOEPsejP7SvqaZmec0MZ+/mHvL\niwENjELmGU3CDhoYhcwzqsIOGpgCMs9oGgZ0htj3YPQ3RqMzz2liPn8x95YXAxoQmWc0EztotB6Z\nZ9SBHTSQD5lnNBIDOkPse7C29xdS5jlNzOcv5t7yYkCjtcg8o+nYQaO1yDyjTuyggQmQeUYIGNAZ\nYt+Dtbi/4DLPaWI+fzH3lhcDGq1D5hmhYAeNViHzjKZgBw2MR+YZwWBAZ4h9D9am/kLPPKeJ+fzF\n3FteDGi0AplnhIgdNFqBzDOahh00IDLPCFfmgDazxWb2YzP7LzP7syqKapLY92At6S+KzHOamM9f\nzL3lNemANrNpkv5O0mJJCyRdbGYnVlFYgyysu4CSRd7f752vuDPPMZ+/mHvLJesV9GmSnnX3Xe4+\nLOkbks4rv6xGeVfdBZQs2v46medFFyeHN7lre60FlSPa86e4e8tlesbt75W0e9TxHkkfKq+cyZnp\nNyS9u9pnXTDHTL9b7XNWKer+LpSOeo/IPCNQWQO6t4hH8a6X9Klqn/KDkvTpap+zSrH3t0uKJPM8\ngb66CyhRX90F1G3SmJ2ZnS5plbsvTo6vlXTI3W8adZ+mDXEACEJWzC5rQE+X9Iykj0r6X0mPS7rY\n3XcUWSQAYLxJVxzu/oaZfV7StyVNk7SW4QwA1ej5nYQAgHIU9k5CM7vUzHaY2dNmdlP2V4THzL5o\nZofM7Ji6aymSmf11cu5+aGbfNLN31l1Tr2J+g5WZzTWzh8xsW/L37Qt111QGM5tmZlvNbGPdtRTN\nzN5lZnckf++2J7/vG6eQAW1mH5H0cUmnuPvJkm4u4nGbxMzmSvqYpOfqrqUEmySd5O6/Lek/JV1b\ncz09acEbrIYlXeHuJ0k6XdLnIutvxGWStqt5abIi/K2k+939REmnSEpdHRf1CnqlpNXJm1nk7i8U\n9LhN8jeSrq67iDK4+2Z3H3mH3fckHV9nPQWI+g1W7r7P3YeSP/9Mnb/cx9VbVbHM7HhJSyTdKimq\ni7ElP6Ge4e7rpM7v+tz9lbT7FjWg50s608weM7NBMzu1oMdtBDM7T9Ied3+q7loq8MeS7q+7iB6l\nvcHqvTXVUioz65P0O+r8jzUmt0i6SorurfmSdIKkF8xsvZn9wMy+amZHpt0x640qh5nZZkm/nnLT\n9cnjzHL3083sg5L+RdL7uii8Nhn9XSvpnNF3r6SoAk3S33XuvjG5z/WSXnf3f6q0uOLF+CPxOGZ2\nlKQ7JF2WvJKOgpktlfS8u2+N9IJJ0yV9QNLn3f0JM1sj6RpJN6TdMRd3/9hEt5nZSknfTO73RPKL\ntF9z95emXHpNJurPzE5W5/94PzQzqfPj//fN7DR3D+aylZOdP0kysxXq/Ej50UoKKtdeSXNHHc9V\n51V0NMxshqQ7JW1w97vrrqdgiyR93MyWqPMPLMw0s39090/UXFdR9qjzE/kTyfEd6gzocYpacdwt\n6Q8kyczeL+mIkIbzZNz9aXef7e4nuPsJ6nxzPxDScM5iZovV+XHyPHc/WHc9BXhS0nwz6zOzIyRd\nJOlbNddUGOu8Ulgrabu7r8m6f2jc/Tp3n5v8fVsm6bsRDWe5+z5Ju5NZKUlnS9qWdt/cr6AzrJO0\nzsx+JOl1SdF8M1PE+OPzlyUdIWlz8lPCf7j7n9RbUvda8AarD0v6I0lPmdnW5HPXuvuDNdZUphj/\nzl0q6WvJC4j/lnRJ2p14owoANBT/5BUANBQDGgAaigENAA3FgAaAhmJAA0BDMaABoKEY0ADQUAxo\nAGio/wfTqr8t10n9mwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plot(testValues, ReLu(testValues), linewidth=2)\n", + "grid(1)\n", + "legend(['ReLU'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb index 2d63135..c9e87f6 100644 --- a/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb +++ b/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb @@ -13,7 +13,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The code below was written in class, to teach simple Python concepts" + "The code below was written in class, to teach simple Python concepts.\n", + "No further polishing is provided and the code is not necessarily linear (so a cell does not necessarily follow, a previous cell)." ] }, { @@ -49,7 +50,7 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -59,11 +60,18 @@ "output_type": "stream", "text": [ "['couthie', 'confidence man', 'definiteness', 'changelessness', 'morally', 'ethmoidal vein', 'unquestionableness', 'uselessness', 'top-quality', 'good-humoredness']\n", + "**************************************************\n", "['twilight of the gods', 'rumbustious', 'screaming', 'grueling', 'inanimate', 'stern', 'changelessness', 'sugarless', 'order pseudoscorpiones', 'modest']\n" ] } ], "source": [ + "# Let's make this function more general so that we can use it to read lexical files,\n", + "# whether they are positive or negative. To do that, we simply parameterize the function.\n", + "# What this means is that we make it work with a parameter, which will be a file name that we pass to\n", + "# the function when we are calling it. Now, this parameter can be either the name of the positive lexicon file\n", + "# or the name of the negative lexicon file. So, that is a desirable change.\n", + "\n", "import re\n", "\n", "def clean_lexicon(lex_input):\n", @@ -79,14 +87,75 @@ "\n", "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\")\n", "print my_positive_list[:10]\n", + "print \"*\"*50\n", + "my_neg_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", + "print my_neg_list[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false, + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "% of positive: 0.0864\n", + "% of negative: 0.1653\n" + ] + } + ], + "source": [ + "# What if we wanted to know the percentages of positive and negative words to the overall words (tokens) in a file.\n", + "# Let's write some code to do that based on the positive and negative entries we acquired from SentiWordNet:\n", + "import re\n", "\n", - "my_positive_list= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", - "print my_positive_list[:10]" + "def clean_lexicon(lex_input):\n", + " lex_file_l=open(lex_input, \"r\").readlines()\n", + " \n", + " new_lex_file_l=[]\n", + " for i in lex_file_l:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_lex_file_l.append(i)\n", + " return new_lex_file_l\n", + "\n", + "# Change the path to your local path:\n", + "pos_lex= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\")\n", + "neg_lex= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", + "\n", + "\n", + "# Determine the percentage of positive words in a file:\n", + "def get_sentiment_diversity(pos_lex, neg_lex, input_file):\n", + " '''\n", + " just returns some stats about % of pos and neg sentiment in a file...\n", + " '''\n", + " input_string=open(input_file, \"r\").read().lower()\n", + " len_words= float(len(input_string.split()))\n", + " pos_count=0\n", + " neg_count=0\n", + " for w in pos_lex:\n", + " pos_count+= input_string.count(w)\n", + " for w in neg_lex:\n", + " neg_count += input_string.count(w)\n", + " return pos_count, neg_count, len_words\n", + " \n", + "# Call the function...\n", + "input_file=\"/Users/mam/CORE/TEACHING/ssa/git_hub/python_tutorial/hamlet.txt\"\n", + "pos_count, neg_count, len_words= get_sentiment_diversity(pos_lex, neg_lex, input_file)\n", + "#-------------------------\n", + "print \"% of positive: \", round(pos_count/len_words, 4) \n", + "print \"% of negative: \", round(neg_count/len_words, 4)" ] }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -95,13 +164,30 @@ "name": "stdout", "output_type": "stream", "text": [ - "## hello ##\n", - "##hello##\n", - "##hello##\n" + "['!', '\"', '#', '$', '%', '&', \"'\", '(', ')', '*', '+', ',', '-', '.', '/', ':', ';', '<', '=', '>', '?', '@', '[', '\\\\', ']', '^', '_', '`', '{', '|', '}', '~']\n" ] } ], "source": [ + "import string\n", + "punc = [char for char in string.punctuation]\n", + "print punc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# What if we wanted to remove all punctuation marks from a file?\n", + "# There are many ways to do this.\n", + "# As an introduction to regular expressions and the \"string\" module, let's do something along the following lines:\n", + "#----------------\n", + "# Let's take a look at the \"re\" module first. Here's an example:\n", + "\n", "import re\n", "s = \" hello \"\n", "print \"##\"+ s + \"##\"\n", @@ -111,6 +197,239 @@ "print \"##\"+ s3 + \"##\"" ] }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hey there 654%$21!!!...? $& + ___ | %\n", + "Hey there 65421 \n" + ] + } + ], + "source": [ + "\n", + "import string\n", + "import re\n", + "\n", + "def clean(to_filter_list, text):\n", + " '''\n", + " input: \n", + " a. list of undesirable items we want to remove from text\n", + " b. text we want to clean\n", + " output:\n", + " cleaned text\n", + " '''\n", + " for i in to_filter_list:\n", + " #print i\n", + " i=\"\\\\\"+i\n", + " text=re.sub(i, \"\", text)\n", + " return text\n", + "\n", + "#----------------------\n", + "# Call the function...\n", + "punc = [char for char in string.punctuation]\n", + "text=\"Hey there 654%$21!!!...? $& + ___ | %\"\n", + "\n", + "new=clean(punc, text)\n", + "print text\n", + "print new\n", + "#print punc" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Detecting Retweets & Removing Duplicates..." + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['@alex Did you make it to the meeting?']\n", + "['@alex Did you make it to the meeting?']\n" + ] + } + ], + "source": [ + "# Some work on filtering out undesirable content, for example retweets from a file.\n", + "# The first step is to do some analysis and understand the structure of a retweet.\n", + "# Below we assume simply thar a retweet is just a tweet that starts with either \"RT\" or \"rt\"\n", + "#--------------------------------------------\n", + "# How do we get red of retweets, for example?\n", + "# Let's say we have the following list of lines, returned from a file we opened\n", + "lines=[\"RT @abhi I like #soccer!!!!\", \"rt @abhi I cooked lentil soup\",\\\n", + " \"@alex Did you make it to the meeting?\"]\n", + "\n", + "new_list=[]\n", + "for line in lines:\n", + " if not line.startswith(\"RT\") and not line.startswith(\"rt\"):\n", + " #print line\n", + " new_list.append(line)\n", + " \n", + "print new_list\n", + "\n", + "clean_list=[line for line in lines if not line.startswith(\"RT\") and not line.startswith(\"rt\")]\n", + "print clean_list" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "set(['RT @abhi I like #soccer!!!!', '@alex Did you maaaaake it to the meeting?', '@alex Did you make it there to the meeting?', '@alex Did you make it to the meeting?', 'rt @abhi I cooked lentil soup', '@alex Did you make it to the meeting...', '@alex did you make it to the meeting?'])\n" + ] + } + ], + "source": [ + "# Now, let's filter out duplicates:\n", + "lines=[\"RT @abhi I like #soccer!!!!\", \"rt @abhi I cooked lentil soup\",\\\n", + " \"@alex Did you make it to the meeting?\",\\\n", + " \"@alex Did you make it to the meeting...\",\\\n", + " \"@alex Did you make it there to the meeting?\",\\\n", + " \"@alex did you make it to the meeting?\",\\\n", + " \"@alex Did you maaaaake it to the meeting?\"]\n", + "\n", + "print set(lines)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Using a Main function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Why don't we now use a main function to call the code we wrote so far?" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Welcome to the sentiment statistician!!!\n", + "% of positive: 0.0864\n", + "% of negative: 0.1673\n" + ] + } + ], + "source": [ + "\n", + "import re\n", + "import string\n", + "punc = [char for char in string.punctuation]\n", + "\n", + "def clean(to_filter_list, text):\n", + " '''\n", + " input: \n", + " a. list of undesirable items we want to remove from text\n", + " b. text we want to clean\n", + " output:\n", + " cleaned text\n", + " '''\n", + " for i in to_filter_list:\n", + " #print i\n", + " i=\"\\\\\"+i\n", + " text=re.sub(i, \"\", text)\n", + " return text\n", + "\n", + "\n", + "def clean_lexicon(lex_input):\n", + " lex_file_l=open(lex_input, \"r\").readlines()\n", + " \n", + " new_lex_file_l=[]\n", + " for i in lex_file_l:\n", + " i=i.strip()\n", + " #i= i[:-1] # i is a word in the list\n", + " i= re.sub(\"_\", \" \", i)\n", + " new_lex_file_l.append(i)\n", + " return new_lex_file_l\n", + "\n", + "\n", + "# Determine the percentage of positive words in a file:\n", + "def get_sentiment_diversity(pos_lex, neg_lex, input_file):\n", + " '''\n", + " just returns some stats about % of pos and neg sentiment in a file...\n", + " '''\n", + " input_string=open(input_file, \"r\").read().lower()\n", + " input_string= clean(punc, input_string)\n", + " len_words= float(len(input_string.split()))\n", + " pos_count=0\n", + " neg_count=0\n", + " for w in pos_lex:\n", + " pos_count+= input_string.count(w)\n", + " for w in neg_lex:\n", + " neg_count += input_string.count(w)\n", + " return pos_count, neg_count, len_words\n", + " \n", + "def main():\n", + " # Call the code...\n", + " #------------------\n", + " print(\"Welcome to the sentiment statistician!!!\")\n", + " # Get the lexicon:\n", + " pos_lex= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\")\n", + " neg_lex= clean_lexicon(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/neg.swn.txt\")\n", + " # Read the hamlet file\n", + " input_file=\"/Users/mam/CORE/TEACHING/ssa/git_hub/python_tutorial/hamlet.txt\"\n", + " # get sentiment stats\n", + " pos_count, neg_count, len_words= get_sentiment_diversity(pos_lex, neg_lex, input_file)\n", + " #-------------------------\n", + " print \"% of positive: \", round(pos_count/len_words, 4) \n", + " print \"% of negative: \", round(neg_count/len_words, 4)\n", + "\n", + " \n", + "if __name__ == \"__main__\":\n", + " main()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Miscellaneous code to loop over lines from a file, and do something (e.g., counting positive and negative words)" + ] + }, { "cell_type": "code", "execution_count": 51, @@ -188,51 +507,6 @@ " " ] }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], - "source": [ - "x=open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/posTweets.txt\", \"r\").readlines()\n", - "print type(x) " - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "@Vivawonderwoman Got it! :)\n", - "Riri flow on Pandora..Christmas tree all done :)\n", - "Ah love feels so great :-)\n", - "@stephhybb okay maybe then but the other stores usually have better ones & okay yeah come after you're done at game stop!:) def!\n", - "@pammpimm haha gpp kok dek :) thanks yaaaa\n", - "@katelittle_ @soph_funari @kaseycreehan @kaylaaajx3 awe Kate I love youuu <333 :)\n" - ] - } - ], - "source": [ - "for l in x[:6]:\n", - " print l[:-1]" - ] - }, { "cell_type": "code", "execution_count": 34, @@ -316,6 +590,15 @@ " print count_pos #entry, lines.index(l)\n", " count_pos=0\n" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb index 4d41aaf..d09e61f 100644 --- a/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb +++ b/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb @@ -33,8 +33,8 @@ "output_type": "stream", "text": [ "25000\n", - "200\n", - "200\n" + "25000\n", + "25000\n" ] } ], @@ -54,8 +54,8 @@ "test_data = all_data[25000:50000]\n", "print len(train_data)\n", "\n", - "train_data=train_data[:100]+train_data[12500:12600]\n", - "test_data=test_data[:100]+test_data[12500:12600]\n", + "#train_data=train_data[:100]+train_data[12500:12600]\n", + "#test_data=test_data[:100]+test_data[12500:12600]\n", "print len(train_data)\n", "print len(test_data)\n" ] @@ -71,8 +71,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "7142\n", - "6994\n" + "113562\n", + "113538\n" ] } ], @@ -103,15 +103,6 @@ "print word_space[\"love\"]" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": 3, @@ -123,9 +114,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "0\n", - "200\n", - "200\n" + "25000\n", + "25000\n", + "25000\n" ] } ], @@ -151,20 +142,11 @@ "#test_vecs= get_sparse_vectors(test_data, word_space)\n", "\n", "#print train_vecs, test_vecs[0]\n", - "print len(train_data[12500:12600])\n", + "print len(train_data)\n", "print len(train_vecs)\n", "print len(test_vecs)" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": 4, @@ -177,8 +159,8 @@ "output_type": "stream", "text": [ "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", - "200\n", - "200\n" + "25000\n", + "25000\n" ] } ], @@ -195,8 +177,8 @@ "from random import shuffle, randint\n", "\n", "\n", - "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", "\n", "\n", "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", @@ -228,7 +210,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "(200, 7142)\n" + "(25000, 113562)\n" ] } ], @@ -249,56 +231,11 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "\n", - "\n", - "Done fitting classifier on training data...\n", - "\n", - "================================================== \n", - "\n", - "Results with 5-fold cross validation:\n", - "\n", - "================================================== \n", - "\n", - "********************\n", - "\t accuracy_score\t0.715\n", - "********************\n", - "precision_score\t0.765432098765\n", - "recall_score\t0.62\n", - "\n", - "classification_report:\n", - "\n", - " precision recall f1-score support\n", - "\n", - " 0.0 0.68 0.81 0.74 100\n", - " 1.0 0.77 0.62 0.69 100\n", - "\n", - "avg / total 0.72 0.71 0.71 200\n", - "\n", - "\n", - "confusion_matrix:\n", - "\n", - "[[81 19]\n", - " [38 62]]\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using gpu device 0: GeForce GT 750M\n" - ] - } - ], + "outputs": [], "source": [ "# Classification with scikit-learn\n", "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", @@ -331,7 +268,7 @@ "from sklearn.linear_model import LogisticRegression\n", "from sklearn import cross_validation\n", "import gensim\n", - "n_jobs = 2\n", + "n_jobs = 4\n", "\n", "#train_vecs=array(train_vecs)\n", "train_vecs=np.array(train_vecs)\n", @@ -355,8 +292,113 @@ "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", - "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", - " \n" + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "X= train_vecs\n", + "y=train_tags\n", + "y=y.astype(int)\n", + "num_examples = len(X) # training set size\n", + "nn_input_dim = len(train_vecs[0]) # input layer dimensionality\n", + "nn_output_dim = 2 # output layer dimensionality\n", + "\n", + "# Gradient descent parameters (I picked these by hand)\n", + "epsilon = 0.01 # learning rate for gradient descent\n", + "reg_lambda = 0.01 # regularization strength \n", + "\n", + "\n", + "def forward(W1, b1, W2, b2, x):\n", + " z1 = x.dot(W1) + b1\n", + " a1 = np.tanh(z1)\n", + " z2 = a1.dot(W2) + b2\n", + " exp_scores = np.exp(z2)\n", + " # softmax\n", + " y_hat = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", + " return y_hat, z1, a1, z2\n", + "\n", + "def predict(model, x):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " y_hat, _, _, _ = forward(W1, b1, W2, b2, x)\n", + " return np.argmax(y_hat, axis=1)\n", + "\n", + "def calculate_loss(model):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " y_hat, _, _, _ = forward(W1, b1, W2, b2, X)\n", + " correct_logprobs = -np.log(y_hat[range(num_examples), y])\n", + " data_loss = np.sum(correct_logprobs)\n", + " return 1./num_examples * data_loss\n", + "\n", + "\n", + "# This function learns parameters for the neural network and returns the model.\n", + "# - nn_hdim: Number of nodes in the hidden layer\n", + "# - num_passes: Number of passes through the training data for gradient descent\n", + "# - print_loss: If True, print the loss every 1000 iterations\n", + "def build_model(nn_hdim, num_passes=2000, print_loss=False):\n", + " \n", + " # Initialize the parameters to random values. We need to learn these.\n", + " np.random.seed(0)\n", + " W1 = np.random.randn(nn_input_dim, nn_hdim) / np.sqrt(nn_input_dim)\n", + " b1 = np.zeros((1, nn_hdim))\n", + " W2 = np.random.randn(nn_hdim, nn_output_dim) / np.sqrt(nn_hdim)\n", + " b2 = np.zeros((1, nn_output_dim))\n", + "\n", + " # This is what we return at the end\n", + " model = {}\n", + " \n", + " # Gradient descent. For each batch...\n", + " for i in range(0, num_passes):\n", + " # feedforward\n", + " y_hat, z1, a1, z2 = forward(W1, b1, W2, b2, X)\n", + " \n", + " # Backpropagation\n", + " delta3 = y_hat\n", + " delta3[range(num_examples), y] -= 1\n", + " #print [range(num_examples), y]\n", + " dW2 = (a1.T).dot(delta3)\n", + " db2 = np.sum(delta3, axis=0, keepdims=True)\n", + " delta2 = delta3.dot(W2.T) * (1 - np.power(a1, 2))\n", + " dW1 = np.dot(X.T, delta2)\n", + " db1 = np.sum(delta2, axis=0)\n", + "\n", + " # Gradient descent parameter update\n", + " W1 += -epsilon * dW1\n", + " b1 += -epsilon * db1\n", + " W2 += -epsilon * dW2\n", + " b2 += -epsilon * db2\n", + " \n", + " # Assign new parameters to the model\n", + " model = { 'W1': W1, 'b1': b1, 'W2': W2, 'b2': b2}\n", + " \n", + " # Optionally print the loss.\n", + " # This is expensive because it uses the whole dataset, so we don't want to do it too often.\n", + " if print_loss and i % 1000 == 0:\n", + " print \"Loss after iteration %i: %f\" %(i, calculate_loss(model))\n", + " #print y_hat[:2]\n", + " \n", + " \n", + " return model\n", + "\n", + "# Build a model with a 3-dimensional hidden layer\n", + "model = build_model(3, print_loss=True)\n", + "\n", + "\n" ] }, { diff --git a/.ipynb_checkpoints/python_tutorial_part_9_neural_net_a-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_9_neural_net_a-checkpoint.ipynb new file mode 100644 index 0000000..c983d40 --- /dev/null +++ b/.ipynb_checkpoints/python_tutorial_part_9_neural_net_a-checkpoint.ipynb @@ -0,0 +1,668 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# A Vector Space Model, with scikit-learn" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# This is code to build a vector space model, with SVMs on Andrew Mass' \n", + "# distribution of movie review sentiment data." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "25000\n", + "200\n", + "200\n", + "7142\n", + "6994\n", + "0\n", + "200\n", + "200\n", + "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + "200\n", + "200\n", + "(200, 7142)\n", + "\n", + "\n", + "\n", + "Done fitting classifier on training data...\n", + "\n", + "================================================== \n", + "\n", + "Results with 5-fold cross validation:\n", + "\n", + "================================================== \n", + "\n", + "********************\n", + "\t accuracy_score\t0.715\n", + "********************\n", + "precision_score\t0.765432098765\n", + "recall_score\t0.62\n", + "\n", + "classification_report:\n", + "\n", + " precision recall f1-score support\n", + "\n", + " 0.0 0.68 0.81 0.74 100\n", + " 1.0 0.77 0.62 0.69 100\n", + "\n", + "avg / total 0.72 0.71 0.71 200\n", + "\n", + "\n", + "confusion_matrix:\n", + "\n", + "[[81 19]\n", + " [38 62]]\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], + "source": [ + "from collections import namedtuple\n", + "\n", + "all_data = [] \n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " #print my_data[line_no]\n", + " #break\n", + "train_data = all_data[:25000]\n", + "test_data = all_data[25000:50000]\n", + "print len(train_data)\n", + "\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n", + "#--------------------\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "from collections import defaultdict\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]\n", + "#-------------------------\n", + "import numpy as np\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "#test_vecs= get_sparse_vectors(test_data, word_space)\n", + "\n", + "#print train_vecs, test_vecs[0]\n", + "print len(train_data[12500:12600])\n", + "print len(train_vecs)\n", + "print len(test_vecs)\n", + "#-------------------------\n", + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "from random import shuffle, randint\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "\n", + "\n", + "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]\n", + "print len(train_tags)\n", + "print len(test_tags)\n", + "#--------------------\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "print train_vecs.shape\n", + "#--------------------------------\n", + "# Classification with scikit-learn\n", + "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", + "# Let's use sklearn to train an svm classifier:\n", + "#-------------------------------------------------\n", + "\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC, LinearSVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "import gensim\n", + "n_jobs = 2\n", + "\n", + "#train_vecs=array(train_vecs)\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "\n", + "print type(train_tags)\n", + "print type(train_vecs)\n", + "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + "clf.fit(train_vecs, train_tags)\n", + "print \"\\nDone fitting classifier on training data...\\n\"\n", + "\n", + "#------------------------------------------------------------------------------------------\n", + "print \"=\"*50, \"\\n\"\n", + "print \"Results with 5-fold cross validation:\\n\"\n", + "print \"=\"*50, \"\\n\"\n", + "#------------------------------------------------------------------------------------------\n", + "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", + "print \"*\"*20\n", + "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", + "print \"*\"*20\n", + "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", + "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", + "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + "#----------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# A lot of code taken from this tutorial: \n", + "# https://github.com/dennybritz/nn-from-scratch/blob/master/nn-from-scratch.ipynb\n", + "# Package imports\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import sklearn\n", + "import sklearn.datasets\n", + "import sklearn.linear_model\n", + "import matplotlib\n", + "\n", + "# Display plots inline and change default figure size\n", + "%matplotlib inline\n", + "matplotlib.rcParams['figure.figsize'] = (10.0, 8.0)" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 67, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAAHfCAYAAABj+c0fAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xd8XXX5wPHPc865M0mbjnSme0JpaUtZpVAoIEumKFtR\n8YcIOFmKCiooIvyAnyKIiIIosmRpmZVNoVDK6t5N90qbcW/uOOf5/ZHbmiY3bZo0Scfzfr36ejX3\nnPM9z7m5bZ58x/MVVcUYY4wxxuxaTnsHYIwxxhizN7IkyxhjjDGmFViSZYwxxhjTCizJMsYYY4xp\nBZZkGWOMMca0AkuyjDHGGGNaQYuSLBHpIyKvishMEflMRL7dyHn/JyLzReRjERnTknsaY4wxxuwJ\nvBZenwG+p6ofiUghMF1EXlbV2VtOEJGTgcGqOkREDgXuAQ5r4X2NMcYYY3ZrLerJUtXVqvpR7u9V\nwGygV73TTgMezJ3zHlAsIt1bcl9jjDHGmN3dLpuTJSL9gTHAe/UO9QbK6ny9HCjdVfc1xhhjjNkd\ntXS4EIDcUOETwHdyPVoNTqn3dYO9fETE9vcxxhhjzB5DVevnN9tocZIlIiHgSeBhVX06zykrgD51\nvi7NvdbAjoI1uycRuVFVb2zvOEzz2Pdvz2bfvz2Xfe/2bE3pHGrp6kIB/gTMUtU7GzntWeDLufMP\nAzap6pqW3NcYY4wxZnfX0p6sI4ALgU9EZEbutR8BfQFU9Q+qOllEThaRBUA18NUW3tMYY4wxZrfX\noiRLVd+iCb1hqnpFS+5jdnuvtXcApkVea+8ATIu81t4BmGZ7rb0DMK1LVHeP+eYiojYnyxhjjDF7\ngqbkLbatjjHGGGNMK7AkyxhjjDGmFViSZYwxxhjTCizJMsYYY4xpBZZkGWOMMca0AkuyjDHGGGNa\ngSVZxhhjjDGtwJIsY4wxxphWYEmWMcYYY0wrsCTLGGOMMaYVWJJljDHGGNMKLMkyxhhjjGkFlmQZ\nY4wxxrQCS7KMMcYYY1qBJVnGGGOMMa3AkixjjDHGmFZgSZYxxhhjTCuwJMsYY4wxphVYkmWMMcYY\n0wosyTLGGGOMaQWWZBljjDHGtAJLsowxxhhjWoElWcYYY4wxrcCSLGOMMcaYVmBJljHGGGNMK7Ak\nyxhjjDGmFViSZYwxxhjTCizJMsYYY4xpBZZkGWOMMca0AkuyjDHGGGNagSVZxhhjjDGtwJIsY4wx\nxphWYEmWMcYYY0wrsCTLGGOMMaYVWJJljDHGGNMKLMkyxhhjjGkFlmQZY4wxxrQCS7KMMcYYY1qB\nJVnGGGOMMa3AkixjjDHGmFZgSZYxxhhjTCuwJMuYvZTktHccxhizr7Iky5i9jIgcWSCh1wWyAtkC\nCb0uIhPaOy5jjNnXiKq2dwwAiIiqqv3WbUwLiMgZUdy/nc/Q+MF0A2Aaa3iE+Yka/PNV9Zl2DtEY\nY/YKTclbLMkyZi8hIuEwzpqrGVM8SDpuc2yBbuY2ZpSnCbqraqadQjTGmL1GU/IWGy40Zu9xbHfi\nTv0EC2CwdKSEmAtMavuwjDFm32RJljF7j64lxBr9rapb7bEubRiPMcbs0yzJMmbvMXsBm50gzxSA\nQJUFbHaA2W0fljHG7JssyTJm7zE9jb9kCsuD+gdepszPECxS1RntEZgxxuyLbOK7MXsRERkUxnln\nOJ0KjqBngaK8zerquZRXpQnGq+qi9o7RGGP2Bra60Jh9kIgUARcW4H0JoJrsY8DDqlrZvpEZY8ze\nw5IsY4wxxphWYCUcjDHGGGPaiSVZxhhjjDGtoMVJlog8ICJrROTTRo4fLSKbRWRG7s+PW3pPY4wx\nxpjdnbcL2vgz8Fvgoe2c87qqnrYL7mWMMcYYs0docU+Wqr4JlO/gNJvQbowxxph9SlvMyVJgvIh8\nLCKTRWT/NrinMcYYY0y72hXDhTvyIdBHVRMichLwNDA034kicmOdL19T1ddaPzxjjDHGmO0TkaOB\no3fqml1RJ0tE+gPPqerIJpy7GDhIVTfWe93qZBljjDFmj7Bb1MkSke4iIrm/H0JtYrdxB5cZY4wx\nxuzRWjxcKCKPABOBriJSBtwAhABU9Q/A2cBlIpIFEsC5Lb2nMcYYY8zuzrbVMcYYY4zZSbvFcKEx\nxhhjzL7IkixjjDHGmFZgSZYxxhhjTCuwJMsYY4wxphVYkmWMMcYY0wraouK7MaYJRCQEHAuUALNU\ndfoubLsUGAisVtV5u6pdY4wxjbOeLGNyRMRzRK6Ii7fYEycVE2+VJ85PRKSgDe59ehhnTR8KHx1L\nyd0dCL8eE+8zERnUwnZ7xSX0SgR3fh8Kn4njzYhL6GMRGb2rYjfGGJOf1ckyBhARN4r7XC8KJp7N\noHg/ilhNgmdZkpxL+bwa/PGqmmile0+I4r74A0bHB0lHAAJVprA8eJKF69MEQ1S1ohntFkVwZx1P\nnx6n0M+LiIuvAVNZow8ztypNMFZVF+zyBzLGmH2A1ckypunO7Ez0yOsYGx8unYiJxwDpwLcZGRtC\n8VAHubK1bhzHu+k8hmxNsAAcEY6XPs5wOhUIXNScdgUuHkZx57NkoBcRFwBXHCZITzmBvvEo7o93\nzRO0PxEZ6oj8xBPnNhH5Um7o1Rhj2pUlWcYABXhXnkK/Qk+2/SchIpxK/1gE5/LWuK+ISJLskYfQ\nPe/xCfQsiON9qTltx/EuOpre8XzHjqSn66NnNafd3YmIODHx7o/ifjSJ0p+cwYAfDKDD/WGcMhEZ\n0d7xGWP2bTbx3ZhaPbsRy3ugGzGyaOdWvHejY/YtHMyPRHDzH8BF0T3+338I5+puxM69lrGxmNQ+\nzin0L3pHVxc+xJxXRaSvqta0c5jGmH2U9WQZAwQwazEVeXOaxVQQwlnSGvdVVY3hvfUea/Ief5tV\n1dVkH21O22mCF6ezNp3v2AzWE8Kd2px2dxci4jlwzdfYr2BLgrXFeOkh/ekQA77YLsEZYwyWZBkD\nQJLs/z7HkmSFbpuTpNTnCRZWJ8j+prXunSB7/T+Yn1igm7e+FqjykpYFcyivBv7anHYzBL99k1WZ\nmbpxm9dXajWPsSCRJPvzFgXe/np7ONG+UpT34DhKCqO4x7RxTMYYs9UeP1xgzK6gqm9ExL3jx7z3\nvVO0X7Q/Rc4qEkxmaXUVmWdpZqLTxHu/LSLn38aMP3fTmFtCTBaw2ckQLE4TnKGqlc1st0xETv4/\nPnmmrxa6QymOr6A6OYtyNyD4lqq+vqufpY0lMwRuVgPqz6UDqCYb+Oz8qkxjjNlVrISDMXWIyPgY\n7nddnGEBWpYg+zvgRW2Dfyi5FXGTgK7AbFX9cBe1GwFOBwYDq4EnmlMSojWJiFBbiPVwIAk8paoL\nd3RdgYQ+vJChYw6THtu8ntGAa3inejPpY1X1vVYJ2hizT2tK3mJJljGmXYlInyjuK0WEex1Mt3g1\nmcy7rFGBR2rwv6Gq/naunRDBffHr7BcfSwmOCOs1yYPMTS5k86s1+J9viwTZGLPvsSTLGLNbExEn\ngjvvFPr1P4V+bm2HFiQ1y+18lFhO1f+m1P/JDtqYGMP7PeiAGF62kozrIPel8K9V1bwT/40xpqUs\nyTLG7NZE5MQexB+7mUOLtiRYW6zRBD9lWlWGoKQpZRhEpD9QCCxqrer8xhizhVV8N8bs1gSOPIRu\nDRIsgO4SpwPhABjWlLZUdYmqfmYJljFmd2FJljGm3Sgkk2SzeY+pksJ3gVQbh2WMMbuEJVnGmPb0\n9NuszqTzzG2fTTlZgo3A3LYPq/2IiCsip8bFe6RAQk+LyGUijRQDM8bs1mxOljGmXcXEe3IAHU78\nOvvFO0sUVWUum/gdnyaTZC8IVJ9q7xjbioh0iOK+2pnI0GMoLYziMo211XMpT6UJjlbVT9s7RmNM\nLZv4bozZ7YlIKIp7m49+owvRdJKsm8KvSOF/O1B9sr3ja0tx8R4ZQ8mZX2O/iFNnntpUXa0PMmdN\nmqCPquYdXjXGtC1LsowxewwRKQRGUFuM9DNVDdo5pDYlIl1COMtv54hooYQaHP+pvle5nOoLVfXZ\ndgjPGFOPrS40xuwSIhIXkdNF5AIRGdoa91DVKlV9T1U/2dcSrJxh3Yil8iVYAKPoWgCMatuQjDEt\nYXsXGrMbEZHewDkCnRQ+BZ5u74KaIXGuCOHc0pdCv4iwzKXci4v3XhL/bFXd0J6x7WU2bSbtBao4\neUparCeZAja1fVjGmOay4UJjdgMiImGcnwFXH0x3KSEa+YQNlcupSqcJTlDV6e0RlytyUQci917D\nmHgPiQO1+wI+zoL0W6yaW4M/elf0OolIB2AoUAHM3xe3whERieHO+zr7Dx4rJdscK9carmFqWuAv\nDuKnCaYAz6pqpn2iNcbYnCxj9hCOyEVdiN57PePiHSW89fXpupY/MmtzmqC/qrZpL4aISBS37Lsc\n2HuoFG9zTFX5Ie9WriX5BVV9uQX3iEVx7/LRCzsRSVeTCQXoiiT+par6aosfYg8jIhMjuJPPY0j8\nMLrj4fAJG/gTs4IQTnYivb0IrvMuqyvXkNyUwj9KVZe0d9zG7IssyTJmD5DrwVj4LUYOGCGdGxz/\nrX6S+Ij1PwpU72rjuAbE8T77LUfG81Vk/7cu0WdZcnda/Sub2b5Ecf+zH50OvYhhsWKJEKjyMeu5\nj1mJFP6Jqvpmix9kDyMih8bxflODf7gD6uGk9qdT5DIOCLny32m0L+gy/1kWL67BH7ov9vwZ095s\n4rsxe4aCNEGf/emU9+DBdIvH8U5o45jawlFxvHHf4oBYsUQAcEQYIyV8hWHxON4dItJBRC4NiXun\nI3KNiPRq55hbnaq+V62ZowK0QxYd4qPeVxi+TYIFcAJ93EJCPYBj2idSY8yOWJJlTBsRkQ6uONcW\nSGh+TLxVcfGeFZHxQEaBNPmnNiXJEkBVE+/RWUQOFZFhkq/7aecsCdDy+WxucEBVeZNV1RmCZpcT\nCOOcewy9C+onDwDj6EaG4EAPWTmKLrefyYDvjKfHz8I4C8Pi/qC599yTqGoS6NedWKqozhDyFiLC\nWEqiwMFtHpwxpklsdaExbUBEukZw39+fTt0/R59YB8LMZOPnn2bxsWn8q0K4r7/NqkmTKN0mMQpU\nmcLyqiTZv+6g/aIo7r0hnLO6Ek1VkA4F6HIR+YaqvtGcmFVVXZEf3svMe6/VMfHu9Sa+V5BeDExp\nTtsALlIYx8ubCHri4Kp432CEN+a/k8CjZ+hAfsEHPxOR2ao6ubn33oNUVZN1VZV8OfNm0hmamIAb\nY9qeJVnGtIEo7h3j6dHrQhm2tUuiJwUySrvGf8p7tyfJnvUYCw6Pqxc/hO44IlRomn8wP7WR1Fyg\n0YRCRNwo7n9G03XkeQyJFEk4GqjyIeuG3s+s50XkGFWd1py4fdW/hsTp+FOm3dJXC/0OhGUO5R7w\nXk1tCYdmryxM4r8yjbVnHENpYf1ji7QCD4cD6brN610kyrk6uOBh5v2U7bwne5GPkmQr57KpcHi9\n4eRKTTODdQLsM9sOGbOnsYnvxrQyEYl7yIbbOCLaIc+wz4M6J/UWq37loy/F8O4TGFhIKFNOKuIi\nT9TgX6aqjfZWiMgpPYn/4xccWli/vtIbupLHWPBGtWYmtvQZgOOBQuADVW3xps0iEgvjLL2QYV0n\nSM+tgVdphluYHuxHJ+cCGdbguoRm+A5vpbIaRFsaw57AETkjhve3S9g/PoouOCIs1Ur+yKzq9STv\nTal/VXvHaMy+qCl5i/VkGdP6ukZw/XwJFkA/iiLTWDMkodmfASNFZFCCbCdgQboJZRtiuBdMorRB\nggVwGN15iLnjRSSWm+PTLKqaAJ5p7vWNtJkUkaMfZu4rL2tZ4Ri6Fm0ilX6XNYGDlA+luGe+66rJ\n4iCpXRnL7ixQfVpEvvhHZt6h0NdTx0/j1/joTT5tu+LUGLNzLMkypvWtTxG4FZomX6K1jMpUmmD+\nlq9VdeHONO4gBVHcvMdCODigAYSo3RMwL8n9SrYz990VVHWWiPQto+rzy6kao1AJPA4cN4UVd43T\nbgX15yK9xoqsgzzW1rG2J1WdLCLPA73BDwNLVdVv77iMMdtnqwuNaWWqmvCQJ59jSYPq3Os0yTus\nVh99oLntV5N94X3WVuc7NptyPJwV1CYv25BaF8UlNBfwPXGSMfH+KiL9mxtLc6hqVlWfDlRvUNXb\nVHUp8PelVC5/iLnpytyuQin1eUGX8QrLvYDgSxFx7xKR7m0Za3vSWstVdZElWMbsGWxOljFtQERK\nIrjvH0DnbsfnVhd+xgZ9hsXJFP41GQ3ubkHbRWGcpRczvPgw6bH139BmTXEz0xMbqLksUH2o/nVR\n8W7vSPjS8xlaMIJOVJJhCsv9lymrTBMcrKoLmhtTS4hIYQjnetBvgXQAKCTkV5Nxe1PIRQwlisfL\nlKXfZlV5muAgVV3RHrEaY/ZdVvHdmN2IiHR0kMuiuJcEaKEg05Nkf6Wqb7Ww3RhwUBjniR7E46Po\nWriBZOoD1onArWmCG+oPBYrI8Cjuh7cyPlYooW3a+7cuCSaz7KWEZk5qSVzNkdtm590RdB56JgOj\nvaSAqbqah5nL9YyjlxRsc/7juiD7KiueTGr23LaO1Rizb7OJ78bsRlR1M3BL7k+LicjIGN7tLnKM\ngwRAeRlVjy+jqgwoBx5X1ZX5rg3hXDyRXl79BAtgEqXOMyyeJCIdczG3GYFvDKDD4G9xQHTLXKwy\nqphEaYMEC+AYensvseyMfBP7RSQCeKqadyi1WfGJHBTFvdpBDhGoqCZ7H/CX3MIAY4zZhiVZxuyB\nRGR0GOfNMxhQcCQ9JYLLIiq6P8Tcc9aR/EdSszdu73oP6dWNWMMMC4iJR0TdTJZsR8hT7r0VxfAu\nO5l+2+yVuJk0+9WrEbVUK3mKRcyhHBcn4iFLPXFu8tHfAmPjeLc6yFGAxMVbnMT/qao+0pLYXHG+\nFsP97Sn0j46gs7OZNC+y7DeLqbhCRMa39Qbexpjdn018N2YPFMf73TkMKThe+khUPESEQdKR6xhb\n4CDni8iI7V2fwp8xm/K8vS/rNLlli581rRH79gRo565sW/6qBzEWU7H160Vawe18xGi6chdHcg8T\nuYrRJT2J/9JDngzjvHE2gybdzVHefRztXsYBgzsTuT8s7o+bG5eIlLrI3T/h4PjJ0s/pJ0WMki5c\nxej4QZQMjODe2vynNsbsrSzJMmYPIyJdMgQHT6BHg7kAMfGYSK+Qh1xU53xHRPqJyNa6UwE8+DEb\nWFhvNDBQ5R/MrxH4k6q2eS0qB5k1j207hCbQi2msYUWuHuvjLOAcBnO09CYi7tYE81rGFHg4Z1zC\n/vEtxxwRDpAuXM+4OHC9iPRoTlwu8vXx9JAeua2FthARzmRgxEcvFJF9ojiqMabpLMkyZs/TIYKb\nCUn+2lidiHguTlcREU+cyyO4q+J4syI4i+MSmi0iJ6rqxgzBF3/DjMRfdW7qI13Pm7qSG5hWNZvy\nj9IE17XxMwGQIPvrp1iUqMiVbQDoJBHOYhA3MZ2Hda5fRhWH0rByQwUZwjgylpIGxzpJhLGUKPDF\n5sQVwR3en6JIvmOdJUoIR4EuzWnbGLP3sjlZxux5VqbwdY0m6F6vZwXgEzZUpfDfC+Pc3InIt7/B\niIIBFKHAx6wf/idmPykiF2htJfHhb7Hq8vdZe2SAbkqQvR94TlWzbf5UgKq+EBH3rut59zuf076R\nUgrcMqr8lyhL+QR/eZNVXeN4Z3viNPgFMUGWjkTIV/keoIRoFOpN7mqiNP6iFVSngQbVZCs0TQbf\nBTY2p21jzN7LerKM2cOoakrgnoeZl8zW25/5U93AXDYFwBSF713HQQUDpQMigiPCGCnhCkbGI7j3\niIijqmVp9a+r1PQR1Zo5RVWfaq8EC2oLpKYJpgew7F8s0T8yK/Nvln6WIHtyRoPLMwQXJ8jWrM+z\nQ1B34qwhQZU2qPkK1CafwGfNiSuL/ulNVgbleUZQJ7M04+E82dxti0SkpyfOTwsl9GxUvD+IyLjm\ntGOM2f1YnSxj9kAiEo7i/iuOd/ixlBZ0ICwfsT7xCRv8DMGJwKiD6Xb7ZXJAg64uVeVq3qncSGqS\nqn7QDuE3KirubwoJX/YlBhcMo5hNpHiJstR01q1L4R+kqmuj4t01lOJvXMnImCf//T3xQ13HH5np\nH0Q392vst02P1jRdow8we32aoLdqI1nYDoTFvS6G+5MvMCg+gs5sJs3LlKVmsG59qrYg6hqpXRZ5\nehzvh1mC4Q5SkSG430fvzFcOwxE508P522F0l+F0iq4n6b/C8poMweM1+F9rj62OjDFNY8VIjakn\n90NwUhjnC4J4KfznacfhsZbIPcvEKO5XXKQ4SfaNoLZmU7mIXH0cpTefL0Pzlmm4UadtXkbVGcDr\nwH5AETCnreti1SUio+J4797C4Q0KpP5N52XeYtVfazT7dRGJRHEnx/EOOZbSgkJCMp111XMoj5ZS\nmM0SRDwcjqAnUVymscafTXllmuAYVf2ohTF+Lo53vU8wSpDqLMEDWfQuVd0AW5LE0GVn55LEjaR4\ngaU1n7JxZQr/YFXdWKetfmGcWT/koHg/Kdp6j6Rm+RXTq1dQfVWgem9L4jXGtB5LsoypQ0Q6RnGn\nFBEeNpFeBS4i77C6ci3JdSn8o/amrVlE5JiuRJ/9NYcX1t9guUoz/IC3azIEXwl5sV87jlcSCRdm\nqxPrI47jPpzJ1ny7uUNfLREV93efo+83z5SBDWb0l2uKa3mnJot2UNXMlgQzgnuhhxQnyDKU4pOu\nZkxcUT5hAx+wjgw+C6lIl5P6tqr+oTXjF5Excby3buHweP0k8c86O/0ea+5PqX/5ltfC4v76KHp9\n5wIZ2mBC/Rwt5//4ZFlSs/1aM2ZjTPNZxXdj6ojhPXQQJQdczPDIlqGkE+hb9Kwujr3AsskiMnov\nGp55rZLM6pcoG3ACfbcmLYEqDzO3xkHeCXnRvxw17luxXt1GISIkazbx7sd/uWD1+lkDReS4prwX\nIjIWOASoBv6lquXNDdjDGdCTeN4lk50kgqg4oAXAplxsr+X+EJfQgtMZEK/9vgpjKGFMbpXhdF0b\n/gtzvg60apIVwf3m8fSJ5Kuifyr9w1NZfbGIXKlaO5Eugnv4/nTKu2JxGMXU4PcREW9P7GU1xtSy\nie9mnyAivX2Cz53HkEj91Wefp78XwR1EbbKwR5FaE2Li/blQQv8KifNjEemuqprCP/5pFq/8mb5f\n9ZKW8Zwu1mt4J/MJG2b5brj7+DGXxHp3P5AtPV2xaDETD74iFvJihwLjd3DfHjHxPigi9OYR9Pjf\nkXT5fQhnZVjcn0j9rrMmShN8uoiKdL5jq2t3rakBKvMd9wm6dCOWt90SYih0a05MO2N7SWJXiaG1\nKxO3zpHz0XWbyF+KbDNpXCQN+K0SrDGmTViSZfYVo/tRlIpJw85bR4SRdHGAg3a20VySc4wnzq89\ncW4RkSObm2Q0495uFPeJYsIvnEr/L5/P0FMOpfuPQjiLROQkVV2Swv/WcqpCr7E8O5dNMogOogT7\nKf7wPj0aPq7jeAzpf0zMdULn5O4xWES+KiIXiEjX3GtOBPe1SZSOvoMJ8a/L/rHvyYGFt3B4tJjI\ntS7yzeY8T4bgD2+w0l9TbxvAQJXHWVAj8AdVzZt0eDgLF9WpCl/XIioUmN3UOJr7/cvgz1xCZd5J\n9au0GgdJAFsfLkn2Ty9SVlV/hSjAy5RlPZxH96KeVWP2STZcaHYZEXGBkUAI+Kw95vVsR2UlmUZ/\neG4m7QNVO9OgiHSO4r5SQGjIBHoWKMpbrLo8QXa2iHyutfey85Cre1Nw4tWMiYdzhUkPp0dsovbm\nN8x4XEQmhHEevYoxkcHScetly7TS+6UzE8fJX8w07MUcEbdjXLzJUdxjDqCzn8IP5rApFBXvbuCt\nYsKDv8BAt24+0kkiXKojCn7DjBtF5L7GEqLGqOpiT5zv/Iz37zxJ+0X2o5O7iRTPs6x6FdWz0wQ3\nNnZtguytT7DwgRHauaBuIl2paZ5hcSJB9vbt3VtE+kRxf54lOAeIxsVbWIN/k8JDTU100gT3vsqK\nSydp71BX+W+vWqDKEyysAX6/Zagw54UK0lPv4OMJ5+mQWKkUUqlpXmG5P4Xlm9MEzd4GyBize7CJ\n76ZJRKQ7MBBYq6oL6x/3xLnYw7k1jhcL4QTlpByBO9MEN9T7wdIuRMQL46y9hrGdBkqHbY5t1Bp+\nyLs1GYJeOzOnKCruOz0pOKSQkBvF41C6MYou/I156fdZ95+EZk7a5Q+Sk+tNWnMdY7vWXZm2xf06\nKzmNNdOOpNdhF8mwbeb9BKp8132PiROupUvxgAbXTn7jZ5WbyxctG0e3QRczPLqlsnyFprmdj6pX\nk6g+jf7dTpH+eWP7tr5ZXUVmtKouaOazjYni/sBFxilsSpB9Cvi7qpZt5xqJ4v4xinfuKfSL9yIu\ny6gK/s3SmgzBb2s022gFexE5OYzz6CRKY8fTx+1AiNmU83fmV5eT+kuNZq9oauwhcb8TxvnlqfSP\nDqPY2UiK51lavYLq2TX4E1W37aYTkXAI53qBKxUKApQwztNJ/GtUdWlT72uMaXu2utC0mIiUxHAf\nyKLHdyVas4l0GHR+Ev9iVZ0B4Inz9UJC/3cFI+ODcj0mazXBvcxMrCbxUFKzl7XrQ+S4IufHCf3x\nEvaPH0BnBFhIBfcxM7GZ9M1p9X/Z1LZE5NIwzr1H0YsRdKaCNK+xkigulzGCq5lak8IfoaqLWuNZ\nRKTYw1lznxzdoAI5wFRdzd+Zt/Hr7N95dO0o3zZe1DJeKKzmxCN/TCRcuPX1uYun6AczH9kU8f3I\nnRwZr1uHCmCtJvkx73IK/ThdBjZoV1W5kjfTCbLDVXVxnrhLgWNzX05R1eWNPF9BFPeuLHp+EaFs\nNZmwizMjSfZSVf2kkWsEODqGd6UDAwJ0bhL/LlWd2sj5vWO4z4IceDyl7hn1nqdaM1zL1GSC7GGN\n3bORdg+J4V4tyBiBjdVkfw88sr29IEXEAToC1aqad16aMWb30iZJlog8AJxCbQ/HyEbO+T/gJGrn\nI2z94bxsrRJbAAAgAElEQVSzwZq2JSKxCO6nR9Gr7xkMCMXEw9eAqazRh5lbnSYYBywK46z9IQcV\n1+9RSWiG7/N2TZpgSGM/TNuaiJwWw709gFIHUUXL0wQ/8TV4YCfa6BLCWXkNY8KD/jsMR6DKH5hJ\nF6KsoLriUzb8j6o+2krPEXGRyjuZECrIs5ptsi4N/sWSFecypM9R0qvB8U1aw9UyzRfHcfr1OkTi\n0U6sWvUhyeRG0n5NZhid5PsyOu90gqv0bQ3hyM0c1mALm9m6kd/yaUUNfnHdYbZc8dQ/BejZI+ic\nBZjJRs9BnswV3UzXOdeL4k4dRZcDzmFItJNEyGjA26zSfzC/Kk1wiKrOafabl4sngjvveEp7v8xy\n7zbGk+99fFIX+i9R9ru0+t9tyf2MMXufpuQtu2Li+5+BE7cTxMnAYFUdAvwPcM8uuKdpG+cNoKjH\nuQwObZnn4orDBOkpp9A/FsO9ETikmIibb8gqLiHGUBIAp7Vp1Nuhqs8m8Yem8AclyQ6vwe+zMwkW\ngANfOZCu1E2woHYC/dkM4i1WUUUa6kxy3tVUNRXG+fd/WNFgKDatPq9QlqzB//1LlFUFeX6RepWV\nvqfBgv38ovRBZasYNP8zLqjqwl3+YfyIcaH5bPYqGulQ8RCJ4fEgc0jWqS6wTCu5j1nU4D9bfx5T\nFPe+gXQ46w4mRK+UUYVXyqjCO5gQHUiHM6O499W7xee7Eh3+P4yIdpLakc6QOBwtveU0BhTEcJvc\n47gdZ/WioPOxlHphnLwJFkA3Ym4Ip2GWaowxTdDiie+q+qaI9N/OKacBD+bOfU9EinNLzNe09N6m\ndRXgXXwMpQX5FlsdRU/3ORafAfwxhtfonKs4ngfkrQXUXnIJwMrmXh/BG7MfnfIO05VIDE+FMqo8\nYEpz79EUSfyr/s2So1HtMIlSp0BCLNEK/sa8RA3+C8BvNlJz6t18OvZcHRItkRhJzfIqK4IXWFoJ\n0ucihkbqTtIG6EcRB2oXnmABVZplOVXE8TicHtRudZMOTqGfM5tNXMM7DNKOVJFhAzVUk8kAP6nb\nnoj0COGc+y1GRupOSo+Jx7f0gPj3ePtcEblOVVcDxPG+ehx9CvNt9DyRXs4/WXhqbt/FZs/1i+Gd\nPoEeRQXUJlerNUGPPJttz2FTMoXfoOfdGGOaoi1WF/YG6k5YXQ6UApZk7eYEiUbJvwItikeAesCM\nFVRHKjRNB9k27whUmc66DPBW60fbOnIrJrsAlVtWS2YIVq4jmQEadH8kNUsVGRR+Vn+S866mqgtF\nZNwLLPvNsyz5vKioi2zOEtzmo7erqi8ix81i469+zHuXhNUhhR8K4f4ng/4yjvtC/QRriywBc9nE\nWQzkXIZQTg0vU8ZzLNEswcMPMe/cvhSGB9CBrkTZj2LeYFV1Ddk/ZjRYUq+5icMoTsfFa5BsxyXE\nMC1Of8bGicCjAA5SVNDIf00xPBRcav9sN8nKzXMaB3QGZqnqsm3ePsATh6O0F0+wkMt0BG6dOWhL\ntIIPWIuP/ml79zHGmMa0VQmH+r+S5p0IJiI31vnyNVV9rbUCMjtWg//idNaNHEmXaP1jM1hHFO/D\nas2Ux8T76/3MuvBKHRnbshJNa5etZ9L4s1X1/TYPvoVyq75+Gsa5QpBIlsCJizc5if994C+vs+LK\nk7VfqH517ymU4eF8mtTsrW0RZ26l51kiEgaNZaGi7lBdLjH8rohcmyHoDmzOaGaziBSm8b2EZojX\ne4blWsVcNnETh9IxN1zXjRhDtZi7+CSYQ/mXRtA5OJweZAl4nZW8zaogTfBLhV/lC9NHG0xNyGqA\nAEHtfwdbY06SfXUG6w87iG4NMsBZbCSKuzCh2e1u8iwiJ0RwHyjAK+pEJFhOdSQuoTeTZC9Q1XVJ\nsv98mbLT5+qm+DIqSZLl53zAsVpKMWE+YYO+xapkluACVV27vXsZY/YNInI0cPROXbMrVhfmhguf\nyzfxXUTupTZh+kfu6znAxPrDhTbxffcjIj3DOPMuZ2ThSOmy9fW1muBmpicqyXxBVV/Ibdj7iCAn\njqeHF8Zx32VNIkl2YQ3+8aq6rh0fY6eJiBPFfWkQHcafx9BYLymgWjNMYbk/maUVaYIxEdxvdyR8\n6fkMLRhBJyqoPf4KZRVpgoPzlbnY3cQl9Mzn6HPK6TJgm+7KB3UOBXicLYO3OV9VuZ73OIE+TJTe\n2xx7WhdlX6bsvYRmJ9S/j4gcGsJ59zbGUyRhputanmcZS3PF20M4QQ3+Car6Su78bmGchVcyqnCE\ndN7aToWmuZkPEuuouVRVH27suURkfAT3lcs5IDaCzogIKfV5ikWZN1i5pAb/AA+5PIJ7+xkMkOF0\nppwanmIRG6jBR4MU/tNZ9AequmSn31hjzD6hzUo47CDJOhm4QlVPFpHDgDtV9bDmBGvanogcEcL5\nVz+K3OF0KlxFdeJjNrigV2U0uLveuftTOwcvRO2ecm/tiRWrReSkbsQeu4lDC+uXMHhSF/pTWP5I\nDf6XgQtieD9Jkh3iISkP59Ea/Bv2lPpGItIvjDN9Ir07Hkep14EwM9nIX5nLeQzhEOm+zflztZyH\nmcfPOWTrVjxbZDTgu7yZTOIfqKrz6x6LifdIXwrPEUT2pzNvsZJzGcKBdCVDwDus4jEWJlL4J6vq\n67nYjgrh/GsoxTKCzoVrSaansjpQ9M40wY+297kqkNCb5zB4wpH1VlWqKjfxQdViKn8Uwbn1Fxwa\nrTtcqqrczyyms25KSv3jmvu+GmP2DW1VwuERYCLQldp5VjeQm6uyZdd7EfkdtSsQq4GvquqHzQnW\ntA8RiQBnAsOAtcBjqrqhfaNqPXEJPXk2A886RkobHNusKa7inbSPRrf8oJfch7fNA90FRKR3BPfH\nAXqBj8ZjuDKYjk4phXxBBm1z7qu6nKVUcbEMz9vWrfrh5jls+rKqPlv39bC4Vb/ksILnWcqbrOJm\nDqX+XLAZuo4/MmtRDf7gOu9rAXBuGOfALMGaoLYgaYPaW/WeJyyQvIeJzpYq+HW9oSv5O/PmHUWv\n/ufL0AaLFzbUFqZNZAmKVXW7Q5LGmH1bU/KWXbG68LwmnNPkislm95MroviP9o6jrbhIl46NLIjs\nQBgfDVE78ToLW1cr7pFUdQVwGXCZiIiPLh1Flz7PsJhJWsqWEgoABYRYTXVj7bCeGgdYX/9YgHpR\nXCK4jKBTgwQL4EC6EsbpXoM/Evgk12Y1sLOTzh0At8E00FohHEI4HftRlHd1aBeJ4igOUAzsUcPc\nxpjdj20QbUw9Kfx3ZrIxb3XuuWwiirtMtU6BqL2EqmoN/sWPsSAxlGK9mQ+YostZoVV8phv4D8vT\ni6kMlmvDLR5nUU4lmSrg3frHIrjT/sUSprCcbjQskwC1NcaKiWSoXQnYkmeoieHN/oT8Ha3vsrq6\nBv/jMqry9lJt1BqC2oWHm1sShzHGgCVZxjSQIbjnbVb5i7Vim9eTmuXvzKtO4edbQbfHy21LEyi8\nPIuNazeS0sdZwC+ZnrmXmcvms/n7WYJLb+HDxFRdTUYDajTLa7pCf8eniVTtVksNyiokyN74Oiv1\nZPqxoJHcJalZVpOIAnNb+hwJstc/yNzEmjoVNFSV13WlzmVT0kevfp2V2Y1as811qspzLEm7OH+z\nrW2MMbuC7V1oTB4icqaHPDyObs4IOkfXU+NPYXkqQ/BICv8b7TVEmEuERgOdgNmqumoXtRuN4v47\njnfIJEoLfAJ5izXpcsloVjO/AW5V1crcucfH8W5Kkh0HaBTvtSTZ61X1vUbaHteJyFu3cnjkR7zL\nmQzkMOmx9biq8jDzsu+y+vmEZpu1O4CIDAa+HMLpliVY4IArODcMp9jvSsybxcbMZtIbU/hXAmkX\nOTaOd/lZDIztRyfZRJoXWZacRfmqFP7BqrqxOXEYY/YdtkG02euISHfgQKASmKaq/q5uP4p7W5bg\nbBdHAlQcKAvQ/2TQe/Mt2mjmfTo5cHEUb2KAltfgP0RtqZO6+/3FBS6J4X1T0eIsukK9cN+QF43H\nIsX+5qqVEUfclzLZ5MWqWt6SeKLi3T2c4q9ezsjYc04Zr8hK+vcZT0Fhd1au/ZQ16+dU+UHmzC1l\nFnLxOdSOMm73PxER+dxAOjz2YxnXsUyruIOPGEoxYyghjc/rrGQF1WtS+PvvbHIjIvuHcB4H9h9F\nFwbRkWVU+tNZlwnQa320nNohyGQU93shnD4lxLKrSIQVXQpsAIYKUpHC/2OA3qNarwvTGGPysCTL\n7DVEpCiG+0AWPbWUwpoqMk4F6Zo0/rcC1Sd20T26RnA+nkjvkpPoF+ooYdZpkidYWPMpG+bU4B+W\nWwTQ0vscEcJ5fhRd3NF0jVeS0Vcoq06QfacG/1RVTYtIURT3nYF0GHgS/eKVpHnIXcQRB19Or26j\nEBEymSTTZz2aXrT8nfnZbM1o4PAo7jddnN4Z/BlpgrtVdUET4ikI4az9FYfF57OZx2LrOG7iT4lF\nOmw9Z836Obzy7m3Vvp8elpssvzPP2yeCO+9OJkQj4pLQLG+zinlswkOYTXlNBZkzVfWFnWx3qIdM\nj+AWXstYSqXwv/Fqgpv4IFFN9gRgVRhnxpcZVngYPcQRIasBU1gePMWiDWmC/fbm1bLGmNbRVhtE\nG7NTRCQkIj1zS/Sbcr5EcV8aTdfP38GEyE9kXMdfy+FFP2B0SQzvQRH5/K6IK4Rz1cF073KuDAl1\nzG0RVCIxLmVEtDcFQ4AdrqTdzjMUiUhnESkK4Uy+kpFFl8vI+BHSkxOlr9zC4YWD6HBkGOcXAGGc\nn4+iy5AfMDo+QjrzhruesaMuonf3A7fWqAqFYhw66ivhonhJXw+ZUkz4+dMZcN5XGT7xGHp/K4L7\nSUicrzUhvIFFhLKdJcpkbxVjRl24TYIF0L3rcAaWHu46jnfZzj67qpY5yOv/ZFFGVYmLx/HSh8tl\nJPvTWVMEa4GXdrbdKO4v+1JUcAJ9t0mwALpLnDMZGIvhXR/B/eHx9ImNl56yZT9ETxxOkL7OGEoK\nXeR/GruHiHgi0l2kkf2HjDFmOyzJMm1GRKIRcW8N4WyI4i70kI1x8Z4RkQE7uPToAkIHfJ39o/E6\nGwwPlo5cwv7xGO7tkm8X653kIF89nj4Najc4IpxI34ICvG/uqA0RcUTkvAIJTYuJtyYq7ry4eAtc\nZEMIZ5WHLBlGceSAOhX0ofaH/oUMi2ltKYWIwiVnMTAiImzWFPOCDQzofWi++zG0/7FFMSd2xK84\nvOAE6StjpYRzZEj4Bg6OuTi/E2mksNV/VSTIhjLqszxbTq9uDWoKA1DaY2w05EWP3tF7kE+S7IVv\nsnLxTXxQ9YauZKqu5g79uPpvzNuQwj9xZzd7FhEnTXB6Cl9G0iXvOaPoIgF6KHDmBHrmLVdzFD1j\nUdwL8rQfiYh7SwhnQwR3iYtsiov3TxHptzNxGmP2bW21d6HZx4mIG8V9cSjFB5/HkFh3iZOo3arm\nlH+zdIKIjKm3ge9WIZyzJtKrwMmTR42iCwH0BXoCK1sSY4AWdiRv+SRydbOKt3d9bjueRzoROeVM\nBhak8XmE+d3OZQiH0B0P4W4+7bxfI1UKukuciLpOhmCIopFuEmearuFB5gDgOPn/uXpumF5S6Ebq\nFd/sIXGO1dLQFJZ/h9paWHmp6tK4hBZOZ90BIXFJZaob9GQBpNJVNHe+kqquF5GRi6k8Yw0LzheI\nVJN9Fnh4y4T6nVSiqJsgyxMs5GjtzWi6bLPBczVZBBIBWug18vtkCBfNFU/OzTE7K4b7nSjuuMF0\nDJ/HEKenFFBV+1k97XmWHpX7rJblbdAYY+qwnizTVk7pRGTslYyMdZfaWklxCXGqDHCPo0/HKO7P\nG7tQIBzCydtTJYCLBOR+ULZECGfmHPLPH5/FRt8n/+q5Os4qJnLKDRxccBAlvEAZX2M/jpCehMRB\nROhJAeXU5L04rT4p/BCwWpD0DF3P35nHdRxEL7cDK9d+mve6hcvfZpyfP/8bTrHn4Ry0g7hJkv3m\nX5iT6Csdmb94SoPjqgGzF75Ylc5UP7CjtrbIDZFeGhH3HlecnwK9VfWxas2cUaWZk1T1nuYkWCJy\ncAhn3lhKOIfBHEQJz7OUXzODZJ3yZa9QlvXRh1yc16ezNu/k0/dZk84STM79EvDPnsT/fCS9JnQj\nFv0Oo5yeuRHtQglxugxwJ1HaMYJ7487GbIzZN1mSZdpEHO+Sz9Gn0JWGH7njKHUzBOc0NuSXJnhp\nKqvz/jBeRAU+Wgm0uGchQfbmR1lQXVGvRNIqreYFlqVq8P93e9fH8b57Gv0LwuKykmqSZDmQrtuc\ncyjdeYfV2yQDW7zNag3hvKeq6wXuf5z5wefoQx8p5PPZXkz/6C9UJf5bUF1VWbD0DV23Yb4OpWPe\nmNZTg7LtZuz5qOrbaYJJy4NNH346/1/MWvgi2WztHP9EciNvTr+3pjKxdh7wzI7aAhCRY0M4K0fR\n5fazGPjNo+l1fQR3ZkTcm5py/XbajYRwXvgmIzpcLiPlEOnO0dKbH3EQPYjzD+aT1CxP6SLeZ+2m\nDMEdSbK/eJrFyfp1zz7VDbzOynSa4E7gy92IHXcjBxduIsUkSmnks+r5BM2em9dcublhZzoivxCR\nq23Y0pg9gw0Xmjbh7Hirmgi1HVP5ehyeXUNy42RdGj+Jvu6WXGyTpriPWYkswY07O6cnH1V9JiLu\nndcx9ftHa+9QLwq8RWxOvc1qDdArVPXj7V4P/UqpnYCdxKcjYeoPcZZKIWO0hFuZwYU6lIF0oAaf\nt1ilT7KwOk1wJUCa4KebSV9+IF0dgEOkOxtTaZ6Zch29Sw4gGu9C2dpPNFmzabkEmTdeZcUXv8zw\nbcY6sxrwIsuqEmTv3fJabkjsSKAHMB+YsaUEQ67O1UEicvDHc/5514xZj40Nh+LpdCbhieP+LZut\n+V5T9vMTkV5hnGe+x4EFw6TTlpfDp+kAbuKD74rITFV9ZEftNOLMvhSGxkjJNi86Ipytg7iad5jK\najycaWmCs1V1HbBORC68hQ8f6q9F2ofCyAI2p1aTyKQJTlfVpQUSuuosBhWEcqsfGxs2LiZCFo22\n5X6VIjIijPNKD+IFYygpKieVepfVP4+Kd08K/wd78rZOxuztLMkybSKF/9ZnbBg3mq4NfnrNppwY\n7qKEZvMmSqqaEZGj/sWSF/7D8j4HatfQZlLZT9noCtzmU7sReXPkes8OASYBPvCkg3SewvKvuoiT\nIXBd5CkffX6HbcHS5VSVllJID+KsJkGVZiiUbUcyL2AoV/F26jY+SgZoPECdMO5/0gRXqeqnuWeu\niEuoLEF266KAE+nDhKAHH65ZRxlrWMC6tVmC/kCXqaw+xlEpOZl+oU5EWEIlj7IgsZn0W8DzuWed\nFMH9W0fCBT2I6xIq3TT+MhE5U1W3VlpX1feB8SJSkkxt7gwsVz+df9PCPDycyw6jh1snwQKgg4S5\nQIcW3MfMG4BmJVkCI0bSpSjfsQ4SppNGUmtJfiGj2X/XPaaqT4lIt/lsPm0+m3sBi4DJW5LGLEGf\nvrkEuQ+FzKa8QS8kwCw2bvmstlWCFQ/jvPZlhnUZLz23ZOyRs3UQt/Dh/6whsRC4uy1iMcbsPEuy\nTJvIEPz+LVZdcYT2DA+Q/06qTmiGvzOvugb/5u1dr6rLRGREDf4Rr7JiHFAFPK2qDTYkbioR6RTD\nnRzCHXkY3SNZAn2XNb+K4urljHQHSAc2acp5nmVnvsGKCSIyWlXXNtZeguwdz7B49BgtKSiUEGO1\nhMdYwMU6fJserams1gTZjWmCPkAHoCahmWT99tL4D77KiuuGUBzd8lqhhDiKXvxJZ6UC9L5cD946\nERnzNqt/8QYrL/LRSBh3fZbgdh/9X1UNROTAMM5zl3NAfASdERECVV5nxbBHWfC2iAyrXytqSy/Q\nzr6vUdxjDqRLNN+xA+hMEn+oiDjN6X1UKF9HsgZo0L6vAZWkA2BW3mtVk8Cj+Y65OGtWkSgqJsLR\n9OYXfMDh2oN+8t98rvazOn+Hn9Vd7JxBdIzWSbCA2s/BV3V4wW189GMRuWdX9OQaY3Y9K0Zq2oyI\nnBbCeeQgSmQ4nWLrSPqvsiLto39O4V/R1sMecfFeO5Tuh1/IsPCWJCijAfcxk0JCfKVO5YMHdU56\nKqt/n1L/e421l1td+LdiIqeewYCCbsR4gNlkUY6hN1FcprG2aj6batIEE1U1bzJQp70uYZyZJ9K3\ny4n09aLiUaNZXqLMn8zSDWmCA3KJUN1rBAjV33svLt4/T2XA6SdK3wYTjf6gM5MfsPbnWQ1uadIb\ntwMFEpp8DoNPOlJ6NThWoWl+wNtpH4025/stIr1DOAtv5fBIR9l2+HmqruZh5n2S0MyBO9uuI3Ll\nEDrecg1j444IH+o6/sxsxlLCYDqylmTwKitqfPTBFP7lbfVZjUvo719k0HlHS+8Gx1SVy3kjWYM/\naFdtr2SMaTqr+G52OyJS4iBfi+KOzRCsyhA8oKqftEMcI+J40+5kQtyrN8G5WjNcy1R+yWF0yBUl\nXa0JbmTappT6nfK1V6ddBzg7jvf9AO0nsCKJ/58obk8HiSXIvgA8oqpNGn4TkdIY3v0+wcSOhNOb\nSYddnDeTZC9prORFPmFxq37N4QXF0nBe3Ge6gT8w86MqzYxpans7iPnsPhT++QYOLqw/J+1ZXey/\nwLInkpo9t7nth8W9oQPhq7/MsIIRdCaF///snXd4HdXRh9/Z3dtVLMvdltwbbtgGFzrGYFoIoQYC\nhFBD+UIgkBBaIECAQCDUAAmhBggtlBB6sMHghgu49ypbslVsSbfv7nx/6NpRubIlS274vs9z7Ue7\ne86ZXenuzp4z8xu+YoO+wfJIAnecqk7fCZu9fszPCsga8UN6BjsTYiHlvM7yRBJnUxJ9K4n7zI7i\n8lobv1hP/4Ael5wo3RvcG211uZIvEjZup5aWVcqQIUPzaYrfklkuzLBbSc283Lez7VMzNd2pCZJf\n3YJlksOG0Y76DhZASDz00hxWUMmBqbicPHzYuFkNDq5Hyp7XUp8Wo6rrgONFpMMmYl2BIlWn0SXL\nRvuBRtVaBan5r7H9In5gApAPzANm7GAm551NRBc/zfxBZ2tff574iKvDF6zX/7A6nMC9pbn21yah\nzh0isvRJ5t0Rw+kloD6sTxK4N+6sE5QqZXTMMrZc9gTzfmHjdrAw1kawHwBe3FPLcXGcVz6n6JwJ\n2jAzdwYb8WLMSaqTcbAyZNhLyThZGfYZTJFz/Jj3GUi+AA66xRLjZlvdZwFE5Ogg1q8VHWwgpWHs\nx6l5QKarNxiPkT7QHiCGg6eWwsliNuPH2mEdwF1FKhas2c7VVjwY/53OxpOOpaCBMzWF4mgM5810\n7UyRcz0YT3YjS9vjNxezmXhNsPzJqrqiEVuTInLUd5Q9NJvSnwTVsiPYXg/GtATuFcBqETkriHWR\ngeTGcSYla+osNlmGQ1VfBl4WEb+CHdFkQ02MZpJaYn0s9dlbmBgmOesvzB91nvbztxEfriqzKeUF\nFkfiONftaQMzZMjQOJnlwgz7BJYYl2XheehyBgX7p4TXl1PJk8yLVJG8zYBsH+YNp9Ir0IdcKSPG\n+6wOr6N6fgznqFTQ8zZEpIMHY/UDHOLPlroJjxs0zL3M4gEOYWtK/118Ey4mcqWqvrD7zrp1SC1h\nDvdhfnEtw4L9pOb6qSpfU6wvsnhLArd//aB+ETk2iPX2rxkeLEwFgLuqfMo69y2Wb0rg9t7Rsmeq\nPmVXoEJVN4lIlh9zYgcC/Y+lICsLD99RFv+KDU4C9zRV/WiXXIR9GBEJ+DEfcdCf5OFLVpO0FNZF\nsS9X1Yl72r4MGfZXMjFZGfZKRMQC3KYuwYiI34Ox8TYOzu5ar6b0Jo1yC9NiFuL+gbHB3FoOk6vK\nI3wXXUD5H5Pq3l6/X79YD3UgcOmVDA5tVaFfo1U8wne0x28fTTerhKjzCWvjNu6LMZwr9iVNIhE5\nLoh1VxR7JKAWxrcCfToQoDMhYzlbCGOXxnFO2SodUZuQeGacT/+DRkvHBn3/SedUz6f8OlX9a3Ns\n8ov11DDyL7iMQf7a8VrLdAv3MzucxO2qqlt24nS/94hIDtAXqFTVpXvangxNQ0SyDTGvNk3v5apu\nrmGYCxPJyD2q+t6eti1Dy8g4WRn2KkTkrADWHVHs/gY4PqwPo9g3qur8HbQ7oTvZr/5ODm5YUA+4\nV2clc/GYV8iQBgFWa7WaP/BNWUydBqJHImJ4MW5VuL4tPtdBpZKE7aAPGUjQi3Ggja6L4zyd0o7a\nZ7DEuNCP+fhP6BccSQdsXKZQrK+xLJrAvRMoBpYBX6VzHEXEA8QOIM8oJkI2XsbSiSPpgk9Mpmox\n/2DJR9WaPL6pNolI0INReg9jAm2locLDo/pdeDalv1HV/Vr3SUQ6eDF+C/zUQYM+zEUR7LuBN/Yl\nJz8DiEgby/RN69x+UMGgvicFQoG2lJQtZtb818KJZPjxpB37zZ62McPOkwl8z7DX4BPzlrb4fnse\n/YNDySeGbX3BhpPeZsVRInKUqs7cTvOs7O2UJszBY+biS1siqhsh4rj5ImKqqlN7X2om7Q4Rua+E\n6DBqxEi/bYqq+d6MiGRZGI/fyMjg1pk/Dwbj6Cb56g8+xfyrYjiF23tg+zD+FMJjHEZn+pBLKTE+\nYg0zKOF6HY5bI8zf3GDwLgFMJ52DBTCAvNACKpotv/B9QkS6+DBmjqVT22Mp8ObiYxEVw15l6bNV\nJEYBN+xpG/dHRCQfaA+s12YUSTdN7x2FXQ7qfujwy3xbK1X06nYIXdoPCb392a+vFpFXVXX2LjI7\nw15ApnZhhl2OiHRRuOVmDgoeKO0wRAiKh+OlUM6jf1YQa0dLTt8sZYs3UddHAmrS2BdQodLIA389\nETzIZuBQn5iPB8T6u4icK/I/LQNVjanqNFX9ZnsOloiMFJFrROQKESlo4unvCU7pQ65Tf2kVYCj5\nBIn7oaoAACAASURBVLDaAAc31lhExnoxL76DUYyRTrSTAAMkj18wlLb4+ZA1fMmG6jB2czMoy6M4\n3nia3yPARqIJG3e/1nvyYz5wNN3aXSADvJ0lRFAsRkh7buPgkIFxlYgM3tM27k+ISE+vJ/ixaXiK\nAv420w3DU+L1BF4RkbZNaGugevGw/j/a5mBtxe/LZmCv43yW6b9ilxmfYa8g42Rl2B38+GA6kJdG\no2kMHXHRAdsreKuqKw344lWWxt1aky+qylusSLrogimUxKrr+Uc1+5fHTIzKtvjeP5keV5xGr5/1\nIfdJL8YqEenbFONFJD8g1tRsPF8cSZf7RtPxAS/GkoBYT4uI2eSrsPto15FA2qk/EaEdfocaOYa0\n+DGvPoHu/lC9ckAiwg/owWesZRVVm2lEPb0xVLXcgzF5IkUNHOJKTTCZDY6DPt+cPr9PiIjPxj3t\neAobrDBkiYej6erxYFyyJ2zbHxGRrqbpnTG478njzjrhcd+ZEx7JPuO4h/w9u409zTL9U0RSgZyN\n43fV8WeHOqTd2Sanm2kaVu/WtzzD3kRmuTDDLseAdh0IpK0ObYlBjnoTMaL5wOrG+oji/HgqJZ/N\np7zv4dolZCAymQ1Vm4mvi+GM98Fvbmf65adr71ABWayhislsiK6iKjaIth1+ziD/Vp2h8RRkf67r\nQq+x/FMR6bm9AHwRET/mh4fSeeg59N2mDB9Rmz8x5yfrCZcBv23J9dkFLFxERVJV/fXfoJPqsJZq\nL7A4fVOwMHp3JZT2BawLIaqxAcbWz9hsChHsK/7FiulhtbPG0dXMwsM8ynmZJWFFH25MFmI/IcdA\nNEfSF6fuTNDyYBTuZpv2WyzTe2Of7kflDOn3g20vUn5fDqOHXuitrC7pWly64CfA9mbho4ZY4S1V\n63NysxtWPyjbvMp23OTCXWB6hr2IzExWhl2OC/PnU16dbl+VJqgg7qOmYG+jqGpFDGfkJmKnvseq\nR99h5WPFRM6K4QxW1Y1xnOvLiV/+IovLfs83vMYylrIl4KBtzqe/v76Q41F0NfLwtaVGZBOocahE\nZJSIXCIiZ6bkB0b7MAfWdrAAgmJxBYOCLvqL1HGtRsqOoSJyqIhsV2G+ET7bTGLLFIobxFy9xypb\nasREG73eNu7iNVSlXdNbSzU+zE0pkdRmo6pLErjDP2Xty7/m69hlTNS/sWBBKbGL4+rcvDN9NgcR\n6Soihzd1FnM3U6Fgl2gk7c5lbIkncBpkgWbYVci5/Xsc02BGWEQY2Ou4kNcTunR7rWtiHvWJWQte\nj9V/jwtHy1m88pOk7cT36ySP/YHMTFaG3cGbq6h67DstZaj8L8nPVeV1lsdN5K2E6uYddZIK1P4s\n9alP0Id592g65v+IXuSIl3laxussl3QzAyLCcG0X/IA1I4APRKS7H/PfXsyeB5BHOXF3JZWmg/vR\nKDr4DBE2apSNRMjBSwFZtJMA+epPFhMZBny901enrl0neCz/U6bpy/N7s5yq8Eaf1xP4Z9KOXana\nyNO3HqmC0Ce8wOIvZmupbwwdg0lcJrG+ehVVZXGc7Za0ieE8+iFrzjhcuwRz6klivMXyqI37aEvO\nUVVXAhcAF4iIhDW5yzPmRKRLAOt5L8ZhHQjGyon5guJZEsX+6e4uldMYqmr7xHz6dZZdeaUOqSNx\nUaRhvqZYbZonmZFh53HV9fu86Ys8pLbvsAKE4ybv3FA6/9gPJ989YHDfk0OhQD4lZYv0u8VvR111\nb1fVzEzW95yMhEOG3UJNMLXx0QjaWyNpH4hg8xnrqjcSXRHDObw5GTvpMESuHkjew7/iQGPrEtk6\nreYRvuM+xlJ/2Qzgr7ogNoXiG4GnfJhLf0CPzsdTaG59uKVESZOdCIoP01pNFQVkUUIUF+UA8phH\neXgLiaNbQ+JBRI7xWP73jjj46kCX9kMQEWLxSqZ++3xsw6a5M5J27MjmpPCLSJ4BPwvgOQU0GcZ+\nGXi1Kct8PjHv9GNdeyo9g33IlVJivM+qcBHhuTGco1U11qKT3Y2ISLYPc8GxdOt0Ej0sn5g46vI1\nxfoPllQncEeo6h5T86+NiAT9mJ93InjA8RRm5eJlARX2J6xNJHAud1Rf2tM27i/4vKHJo4acf2iv\ngkMb7Ju14LXkohWfPJO0YzsMXE8l2Zzn9YSuAm2rqnOSdvR+Vf1qV9idYfeR0cnKsFchIu1M5BI/\n5gSFcAT7OeCd1pBMCIi1ZDQd++bho4AshpKPgXAr0zmbPgyRunHeVZrgBr6OJXD7Akf1IfeJm2Rk\ndv1+p2sJz7KQE+jOCXTHIwauKrPYxLMsxEBw0M9jOCc3daapMbze0HeHHHjRkO5dRtXZ7roO//r0\n+upwtOwkVf2iJWM0BxE5JoB1AzBIoDRSU6bohVT5mX0GQ+T/hpB/7y9lWINA5X/pCucT1r4UVfvC\nPWBaWkTEC5wRwrocaGujU+M4D6nqglbq30PNMnkhNXGQH6lqi8sSfd8QkQl+X85bJx5xezAr+L8Z\n+LLNK/lo8h8ithMfrqpL9qCJGfYwGScrw15LqtBzEIjV169qbj9ejLuAm0bRgTb4WEgFVST5P4ZS\nToy/sYDz6c9w2mGKwUqt5G8sCJcRezKuzvUh8bx+Jr3POFK6Nuj/U13Lt5TxKzmwwb4PdDXrqCaJ\nG5tP+b8iap/bgvPoYJreNeec+JTPMBomLH635F2du+TdJ2w7fvXOjrEvknI48oHNOxNoD5AlnhkX\nc8BBB0oDPVpKNcrNTKtKqJNW6Pb7hogc48V4rRNBqzvZnpVU2RuJxBO4p+9OB35fwTQ91wlyd89u\nY2mT3dW/sXxppKjkWxw3ea6qvrOn7cuwZ8mIkWbY6xARy0R+5cP8lY2bB7gBsV6P4dy4M8HUAhe3\nwXfNTYykdvzQV7qBh5jD3YzhEDrxPIv4G65tqZFw0GoH9y4bfQxAUbex9MI5lHIM3dLuO4RO3MQ0\n/shY/3V8dZqIdFTVkuaeQwqvIaabzsEC8Fh+ETF2lDL+vUFEsnyY93gwLjIRsXGNgFhvx3CuU9X1\nzezO5yf9dfVhouh+cR8UkYFejHevYWhwYC2Zp3lalv0Yc/8jIgfuLcumewuOk3xQRP65fO3k803D\nU2A78YXAP1S1Yk/blmHfYL+4uWTYO0jJIbzZjazx59A32FNy2KIJPmbN2Z+xboKIDG+Oo5Xq7/aL\nGBiqH9x+qHRmpm7iM9YynY3EcGIOOiCB6wJFtWUbojivf8H6E47SLtn1Y7eqSeJpJAnXg4GDS0g8\nFGpWbDmVw4EPm3FJarNe1a0qrVgRaJfXq8HOVeumVtl27NOd7HufQkR8fswvh5I/8Ax6+9pJgEpN\n8BFrTv+MdUeKyLD6xay3RwL3o5ls6jeAvAYyIrMpxYM5rXXPYO/Ej3njcRR4B9bT0Rws+YzXAu9n\nrL0e+PmesW7vRVWLgHv3tB0Z9k0yEg4ZdifjQnjG3cCBwZ5SszqTK17OlD7WOLq18WPe1cz+8h20\nfV9y0+48kHa8z2rCJBMuep6qrlbVtWl0sd4pJlL8OsuTyVq7lusWSogkp1KSNgZpJpsYQI3CQhhb\ngJ2OyVJV13XtOyfPeioci1fV3s7ilZ+5FZVrqoE3d7b/fYxzuhLqexmDfO0kAEBO6u9kDJ3aejB+\n3ZzOkriPfsn65Hwtr7N9g4Z5jWWRKPbtrWb53s1xB9Mx7Yv1KDp4DIwTd7dBGTJ838nMZGXYbQQw\nLz2OgpAnjUj6cRRYn7D2xyLys2Zk0CVcVGwUDw2XxSPYuOiGBO6pqjq9sU5UNSkih02i6PWJFI3q\npTmJCuKUEXMSuL/5ho0PDNf23pHSflubdVrNW6zg5wxitVZRTswFpjTR7rS46jweiZZ3f/OTa6/u\n3vlgAv423rXFs8KRaHm57SSOVdV4c/tMxb4d5cM810RCEezPgFdaGqS/KwlhXT6BwpCRJiN0PN28\n0yj5KXB9U/tT1TUictIjfPdOoWaZ/WgTKiIcXUCFoehVqjpJRPoBvYANwHffx0LMAq7TSLlJZ+dq\nUWbIkGEHZJysDDskVTrm+ADWTw3IjmJ/6sKzqvWmBnaAidGhLf60QYK5eHFRD+ABmpS9pqqVIfHM\nnk7JqEPpXGefq8pEiqrjuBduz8Gq1ddG4EgR6buAiiHAZuALVbVFZPZfmf/+2xpo04823lKirKCS\nc+mHIDzInIiDXtfSLMnUg/0GEfnzinVfnQ5kAzOBj7enSt8YIhLwY/4nhOego+kaCmLJN2z8wTK2\n/DFVlHtvFbbMzSW96nkbfDi4zRZ/VdUvRKTTcip/uJzKPkDCi/RT5IGAWH8LYEkXgpGNRCWJu15E\nzlXVb1p6InsTDvrW1xRfXkh2A4HNr9iQsHGbW4syQ4YMOyCTXZhhu4hIyI/5aR6+wcfQLSuIxWxK\nI3MotZO4E1R1alP78op57xF0+eVPpF+D2JhluoUHmbM+qnbDFL/t23eoD/PjyzggOIya4tOVmuAV\nlsa+pfTbGM4hO+OgpBlnhIU8pDAa8GZhJV2wk7ibEzjXO6qvtHSM1iYg1pMDybvgSgYHaiveT9Vi\nfY5FmxK43VpDPqO18Yv1zAQKLjhVejV4CZyuJbzA4m/Cmmy0wPWOEJH+Xoyph9Ap9C1lnmPpxngK\nsFLyHNMp4TkWbdXPWtqys9mhLWZLsmubOVZ3D8bcn9I/awydxBDBVeUrNug/WFKZwB2Uij/KkCFD\nE8hIOGRoMQGx/jqYtuf9nMF1FKi/1VKeYN7mJG6XpqbWi0h3L8bCmxgZKJT/SVLF1eE+ZkXWUH2T\no+7DzbVRRMYFMJ82MTpl47FLiflM5I0YzhWqmracT6pdW6A9sF5Vqxo7zhA5zYf54sn08B1IOzNM\nko9Yk5hLWTiJHgR0DmD9GhguUBHB/gvw/M5KDrQGIpLtwSi5l7GBdIW5f68zqlZRdZGqvrEHzGuU\nlHDjr4NYt2fjMQrIYhzdGCB5VGqCO5gRriB+nqq+vbNjhMQz5VR6jkrgGkWEuVQOaHDMv3SF8zFr\nX4yp/bMWnE5aRKSNF+M2hUuSuNk+zE027p8d9IFdrUEmIgf6MV/zYnbpQtAuImwlcdfGcM5U1Xm7\ncuwMGb5vZJysDC1CRLI8GBsbe1Dfp7OqF7P5KlV9oal9GiJnWBgvHEonYyBtfaVE3U9YG43hvB/D\nOXdn3+pTsUcDgFxg8fZSrEWkRwDrSRv3qCBWIoztsZB/xXCurr8EmnJWim9iZLC71NUq/UBXO++w\ncrUHo9MP6RkYQJ6UE+dDVkdWUbU8NYvWqJO3KxGR4e3xT7xPDkmr//SuruRdVt7jqN60u21rjNTy\n5sSuZA06ie6hdvhZymb+zWraE3DWUJVw0Qfj6tzSgjEK/ZiLHubwwP3M5of0ZFC9bDuAjRrhNqZv\njquzM7Ujtzd+rg/zmxG0LziFHr4OBFhNFa+zPLqSymkxnGOBAuBMgRytWS5+rzXFQlPflQNT46ze\nW8oKZciwr5HRycrQUnpk40nmiS+Qbudg2mYtZ8uBQJOdLFf1DRGZ9hXFP5/BxoMctCSG81dgckuC\njVNtd1gHTEQ6ezFmHE9h3ni6mQGxfJWa4G1WnD6FkoNSWkG1g8LP6E8bt76DBTCEtua7rOp1B6No\nK34AupHFEG0bfJL5/b6l9PfAdTt7Ti2kMoxtuaqkCyCvIJ5wa+LO9hos5Dd9aTPkGoYGttrcjSxG\naHtuZpqmnPCdnsFK0SUPX8IjRkBVtyPPYQKNiGu1AAu5bghtu13CQN9WuZAe5HCdDgvcwYyDNhB5\n10SOHkMnIw+fdzabqoqJhEXkmNZSfE99V2anPhkyZNiFZCQc9iFEJEtEClJLKruDzWFsr91ISFMZ\nsYSDlja3U1Vdm1Dn5mpNToiqfYGqflnfwRKREUGx3vaKWeUVszIg1j9FZNBOnsc2vBi/OYzOOT+Q\nHmZAat4xcsTL+fT39iKnK3B+vSaFPclJWwj2a0o4ii7bHKxatnMavXwKl6aSBnY7qrpcYfUsNjXY\nV61JplLiAntVoLOJXH06vQL1ncJc8XEcBeLDPLkVhlldRswXV4f+5DEzzfUB+IaNaiKTWmG8OlgY\nl51ED399PTZTDE6ke1YIa8IDHOq/UAZ4fyg9uV1GZZ9P/45ejIkikvZlJ0OGDHsvGSdrH0BECoNi\nvW0hpUGsRR6Mcr9Yj4tIs7OsmoOqrjORBVMobrCvShN8TbGr8HJrjysiJ/gwvzyVXqfcy9isexiT\nfRLdz/BiTBeRI1rUN3LeOLo1SF0TEY6lIBjCurTerjUrqEy75LeRKD1JX42lowRR1AekddB2B1Hs\ny59hYeQr3UBSXVSV5bqFe5kVBp5S1VV7yrb6iIgRx80vaORyFZJtejAGtnQcVd1gYnz5H1bbR9OV\nqRTzbb33hJVayb9YEY3i3NnS8epj4+bm40+7rx0BcvEaWVI3+e9Q6Sw9yfEDZ7W2PRn2DqSGzMrS\n95DML3UvR0S6eDFmHktB3nEUmEHxUKpR3mD5Rd9RNkZExu7KYNkI9mX/YMnEmDqhw+gsfkwWUcEL\nLA4Dj7f2g1pEPF6Mf1zLsGA/abNt+0n0MLpqVvAp5r8iIgWq6qZiSw4DBgKlwAc7CjZ3UX8WDTLY\nAUhtr/+Uf30Jmx9dpZV0J5u1VJPApQshqknoWqoZRccG63GlGgUkCXVjslI30jOCWFcKtHfQOTGc\nP+0KuQBV/VJExr/C0gefZ9FIE8MBNtu4dzvo4609XktQVdcn5ub1RNp0peG7w3rCro3bKiVfotg/\n+5g104sI551Ej8BzLKKN+uhBNkWEo6upcpK4FzRF+qO5eDGXLWXzkOG0b7BvCRUU0nBZGmAUHbLX\nUDUeeL61bcqw5xCRAsvy32WIebarjtfrCayz7fg9ij7VGlnRGfY8GSdrL8eHeevhdM49VXptW3Zq\nJwEu00H+u/im3yqqzmAXzCZtRVVnisgh77Dyvn+y7FgBPBjr4jh36K654R/XiaBZ28HayjDyycaT\nHccZKyJlfsz3Q3g69KeNUULEWUO1GCKXuqqvNta5B2P2XMoOq6+rBfAtpbZN3SK5qlotIuf9gVmv\nhrB8XgxCeFhPGEXja6lmvBb4c2uV9VFV3mVVwoDnagfyp8rFfNSewMgTKMxqR4AlbO77H1af4hHz\npqQ6zc6s3BGqOgUYKyJtbBw/sHFvvXkrPPUOK665QgfXWU6r1iQfsiYWw3miVcZRLRKRQXPYdPFi\nKi50UX8R1UtXUzUTmEtNoPkuyQyNYN/3OsufGqB5oUCtiYtyjfEf1nANQxtrpw57Jokiw65BRApN\n0zurf49jcgf2nmAFfLlsLFtcMH3uS/dXRTaOEZELv4+iuPsbmezCvRyfmJvvYFRuR2lYG3ialvAS\niydVa/Ko3WGLiHgALxDZVV9+EblkNB3/fLkMSrsU+qDOqZxH+dVejIfOoV/bI+gsWx/Ia7SKPzI7\nEsE+WVU/b6T/47Lx/Os2Dg7m14qlWq1V3MusSBxnpKouqtfmxCDWm1cxxD+ANogIWzTBcyyMLaKi\nyI/V6XR6hwbQhgrifMia6AIqiuI4o2pnOVpi3NqfNr+9lmF1dKtKNcptTI/GcA5qreDmfRERyfZj\nft2X3N4n0j3QjgBL2cybrAhXk/xrTO1r97SNLUVExIf5lB/z3OMpDHQkaKyk0v6UtUkXyn5K/25j\npFOdNra63MiU6nLiJ6rql3vI9AytjNcT+OeAXsedPnzgGXXiNpN2jH99en04Fq8ctytmUzO0Hpns\nwu8BDhrIaUT9uma7pC/ctwtICVe2inhlKiB8AjWp5JXAm6q6AVi2gi2uqlI/ONhVZTVVlsDgQbQN\nHCld6hxQKNmcq32DL7P0TmqWEdOdw8ceMW65mal/GK0d6UrIv4wtkW8pExv3vDQOlgQwH7mUA/wD\n5X/Z/Lni5Sod4r+erzttIXHTqyw93UEHG8iWOM7TLvqEqlbW7stCfnEmfeo4WFAzM3mMdvN8wtqr\ngKuadyW/P6hqlYiMmU/55cuovNJF25rIogj2fcC7e9q+1kBVVUQuj+M8/w4rr7Qwuidx58VxHgPa\nPM/ijzxqBoenhHUrNM5LLI5GsGcAk/e0/RlaBxHxGmKeOrDXhAaJMR7Lz8BexwXmLnnvUiDjZO3j\nZJysvRwf5sIFlA8bSYcG++ZSlrRxvtoDZjUJEfEIXObHujaJ29mDURzDfljhcx/mf/Lx5Q6lXbCC\neGIWm+73iXk/cHslyc3TKMkaQ6c6TtQkilwbd1UQz+ixdGo4tQccRAeeYeFYETEaWxZLqvuQiLz2\nFRvOtzAKE7gLgZca0dYqBLoMIb/BDksMjtDOgQ9Y0zWsySN3dC2A/MJGArt7kWN5MQdvr4/9AVUN\nAw+mPt9LUrPAX6U+dRCRk//OgicNpFtIPckK4l4TeSGG88vM0tH3ipCIgd+XPgYvFGxnGIbVMKYh\nwz5Hxsnay4lg3/Uqy57rq21CObXiftZoFZ+zLhmn+QrpuwMRsfyYH3QlNPY0egW7kcU6wr3eYvk9\na6n2/Jg+1pHSdasTFdiicf7ArOtKia6K45zyHIsmLtQK3xg6+l3gKzZEZ7EpksA9LYT8xSX98ya1\nXVOfRkmVD7m3Cafi82A46bSmAAJYhoGkTxeri+3BCG8iltWBhpn4xURcG3d1E/rJ8D1GVT8XkQFA\nnzB2NrA0sZ1qBBn2WbaISLiicq03L6egwc7iTQtjthOfsQfsytDKZCQc9nJU9Y0qEg/fyJToq7o0\nOVGLeFrnR+9mZjRRkwG1S2urtYCfdCQ45jeMCA6UtmSLl4GSx+F0DvYkx1PLwQJqtJAuYkDIh3kH\n8G0Cd8AUSu5/gnnznmTedzPYeFcCd4CqLg5jv/oF68PpBp1KifqxPm/Ft/4VMRx7XSMxx1MpqUri\npo3/qk1q4f7v77EyXn9fRG0+Yk00hvNkK9ibYR9Ha1iqqrO2V+4pQ3pExCMiXVpT4kZEDBE53jCs\n+wwxfi8iI1rSn6q6rrp/njH3pajj1hXzL9u8ipXrpqjr2n9tkdEZ9goyge/7CCLSz4NxkYVREMf5\nzkWfVdWNe9quxgiJZ9bFDBw+XOqmqv9dF9KLHI6ShnWgVZUrmBRP4BaoanqVSGqKVvswlpxEjw7H\nU2hZYqCqLKSCR5kbieOMU9VpqWMFCALRnc2q84r5m04Eb/s1w4OhlIaRqvIxa523WVkUx+ndlLIn\nIpLnw/xmGPldT6C7Lz9VNuY1loW3kHgpVWtx7/hCZsiwjyEiAcv03q2qlxqGZTpu0jQNz4dJO3pN\nS6RmRKSLZfknBv1tOvXqdki27STcZasnxRzX/jJpR08FuoKcYxhmW9e1ZwFvqGqsCf16PFbgPZ83\n+9AD+hyfFfTnsWHT/MSyNV/YjpM8X9V9a2dtzrB7yNQuzLDbEZEg0N6HMe02Du7Yud7L5Au6mI4E\nmCCFDdra6nIlk5I22l5Vt+xgnIIA5huCDOlBdrKUmGwhEYnj/FRVPxIRvwfjFoGrHDRLIGliPB/H\nuUVVy5p5ToYP8xFFLz6YDuTg9c5kU3gLiY1xnPHNuYGLSBsL41cWcomNm+vFXBbBvhd4JeNgZciw\nc4iI5bH8kzq2Gzji4MHn+rNDHUkkwyxc/rEzb9n7WxwncaCqrt2JfsVj+b87oPcJA4b2P9Xamozj\nujYTpz8aLS5duAK0d++Cw4xgoK23qOTbqrItq2zHSRzXFO07ETGAYz1W4FLDMNvbdnyK4yb/oqrN\nCh0QkX7UxI+uVdXFzT3PDDtHxsnKsNsQkbZ+zIcc9EwvhhvHCfYmV37OIHJrVQGap2W8yjJ+z6gG\nNfWmaDH/YMmMsCZHNWPcAfxPjPSrlEipx485qS9tDjyL3oGukkWpRnmf1YmplKyP44xoJMh9R2MV\nAqcBIWqyfj7bWzWnMmTYnxCR09tkd3vu5KPvyjLqZe/OnP+qvXjlZy8k7djFO9HvYUF/3oenH/fn\nUP1s51i8kjc/vo4fHH03OVkdt21fs2EmX878yxbHSXTf0ctiSxGRfh4r8ArIwJysTomqcIlH1V2a\ntKPn7s9yMLuLjIRDht2CiGT5MKeNpmPhqfT05oqPak3yPqv4A7O4VQ9ia6mQXuSwhbj7VxY4P9F+\nnizxoKrMpYwXWByN4/yqkTFyDOQKP+alLpotyOwo9r2qOhFYVO/wszoSHFq70HA7CfBTBnjj6nT+\nhk3XAbc29zxVdQ3w5+a221tILZ2OBg4BosC7qQSA5vQRBE4G2gHzgAZ1JzNk2N14PcFLDuhzQgMH\nC2BAr2OthSs+/jHQbCcLGFPY+SBPfQcLwO/LoW1uIZFoWR0nq7DzSDq3H2QVFc85H3hsJ8ZsEiLS\nyTS9Uw4ceEab/j2ONgzDCriuw9LVk4Z+M//lr0VkULrvt4h0oEY+J48a8d2vVbVBrGiG1iHjZGVo\nMQIX9yO3ywX09269GWWJh7PpyxZN8HcWcJL2oIgw77EqbOO+/S2lxiw2ndZBA7EqElYCtzSOc2k6\nsUURaevDnH4AeV0mUBjIxctCKo57ixWHe8W8NaFOnXT/ENYVJ1AYSpcReDyFvjmUXspOOFl7K7XK\nCw2nRnPsnfozdSLSyWMFPrAsX9/CziM98UTYXls860GP5f+b7cSvacqMnCXGhR6Mx3qR47YnYC2k\nwgmT3CgiJ+6JJYpUiaLxQGdgORmHb79FxMgL+NLXEQ34cnFdxy+paYdmdh2JJaqcxnYm7Rim6Wuw\nvXvng0Iby5aMZxc6Wabh+UWvboeEBvY6dptnaRgm/XuOk81V6wLL1nxxHbDtpVVEDMvyP2iI9fOA\nP9eKJapMy/Rh2zHHsnxPOE7ihoyz1fpknKwMLSaIdcmxFATTve0dSwEPMDu2hG/XAisj2H8GPkyJ\nMrYrIjwA2ALMa+wG6MO8fwwdC2o7cR0JyhDND97C1LtF5B1VXV6rSbu2jRThbYsfG00vTrMPohEp\nNgAAIABJREFUIiI9/ZgfBLG6DiHfqiBuL6TiL14xf5dQ54+pY8Rj+T/r33N8v+EDT7dqwkDwxhNh\nPvn63p9tqd5QAty1g3GOD2E9fiMjgl2lRutLVfmC9aFXWDpZRHrXF19tgu0DgYOBKuDjlEZWU9tO\n8GK83IGA1ZmQuZJKrSJZLiI/VNU5zbFjV5FyficEsa4S6OKg38ZwHlbVb/e0bd83bCfx1fqNc4d3\n7TisgXLzhk3z8Vj+ZYlkZGcc8HfWFs/6UzxRjc9bV+NuU/kyknaU/LyeDRrFE2FVdXbpUqFhes7p\nU3hkQw8P6FN4hHfF2q/OppaTZZm+3wf9eZfGE9W+QX1Ook/3I7BML1Xhjea0756/alP50kEicmwm\nBKJ1yUg4ZGgxCtmNqdJn48GFaFiT/cKanKCqH2x1plS1VFUnq+rcxhwsEfE5uOeeQk9vfScuX/wc\nThfDg3FJ7e0OOmsxFWnfPhdTgRdjyc6c555CRPJF5DARGSq1LoKI+HyYX/6Qnn3u55Csn8oA/y9l\nWNYfGOPPxvM7Q+S81KHjfN7swuEDz9jqYAHg84Y4fOSVIeAGEUl7s95KEOvu8+m/zcFKjc+R0lUG\nkhcAzm/G+bQPivVlEGvmCNo/3pfc5zwYGz1i/F8T2x/ow3zrlwxr+3sZnXOFDA7dx9isn9K/wIsx\nUaReXZoWIDUcGxLPByHxLA+J5wsROUMkzbpU3XamH/PN9vhfP4PeJ/+cwSOOp/CCAObXXjH3+fJA\nexuOk3hsyeqJdtnmlXW2xxPVTJ/7UjhpR7f7EtEYqlokyNMff3VPeHPluq3b2LBpAf+d9qDbqd0g\n6i9Ruq7DopWfhJN27MWdPJ2mGmdZZvpi96bpAXTbJIqIhFTda7NDHYKD+57MgF7jscyae3Z2qAPj\nRl9r+LzZY4Bxu9Tm/ZDMTFaGFqPo1PmUFxSS3eDBM58KNZFZLei+jYWhJsL7uooiwgSwGEsnepND\nAVleD0bf2g1iOH/6gDU/HKUdg+3lf8KfYU3yGsvDEex7WmDPbkNEQgHMpzwYZ3QgEKsiaSVwykXk\nMlX9EDi9G6GcCVJYpzRHvvi5WA8IPsp3fxaRNwU5qme3QxoE7gLkZnfG782WcLRsIJB2BkhELIHh\nI2ifbjdj6RRaxpZTgcebcE6GH3PiEXTpezq9PVbqAVWiEe5j9r2mSIWj+tL2+ghg3nYKPf0DapU5\nEhHG0EkWaoVvCiVXArftyJYm2Co+zIeCWJecQo9gIVmyhupeH7FmxGYS54vIaVqrAHidtvDzTgQn\n/JYRQY/U/HoG0dY8RDsFf8f0u0Tkv5kZrdZDVVeKGOd++OVdLxd2OVg6tRsQqKwusZes+jyp6vwV\n2O7f1Pawnfi1ldXFxe9/cftvfJ6Q4bhJw3Xt8qQde2TV+qm3d2o/MNSz62gMwyIcLWf6dy9G44mq\n2cB/W+8MG6KqH69eP+OCvNzCBs/xNeu/cYDPam0aGQq2TxZvWsDhI69o0JdhWAzodWzg20X/+hnw\n6a6zev+jxU6WiBxPTTCwCfxNVe+rt/8o4B1gRWrTm6q6U28V+zqpWYihQBtg4d6sc9Ucojj3/5tV\nPxiu7YOdahWyLtMYb7E8GsG+uwXdVyRwjJuZygjaM5i2bCbB31hAb3IJYCYSOHXigVR1pkeMX9/G\n9PuP1q5WL3I8G4g4n7A2nsR9Bni9BfbsFkRE/JgfDyF/xHn092WJx6eqzKc89ATz3hKRkwJYJ4yl\nU9qlzwG0QSHfMDxrXTf5iuPEXWq+o3VQVRzXFrZfk1IBdVBJd8NwasT1d6gTlmJCG3yFZ9GnTjBx\nRwlyuR4QfJjv7hGRf2wvdsZGx4+iY9qZpLF08s9k0zU+Ma9RVEGmpH7n7+1EvMm4AOYlt3Nw6GuK\n+QvzCZPEQUMBrJNiODcCaf+2/ZjXn0WfbQ7WVtpJgAla6P2QNb8EftZMezJsB1X3HRHpsapo6kVF\nJXMOdJzkesdNPtPSLLvU8tk9IvKniJPoS813ZWkq5OHLqXOee3HK7Gd6WJbXse2EGob5vO3Er9vV\ny262E7t/wfIPz+7cfpDVsd2Abds3lS9l3tJ/x20nXruihavqiIjgsRpWnADw+3JExGizK23eH2mR\nkyU1RX4foyb4tAiYISLvqurCeodOUtVTWjLWvo6IHOPHfMaL2S4Xr11MxBcU64MozkWqunlP29cS\nVHWmJcbVv2P6E6O1Iz3J8a+jOv4VG9RFb1HVHSqib4c8E8O8hmH0qVUL+xjtxkPMYQWVho3+rX6j\npLqPi8jH/2XdlZMxB9u4q2M4f1HVmS2wJS1SU5cwC6hsbGZjJzg6G8/Qyxjk3xrALyIMJp8LdUDg\nBRY/6KJzbNLfx10UxODgwT9pO2PeS+cuXzM5PnzgmUGz3vLCxvIl2E68Cqj/nd2Gqjoh8Xw5jZIj\nj6BLg/2TWF8dxn61KSflxfjB4XTOSjer1o82COQDBcCa7fWj2ymrlIMnZwyd+dQsxvSGjndEj4/G\ntkQs03uX4ybvbWrwcxDrulPoGXyd5WwkwhUMpqfkEFGbSRSZ77Dy9yLyptYrKg4QxSnYTJwZupF+\ntCG3Vkms3uRaJjK0KTZkaB4pEeP7dnjgzvWdAOZv/VlECizL/6zXCnTu2nGYRuNbdMPGeYaiJcAO\nxUhbgSWu6/z70yn3n902t7u2bdNDyjavpGLLGttxk+eo6txax84IR8sl6G/DxvIldMzv36CzdRtm\nx5J2dNJusHu/oqUzWaOAZVvFGEXkVeCHNLxh79f6VyIy1of57uUMCg4lHxEhojavsfSk6WycJCIj\nm6IYvjdjq/usiHw4heKfzWTjgATuCqdGlb5F9fgs5NKD6WD3kdw6f6s+MTlb+/JHZsVs0ouBak3J\noV0W/yIiXfyYfzSRMwxEFI37xVxlIPkg8STuyzbuIzuasRSRfoZhXWganq62E5+j6r7gxTj7SLqm\nzZAcQXv+xsIDbNx7J7L+jPFakFX/uDmU0ja7K/17jpPla7/0bK5ct3jSjEcHHDL8koA/lYVVtnkl\nk6Y/GnGcxPU7euuOYP/2FZZ+lq/+wAHkISIk1eU9VtmrqSoDXmviZXO1ZmYs7T1B6/yXHgv5YArF\nZ5xEjwazWVMpph1BvghUc/Tom8jLrRG93VK1PjhxxiM3hyNl2cBNTbS1n4UhC6ngLkbjS81KBcXi\nBLqDYrzP6geBE7c2kJqanX/2YJhTKcFAeI5FjNWO/Ji+WGKwiSgurG+iDRn2QkTEskzfl0P6ntxt\ncN+Tza0hepFoBR9OvuuGcLRsPfD0rrTBMn2/zw51OPmoUb+kbPNKCUdLyc/twaqiqclNFcvPB97d\neqyqxi3Ld2fSjt35zdyXfcce+hu8nv+tOqzfOI+1xbNtVfeZXWnz/kiLxEhF5Axggqpemvr5PGC0\nqv5frWOOBN4C1lEz23V9uunb77MYaUg8k39M30MPk7pF1VWV25heVUT4PFV9t5Hm+zVZ4vn3ufQ7\naWwjscyX6edNUohvbUSkkxdjzji65U+g0FpJJX9nIePpxgjaE8dhEutj0ympTuCOVtUVafoQy/Te\ngxi/6Nv9KCs71MFTUroosq54NpbrTjuT3kePk24NxlZVruSLWBynjx/z/VF0HPhj+nj9qcW8JbqZ\nx8xFjBl1FV06DGHukveYs+itRyzTm+W69rm52V3jSTsi0dgW23Hta13XfqGJ53y8D/PZbDyhfPzu\naqq8AjOjOGerapOcBhGZ0JHA639gTHb92ayFWs6jzF0Tw+mxvdkmERnsxZh2JYODQ1IvLW5NpiPv\nspK4CMcfczfZoQ512kVjm3nrk1/FHDfZpSlitCHxfNaLnHF9yOUUaZhBFlWbX/Bl0kHztmZGBsR6\ntCuhi37B0GB2avaqWpM8zXzaE+Bs+nAL06pLiZ2lqh805Zpl2PsQkdPa5nZ/7uSj7mywXL+pfCmf\nfH1/se3Euu6qJUMRyTINz8YfHnNfICvYrs4+247z2odXx2wnfoCqrqzVRgzD81tB7jBNy+pTeCSh\nYD5FJd/ZJaULo46bPFlVv9gV9n5f2R1ipE3x0GYBBaoaEZETgLeBfukOFJHba/04MSU0uU8jIkED\nGT2aDun2MU67Zr/B8p9Q660jw/9w0E2biaed+QhrcusfYHQ3m4UX49bD6Nz2LOljxdXh7yzkGobS\nu9aSZm9y/V005H2PVS8Ch6bp5my/L/fqE4/43bbZpQE9xwdLK5bzwZd3HTJNS8Lj6NagyO1yKhGo\nADbEcI6aQcnrUyke39NsTyUJwoZy0LCL6dJhCACxeKWNuuWJZOQaEbmhfMuq4dRcs+nNmUFV1Q9F\npGscZ0wpsXxq4gqXNeOyAXyyhcSqV1ja/0zt4/WkZgDWa5inWRBJ4Ny4o+U8VZ0nIif+hfmv5ODN\n6qxBXUWVT8BzCj2M/+Y4DRwsgIC/DR3zByTXb5p7AvDyjgyNYD+yisqjDqZD2vivgFhYajgOThYQ\nFpF8D8Yl/8dQf3at5cEs8fBzHcQNfM1SNkerSX4CfLSj8ZuKiIwKYN2ZwDkKwIs5KYp9q6bqd2Zo\nfSzTd1yvboekjYdsl9cHwzBycOjGDpa9W8DY3OwuiaxguwYBVpblo6DzCHfluikTgG1F51Pfqz+I\nyMOOmzhv4YqPjzUNT8J2Yl8A/9BMMfIdkooxP6o5bVrqZBVREz+xlQJqZqy2UfsXp6ofiMgTItJW\nVcvrd6aqt7fQnr0RSwCzEbUMLyYGkl7UKQMxnL9/wrozj9FuIW+9IOL/ss71Yvw7onZid9ulcN5x\nFHgAZrKRXuTUcbC2cgxdjXdZOVxEutdfOvV4greOGnJeyF9PRLFdXm/6Fh7JqtUT+VyL3KPoYmyd\n9anUBM+wIJzAvSP1lryZmtpnizw9D+4/rNNw2uX13pZWnkxGWbp6UlJR2+fNmuH1hHJddabYduyB\nnVmiTo35dXPb1W4vIkd/RfFrk9kwtr+2sStJ6DrChove4Ki+0sR+JolIt01ED91EtDOQ1w7//dl4\nsz3buat5PAED2K5cRS3ei+HYCyj3Hp4mFm2dVqNoAigVETGQh3uS7c+RhnImQfHQW3N1HuX/BC5p\nrRkOETnBh/nG6fQKjKKjCDCdjeNfZ9mhInK2qv67NcbJUBdFbcdNNrLsrbiuY9D0ZBBEZLDH8t/q\nuu4JoBiG9VnSjv5eVWc32mS7UTiy7Z8G1tXMuj6V+mRoBqmJn4lbfxaR3+2oTUudrG+AviLSg5oY\ng7OBc2ofICIdgY2pTIxR1CxRNnCwvsdUeTFWL6C892DyG+ycRkl1GPv9XW2EiHT1YPzCg/EjQJO4\nbyVxH23qMk8Lxg0ApqpW72QXk2PYnzzAnON+ov2C3SWbsCb5L0Xu+6yuTODe0Jr2NhUHN5SXelaX\nEaOArLTHecSknfoT6wgXAnWcrGQyOqBzarapPgWdR/hWFk1Z9Jq9rM0nrG17oLYLVhBPzGKTIfCI\ni9aJ97Cd2GWLVnzyQZvsrsH8Nj1BDMq3rGHyrKfDQLJj/oDfHtD7+JDfl8uGTfN6zVv2/hmGGNcp\nuoGam/FkbWbh7J0lNc4xItL3W8pGUiNG+l9VbdaMZMpR+RJqpCGqSN5SSSK7ePMakskoHs//XvIT\nyQhLVv6XjetnB4NYt/jEHJbAfbj2ckoaenow7PlUeBdrBf1rSUYk1eEfLMFBZ6mq4xfz7gDW6b6G\nCZzbMJFqaupd7nRyhIgcHsS6PYFzCOB4MTy/ZKi3tm1H01W6aij4IHOeF5GO+3q8596I4yTeWrJq\n4k8H9T25QSmfopLvEDHWABua0peIHGma3v8M7vsDf6+CQwxBWFU07ZQ5i946LiUTkm7Wc+rmqiJv\nJFpOMNC2vm2sLZ4lwCc7eXoZWpEWF4hOLQFulXB4RlXvEZHLAVT1KRG5CriCGq8+AlynqlPT9PO9\njckyRM7Jw/e3mxgZbFtr0mqybtCXWFyewO2xIyckJf/gBRJNzY6q1XaEF+PzQ+nsG0snn6J8TXF8\nCsWxBO4RqvrdTp3Y//ofFcS6M4YzDhQf1hdR7NeCWBfEcEYLqBdzeRT7ZlV9cyf6tyzkBgP5lUK2\ni4oH4/0YzmvUOAgrganNvS4tISieRZczqP9Qyecr3cA3bOQaGdbguKS6XMOX0RjOAVsTRLZimp7I\nj8Y/EAjVu0kCLF8zmRnz/vFxIhk+ATgWGEGNM/Kmqqa9eYvIoV5P8GHHSQ4xTY/tuHYcZGlh55HD\nDhtxua92DNTmqiLen3gbha6/yoOhK6nymsizMZxf7KsP5a1/522NrKxgx/7GIQddgWFYRKLlfPLF\nnfSKmxytnfFhMotNic8pSiZwT1HVtHpGIjIoD9+UixmY/STzGUY+B9CWzcT5gvX4MFlH9YMu3GVh\nrL+Vg/z3MYt7GbutVudWImpzLZPjSdxeO/tiY4qc68f665n0Do6gPfMp5wPWcIekr6d+q06rKiJ8\nZu2HdEqotSdQvAMHM8N2EBHxWP4pBZ1GDBs19AL/1iDyTeVL+Wzqg5FEMtykWUQRMSzTt+6Ig6/u\n3K1j3ftHcekiPpv6QLnjJNI6yh4rcF9uduerjxnzq+DW2fCkHWfyzCejxaULPk4kI6e2yslmaJSm\n+C0tdrJai++zkwXgFfO3wG1DyXfa4ffNpTxaRqwyjnPc9nRcRCTkwbgVuMLGzbIwwsDTyZrloh2u\noacEFVf/jAEFo6RjnX1f6wZ9iSUrYzhjTeTSANaxikbC2M8D/0qlLO+o/xN8mG+cRZ/AaDqIIEyn\nhH+yjMPozJn0xsRgLmU8z6JINfbNSXV2qshySmU7Fxjuw3wpF29WF0Kspkqi2BtjOD9qqcPYDFsu\n6ETwiVsYGTIxuJ6vuI4D6Sl1l/4+1rXuO6ycHtHk2Pp9eDyBZwf0HH/eiAPOqjOjrOry70m3VVds\nWXORqjZb00tE2gMBoMw0PBtPHX9/MJ0jN23mXxlUtJ6T6U6lJniMuZF1VL8UVfvy5o65tyAiXSzk\nl4bpvVIMK9iz2yFasmGOHBoLyWnSq86xi7SCh/i2MonbUVUbpNyLiM+DsekORmWHsJjMBtZSTQCL\n0XTkRRZVFRE5DwgMJO/pG2R4ziu6lHVUcwWDtzlaEU3yGPPiq6h8Nar2hTt5XlkWRsltHBTsllLd\nn6LFfEspP5fBads8pnOrZ7HpSlV9UUQ6BzCfc9Aj8vHHK4h7BVkYxb6wXqp/hiYiItkeK/Cs69on\n5+UWxmLxKiMW3xK3ncSVqm6TvrcicnhWsP37Pxr/QINEEID3Pr+5sqJy7dlaIz5cv+3/s3ee0XWU\n1xp+9pTTJFmWbLnIstx7wd0GbMCmmN4JBJJACoQQEkIvITfk3iQkhBAIoQQIJSGBQMAEiCGAsTG4\ngMEF997kIsnq0qkzs++Pc2zUjLslm/OspeXlmTPz7Tl1z/ft/b6mZQUeUM/9fsd2/eKGabO9dKkt\nYr6ZcCLf2teZ4TT7TjrJamWkfvwuJOl+voikX5ub2hcEAkDlzhkZEQkEMOcMJLf/hfQM5EsG2zXM\nFNbFPqdsTQx3rO7B701EJrQnMPW3HNtEm0hVuYXZkRriOpqOMoK8YBSHaRTVbie8KYr7aCrWdSST\nrkijc5s+jOKfcEy7+urbAGu0ikdYzO84jp2q3qUa4ad8HHXwOut+aoOJyAAfxrwfMDhjpxyGqjKH\n7fpXVlbH8QbsbqbnYJJKXh+xkCtPoas/TMKcxXZOp5CRqe7CmWyNzWZ7Xaq7sEmBuIgUmqZv4dC+\n52X363GK4bOD1IZ38NnSF6JbSxYvSTjR41T1y0RC9xRjgW2HVn79zMdDze1ftWE65pL3+K6XFMyv\n0wQ3MSuaSM6sFu/vuK0FERkGXOrDuOUhJlh+abqUd69+VrOaqu/rbmrB/GL+XwGZN93CsFCgnhTr\nu7rZe5V1RTHcnsDlI2j/yPUyNMtVj3+yhtlspz85CLCYMgRmxPAm782Ny26u5RsDyHn0Vhm+q9h6\ng1bzGEu4l2NpLOHhqXIzs+qqiE8Clvsxl55MQeez6WYFxMJRj4/Ypi+wujaBN6y57tc0e0dqdnAo\nUAt8vC/LwSLytS4dhj558rG3NOtuPfPTR+o2bPn4R6r6zJecoz1JrUoLmKmqh6rYPk0jDkd3YZp9\nQJNCeQ2KDUVkaBDrfhOZKIhnITtMMX7loY8B3+5GVt8fMjiwM0HqJCGu1UH+P7Cox1LKrya5VPtl\n9OxOFs3dJYkIXTUzkE+GfE1679p+nHbKfJrlA1dS+eAYOhprqKpbT/XjInKRqtZf5z+pHQG7cYIF\n0FuyydMgyyhnKMkW4zwJMlBznM8pOx94ds/PGIhIG+DrFsYAB29zAPPY0ykMHCPt6z+G4+gsq7Uq\nMJvt1wM/3ZtzHwipRPg6EXnqbTZdbSEFUdxtU9nYaSobxwjEHfQfCbwHVXX7bs6xSURGL1n95p8W\nrZwyyTYDCceNiYjxrOPGbjuQBCtFhevGzWi8hoCvaSNUTXURPbwvvgIyxKa/tk0spvwU4O8HOPY+\nISJ9Apg/d9HzPdT2Y84L49yjqvtt8aGqC0UkM4/gD/xiNu1KAPrQNmM1VT2b2wcQx7tnK3WFNzHr\nkuO0k5WJbX9KaU0Z0fIY7iRNymjPXkqFHVcXn5hcTl/O0u6soII4Lp+zIxpHv7W/CVaKjvlkNCjY\n70YWmdjMYAuTaCj18QFbvBhuETBP4Pp+tM29WHrterEtMTiJLlKm0eB7FP0U+O4BxNaqEZEJthX6\nmafOOEFioC86bvy3qlq056P3TOrz3exnfC9YW1a10VT1aGyHqarsqFjnAWv3MP4OYK/EgNMcftJJ\nVgsiIsN9GDMvpGfG8XQSPyZrqc5/jhW/20F0sIlMOJNuocYJkohwhnYLraP6OvacZG3ZQl2znUyq\nynbCcgaFbNIaplHERmoIYDKUdnxGqXUGhWSInbUyubTymogcU29WJq8Dwd1m8XkEqWnk1tKegA9o\nunbVDCJylo3x0kBytC9tM7ZQF/2UksA4Ojb7+GPp5J9HyUUchiRrJ6o6n2TN4f4evxY4Q0SyY15t\nW6C4uaWrvUFEevBF7dYMVa3z2aHXl66eesHIQZc2+KyHo5Ws2/QR39RjGvQgGYhwmMWDRWSYD2Pm\nZApDE+hsBrBYSOnxL7Lm36bIXR7sNPSerfuuh7atnKjPUW/XjGp9iqiN8CUFyqlZiStF5Nfvs+VC\ngQyFOcDbO2csVHVtSKz3n2PFpO/ogIApBtniY6Tm8QRLoybydgKd5BNzpINXqvCP1Ou+L6xeRWUM\nvnBiFxGu0UHcxwKWajnj6YwgzGJbeDFlkTjeeaqqmWJ/6yS6NJECATiBfOtdNl/MUZpkGYb1HZ8d\nenjEwEuDBR2HSdwJZ63eMP37qzbOuEJExmpSsLglme+6sc2rN37Qr2/3iQ0+d+u3zNForKaCVHNH\nmiOTdJLVgoSwHv0avTNPkC/aw3uTzZ06InQLs69StCaX5tUd2hHAQ/fGZ2p6OdHoEi1rM1gadjcu\noowoLjuI8jJrOZUCJtKFahJMpwgTYTO19CeHfpLDyVpgT6PoJhH5MXAaMHQd1T5PtdnlinVUMYku\nu7apKksojwN7rJsSkd4+jJduZXionjRCYNmXNKZKg3+OLFLJw34JqopIbhDzBT/mCb3JjlcTp5gw\nlhg3uOhPVq5/b0IsXps7oOepvkCgLdtKlrB46Uuc4XWhQz2vyYg6LKfCAg7EBmmfCWI9cxm9MydI\n/q7Xbpx2Yi1VoQ/Z/mBXQtVRPHMHEcsv5mNxvJv3VgIhmQDZyz9i2/CT6NLgvbFFa1lGhQH8ay/O\nsxLYrbF4BPeyBex4/UZmjRmrHW2AuWxPJPAWK0wcQM4pg8nNLCeWmMW2OwNi/jmGd9M+NGtMLSYc\nW6g7sobVm8XtKCGO1Y7Ou2zetIrKkmQszssePKNfCK4Ggrvpegxg4qE2gIi0BSakdn24v0v6rQUR\naWca9iNnnvDzQJvMpBB0iBxGD/mGnRFqn71oxZRngPEtGWOq6/7CeUv+PqukbFWwZ9fjA4YYrCua\nHV2/ZW7MdePnHs6GnjQHn3SS1UKISEcfxvBjaapkHhKbCZpvf8jW2tVU5nUho0nisIpKDGSPxqep\n5YxL/8Ti/5yp3fzj6GgqMJdi9y02uj1ow4us9t3JSPLli5vdIZrLM6xgJlvpT3I5cAR59vsUnePD\nuDiPYKAnbawF7LDfZTOTKWww7gdswY9JT74oNZjJNq0iXkYz7vQi4gcuCmKdA+qaSM5JdLEba08N\nph0fU8w5NFXgnsP2WALv1T09J0cTImIEMN8fR6cBl9Lb5xMzALBJa/g9Cx+pJfFNx40NXV80++YN\nWz7+tqobMg2rroNjtJ1Evn9nShpRh8dYEjGRV1V1y2GMv1cQq99xdGrwHn+FtWyilglSwBwpbpOb\nmU+eeuyo2/oTS82xInL83v74RHCufJHVs0o1EppAvhXAZAE79BXWRjy8a1W1+kCvI9WEMlFERkyj\n6FSSQs2zbIypP2Bwm3qJkX2e9rDv5bOrtxNewV5qFamqIyJnP86Sd8dpJ3s0HQIJPD5gS3gZ5WET\nYxlIgYeu85IC0LsSpATeO/Mp7duPnCYCXgvZgY35cVCsP9gY13YjKw7oRmr8AbEei+HeeiCSEy3M\nZQWdhnk7E6z69Os+yVi4/JWRItLlcL7fm0NVl4tIvw1bPr62qHjhJYA4bnyK5yUe3V2pQZojh3SS\n1XJkB7HithjNCiO2w28l8Ja8xrpOw7V9KFu+eFi1xnmVtXVhnL0yQlXVGSIy+m023fUWm84AEHgz\njvfUWqreP45ODRIsSC5FXKg9uZuPiamLX0xKiKCQfy2DjZ0/GmdpmN8wn+VawQnkI8AstkeWUOZr\nRyD6AVszbDWYS3HtaiojcbzTG89CiEg3P8asAjLbHEfnLA9lBlu8JZQZNdqN+urZk+lKug0JAAAg\nAElEQVTKb1lAD23Dzpk5VeVjinV2Msl6eG+ek6OIU7Lx9fwGfX31l5ULJYurdWDoMZb8LoLb23Hj\ndwB3QNLQugzziZv46LKBmusIsJRyy0SmRHAP97JRxxz8cVOMXaJWtZpgBlsZZXRmRRsfZ4/57S4t\noJq6YqbN+f2xdZHye3dez57QpEL8Me+z5a732XKRh/osjNkRnP9V1VkH82JSy8fzAUyRG4fSzqw/\n8wRJBfhvab+Mh/j8bhF5Ym+TRVX9WET6zWb7Dz+j5GwgHserDWCNPYtuZ3Yly9hC7TH/YePkKO5f\nReSHqqpxvD9+wNbvD9M834B69ZPbtI6XWBN2cCu6knXNjxgayBZfAKBKY/yRxd/fRp0J3HCQnp7D\niojRJadN12abPkzTRzCQHaupK+lEUlS7RUnV6/5f6i/NUUS6u7CFEJGgjVH6K8ZmtJcmzgj8ThfU\nLqfiBh9GgYlxx6kU2F3Jsoqodd9lc8zFeyiq7t4a3e6WoFjzL6HX8IlSQExdPqaY5VRgAMfQnhdZ\nzV2MpL0E+anOTQwk17hC+jZYe4iow5Ms09VUlgMrIzj/8uBvwIkhrMsFfHU4b5CsRWmgByYiEsBc\ncg7d+50h3XadV1V5gdWUEeVHMrRBzHN0G8+xUnMJ1HQhw9hADbUkymO45+vuFZKPSvxiPnYuPb5/\npnRr8tnxVPlhyuOwubt1Eckn2ZUkJEUyD0oh8L4gSQ/I9X9gfCCY6t6bpyXMZCtrjTrOm/wgfl/D\nG4Da8A7+Pe32mOsl2jd+P7UmMsV+43L6nt2c76aq8n1mJBy0feOZtJRUyck+jPMAM443FZjaeEZJ\nRE7Pwf+vXzAmo74uV1gd7uGTuh1Ev6mqU1KPnWhjvNaDLOlLTmYRteGllBsu3h0mxm/v57gGVkCQ\nvJm7ldnRBF6BHiah2oOJiHw3v8OQB0859tYmSsHxRISX374+5nqJrqkEJ02afSbdXdiKUdVIQKyn\nnmfVNdfrkGD9otzPdQdrqHKBF2PqhkXktf+y+YcW0tdB18RwHz1YyUQC740i6oYUa9i6n4V0JZOR\n5OHgMY0iorjE8Hhd1ztVxI2xdGxS3BEUi8u1j9zNx2ZM3foefa+k/r6McQHMbpMpbHBeEeEi7cWt\nzGaHRqifiG6iNgE8vZ3wy9sJdyEpMTHrq1i7ICCpYvXm9iHJZatmPZ00KYq5V+bQhwpV3R4Se/ob\nbDjla/S2ATyUOhJ0aT+oSYIFkBlqT9s2BW5Z5foTgKmHO+a9xUWra2neeiWGi5fcHqu/XUTaBjDf\nz8LX5wQ6Z5gYMottV+wgul1ETqi/fBTCuvV8emQ0Fj4NicWF2jPjeVbdCkwBUNXpItJxFVUXrqKq\nN8luuJeA0waQHc9KzWDVp4346K3Z8eVUTAL2Wa+tFfDS9h0rHiotX0Nebu8GOxavej1hGNY0x423\n2gRLRE732aG7PM8ZKmLWuF7iL57nPFi/Vk5EjL2tT9yL8QySN11DgDKSsj37VSea5gvSSVYLEsO9\nYxWVQ+9i7uiTtSAjE1sWsCO8mDIngXeGqoYBUgKbh0Qg0kWf/oitt62gwjqTbkyULwrVT9B8XmA1\nv+JTT+AjQbq6eL2aO4+DIsj+fNiPGUSu0bhwHsAvJl01k9VU0Z4gMXWZwRZvOltq43i/UtXN+zHe\nUUUM7z+z2Hb5ZO3aRMxwVbIsp4xGfqKtjQjOVdPZ8vEmrcmbSEGGohRRSw/Tbvbx8UQYx40DjBCR\nGTs/J62NKO5fp1F07iTtkmk26mz8iG3qx3w/rIkGSVYQ8+9j6Djwm/Tz7/xMTNauWVNYF5xG0RvA\n6J2PVbR/T5pVpqAX2bhon/rbUl2rDYyxRcQ0MHZ7J24mE/jdewW1YlS1RkQufXf2b17q3/M0X9dO\nI6y4E2bl+mnh7aXLyhw31mo7Ki3L/z9Bf/btIwZdFurUfgCRaGX2srVv3V60feFVIjLRNOxrSTqr\nZNtWoNRT94+e5/xOVWN7PHkziMggy/S/HQrmZufnDfbX1BXHt+9Y/qhp2re6buKRg3x5XynSy4Ut\nTMouZ1IA81smkh3BmeHBs4ezs8cU45Ec/Nfdx7FN9LRi6vITPozF8PpZyLfH0vH278rAJne9r+pa\n512Knouq8719GVtEvj6AnD/XF1msz506x9lB1MvGF60l4TMx5kZwrmkFrdetAhExA5jLT6Ggx3n0\nsHb+mJdohN8yP1xF7Aeu6l9FxAQmW2bgIhGxE07kTeC1A9RuOmiISAZwRQbWVUAggZeF6et9yRl/\nwjSTy1ie57Jg+b9YtWE6WRl5qHo11bXbTUTudd34r1rbTGaqKWHGAHJGXUHfYK4EcNRjDtv5O6vq\n4njHq+qieo/v5sdc8SDjA42FUz1VfsJH4VoS43fOYmeIveB7DBzWuOYLYKmW8xhLVtZpov8eYsz3\nYaytv1y7k3pWQN2P5AJsEelrmf6bDMOaBEQSTuQZVe/pg9HwcCgQkf6WFZh/wcn3BYOBhg3kHy96\nLrF286xwQadjAkP7ne/PzuxMedUm5i97KbKjYs1nCSc6aV/19UQk2zR968YNvSqnZ9fjZedvQE1d\nMW99+H/haKz666r6+sG7wqOH9HLhEUDqh2Fa6q9F8NAVg8iNi0iT7qPkbFJWdA1Vgxz00XmU/Ki7\ntvGdRL5hioGnyjxKeIfN0Tjeb/Zj+P+socoq0XADOQFIqsZXEKty0X7lxDoDparuEa9Evj+IyGk+\nO/SLRCIyGjFcy/RNTTiRn6WKuk+YRtHrM9g6aJDmahUxby3VlqI/SyVYOZYVmJERyOnRp/vELENM\n1m7+6Pyqmq2/E5EJe6sQLSIjgDtMwzfOMMy4qvuW48Yf3A/NpyZo0rngidQfIuL3wfqP5j/RefyI\n72OaNp8u+QcVNUWcd/JvCCV/fLJq6kqYNvf3d9RFygzgfw80joOJqnoiMnkZFfffydxvZ6jlhHFs\nC2NxHO8H9ROsFCN60ibuT3WI1scQYbDmMpfiUcACgDDOw2+y4Y9DtV1G/ZlgT5U32RAO4+yxCURV\ntwbF+tdjLLnoOh0c3KlqH012m4Yt5F/xIzjBAlDVVcC1LR3H3mKavqv7dZ9kNU6wAIb0O89es+nD\n7OOGX4OVmult17Y7Jx97S3DqB/cML6/acCnw/L6NKN/Kzxvs71U4vkGykJXRkXFDrwrNWvjUL4F0\nkrWfpGey0iAi3xhE7qM3y7Ams0mqym3MqSkjOllV54hI3yDmywbSu4DMxHbCZgy3OIp7map+uj/j\n22L8KAP7N99lQGgguSjKAnbwLCvCEZwrPdU96hgdzZiGdbVtBx8cPeQbocJOI3HcOGs3f+gtXPFq\nxHXjp2jKcD1lIzOSpBjpWylZAXx2aGqPgnEnjx16VYMOxMWr3nCXrH5jRcKJDvmyWSARybSt4OvA\nSQWdhomqsqX4c9pkdPAqarZEPS9xvjZ0AjgopMZ9Eziuc94gtpYsti+e/CA+u3EhfCn/nnZH2PUS\nHXQPNlMthYiEgG4kbbN2Z/B9SgEZr/yvjG3WYuV+XVCzjIpdFkAiYgcw3+9Bm5EX0jPYlUy2UMcU\n1kXWULUkinvC3gjbiog/iPm0i144hHYuoIspswzk1Sjud/Z3CSrN/uH3ZU4ZPfjy83sVTmh2/4tT\nr+X8k+9jpyn0TjZuncechU/PjcVrm/ik7mG8d8YOvfLUHgXjmuzzPJfn3/iOBxpsLbPerYn0TFaa\nveX1lVQ+0dxs0goqqCVRB3wMu+4KjxGRQSup7Emy/XnBgSzVJNR72BDZ/hhL7nXQroqKD3NZGOc2\nVX3nAK7riEdEsg3DfuiMCT8L7tT7sSw/g3qfaYSCuRlzFz7znIj01yQLgYWNji+0TN/EkYMub5Bg\nAQzuc5a5Yt273RNOdCwwV0QCwBlAB2AFSR80ta3gv7p0PObE44dfLWbq7jmeiPDBvIeNws6jQpu3\nffqqiHQ82LVRqc7Bk0Sk/6Ztn97dtdOIi312RhPJk8xQHm3bFCTKKtdPAJoY6bYGUs/N8j08bGYJ\nEd2kNRRKw/udHRphNVUW8J9650yIyCkrqbzh9yy8IY7b0YdZksD7k4v+YW8SrNR5YsAVItL1M0pP\nTW1+N13z2DK4bnx5WeWGM3sVTmiyslAbLgUEn91UmSIz1B6Sn919Je56za8wep6DgCoclOL6ryLN\ndh2l+WqRrE3QW3/N/PBC3YGnSkI9Zus2HmZxJIb73cYdLKq6VFXfUNX5B6MWxlN9OYLbJ4GX76Ad\n6jQx7KueYKW4sHPeQLc5QcXu+WMwDKsLMPBLjh+a27ZHzLaayrGJGHTpONQAhosYF5uGr7h9Tq9n\ne3Y9/oHMUPs3LTOwXkTOAk48fvjVhlmvEN1nBxk/4vtsLVlEu7Y9FLjoQC90d6jqCuAD0/TtVhTT\nNGw4wm8aVTXuojf8noXhJVqGqqKqrNJKfsP8MPDzxnVEqhpz1bsvok4XV9WKqJPvqPdrbWTmvpfj\nb1bVp1N/6QSrhXC9xBOrN33g1dQ1rIxQVeYve0k75PbBMJq+1UvKVqmqt0c3jcbEE3X/WLXh/drm\nvsbXFc3GsoIfqKqzr+dNk+SI/lJKc/BIqPeIiGx9kmW/iuP2UZAA5idR3NtV9bB4Z6WStSNKjydV\nJdoZsIHNB6uduh4d2mR2blawVsQgI5jrxOI1eV9yfHU0WrXb6ey6SLkD5NtW4P7Tjr8j1K5tUklf\nVVm7+aOMOQuffqmg03Axm+n0Cwayyc3uTjCQnUU5zXadHkRmbCleJK4b31UIv5NorIayyvV+kp6C\nRzSOes8ZIrWPsuR3QGcD8Ty0MoF3t6PeMy0dX5pDj6puME37pjc/+Pn9Q/ueG+jUfqARiVWxbM3U\n8I6KdWV+X1ZuPBHOqD+bFY5U8Pmqf0cSTuT+/RjylcrqLfd8uvQf3Yf1v9i2LT/JkoBFfLrkH2HH\njd158K7uq0c6yUqzi5Rw4ZRUp5dXp4l9vhv+KiEiZwUxH/Cg0EQ8F62xxPiFiz5+EDvdlm/fsSxK\nMolrQMKJUV27zQ+saXrYLmaHo+VOc1pBteFSSspWWrYVPHHU4Mt3JVipa6N34QRZse5dy/MSu03S\nPM+htq40ypeYLB8MVHW1zw69P2vBkycfP/yawM6kL+FEmfnpn8KGYT7vuPEjKkHfHZ7qKyLyKtCF\npHzCHpP31Gf2TKAtsBio82PeYCGjFIrDOI8BbzQWNE3TOnHdxGMi8unnK/992+JVr48CqYwn6h4H\n/kpcfv/atNu+Naj3mcHszHyjrHK9s3zdf+Ou5/xS98PBQFVjInL8mo0z/7Z6w/QT27YpiNZFyi3H\niZY7buzbqvrJIbjErwzpwvc0afYDQ+SiINbfvsfA4FDaIcA6qnmSZXWVxB6Iqfs/B2McEbFM07d1\nwsgf5BV2Htlg32dL/5lYtWH6jHii7rQvP4dxic8OPXvc8O+FCjoNRxCKdyzno/lPhGPxmp976t57\n2RmPWbbd1Hlg9caZzFv8V71k8sPSeH9NXQn/+eDnJJxYRNXpol8YEh8SUoXwLwEnde08As9ztWj7\nfEPEeCXhRL+9r63rRwumGN8zkYd60MbNxW8up8KM4frG09kdTp5VTpSpbKytIDY3invmV/V5amlE\nJBsYASSATw6kkFxEjrOt4A8Nw+zhes4yx4k+3Ey36v6ctyvQn+SKwgHV2n4V2Ju8JZ1kpUmzj4iI\n6cfcdhPH5PWRhm3WVRrjNuZEE3iFepDsOkRktGn63uvWebSve5cxAceNs3LD+3VlFetKHTc2TlX3\nKGshImfaVuh3npfoLWJ4Ikax48bu9jz3ecOwYl87/U++5tTVN239lFkLnizJyS5sc+LoHwWCqY6m\nmrpSpn/8B2rDpQnXc77vec5hW8oSkf7AJMAF/quqGw7X2K0NETk3C/uFOxgR6pzyH/VUeZfNTGcL\n/8sYfGLiqMfvWRheQ9UvHfXubeGwv1KIiG1ZgQfUc7/XJqtzzHXjUhcpV1XvLtdNPNrS8aXZf9JJ\nVpoDQkQKSXabmcB0Vd1Td9RXAhEZ157AO/fJcc0KqP5JP6+bz44bVfXJgzhmB0Os79t24ExVYvFE\n7d+AF/a1o09E8ki+nsU771J9dsbUYf0vOL1P94liGnYDQdppcx+o21K88C7bCvTzPPe7bdt0Fc9z\n7OrabSJirE0tJxyWmr3mEJEJPjv0fwknOh5Qywy8n3DCd6vqvJaK6XCSIfaS7zJg0HBpWpb3gC5k\nLB05XpJNExu0mt+yoDSqzv50oKXZT3x26O+52d3OnzDqulBK342K6s1Mm/v7cDRWfYvrJh5r4RDT\n7CfpJCvNfiEidhDzLy56yTDaexaGLGAHoDMjuBfrYTLlTdWZXAR0BTaQ9NJqcQsVETm1B1kv/0xG\nN+tp8ryujL/PljtU9Q+HO7Z9JSnx4H/c85wzECEjkEu/nqfQt/skVqx71/185ZRSx433U9VqEckB\nxgEOSa/IFn0tRIyLbSvw3KjBl4e6549B8VhXNEfnL/1nxHFjF3xZd6qIDAxg3qlwOkl/x9djuL9R\n1S+rb2tViEimiVT8mZOs5mypPtStLKeCa2QQkGxm+B7TVcF/tC8ZikgH4ASSr+0MPUwG16lGmGFA\nR2A1gGUGllxy+sOBxh2+FVWbmPrh/1a6brzD0f56HK2kdbLS7BcBzIcKybroxwzdZbXhqMfTLD9p\nETv+RfKH6ZAiIufbGM/3IVu70ya0jqrwWqofF5HLVPU/ez7DIeXzIur8YU0QamTOq6osZEcc2C9h\n1sOJiBSapm9+/56ntR3QazIBXxY7KtYxb/HfWLRiiquqCx03fvFO2YBUzdVbLRw2kBTQNA37L6ce\nd3uofU7PXdv79zhF2mR0Cs345KG/ikh+cwXjIjLJh/HG6RT6x9LR9FDmsP3Kd9h8qYicoqofH9aL\n2X88JWmobTT1oMZttL2MKCYScTh62/FFxLatwKOmYX+jQ7u+cVWP0vI1PtsOPuk40RsPZeG/iBxr\nW4HnLdPfITOU51bWbPG5XqK4W/5oaU5CJSe7kFAgx6ipKx7FUdAZm6Z50klWmgaISI6N8e0fMLiB\nl5klBt/WAf6f8OEJItJPVVcewhiGBTD/fhvDQ91ll6px5lqt4n4WviQio1V12aEaf0+oanFIrDee\nZ9U539OBgfqzCG+xyQ3jFAEftVR8e4ttBX7bv8epbYcPvGSXUV5ebi9OG38XU967JR6JVt7Yiuud\nTm/bpqvUT7B20jlvEMFAdqimrmQ8MLP+PhGxfBj//BFDQ4Mkd9f2C+llFWpW5tMs/6eI9DgSCn5V\nNZwh9sLPKB01lo6N9/ExxUzkC8P3/7AxbiDPqXqt/tr2F9sK/Dk3u/ulJ435ccDvywwARGPVvP/x\nH75bWbNFgRsOxbgiMtA0fe8eN/x7GYWdRyFi4LoJpn/yYKFp+narR5nSd2veCT3NUUFajDRNY8Z2\nJTPWpqmNIbYYDCNPgYmHMoAA5p3n0D1QL8ECoJdkM5muvgDmbYdy/L0hgvudhexYcDtzat/UDfpf\n3cQ9+knNG2zYEsWd3Np/pEXEct3Ehf17TTYb77NMHwN7nh6wrcA+mX0fZvLaZHZs9vtLRMjK6KhA\nc/php+QR9NVPsHYykjxCWO2AsQc31ENHGOf2v7IyvEardm1LqMfLrKWCGIPIZYvW8RddFptLcXEc\n76B0vbZGRCTf89zLJ479Scjvy9y1PeBvw8SxN4Y8z71GpJkX/iBgW8F7hvY9L9gtfwySMmk3TZth\n/S8yNm/7FK8ZBY66SDk1dcU2KS/KNEcn6ZmsNI1xHbzdrjEncJVkTc6h5KTh5DX7AzqCPOtdik4+\nxOPvEVWtFZHjo7gnvsGGiwR8cbx3gNePkPqKAGAE/c3a5JERaiciZsdmd7YOViQVrrVBoT4k/dZ2\nVKyzSFoDNaaggMwmiSUkk7N8zfDKiXUF5h78kA8+qvq+IfKN+1nwZK4G7Bz8up5qH7AZMG/go24W\nUqPwlwTebw5XbVILMalT3qCEzw41WZsL+tuQl9MzXly28kRgysEe2PUSZ/UqnNDkO6t9Tk+CgbZ8\n8vlfdczQb4mRSsASiQgzP30kLGL+WTVec7DjSdN6SCdZLYiI+EjeNdvAZ6r1bkdbjlnbCJslGqGD\nNNRFCqvD55SZHOK6HIFEjOZLJ+J4CLQKo9LUbNWM1N+RRp1hWBU7KtblNbfktq10acxxY625rmxW\nLFFXsmbTBxl9up3UIMtatvZtV9VboapLmzlu/QaqvWaTM1U2U2sC6w9h3AcdT3WKiLyxnfD47YRz\ngCWqurql42oBTFSbmvql0Hq/d5KcbppEski9Cnj1gBJQVdNsxuoGYEifc5i98C/hjVvnSbf80abj\nRL1N2z4TEXnJcaMtPiuf5tCSXi5sIXxiXu/DKO1M6M1CMl+1MbYFxfqjiLTo+ryqhgV+/QcW1pXU\nax6r0hgPsihsIP9Q1S2HMgYHfWEmW5tNpD5gSyyO+/yhHP+rgKqq5zn3z1v8fNh1Gz7VZZUbWLd5\ntud5zuMtFN4eUVV1nOjZnyx+vnzGJw+HN237jI1b5zFt7gN1n698rTThRHbnpTi9knhtslu2IXPY\nrjHcrcBnhzT4g4CITM4Qe3pQrJIMsVcZ8GNgnqpOOVoSLBHpKCLjRaTfXh5iFJctN+LNGFVEYzWU\nVawLAjNFpI9l+tdmZXR8tX/P037dtfPIP5iGXWSZvv22jzEt/6xN25q/JykqWRR1vcTvY/GaCas2\nvH/nuqLZtzpurH88Efl22hPw6Cct4dAC2GL8sA2++27kmFAXSdYOVGqMP7M0vJGaKRF1vtGS8YmI\n+DDuUPhpR0KuhbCFOttEnozi3nyovxhEpLMPY+nF9Mo+iS6GJQYJ9ZjGZvc11lfE8QapasmhjOGr\ngIhYthV42bZDpw7seXooFMyVbaVLY+s2z/JcL3GFqnfQl1UONiKSI2Jc5bNDF6uqF0/UvQD8TVV3\nuwQjImN9GO+dSL5vDB19CsxmW2w226NxvAmquvjwXcGumAyStY79gGLgP6oabe6xfjH/N4h104X0\nzOhHW8qJMZWNkTVUrY/iHtvYRHofYsgCjgMEmL2/5zlQRKSdbQWf8TzntKyMjtFwtMJW9TYknMiX\nWrwExXqmi9H2qkRuZ44b8yN8KYeCWLyWj+Y+SHHVetfxEj1M0zdv1KCv5/XtPsnYOZtZFynn7Q//\nL1wXKf++qrfPN3EiMt62gv+dPP6nodzswl3bN26dx0ef/bnG9eJ9VXX7Pj8ZaVo1aZ2sVoiI+HwY\nJXczKrtAMhvsi6rDjcyKxnAHqmqLL1mISJDkl65J0gai8hCP5wMSqqoi0jeI9TdFh+QRjJcS8Qmy\nIILzTVVddyjj+CqR0vU50baCVxuG2cFxYp+4XuJxVd3c0rEdSkSkmw/jBgvjPEATeC8n8B5W1a2H\nMQYbGAr0tUz/fcFA2+wO7fpZ1bXbnPLKDbie811V7+VGxwwNYs39NeOC2fWaU1SVJ1gWW0DpozF1\nb9rHOAw/xi89+EkXMuICFFHnM5HHori3HU6/QxEJWGZgUe9uE7oP63+xz2cH8dRj45aPmb3wL3Wu\nGz92d0lwUKxnz6PHlVvNGPO0hPx2A1A8tpWt5Fg68ZG3KZZAb+3Yrv+vJ4+/K7Px8dtKlzHjkwc3\nJpzofnWXihiXmYb1VPucXl52Vr6vpGxlvDZcGnbc+Nmq2pqX3tPsJ2mdrNbJ6Bz80jjBAgiIxUjN\nYzbbzwYePvyhNURVI8C0QzmGiBgGcp0P4w6BzgbiBMX6J/DTsCbGikiPzdR2BTa1YjmBI5YjvK5s\nv1HVjcBNqb/DioiIhfFjH8Y9mdhWtaEZ4465SnoUHLerTqyscgPvzLr3WRHZpqq75ED8mNedSoGd\n3aj7V0Q4X3v451N6tYjcvC9Jgh/z3g4Ef3gDQ4O5EggCVGiMP7H42m3U2SSXIg8XX8vJLsgfPfgb\nvp3PhSEGPQqOJRytDH6+8rVfAuc1d2AU9425FF/4P+6orPMpZGVp0kpzAKPZTA1zKSrC8o3vUXBs\n0y9foFP7AXie2xnIJendt0+oei+KyBvFZSvOLS5b0YGkGOl/D2eSmqb1ka7JOvzYNuZuvwB9mAZf\nEd0UEZEA5l8LyPjNjRzT5SkmGvdzvG8SBZf7MBaISKGqrlfVmekEK82eEJEOInKOiEwWkd0WQLc0\nNsbtOfh/dTej2p4kBZk9O4+Snl2Pb1CI365td0YNvjzkMwNTReQ7qVlebIzeBWQ2e3PcUUK4aBBo\n6vS9G0SkrYf++EaOyciVwK7tOeLnJwwNueg1KSumw4LPzvhmv+6nZDZuSgDoXTjBcNzYmdLcziSv\nbydc+m/WO22wGSedGCedCJPgLywPR3HvAI25bvPNv566qHpC0sB5v1DVOlV9QVUfUtWp6QQrTTrJ\nOvzMLybsq9BYkx2eKp9S4nCAswoikiEiF4nI7bYYz2SK/apPzHtFpGkb2RfHGCIyOSTWPzPF944l\nxs9EZK9a+EXEEpGTReRiERmwD6GO82OefycjM/pIW0SEbPFxsfQyT6OwbQDzvn04V5qvKCIS8Nmh\n50zD3tght+/fcrO7/9M07BLL8t/6JT/ILYKIZAI/u4VhGQWSyRKzloKuxzb72G75o3G9RFYv2vwx\ngPmhiAQTeKs2UdNsTeQ2rcNEwkDTyu/dM7EHbeJtpakieZb46EfbOHDKPpzvgBARv9WMOjqAZQVQ\nVQOakbcHVDURw53wLpsX3sis8GO6pPZe/az6HuaFa0nc4qn+K+FEX1q14f3aZowA2Lj1E0zLv6il\natHSHJ2klwsPM6paHRDrz4+x5Job9ZjQTlV1Vz3+zuq4g7dQVefv7/ltMa6zMX7XFp9RSyJwPJ3p\nRhYbqEl8yNYbbDHvTKj7UP1jRMQfwJyahW/MqRRktsHH55SNn0fJ7SJyvqq+t+NZP5IAACAASURB\nVLvxDJELfRhPtidg5RJgLdV2SKxFEdyL9lTf4sf87il0DfqlqWzRqRSYb7HxAhGxjxDdqX0mNTvR\nC4gB61u7gGlrxbaCL3Zo1/e08SOuDfh9GQGA6tpi3ptz3z2RaKUDtCYPyYldyXRy8POpllBGlHyv\n+T4Sz3MxMbiTkRkP8/mQpVTcFcN9dBpFV03UAiunXmKkqkxhXRR4fB/fR6bVfM4CgIUhJGsyDwuJ\nROTtTVvnjSrsPLLJbFzR9vnYVvDzeKKuaYaUIvWdM1pEjplHyXCgmuSSXV3qIW/VRcvXzFn49IBR\ngy/3++wQqsq20qXMXfRsxHGitx6aK0vzVSVd+N4CiIgVwHzKQy8dQZ4GMI1PKXVdvAUR3HNSHnH7\njCHytTb4nrmYXqHXWMedjKT+EsAOjfAL5oXrcE5T1Vk7t/vFvK8vba//MUODlnwxublSK3iARXUJ\nvILGRe8i0ga4x4/xo3F0ss6mO+0kgKMeb7LB+S+bt8Rw+++uQwogU+ypV9D3jHHSqdn91+j0hIPm\ntRL9sIOGiBg2xl0CtwSxzASeqei2CO4NqvpmS8d3JCEiA2wrOP9rpz8cMM2GdUqV1Vv4z8yfV6UM\neFuFtpqIXNKH7L/UkMhqg00mPnZ06MJJxzYtDVu25i1YMZPrvH4UaS2/5NOKOF47H8Zdfsy7zqdn\nsB9tpZwoU9kU3kD1miju8boPBu4i0tHG2PAHjg809uGMqsNP+CgWx+tzuBohRCTPNHxrJoz6QZvC\nziN3ba+pK+WtD38RjsaqL1PVNw5wjOxU9+KZbdsURCPRSjPuRKocJ3q1qrYKb840RwbpwvdWSkoC\n4SoRuWcuxWcCPmAWkAmMF5EFqlq0L+cUEQli3vs9Bobep4iz6N4gwQJoL0HO0x7BKay/AzgndZzP\nxrjuCvo2SLAA+kkOQ7SdLKD0SuChemNNtjFeGUBOcAA5RikR7uETTtdCzpLunE9Pa5lWtFtD1cXA\nbtuhY7ifrKBi4jg6BRrv26Q1GIgJ2tTf5wgngPloB4LfuJpBGV0kA0+VJZT3fIKl/zRELvdU/93S\nMR5BnN69y1hpnGABtG3ThVAgR2rqikfSegx4526iNvNcunOGdCOiDj8rm8/SlW/Qv88ZmIaFqrKl\neCHLVrzGbe5gECiQTBLqZQO+mLq/EpHZ/2Lt7aBDBKkI4zwKPJNqVtlrUj6cLz3G0kuu1yG7ZpVj\n6vIEyyIm8ubh7DRV1VIROeXDzx57q21WF1/nvEEZ1bXbo0XFiwzQOw80wUqNUQVcKCKdyirXDyQp\nRjo/1dU8yDL9NxqGdSxQFU/UPQG8oNpMfUeaNHtBOslqQVLF3I9aYlxpIu+0J2hkYusGqv0hsf4b\nwf3WPtQH5LlolwHk8Bwr+Bq9m33QAHJlCutG1Ns02MbI6LibOuHB5IaWUz5q5/9Tre+v3MLwjN6S\nvetxZ2t3fst8OmkGIyWPCXTO3ErdJXxJkuWgT8yh+M6JWkA3ydq1faf3Wj4ZuoW6O4Cb9/I5aPWI\nSA8/5pW3MSIQSi0VGyIMpR0/1MGhh1n8iIi8nl463GtMw7B2eydpGKZyGJe79oIeGVju6RRaAEGx\nuMMdwpOrZ/DamrfIaJNPIlyBlYhxvdufwtTnoljDWBjhBF4cQFWnA9MPRkAR3GvWUeW7gQ/PH6bt\nPUFYyA7DgLcjuFcejDH2BVWdJyKdyyrXn1tWuX4AUAr862BbAqV0q3ZpVxmG+XXL9D81sNfpvs4d\nBlvRWDXL1rw1tKKm6CciMn5fZgjTpNlJOslqYQyRizOxH72JYaGdiUZEHf7BqtM/o/RdERm3lz+4\nux4TwqKKOB1pmjhVEUOQXYmbhXwzgUdUHQLS9O2wg4jnoMU7/29jXD+BfKt+ggXQVvxcrL14h02M\nJA8DQfbQWKGqW/1iLruPBcOO0070py0VxJjBVjoT4mv0Nn/Fp9/mKEqygAvG0EFCzTzX/cnBj5kd\nxR0CfH74QzsimbZhy9xfjB58hc8wGuZSNXWl1NaVWsB+1zgeAkYeQ3tX5Is3QHsJcqc3hG1ax4Pl\nnzOJzpzJ4F3dhqrKv1kfE3jyUCTfqVmar4tI908oOY1kYfl7qrr2YI+1DzElgFcO13gi0sk07L+c\nccLPgzltCnZtL+w8MuOjzx7vv3n7gvuA6w5XPIcSETGB00WMCcmZT31FVZe0dFxHK+nuwhZERMSP\nef81DArVn8kJisW3GeDPwB4InLCXp9thYmxcTBlj6cg0imj8fayqvM2mSATnzzu3+TAv6EU2M2la\nox5WhxlslQTeszu3+TFPPIZ2zbb/DKEd60nmb7PZXluHs0fFcBuDb9GXIBaz2c5Gavg6fbiOwXQg\niINm7ekcRxihLHzNSnSICEEsF5rJjtM0i6ou8Dz3s7mLnom59QrIo/EaPpj3x7CI8aBqPX+olqem\nilizle6dJQNF4++yOfIJJVRojDVaxZ9YHFnAjg1xvF8cysBUdYOqPqGqf27JBKslMMT8TveCcVI/\nwQIQMRg+8Gt+Ve9KkWZaMI8wRKSbZfpXZ2d1eWFov/NvH9hr8t0+O+Njnx16uaUt3Y5W0jNZLUuh\nIB0GktNkhyHCCdo54002ng98sKcTpeoJbv0Ly1+4niGh2WzneVZxtnYnR/yUa5QprIutpmqTwlO7\njkOtUyngOVaSUI8T6UIGFquo5EXW4OJF6t/lKFpbuxsZmVoS+DD5r25y11JVA7y4p7gd9LNthAdf\nKD2bvBdXayV+jBZXvj/IfPIZJXUXac+sxuoCFRqjjKgPWNYyoX05ItLDNH3XW4ZvgqIV8UTdU8CU\nQ22ztCcSTuScjVvnvbJx67zjCjoNU8eJeVtLFltimE85bux/DlccqRmCq4NYNydwCyyMsgTeIy76\nUL1E79+LKX+4UmM0lk1YpZXUkKiM4V77N1be7uD1N5HKGN7jHvrIl1kFpTkwTNM/JC+nd5PaUIDM\nUHsM08b1Eu2BvfZtFZFcQ8xrLStwBeBzvcR/XTf+h5ZKYEXEsEz/+8f0v6BwUO8zd077WsMGXGxN\nm/v7M8sq1t0H3NgSsR3NpJOslsU06i3zNdmZbJ/e69dIVV83RX7wAAsfbk9QllAW+JCttqmG5+LF\nTYy/xZI2GbtqCxTe20jNFXcywnqd9dzCLDwgjwAdCHrbqXu1/hh1OE+/S9GYsdoxo3GSMJ0tgDr/\nZv2qON7ZezODEMN96B02X36sdrLq14VF1OFF1tRFcO/d2+s/QnivknjJO2wOTaZw1/pWQj2eZXnE\nRJ6Nt0KdHhE5zzR9/+jbbaKV32GILxqrZtnat4+rDZfcLCKTWnK2KFXIfIqI9F9fNOcEIA68pW68\neA+HHjRExAhgTulE6OSL6BXqThbbCXd5nfU/W03VJamanrCqlvrFvO9e5t98tQ7M6EUbFPicMp5i\nWSSOe52qTgGmiEgvoADYmE6wDi2el9hYXbstQTNC0NF4Da4bN4EmtmKpLutLRYyeqt5m4EVVLReR\n7qbp+7hrpxFZfbqdFLRMHxu3fdpt5fr3rhKRc1I1dQdMavZpEtAOWKKqX1ZmcFookJM3sNcZDdbV\nLdPH8cOvDr027fZrROTuenIXaQ4CaQmHFiTpGWZuu4VhHXo1qnFSVe7m45pthC/f17b+lP7SRCCL\n5KzIDqCyuTZ2ERnox5h3I8NCfaUtrnq4KBup4QEWhWO4Y1R1ab3H+wOY80aQ1/cSevmzxU9MXWaw\nRV9lXTSB93Vgnwq3LTGusTAePIl8qzdt7RLC3jtsjsRwX47ifudoKwIXke5+zBntCeSOo2NWBMed\nybaYgzc9mtQXa1WdTMm2envD5PE/DbXP+ULP1lOPmfMejm4tWfLnhBP9SQuG2OKIyEWdCT17D2My\n7XpduqrKH1gUWUbFL1z1fpt6rBjI1T6MewRp6+EZBsb6CM7NqjpVRPoFsZ4HHZRHMF6S9O1cFMG5\nQtO+nYcEEeljWYFFF5z8u2Aw0PC7eP6yl52V6999M+FErwbKNaVkKiLnmob9Qqe8gZqX0zujvGpT\neEvxQvHU+4Fl+q4e3OfscUP6ntMgodlWuoz35/6+yvUSHQ/0cy5iXGia9lNtMjqaGaH2Ulq+2vQ8\nd0XCiVygqpuaucZ7hvQ993+GD7i42d/ZKe/dWl1TV3yypn0W95q9yVvSSVYLY4lxbS7++29nxC5b\nC0+V11jnvEvRupRZ9CG1ZkhJMrzUjSzpTlZgAzXRjdSQwLu0Od0YEWkTwPyji34tA8upw/FZGHMi\nONeq6sr9jKGPD+M6H+YwB68oivs4MPtoS7B2kip8PstCTvQg4qGvqupnLR1XcxiGdWv3/DG/mDDq\nB00EImvqSnn9/TtqXS/Rvv6Phoj0T7XCTwCq44m6J4G/f5lu2pFMhtjTr6DvScc2o/m2Wit5kEWb\nw+oU1t8uIgbJmaqEqm5Lbcv3YSy5hN7ZJ5JvWGKQUI9pFHmvsa48jjdQVUv3N87U+87d18+ViHQA\nLgDakGwkeP9o+2xalv8Xfjvj5pGDv56RnzeYSLSK5eveSawrmq2emxDDtDxBqlzPuVfVfdcy/Z+c\ndvydDW48qmq28p+Z90QA49LTH/GbZtMyp6kzf1Gzo2Ltd1X15SY79xIRmWhbwTdPOfa2UF5uLyAp\nXrt0zVR38arXix031qfx7LKI3N63+6T/G3fMVU2CUlVefvtHddF49dj6N9Vpvpy0TtYRgIv+uYp4\n+zuZ+9N+2jaRjc/6nDJ18JbHcM8+1AkWgKr+V0Q6rKHq3DVUdQM2kZyNavYHMSUrcZWIXF9JvAAo\ni6u731/8qXOu5itUD5CqY/p36q9VY5m+4R3b92/WDy8rIw/TtA3XS+QBRQAixsWW6X9uQK/Jvi4d\nhlrReA3L1r49uLxq440icvzRJi6bokuH3VgGdiSEg+Y23p6aEWkw42Bj3DyezhknS8Gu6TBbDE6n\n0NiitZkfU3w98PN9DU5ELgti3QP0NcANifVmBPcuVV2+h+PENH33mIZ9W0GnYW4okOPbvH1hLBqr\nLhGR046mAnnHif1cROZ/vOjZux03PsgQM67qBYf2O9/u3/M0sUwfZZXrOsxZ+PSvaupKrh3Y+wxf\n/QQLIDsrn14F4wMl5atoLsECyMvtE9hRsXa3Fmd7g88O/XrMkG/uSrAADMNkSN9zzG2lS9ps37H8\nMuDpRodNWVc0+56Rgy6zbath+dm20qW4XryCVloPeiST7i5sYVRVY+r+MoGXv4TyH85i+001JE4I\nqzMqpeNyuOKIpe6s/kpyeXFI6k77y46pVdUV+3pnLSIhEbk6U+yPMsX+1BbzlyKSfwDhpwFEZLTP\nzphqmnbYNO2IbQdX+azQUr8vc2bKZHivjYPr43pOUU1dcbPdDvFEGMeNWyQFHRGR9qZh/fX0CT8L\nDR9wsdWhXV8KO49k8vF3ZXTrPLq3ZQUeOIBLbLUorNhATbMzOxuoxsZosnzTHBZy2QnkNyvAewL5\nAR/mFfsam1/MX7Qj8JfvM7DfU0yUh5hgnU33c30Yn4jIMV92rIjxvZC/7c0XnPr7wImjf5Qxesg3\n7AtO+V3m8IEXd7dM/4dHQ8ddfVT137F43WjXTYTEMFeMGfot35C+54ht+RER2uf0YvL4n4ZEzL5d\nOw1vdpKiMH+U1EXKxPOavz+uqNoUZR8K6BsjIlY8ERnTLX9Ms/t7F56Q6bMzLmnm2lYBL703+75w\ndW3xzm1sLVnMzE//FEk4seuPttnJ1kB6JquVoEkrnb+11PgikhXEfMrGODefjFgNcTOMUyMi16rq\n6wdxnA5+zLm9aNNhIl0y/Jh8RumgOWy/QURO13p2P2n2HhE5y7ICLw0fcFGwW5exgirri2b3+XzV\n6/TrfjIl5atGlFVuuCulu7ZjX87teYmnV22Yfv2gPmfZAV9DRY0V6951TcP+r+smapJxGFcWdh5F\nbnaDlTFEhOEDL/Fv2DL36yLyo1Ymq3DARHAeeIP1J4/RDqEs+SJHiqvLv1hbF8b53d6cR8H27UY7\n1Y+JJt0h9hoRKfRh3HY3owLZqbgysDmDbkZIrYyXWfsEMHY3xxqWFbjnuBFXZ4QCbetvZ0DP04wN\nRXMzSyvWXAT8Y19iOhIQkQ6m6RvWs+C4Jvt8dgi/L0PiiaS4vuPEiCfC+H2ZmKaN35cJwNrNH9Kn\n20kNji2r3EBp+WoD2KO8zZeFtxe7m32M40S/U169afMb0+/6ccDfBseJmZ66JQkneoOqd9C+59N8\nQTrJSoOISADz/9k77+iqyuzvf/dpt6WREJJAElJJQg29iiBdsWN3xjLq/OxlxrGXGR1HZ/S1zjg6\nMyo6drGDICAgSG8BQkkjCQmppN922n7/yA2G5AZIIBCc+1kri5VTnrOfw809++xn7+9eloneQ6/B\nAKudZCszYy/qgl7Djg99TaKXnoxr2SDNPxt9Yy+nlMOx9MGIsI7gSLyGnd8QUUxPS/zu6RCRLIrK\ne9PG/c4eFZF2ePvAlDkIC4nD+qy3cOG0vzm2Zn8Yn1e8+i0AF3RmfGbeK0vWfyxa9cf/GzPkGkdM\nnyHweOuxt2Cpvnf/sgbDUO9uOVYSlcGR4al+I2Z2axgkyWoaqhYF4BclzcHMKy0kvvwoNtw9lxNs\nCQimcrjwLQqdjdAWAZh/POMQsGIbqubNQf92UeStqNJNdNysvYPxrh6HKCGU2vtmExBDHyFvKBH1\nY2Z/kZUoMML7hA/wO3Zi7ITgusbS2fgFOlkAQmTJqomi7DdSFxYSi+zchXrBgTVScdlmiKIFpqkj\nKW4iDENDXPQIbNuzAA1NFUiJPwuiqKC4bDNn7f3cbZj69cdbwUfNJdzTADoLYBXNaRw7LYpjS1HZ\nptH+nMC84h+bVM35mb/xfOknjxLR0073oTQAbgC5gQhW9xFwsgIAwLRgKAN/g4FWwSfLQETIQC/c\nyBn2t7D3/wEYcqIXIaIYBcKUC5DQLllhCEUgjoOkAjRcCOCTE73W/xjTQxxRYmsHq4W+fQZDkmyo\nqsnBsPRL5JzCFTOIqA8zV3bmArrhvb/JVbl19ZY3HtV0d5ogiB6BxA8NQ/0jt+ptZxhacX3TQRV+\nIi6q5oKmuyUANV2YY5chonQ0J21b0dwjdFlLhdjJxMvGw0S06Evsv1cA0hkocUF/BcCi432IuWH8\n5RsUzk3nXvZECjm8PY/rsQQHVC+M44qItSCC+kTC5jf6JZOAIJZVL4xw+F++0pgNgdkEUfvomm54\nmdn8RRYyACjRNDcanZUIdvRpt9PrbXTWNhywpyWeg4unPw+rJQRNrmps3/MZDpRvw9wpT2FY+kXY\nnb8Ey9Y9D93wwjSNek33zGTmTcdjABHFSJJ1uVUJjkuMHe/Qda+Rf2DNw4psX6bp7sc37nh3QWhQ\njD0iLBFAc7XvnvzFRnVtfhOAD482ti/fNqvztyVAZwk4WQGgQLhsCvo6BGofYc5Eb+gwBxBR5AlW\nNfUDcF0QZEPoINqdgV6OAjT4f20OcDSiQoJiOsyfs1t7oaR8O6J7ZyDI3ttb31SWBKASOFwFeIco\nykNN0yjRdPfrANa0dQp8v3+AY0QtTDbezi9e/YchqefDbjtSZHdP/hJDFOTvDUM7JYnvRCTJku1d\nWbJdlBw3UZJlu1RYusHp9tSX+7S9TnrjY2ZeA2DNCZy/nYiueRZb/5vCodwfwbYCNLj2o4E0mJcf\nK1G9LTp4x27UNp2HhKC2++pZRQNUBUBRB7ZUWxTHvgPlW4a0zf8x2URO4Qqnbng/7ow9ZwrM7JEl\n65sbdsy/derYe22i8POjsqR8Gw7VF4mJ/cZpIwddddiBDbL3xsQRv8WydX9FefVupPafgjFDrsWY\nIdcit2gltmR/tK4TDhbJknVJetLM1Mz0SyWfJqE0YuBl0oqNL8+oqsktUzX3jYvX/PnN0KC+CLJH\nUmXNPsEwtALd8F4Q0LrqOQScrAAQQBYFol/PRwBBBBlaJ3NBWiCiIBuk+QqEc5MRqjuhOX6PtbiA\nEzCd4o449iCcbvge/gE6RW5VbR6Y+XC/uxZMNlHfWIqa+iJkZlwKl6dOge8ei6J8qyRaX0hPmi5F\n9kqRG52VnJ236ALN8HxGRDd0ZQmBmQslyfLUwh+feHjkoCsdffsMgdfbiL37l6l5xT/WG4Z6x8mZ\n8rGRRMszvULjL5w+7vc2SWpe9clMvzRoV+63iTtzvvmBiNK6EtEioigAVxIQycBeAAuY2X2y7Gbm\nL4koag9qL9mD2jgAhWhW1u/KNT7JQ/3Lu/gQBlPE4Y0mMz5GrlcEfXw08VtVc923dtt/vlZkhy26\n90AQEbyqE5t2vufxqg3ZAH7ogk1nBLrhfbiyJnfIl8vuH5+WON1hVYLpQPlW58HKXYZAop6RPKtd\nxSgRIS1xOvYWLD2cj2WYOrJzFzWpmuv1Tlx+gizZE1s5WAAAsVk41Pb50t9dB/ADhqFG1tQXzqip\nL2wRI+1JfToDIOBkBQDggbF4HcqvmIbYdm0l8tEABpwAyroytg3i10MQPv56pFtbGlBXsAsvYwcU\nFjHZV1RYwS7sRI0IoMvaMT0BnwLzRXZI1xBgdUFfxMD8bpYtWKuqTmde8Y9Bqf3PPmJHbuEKWC2h\nYNPA9j0LGMA+Zi4goqGyZHth7pSnbK2WQyg1YYpj0Y9/nFffWLoSwDu+OQ0BMBpAE4DFfAxFel33\nPkNEWRt3vPuYbqhDBRLdDH7PMNTnWvSguhsisouCfPukEb+1tzhYvu0YnDpXzCteFd3o9EwH8H1n\nxlVIvE+G8OeRiORo2G27Udu4Hw1/96l4/3iy7PdFIk64EIaZXUQ091XsXDSK+0gjEGl1QsNylDRV\nwZ3rgXHnMc5fRkRXrNj48j8VyR5itQQb9Y0HLYIgfaXpnpt/ybk8zOwlolm67jl7x76vfiUIYpiq\nuVYBPF8U5BKbJdTveTZLGLxqE5gZNfWF2LzrQ5fbW7cOwMJjXZOIYgSSbpYl25VWS7C9urYAvXsl\nHfHyZLOGISQo2lvbcCCTmVcdz7gBTh8BMdIAIKLeCoTKS5FM0xF7+A+6jr34G7ahGu4qDRzVBQHD\nUSGQV76AiQ6xjRpEETfiFezAnzAGWajGR8hzuaHfp7P5RgfD9XiIKNwKcXUU7PFT0S9IgYBNqHRm\no0ZVYU7ho7e8ONFrPyFLtifjY0Yhod9YAIz9JetRfmgPZk54AJt2vo+DVdkeZmMCM2+TZdvbg1LO\n/dWwtIvaJduUVmThx82v79N012RZsn1FJAztGzmY3d56s6o2XyLgcd1Qn++uuZwMiGhUkL3P8ktm\nPB/ib/+2PZ/yzpxv/sTMT3ZizPNDoHz0GEbZI+jn95FsrsGr2NGkwhxwqpzIzkJEfSTQLRZI0xnc\n6IL+DpqTqP1KcxBRGIAUNHeKyPPJuQyHr4tEZ3P6fmlYFMfaMUN/Pd5f4nnW3i95V963pmFoJInK\nIdPU/5/Jxgsd3esWiIQLREH6MDF2vBDde6C1yV2NvKJViO49EOMzb0BrRZ3Pl97X2OSqns7MG0/+\n7AIcLwEx0gDHy9wEhLhWo8zxIw5iEIejASqycAgzEYflOGDXoA8B0Fkn4dwJiLG2dbAAoD8Fg5lx\nF1brNkgbXdCf7EoFIxFZBOAOBeLdGswoGUKlCvMVE/zKqa5StEF6exyiUq7FAKXFUR2HaMd6Lre/\ng71LiCi2G8Vll0iicn9IULQjO28hAEK/qGEYPeQaSJIVlTW5BrNxMTNvAwCBxBFREWl+tQL6RKRB\n0z1JkmRdOSBhasrwjHmy4MtJaXRWYclPf/6jIEiHTFN/u5vmcjLw6IZX8LeECgCq6tIBdCpp2w7p\nyWsx4AgHCwAGUTjGcbS0FuW3AXjsBGzuNnxO0dO+nw4hoiArxNdkCFeEw+JthKbYSS4EcPMvUV6l\nJfIsS7ZLASJNd32J5uXfdi3IWqNqrj9vyf7o4+jeA4+Qt6hrLMXu/EVkGCokybZb190PMvPxRLBi\nRVH5cNbEh49QkM9Imoll6/6GffuXIz1pBgCgqiYfHm+jF0CP7BAR4EgCTlYAAIiMR5ByFVKxF7Uo\nQhOiYceVSEUIKcjiar0Jjb27MC4J8POE8yGCGhmY7GRte1eMJiLFCnFZAoJHXIJkeywcKIEz9gsU\nPLkfDRf6kpuP+mV5siCiaAXCzHlIVto+1MdRNC3kIkcpnHMAdKoPZSfYoBveEkmypM6c+NARXm3W\n3i90AJuZeXGrzRVOl3+5LKerGoIgSHZrWOKIgVfIrecT7IjE5JG32Zetf/5pIprfHVV6J4lsw/DW\nV1TvCYqOHHjEDk33oqDkJwOd1CpyQ88chgi/+0Yg0roVVTPRQ52s44GIRCvE5cPQe+hVSLWGkGI1\nmbEZlRlvYc/3RDTleBO3zwSIKFoSLWtCg/v2SYmfHExEyCtePaeuoeRZX2eCko7OZeaFkmh57stl\n9z+UmjBFCgvuJ1ceykVx2WaMGnwtkuMmiiXl2wavy3rrE0GQ7jjWC4koyLcmx00S2yrIy5IVIwZe\njnXb/oO0xOmorMnByo2vuAxTu+dUdAMJcOIEFN8DAMCePaj1EBEyKByzKR5TqB9CSIGXDRyE0wIg\ntwvjLl2LcrfpZ5WxlJvQCM0AcCJ9sn7dF47hv8dwewqFwkoSUigUv0OmvR8cmQCuO4GxO0taNOxe\nG/l/bxmMCBuAwd11cWZmTffM3bb7k6qla//qLDiwFvnFa7B49dNN2XmLDmq6e17r41XN+fqu3IVN\npqm3Gys7fzFCHNGUEj/Z6i8KFBmeCoGEMADx1Ix/9czTSPP98N61ctOrrvKq3WhZ6W50VmHZ2r+C\nGbIsWZcKwrEV1ImoFxGNEkC6C/6fa27oYJzxFV2zwmAZeDMGWkN8uloCEcZQFK5Cqt0OqVPyET0d\nWbJ9lp40s/+5k58MTkuchgEJ5+DcyU8ED049r68s2Y7Z7ko3vE/phnfoquHwaQAAIABJREFUvv3L\nX9y86wNVEERceM6zSO0/GYIgIr7vKMya+LBdIOFVIrIfbSxJsoyLiRzkV5OrT/gANLoq8cniO5uW\nr3uhTNWcvzFN4/2uzjvAqSXgZAUAgOwquJVNbdIsmBlfYb8mgtZ0sdx9nQf67v8iR9VbBTxq2Yu/\nY5eTgaePladwNOyQ7jofCe2kJwQinI8Ehx3SXV0duwvU1MErdZS2VgW3F92sD8XMebqhppRV7bp3\n4873lmza9f6iypqc23XDm+5HbPJrl6dm/dK1f3VX1xaAmdHorML6rHdQXZOHPhFpMM2Og1TMLEii\n5VUiwQtAU2R7LhH9ivx5ZR1ARPFE9IAoyi8Q0d2CID9jUYIWy7LtHSKa1HYsIhKI6CqLErRRke1l\nFiVoPRFd3lH7J2bzc1Vz/eqHjS+Vfrr4Tu/nS3+HhaseQ1TvdFwx5x/i1LH3xlktIW+Kouw3+ZuI\nQmwkvS9DONgHtmUyBHGVHzkpZsZylDQ50aOXT4+JDeI156BfkD8pl3GIhgfGxK62ZuppEFE6EQ0f\nln6x1PYjO3jABZIgSOlElHmscZg5zzT1JXZruGd85o3tZEvCQmIRHpZgAJh9tHFM06hyeWr9fnl4\n1EYApHvVhvG64Yk1TeOjY9kVoOdwwsuFRDQbwEsARAD/Zubn/BzzCoA5AFwArm/JCwlw+iGiWAXC\nxjHoI72PHOzgaoxAJLwwsAKlRjGaSrwwru3K2MzMRDRrA8oXbED5uMEcYbqgmzmokwl4UYN5Qn3s\nTHCfozXlNcGRJzJ+J9mlwazYjuqk4TjystXsxk4cEgEs6G4jmLkJwL98P0c7ziCicytrct5Ztvav\nV2m6m2TZjuT4SZh11iOoqSvC5uwPMWTAXLT1Ycqr94DBloEp585JT5wmKkoQyqp2pWzc8e7rbk/9\nUAD3H+3aRESiqDwticp9ibETKcje21JauQO19cXISJ4FUZDNPQVL5um6ZyERXe2zVZAl6wKHLWL6\nsPRLgsKCY1HbcCA6a++Ct1ye2nlEdKW/pUtm83MiWmiSfmj00GstcVHD0VJtGN07A7MmPmL/ZsXD\nzxLR275712KjZIW4agQiM65AiiWYFGspN+FZbEUIK5iEGEgkwMkaFqBALUFTKc7wylgBFGTr4JGg\nQIAAsAlY0KwSfqYzLDI81Witf9WCQAKie2eYRQc3ZgI4nlSGXg67/2VkAAiy9RarkNtO7qE1mu7+\n9+78xecN6D81qG1j6b0F3xuSKH+savqu47AlQA/jhJws3zLBawCmo1kxeBMRfd1aMI+IzgWQwsyp\nRDQWwOsAxp3IdQO0h4j6SqDbLBDPB6C7ob9vAm8xc93RzrNAfGoq+oVdTiliI6tYgzKswkEIIJTD\npXlh3MvMFV21y9eT8RwiytiEyvFo/oJe7Nt+Qgig3P1oiIqBo92+AjRAAOWd6DWOByKyArC7Yfzm\nDWQvvJJT7eMRDRkCduAQ3sVeF4MfZ+ZDp8Ke44WZNSL6Pjpy0AWTR90aRCQeThKPiRwIRbJhfdb8\n5uR5sXn5qLahBKu3/MOMjx5JmekXH14m7NdnKOac9YTj82W/u52I3mDmo9x7utZmCb17zuQnrDZL\nc/HfkAHno6QiC2u3/QsXnPMXIT1xumPJT8/MrakvuhPNL3FXOey9Z5w3+UmH6LMlNDgGsdGZjm9X\nPnau1lQ2Dx13CpjqsEegomwHivevhCO4L1ISpyEspB9CgqIQEZaoV9bkzAbQuhXJ+b1hS7kRGZaW\nyE4/CsL9PBxvIBufIM8MYaWxFl6LBPrWA+O3PhXtMxYX9O83o3LaRMS0+4PaizpIEA7qME6JkOwp\noM7l9h85AgCXu8YEcNTvzlbsqqrJlw1TR1unjdlEefVu4NhFQ8tVtenHZev+NmX0kGvt4aHx8KiN\n2Fuw1Nydv7jeMNRHjtOWAD2ME5JwIKLxAJ5g5tm+3x8EAGZ+ttUx/wSwgpk/9v2+F8DZbR/cAQmH\nrkNEYxQIy8YjWhmDPhYNjNU46NqJQ40qzHHMXNjBeSSCXH/FBGsvap8OsIJL8BkKvnCxdkl3z6Er\nENEFEbB+8EeMdtjp57c/F+t4Ehud1fBcw8zHzK04geunyJLted1QzxVIYCJqMkzjIysjwwNjMgCy\nQtrnhv4YM3d7FKsrEFG6LNm3Xj7nNVvbB4TLXYuvfnjAZGYtMjzV61UbUd94UGBm65Xnvi611p9q\nYdPO/2r79i9/zjD1DhPAFdmef/boO5L69mnfqWnN1jfQKyQOg1LORVVNLpau/Wu5pntiLErQ1gnD\nbxoeHzOy3Tn7S9djQ9b89V61abyf+ZECYakN8rRZFIcotiGfmrCKyjF40DykJc3Aqk2vNRUd3Hgn\nM7/Tcp6D5C8uQ/JFZ1O/dtdzs4478KPBwHgABT3Nee4qRBSiQCi6ERlhYyjq8PYGVvEMtjir4b7T\nYD6jl0RbICJFFJXK2ZMeDY0ISzhiX11DKRauerzJMLU+xysAq8j2NelJM8YOz5h3xB9Rdt53xo59\nX+zVdM+QY0ngEJEsCNIDAon3MptBzCZEUfla0933d/QdHuD0ciokHPoBaJ2rU4L2Hd39HRMLoMvR\nkQA/Q0SiAuGbWzAoeAT9vEw1FBH2RVxkWYjCj9H+/6QFwQBbQzsQcw+DBSKoK1WFp4pvmqDNfwQb\nrpvLCfZYOKgUTv4WhS43jHcBdFtXeSJKFkVl0+DU80LSEqcJiuxATX1R+MYd711f21C8lHXDCkB0\nsdajm10z815Ftm/cvOuD8WOG/OpwZSSzie17F3gBWq4bnjvKqnaNQLMY6QGrErJBkizt2rQAQJC9\njywIcpS/fUCz5AZBSIiJHOR3f2xUJgpLNwAAevdKgWFofYjIKknWuF4hcX7P6RUSB2azfweXvDYc\n1gmPYRRskAAChiMSU8xo/Gn3AkT0SkJZVbaANuXwBDg6WjqzQPQdgpJfioMFAMzcQERT38Ke7xdz\nsXUYegdXw+3dhEoG8IrpE6ftLnxyCrEAmvgEWngdD8ysCoJ429K1z/1rQuZv7LHRwwEilFZkYe22\nf7tMNu/ujMK+prsv25O/5Keyqt2Rqf3PDiISkF+8uulQ3f5G3fCefzwag7781KeJ6BkAIQBchuY6\nJdXRAbqPE3WyjjcM1tbT83seET3Z6teVzLyyCzb9rzErEjZbawerhZmIExeiaAgRpTJzu+pAZjZs\nJBXnoi4+Db3anb8btV4vjA3dY/aJ48v5usML49PPUXCPACSbQL4b+sto/vx0m9KuLNmeH5RybsiQ\nARccTlgKD+2PGRP+YP9i2f3TNd0z4WQqgHcnmu6+pODAmuUl5dtTUuLPcjCbyCte7dR0925Nd1/t\nU6vfDwBEZNN0t+B0H4LD1j4Ppaw626kbnqPlseggmJruERS5fcGVqjkPL016vA0AkQ6GKpBYUttw\noLe/Zr11DaUgEvwWZtghPXg5Umxtqz57kw0zzRis3vqWyWzuZOadrfe7oS/ZgqqJYxDVzshdqIEV\nMqkCChXFsU7TXLcx8+6jzPmMgZt7J/YrROOFRWgcys1LZp8cTc7gRCEiSRTkR0VRuUcSFUnXvZJF\ncexQNdcd3Sm2aZrGB0RUu3b7v5/TDTUdAERBydN014PM3KkXNGYuI6KM6tq8SxuaDl4GkKhqzs8B\nfNwZZ803VmeWKgOcQohoCoApnTnnRJ2sUgCtXy/j0BypOtoxsfDf8R2dUV8OcJjUVITJ/nZIJKAf\nO9Q81KegAwkGL4y/vI/c5x/iEY7WD6JibsRqHDQ1mH/vHrNPDj5HaqXv55RARAqReF5a4vR2VW2i\nqCAjaZY9K+fLGwGcEU4WM9cQ0QhN95y9I+frOc131fwWwE9tHVVmdsuSdf6mnR/cMHn07VahVVJ8\nRfVelFXuAoAOy8uZ2VBk+/d5RT/OGZgym47cZyK36EcMHXABAGBPwRJDFOVPNE03iejFrL1f/KNf\nn6GO1onBhqEia9/nTlVzvujvel4YyQPgv/3JAA7F4qadugbtorb7TODtLFQ/vo2rMLzVC0wde/G+\nWIBRmTdRXPQIJbdo5eStez5dR0SjmTmno3mfSfgiKp/hyBy1bkOWrB+EhcSdNz7zRntYcD8Ypo7C\nknWj1++Yv8KndddtL3rM/B2A74goFAAZhtZl58YnfnzMJurHAxHFiYJ8uyRaZjHYrWrOdwD8l5ld\nJzp2gK7jC/ysbPmdiJ441jkn6mRtBpBKRAkADgK4AsBVbY75GsAdAD4ionFobtMQWCo8eZQfhFMD\n0K7voMmMKrhFAOUdnczAm9Vwj3sQ6y6bznHWSFiFvajzrEM56zB/FcgF8IuViGBR2ifcA4DNFkYC\niaeysvGE6Yyzqhve35dV7Rzx7YpHB2UkzwqyKEEoKd/u2V+6zjBM7SI+Rp9GTXc/sG3vZ5Nl2RaU\nFDcRoiDB7anDluyPIIkKInolYdueT409Bd/XtUr4fb/JVXnZoh//OHVY+sWOXiGxqK0/gO17P3c6\n3TXfo4OkdwlCQzU8kfFo/x5yCB4AvJaZ2/19+BzP6f9E9uJ4DpKHICK4HG5sF2oxMOU8JMY2p39l\nJM8i3fAG7cpd+AyAeW3H+aVBREkAzgcgo/klYtOJRIyJaKRFCTpvxvg/HO4xKQoSkuPPAgP2Tbve\nfwUdpzucNI71mW2LT/dqHoBENAcNPu3sGEcZ+2xRVBamxJ0lxcWMtOi6B3v3LxtaXZv/ABGN6+6l\n1AAnlxPuXUhEc/CzhMN/mPkvRPRbAGDmN3zHvIZmnRAngBvYT6fwQOJ71yAimwyh8mGMDOpPwUfs\n28yVeAt7CjwwUo4j6XK0BeLNEqivF8ZmHfwmMx/sVuPPUIiIJNFSMXPiQ5FtFZoBYM3WN92Fpeuf\nMQztqC1MTje+B8X1iuy4BeBQZt6g6e6/MfMx23UQkQRgriLbbyQSwjTds8I09TeO9zNDRCNk2f4G\nm8ZgixKkur0NdoEE3TA0kYhYFOXPNd3zADMXtzpHBHC1ItvvM9nsJ5BY7ItgfehPvgEAFBKfGYnI\n+26hI4UedTbxR2xqKoXzBmbuMGJDRBYAFwskPhEbPSJ9zJBr22khedUmfLL4Dp3ZtHRkx5kOEclW\niG8zcOko9CErRGETKlUVxm4PjDldzU0TRfn5QSnn3jM8Y147QVvD1PHRwt96DVOL60mOBRHNEQX5\nk8jwVESGpwTV1hc7D1btEpjNm0zTOKEoFhFZRUEuP2fcfaGt8xaZGVuyP9Ryi1YtUTXX+Sc8iQAn\nhePxWwINon8BCETzLBDnz0OybRT6kA4TP6HcWIhCjwpzOjOvP902/tIQBen34aH9/zhz4kP21lV2\nlTW5WPrTs07D1JJ7csSWiMIkybo2sldy/4zk2XabJRRlVdnmzpyvPYah3m2Y+r9PkR2xAHoBKERz\nYn0QADczt5ei79r4vSwQt4xGn35zkaBEwop8NOA/2K3WSlBBVM3Mm3zOZYctY6yW4OXjhl1/Tv++\nY9rtY2a89/X1JsA2PkVtnE41NpJejkfwTfdgqN3qSyswmfERctWfULbdxXqXok2yZPv38IHzfpOR\nNNPv/o+/u83pVZsG95SIOhGlSaKydcaEB+yR4amHt9c2lGDx6qdcmu6ediLft0R0TZ+ItH/OnvRI\nu8ISVXPj0yV3egxD7c//4w26ewqnorowQA/AZP6MiEoXoOCJj5B3FgGGCPpWhflUa82yACcPk40X\n65sOjv5i2e/PS0+aabPbwoWyyp3uooObTMPU5vVkBwsAJNHyt/4xo5MmDL/J0lJRGBGWIMTHjLR/\ns+LRV4locXcmO7fgu0br6zSe5PFriWjURlQ8uR4VN+ow7bJoMcPDEmhSynlBNmtYUFlVdvzO3G/O\nE0X5d4ah/dPfOJrmWVVSnjWhf98x7Zbly6t3Q5asReoZWglGRJEAhqFZLHpjWweXiEJkCDf/FoNs\n1lZ5mwIRruRUZSMqB/ly0jb5jj9blmx3E4mpgLlf1VyvAFjuL5quG56fDpRtvSIjaWY7p6KuoRS6\noeroIIf3dCCJlt9lJM9SWjtYANArJBaZGZfatu9Z8Aial1M7BRGNkCTrvYpsn6WqzqD9pevRP2Y0\nBOHnAJ8i2xBki/DWN5UlAjgjnCxf9HmeIjvuBBDDzLs13fX/mHnF6bbtVBFoq/MLgZnXuVibrbHh\nUNkIcbN+dcDB6j6Y2dB0z5Vub/30nTlfv7Vp53+/Lizd8JRhakl8ZCPmHgcRWUw2rsnMuPSwg9VC\nSFA0kuImkEDiTZ0Yb5pFcfwoirJLkix1smT9ly9Ps0fAzDVeNu7SYQaLovLP2JiR+syJD8ux0ZmI\nCEvA4NTzhLln/9FOJLxIRH6lIEzW3ygs3WCUVx/5J+VVm7Bhx3ynpnueOiWTOYkQkV2R7f8VBflA\nRFjiZ8GOPosl0VIhCGLbDg+ZUbCp/rT0BCKMQqQM4GwiIlmyvmSzhC4cnjHvoqlj7x48POOyuXZr\n+JdS82fC3xv/x1W1eVrRwSODiLruxbqst1xgfplPoPXWyUYQxGlx0SP8BifiokcQszmx9TYiGiBL\n1letluCNFsWxiIgupDa9PkVRvk2WbKuHpM69aurYeyMHD5iLPflL8MOGF2EYP0/dNHW4PHUKzhwH\nS5Yl63dhwbH/GTvsuonTx9+fNHzgvPOslpBvZcn6zOm271QRiGQFCNBFfG/m630/ZxIRoiDDYfPf\n6aN3ryRL4cGN6cczkChI/2dRgl4YOegqe2xUJjTdZcspXHn93v1LLyOi8T3M0ZfAfN3wjHntnMtg\nRxRS4icLuUUrbwbwaNsTmbmCiC5Yvu75r2IiB1FMn8GOJle1lle0UmfmfwM/C5meCTQ7RLZvYiIH\nTxifeYPFogRZAKC6tgDL17/wBpHgZTZb2gSpKswOl0S8MEwAKoDZiuy46fypf3a0FIVERaRRUuwE\nx5c/PHA1mFOtlmAyDG23bnhfY+ZdzOwioulrtr6xbN/+ZXJczMggj7feyClc4TVM/VvD1E6K80rN\nPRcvBZCEnxPVG7owkkfT/Qv7a7oHIDrsFQmCeI0kWv6VljhdiokcKDvdNcjOW3SW21O7iYhmM7Pa\nvPxofX7ulKdtwY6fa2US+o7Fio0vITvvOwxNa662zS9ewwDtYeb9nbf7dEC3hoXETZw18SG74BM6\n7t0riRL6jrF/9cNDdxPRt8y89jQb2e0EnKwAAf73qDVMjdzeBrS0tTliZ0OJahhqwbEGIaLeoiC/\neO7kJ6zBjmb9UaslGCMHXSE5bOEh2/YseAvNqug9hV5EghRk96+v2zssUdlfsm5gRycz8w9E1K+k\nYvs15Yf2jjRNvcI09XeZeV+3Wdx9jBZFZezkUbdahVZK/717JWHyqNvtKze+9AIRfeZ7kdhSB69Z\nzI2Ib1Nc42Edm1EFAN8osv3fw9IvdrSuumVmbN/7OQiCbUjahZPDgvqiuq5g/N6Cpb+WROUx3VBf\nYOatRBRbXr3niuq6/ZPYNGoNU/sczbIzJ1xIQEQzRUH+rHd4Cvr0SgmqaSh2lVftflUQxJtN0+hQ\nbsQfmu5+L6fwh8djIge2a5qaW7RCZTY/9F0zXhSVf507+QlbWEjs4WOS4yYGLV//wrjKQzkPA3hS\nEi23pyVOk1o7WAAgCCKGZ8zDD+tfRHL8ROQWrTKy8xa5DEO9oUs34TQgS9b7Rgy83C606SRhtYRg\nSOpca9a+L+8B8It3sgLLhQEC9ECIyEJEV4mi8pJAwhNElHayxmZmtyhIn+/K+abdMozLXYO8olWm\naer/OY6hroyLHsEtDlZrUvtPIWYj05fY3lPQTTYUt8e/FFJtwwHDMLSjOpfM3MDMr2ua+ybD0B45\nQx0sEAkXpcZPtrV9AALNjbOJxAgAyUCzbpYBfuAl7HAV888pc7XsxUvIclGzWOl+Zk6L7JV8xFiF\nBzegrHo3LjjnGQxJnYu4mBEYnjFPuuCcZ2yiqPzJ188WzOxi5rd13fOiIMhDBRJXSqKlWBItlaIo\nP9B2ie3450kpoqh8MX3CH4JnTXwoePjAy2jauN85zj37jzZJtLzpaw133DCbb5RWZNVn7f1C1/Xm\nZg6GoWFPwfdmbtEqp2GozwOAKMi/TYk7S2jtYAGAIEgYNfhqGxHdRUSCKMhDInsl+9U5DA/tD7en\nDp8vvd+5J3/xB4ahjmDmY/VA7DHohrdveKj/RgzhYYkCkXBc0fIznUAkK0CAHgYRDRNFZVl4aH9L\nXPSIYLenTs8r/vEBWbZ9oOueW06GTICme+7LLVo52aM2RmYkzbTYrGEoq8rG1t0fu5jNp5g5/5h2\ngvqGhfRr90YPAKIow2YN8zY6K6LQXqD4dDHPbu2l78r9Vho95Mi0I5enDjmFK0XDVP/VejsROQDY\nARz6JckzEEgRRcXvSzYRQRQkE/hZXExn802JBDyDLc+GsUWUIXAl3IoAetML4/7m84SKBmdFv9aO\nxb79y5GZdjHaqvs7bBEYMuB86459X/0OwOW+6w4WRWVdZvrFjpT+Z5MsWeVDdYXWDVnvPF7fdHAo\ngGuOe35EMkC3ypLtadPU7eu2v4WU+MnISJoBUVTQKyQWwzPm2bbt+exRAOcd77i+QorR2XmL52fn\nLZzgsPX2Oj01CkHIMgz1+pZiEVFURvSJSGufxIbmNlAmm0EAgkzWixqc5Sb8BDwanVUgQXCZph6s\nalrPkAHoBKKoHKpvPBjtT+amoekgA1x0Gsw65QQiWQEC9CCIyC6Kyg8Tht8UMeesx4IHp56H0UOu\nkS6d+ZItxBF1pSBIvzsZ12HmCt3wDis6uPG5pWufK/lq+QN1m3e9v8rjbbhIN9Rnjz0CwOCcikM5\nTf72qZoLLneNBUCxv/2dhYjsRHQ1ET3o+9evc3c0FNk+JS1xunSwcifWbHkD1bUFcLprkF+8GotX\nPw1mUwdQ77veUDvJy0RQrQKh2AKxXCLh90T0i/jONNlYVlDyU5M/CZ/a+mJoulsDkNd6u87mmyrM\nqEq4Z5XCeYEGM8rD+r0t1Yiq5nxlZ85XTtM0Dp9T11CCPhED/NoQFZEmEAmZLb/Lkv3lEQMvd2Qk\nzyJZai7ijAhLwIyJD9kFQb6IiNp3BwdARELrSBcRSbJkXRQZnvKXqWPvDb5s9quYOPxmVB7ah2Xr\nnodhNBeBxsWMIGZjor8xO7jOAFmyfQ5QgW64pxKJ5fVNB1/RdU+GqjnHt45qmqZe0uSq9OuUuz11\nAMME4NJ0zz935y92q1p7IfedOV+rAonvnIjY6+nENPR/7Nj3pbut+Zruxc6cb1yq5nr1NJl2SvlF\nfGEE6NlQM5c4SN5gIbHWTnKeSHSfTwwzwJFc2Sc8VUnsN+6IRGNFtmHC8JscAoldXjppCzPXGIb2\nhKq54jTd08urOqcw89JODPFp5aF9qDh05IoZM2PHvi81QZC+PxkikkQ0VxTkiqiI9H8OTJ7zVJ/w\ntH+JglxHJHxMRBM7qFprh2kaDaap8+yzHkdIUBR+2voGFq16EoWlGzBq8FVgmCYANxFlKhB+ugRJ\n57yGyfI/aYr1DxgeGYugJ60Q3zrR+ZxOfH+LEwGMcrlr3Vl7P9dbB+g83gb8uOV1JzP/2V9VHzNr\nzLyemX9k5rZyGx80NJVv+n7ts66KQ/ugaW7Isg1Nrmq/tjS5DgFoTugiohDDVCelxJ/d7v9SlixI\nS5xmEUXlujZzGa7I9sUEUgHSFNmxg4guAHBFkL3P+FkTH7JH906HItsRGZ6CqWPvgSgqyClaCQDQ\nNA8A4bhkN4joHEm0ZjPMi+3WMFmR7RTdOyMhyB55nyRa7mh7vG5439yTv8Sjau3bFmbnfacLovQJ\nM+vMvF7XvR8sXPWE90DZVqiaEzX1xVi9+XVvYen6g7rhffx47OuJmKy/UHFo777l6/7mrji0D25P\nHQ6UbcWiVU84fX0dO/Ndc8YSECMN0K0QEVkgvhYM+bp5SHakIBRVcGMhitx5qM/1wJjAzM7TbWdP\nQZHt744YePmv0hKn+d3/4cLfujXdPeBUaFgdD0Q0QxSVLwckTJXio0cqqu7GvoKlzsqa3Erd8I47\nUdFEIhoiiZb1MyY8aI8M/znfp6omF8vWvwCRJJdhqlma7pl9rGoxIjrLbu313cUzXnCIbXKRcotW\nYUv2hz95VeckB8mrL0XyxKnU74jvIw/ruB9rXU7o43tibgw197u7UxSVuQA0TXe/z2z+i5lrffsj\nrRCX2CCljkWUrQGqsUmslSTJyrHRI3Sv2qSXVmwXiYTXdMP7h65EUIhIIRJul0TLPYahRhOJnn5R\nQ+1nj75Tau0Lm6aBRT8+6aypL7qFmT8gor6yZMu76rw3/EYocwpXYEv2Rx+qmutq33UmiaKyZOTA\nK2zJcZNIFBWUlG/D+h3zXYahVU0Y/pv+/fuObjdOWdVubMn+CHOn/Ambdn2g5RaueFPTPe2cpDZz\nipJEy4GMpFnywJQ5sCgOON01yNr7OWrqi1HfWOYxTO/gtkvssmz7t8MWceXowVc7onpnwOWuxe78\n77T84tU1uuEdwcwHiShcFC1rbJbgREGQrS53DQRBYt1QvaapzWDmNZ39P+hJEJGNSLhNEi13mGxE\niIJUoGquvwH44EyN0LUmoPge4LRDRJNCoCx+BmMddvo5v5OZ8Xfs9OxAzQsaG+1K5v9XkSTLa0NS\nz791aNqF7aLMhqHho0X/pxqm1rerbUy6AyJKFEXlLlGQZwBwqZrzLQDvnQznWZFt/x2cOvfKIQMu\naBe927HvKzS5D4HZ8BYf3Lz0WO1GfDpOSyLDB0waN+x6W5C9N0xTR2HpRqzLestlGOoUAPkShLLX\ncJai+AkYLuB8YwkOvKhxcx5ST4GIxoui8n1K/GQloe8YxTB15BatdJdWZDXqhncsgCIbxI2T0XfY\nZUiRBZ/D42Udz2GbWoImrw4OAsB2SKtd0B8+GeX1RBQiidYt8TFSeLdFAAAgAElEQVQj44amXWQJ\ndvRBbUMxtmR/7K6uzdui6Z5zmFkjIkkUlerzJj8Z2jZZHABWbHjJeaB820PM5qs++YmcCcNvSmnr\nSDU6q/Dtykd51qRHKDw0vt04TvchfPfjnzB4wPm8JfujBsNQh7Zu3eQPSVReT4qb+H/jM288Yjsz\nY9m6v4HZNCoO7fuzaepHNAtujrDS9bJkfVjTPUmiILmIhHd1w/sU+/plKrL966S4ibPGDPmV0toJ\n3V+yntdt/3eFbqixzGwgQI/kePyWwHJhgG7FBvH2OYi3tXawgObk2guRZBWB/ztNpvVIDEP9YF/h\ncrdhtu8qU1i6AaIob29xsIgoUxTlTYpsNxTZzopsryaiO0+1zcy8X9e993rVpsFetWkMM//zJEYn\np8XFjPS7PBoXMxKVh/ZhzJBfW5jN6R0JibaykzXdc37loX1vfLX8AednS+5u/GjRbZ6NO9/dZhjq\nNJ9iebAFgu7PwQKAMFhECeRfA6Kb8S31pRHRCF9Cfst2SRSVr88edUfQ2KG/VqJ6p6Nvn8E4e/Qd\ntqFpF0XIku0jAKNliBmtHSwAsJCEuzFUkSAEv4iJ9DrOFq5AytlWiMuIaO6J2szMDbrhGV1Utukf\n36x4uOm9r6/jxaufrq6o3vsXTfdMb1mS9C2dvbxhx3xXS85UCwcrd+Fg5U4T4Hd9mwYJghQTH9M+\nRSvYEQmL7DAP1fkvEq2uLYCme3nb7s82G4Y6/lgOFgAQCVdkJM3ys52QlnAOPN4GURSkaD9zZ2bz\nbVVzpTKbom6owZruub3FwSKiaJONGSMyLjvCwQKAxNhxFGTv4wAw51j2BejZBJysAN2KCCGpLxx+\nP2d9YYcXZvjx5tT8j7BO1z2rV6z/f+5GZ3M6k2ka2F+yDht2zHepmuseACCiyZJo2Rwe2n/UhOE3\nCTMnPoTM9EsjLErQK6Iof3JaZ3CcENFAWba9J8u2Slm2VciS9U0iSm5zlNZSKt8Ww/BCECTIkgWR\n4akqgPbrQ21gZq+me+41TC3S5akdqxueVK/qHNGq31yZCtMo68BH3IFqpwfGus7M82RARLNsEPMc\nkLZGwrpChlBpJellIlIAzAkJirbERme2Oy8jeZYIYCiAC0ch8ggHq4VQsiAOQSiDCxYScRb1xT0Y\nZrNAePtk5P8xc52ue+8zTC0EgEXTPZGGqT3FzEf8x5qm9tSh+qJlny/9vXNnztdmXtGPWLnxFdeK\nDS82GqY2l5nrfYdG2K299I5qEMJ7JWD73s9Vr3pkTYame7Bt96ceTXfdrWrOMccrlGuyYbH60ZMD\nmjWfvJrT1A3v1uMZqw3poUExXln2X8PRt88QG4DBXRg3QA8iIOEQoFvRYe4rRuPIIYho92VdjCZY\nIFZ6WO8Za9Y9AGZmIrqwsib32a9/ePAWixJkqppbJkHI0Q3vbcy8johIFJQvoiLSxanj7oXge9hE\nhCWgf78x+Gr5g5cR0Vhm3nC65kFEvQWSbpUky1UAJMPUFhqG+hJzc9k2EZ0jiso3g1LOtST0HSsy\nmygo+emGPQXfX0VE57T0wTMM7dM9BYvvGDfsRqWl4qyFvOLVaIlmqJoTaO69d1wwsxtAu4csM6sW\nEv/+X+TcdQ8Ps8utHuQ7+BD2os4A8EFn78eJQEQzrBA/vwWD7EMQAYEINezB29hzcz4aEj0wVvTp\nlar4O1cUJIQF91OravNC3dA7XHbywIDU6p17AIUhnK1KGVyTAZyUPnO+HJwOW+Qws05EF+m6Z/zO\nnG+uFwQpXNPdPzGb77TklfnIaWgqt7Yk1relsanCparOH7764aHpQwacbw8P7U/1jaW8M+cbl1dz\nLgDwWmfsFkVlZ1nVrrGJse0ltQ5W7oSqOrv6mTjk8tRKzCb8OYyNzgovgB6TFhCgawQiWQG6FQ+M\nVxej2NvARy4BmMxYgHy3DvOV02Raj4WZVU333Nc62qKqzmHM/JPvkGFEQnhmxqWHHawW7NYwpDdr\nAT136i1vxicAuSeh39iHp4y5K2PauPtSB/SfepsoKruIaJJveeuTqWPusQ9Lu0gMDY5BWEg/jBh4\nuTRx+M1BkmT9hIjCZdk2H4TbSyt2Kp8uvhOrt7wOt6cOhqljT/4SlJRvQ1rCNNQ3lqGuoUTASXIG\nVJiP70fDygewzrmIi3g1H8SrvMP1d+xs0GDOYWa/shXdhQ3SKzdhoH0Y9UZLJCqcrLgLw2wyhGkA\nLHWNJX6r5JhNNDorJQBfb0EVubj9MnQRN6IJGhJxpJp7b9ia/zmFNC+x8VpN99ziVZvmmabxYmsH\nyxf1HsBs5q3c9KreVvqgsHQjGp0VLsPU5nm89edt37PgyxUbXsreuvuTRU73oUt13XN9ZxOuNc31\n9OZdH7pcbURsaxsOYHf+Yhim+ls/lZbHwy7D0MoPlG9rt6PJVY2DlTtFAAu6MG6AHkQg8T1At2Mh\n8U9WSPddiER7CkKpubqw0FkG13YPjGltlw0CHB0img3gu19dMB8tK6267sWuvIXILVoFt6cOoiib\nYH7OMLU/d5QfRUSjZcn2MIMnEshtmNp7pqm/fKKyC4ps3zIs/eLMgcmzj/AASyuysHLTq4cMQ70u\nLCT2gwumPtNuDYaZ8dmSu5s03V2bGDs+KjP9EsVmDYNHbcSunG+QW7QKRCLCQ+Mxduh1ULUmrNr0\nmsujNj5gGFqnIhRHw/cwn2iBeJ0E6uWC/iMD81stWZ0SiCjOCjHnNUy2+lvqW8D55ncoeokE+ZbZ\nZz0WFBGWcMT+gpK12JA1P0fT3elWiG/2hePq32KQPdInM7afG/BP7MJcJOAs6nv4PJ1N3Is1Lif0\nMcyc3a2TPE6IaIAkWhdbLUGRMZGDpAZnpVJdmy8k9B2L2OhhKCzd4CqpyNIMQz0HQBCAeAAHAKw+\nUSFZSbI8SqBHUuInS6HB/aTKmn1m8cHNpmnqd5hsvtGFufQCMBdApihIt40afI0lKW4SiaKM0vLt\nWJ/1jsurNT1qGNqLJ2J3gO4lUF0YoMdARFPtkO4HMAhAlQv6a2gu4z0unZoAP0NESaKo5M89+08I\nDe4Lw1CxdO1fYbOGYmjaxQgL7of6xlJs27vAU1aVvU/XPeN9y2OHEQTxWkm0vDEs7SJrbHSmoGou\n7Cv8wVtYuqHeMNTRx5MQ3IFt6Yps33L57Nfa9SwDgG9WPNpY21A8Pyl2wo2TRv6fX520L5b9wRNs\nj6Rp43/frpHzmi1v4ED5NiiyHbrhhWGolYapP2Ca+jtdsbenQ0QDQqBseYkmBfnbv5iL8SUK/q6C\nV8iS9d3hGfNs8TGjyDA15B9Yo2fnLfIYhjqVmTcTkWiB8JQJ3BUOi6HBJCc0+3BECrfQoCNu9EIu\nNBahaLuL9VGnZqZHh4iCRFEpGDXo6ogBCVOFls9FQ1MZFq/+s6Eb3mzD1N5jNrdLouVtqyUkNDw0\nHjX1xfB4Gxp0wzuvVd5dV21IEgTpBlGQ++uGN5vZfLuzEiVERKKoPALmR6J7Z+hWS6hYWpFFhqlp\nmu62A0SyZNuj6a7HmPmLE7E3QPdzPH5LICcrwCmBmVfgJC3n/K/DzAWSaCnJ2vdl7Fkjb0Ve8WpI\nkgWTR91+OLcjLCQWU0bfZV269rnU8uo9NxHRR4Ig3S6JylXMbCESY+ec9ZjYuly+d69kS5A9MmJ3\n3nf/BjCzi+Ylhgb30/w5WAAQGZ6i1DYUi9V1+/3mBzGb8KqNypih1wptHSwASE+ageq6AkwZcxc2\n7njPXV69+xlmfqeLtp4J7PdA10vZiX4/FxQeZiMqGlWYK5l5ARGVbNvz2eNbsj+aDCKDSPjSMNQ/\nM3MuAPikAB4moqcr4B4CQAXg2oqq1S9xln0Coh0mGGtQ5sxHfb0X5iWndqpH5ZqoiDR7WuI5R0RH\nQ4JiMC7zBvGnrf8SmL0LRFHZMXHELUHxMaNARGBmHCjfGrx6y+tLiSizrZZVZ2DmAgCPdeVcX6eA\nXkTilTZL6IOzJj1iddjCAQAmm9ix9wtpd/6S/brhGaJqTk9XbQzQ8wjkZAUIcAZimOqMkvJt+ooN\nLyKncAUykme1S54lIgxOnWuXZds9oqjsSew3/oGpY+8dkJowtX9cdKboT49oYPJs0TT1yUTUp4um\nHWxsqpA6Wp2pbShWAax0uWtcB8raF2TlH1jDAJtWpeNqLl33ICy4HxJjx9lk2d6pBr9nGsysMfDC\nf7Db5W6TT7WCS8wyOBsBfOU7doOquc7TDTVY171hmua+vsXBajOmC81FAkMBDFRhDtmBQ394D/t+\neB85y3aj9h4vzAFdjWZ2B4rsuCAxdkJ7LxNAbFQmdMOTIYrKH9ISpin9+44+vIxORIiPGYn0xBkW\nSbT8/pQajRZ5DflhSbRUCYJcKgrSa2eNvNXR4mABgEAChqVfIjnsEVEAZpxqGwN0L4FIVoAAZyDM\nvJeI4koqsl6XJetFQTb/+clB9kgwm/EjMi4TMpJnCQBQUr4d4aEJfo+XJSusllDF5al5nIjuYfaT\nKX10dhimVrq/ZP2ApLgJR+yoqslHTV0RAHytG97iH7f8Y+mAhKlKYr/xCrOJ/ANrvPkH1niIhLVl\nVbtmRYQltHsJLKvKRrgv78jlrjVMU/ffs+UXhAbzmXK4En6Hn64ez9FiCBRlMyobq+Gp98I8h/20\nv+kIIoqWJdtXiuwYHBM50HR56syaukKJ2fx9k6n5bzPQM9BNP9pxANCi1SmQODcxdrzfKsuk2PFy\nTuEPFwK4tdssbINP/PaTsJC4WWOH/touS3YsXvM0evdK9ncskuMmBWft+2I2gG9OlY0Bup+AkxUg\nwBmKT9TwYoviWFZZkzstNLhvu2Mqa3JAEIS0xGmHHZYge29UHNrrd0xNc0PVnBQRlnhDfWNpPBFd\n6K8ay5cYTm0Tin0SFFety3prVX1jqTU5fpIkCDKKD24yt+/93GuY2rXM7AGwnoiG5BSuuDe/ePUF\nAAzD0D4zTO0VANE7c745Oy56hL31nJpc1dix70tMGH4zdN2LffuXeQ1DfadLN+8MwnePbyKi51ag\n9FICghjYAGBRZ9TAiUiSRMvq9KQZCcPSLpYEoVlVpb6xDEt+euZ5IqGa2fy0m6ZxQqia86PcwhXn\npMRPDmq7jFxYugGyZF3HzEcVoz0NTJQk68yZEx6wi6KCJlc1jpZ/b5o6oxMOc4Azg8ByYYAAZziq\n5vrr9j2fOd3eI1v3ebyN2LbnU4/NEuJunSOV0G8cDlbuRG19+9Wg7PzvEBM5CLMmPmSXZfs5ACa3\n3k9EmYrsWEQkaAB0i+LYTkRHtLNh5q2GoWbuKfj+nW9XPl7z1Q8P1mft+/Ir3fBOYuavWx1XqOve\nu72qM9GrOlN0Q32QmQ8y81bdUG/7duVjnjVb3/Tu278c67PewbcrH8PA5DlQZDu+X/usSze1r5l5\n80m5iT0En5r7vUR0e1sFe2bOZeZnTeZHmfmbLrRbOT/YERWVmX7pYQcLAEKDYzBpxC12WbI+24OF\ngRfUNZaWb9n9sab7FOGZGWVV2diw8z2XqrkeNNn4en/JOr+FNAUl6zRm86tTabAkWW/MSJplF8Xm\n4JrDFgGrJQRlVe2LNZlN5BatdBqm9uWptDFA9xOoLgwQ4BeAJFmeEgX5voHJsy29QuPFuoYDRnbe\nd17D0D6UJOWqy2a/Zm+tqVVYuhEbd7yL9KQZiIsZCVVzIrdoJapqcjFr4sOw28KRnfcd79j3xX9V\nzf1rACCiiaKofD8i43JbcvwkkkQFB8q3tSjRP2gY2qsnc05EFEMk3CCKylg2zUGmqceLomwAcJps\nPG+a+t9+KX3diCjcBvFbCcKwkYgUvTDNragiAj7ywLi5C8u27ZBl29vD0y+9PiO5fYsYZhMfLLzF\naxhqf2auONFrdQdE1EeWbB8ymxPCwxK8LneN4PE2OHXDeyMzf+froblj0ohbgv4/e+cdHVd1dfF9\nXp2iLkuyJKtYtmVb7r3ijgFTQ4cQWiD00DsEQhICfCEhIYTQEgi9mWJscO+927JlW73L6nXaK+f7\nQyMjSyNbkmWw4f3W0lro3ffuPXcYzWzfe+4+ibHjvk98L92OtTv+3WgYvhNKfO8qqhK0YPywa85L\nSZhy5Fp+yVZsTX8fM8ffgxa7DU1zY0v6e96Ckq27Nd0zsas+XhY/HtbpQguLnwm67n2SiL5Iz1p4\npyhIAwxTz9J1zyvMvE2RHZNzCtcN6p847ciHQXL8ePi0JmxNfx85RRsgiSqS4sZh3NBfQlWa3QIc\ntnAiEqOAlvwS+9uTR/3akRQ3/si4SXHjEBGa5Ph6xaMvENF7bZy5j8Jfb28aABHARj5OkWtmLgXw\nbOvndcNrB1B9or5Hpxp2iAsnoffoq5GqtPhhuVnH37D78kI01gC474QHYZiM435/n7Jf8H67hNlE\nlFxedXAIgGoAm1veC8ycS0Sz1u94Y/529ePQ8JBE1NQ3WzgYhu/SH1JgAYCuezaUlO+dnZIw5Ygt\nfVLcOBiGD8s3/QWK7GSbGlJfVZurCiQu1nTPtZbA+ulhiSwLi58IzLwDwI1tr2u6+6rNe/63pq6h\nxN4v4QxZFGXkl2w1dx/4wkNEvimjfhMWFdE+GbekfK9H170b/L+mNRflbW+bFOyMRlz0MKOwbMel\nAN5o2+73BnpCFORHwkL66KIgc1VtrqrI9vf8BXM75ZXmN1XtqcLTpwxENDYY8vDWAgsA7CThVh7i\neBSbbiWip7rpKn4E3fB8mZW/+rLBKWcFt90VLCnfC4HEAgPothEtEUlorrUnAEg/WR54zJwHIK+D\ntq1ElNjoqpja6KpIAlAAYN2PIcpNNt7ML9n6+IDkmYiJHHjkemLsWGTkLHVV1+a+W99Y+g2AnTpz\n8Q8dn8UPgyWyLCx+4jDzbiIacTB3+YMH81ZcCmaJSFhhmL4NgiA/tXnPOzhr6uOQJfXIMxXVmcgr\n3myarLeIpl7HKsobHNTbBiAyUJsoKo867ZEPz5n0gCPIEQUA8PgasHbbv66urMlxAri6J+d7GjJ9\nHKKlQI7uEWRDNNt9xWgaDWD1CY6zqNFdVbx934f9Rg2+TBZFGQBQU1eAdTted2m6++HurqSIonyr\nKCjPioLkJEGUTNMwBRI/Z5jX/dAVHfxzWOv/+dFg5goiunjZhhfmJ8aNFeKihtldnmrzQM5St2Z4\nFzH4jtbb3USUgOYi5y4Aq/wHRCxOc6ycLAuLnyFEwoWK7Phg2tg7HblF63G46iD6J06H0x6B4vK9\nRmHpNo9hapcx87fN91OsKCo5l5/1si1QUV6/k/uvmPmro8chhyjI5RfMetYZ7Iw56hlN9+DT7+7y\n6IZ3sH914mcJEf12Cno//2tKswVqf5Q31h+G+xxm3hCovYtjRcmS/TMA46IjU3W3p5brGkrJZONu\n09T/250+RVG+R5bsf2Y2bSkJUxAfMxw+XxMyspegtqG4Qjc8A45XjoiI+qHZAJcALA3k73W6QkRR\nAok3SpJtkmka5brh+Q+atznZ3x4sS/b3mI25UREDfF5fE+oaigUGP2IY2is/cvgWx8Aqq2NhYdEO\nv39P3vRxv02Mix4KZkZlTQ5yitbD62tAYdkuzTC8c5h5TevnFNmxsF/iGXPGDf2l0nq7Kb9kC9bt\neL3cMHzxbRO0iWhWeEjCF+fP/FNAd9G121515RZvvJ+Z/31SJnsK4D+xFwnAG2jLj4j6qRDT/4Yp\nNhsdvblQyI34E7bV+mBGd8UPqxMxpQIYCaABwIrurjYRkV0gqUKSVOfMCfcctS3GzFi/8w0UlG77\nRNPcV3TwvK1ZYPC5CbGjTQJRQdl2AiMdQBYR6Zru/hLA1z05/1MF/9/i2oTYMWMnDL9ebVlNrmso\nwdINz7vc3vrbTVN/50cO06IDrMR3CwuLQAwQBCkyNmoIgGYjxKiIfmjJy9qa/j5lZC+ZCeAokaXp\n7uuyCtZsqKrJiU3tOztIEhXkF29xFR7eqRmG75wOTsCJRGKH/5ITBJHQnAh/yuMXSwkA7AByj5dz\nREQkgG62QXzKBEcaYMFJ8jYX9PuZeWPLfcycbSdp/t+x56JbeYgjlJq/aIu4EX/HbpcJfqynBQYz\nHwJwqAe6mm5TQ4Re4X2PElhA8/tqzJArkV+8+RdEFMLM9W0fliX7W9GRqfOmj7vLJvmtDgzDhzXb\n/jXW420Y27fPRGQVrLmwvrHsMBFNPVVPPp4AEyXJNnLyqJvV1qd/Q4PjMH3cXY6lG154joje/akd\n9Pg5YYksC4ufHzZJVMyOLJEU2SkRUbutK2auJKJhFTVZl9Y1ll5LINWnuxYxm28yc3UHY22uayhW\nXO4aOOzhRzUYpo6C0u0AsOIE53PSIaK5smR/GUCCJCqGT3ebkqi+aJi+P3b0BahA+HM41DtvwGDn\nAIRCh4lNODzpA2QuI6JzWq8UemBcn4+Gvz6Ejb+OZYfPB5Oq4WET/JjG5qs/2ES7jo2IxF7h/QM2\n2tUQqEqw4fJUJwJIb91GRHGiIF9yxpjb1BaBBQCiqGDqmFvx+ZJ70af3KAzsOyd4Z8antoO5yz8H\nMLUrwfmF8WAANgAZ3KZQ+o8NkXBu/4QzjrJXaaFXeD9IkhKkG56BADJ++OgsegJLZFlY/Pw45PE2\nUn3jYYQExbRrzCva1MBsBkwa9m8rve//OS7MXC9L9n+v3vryLbMn3e9Q5Obyc4bhw/qdb3oArGHm\nU/oLhIjOliX751NG/8bRp/coCCSgtqEY63e88VBdY0kyApzoJKIEBcLdj2GMLZiaBYQMEWcgDnaW\nHP/FgdcBDGq5379SdRcRPVGAxjFoLt685WSd0OtBtrg8tUJjU3nARt3wwas1Cgh8anFKdORAryI7\n1LYNsmRDbNQQlFcdQkrCZIwYdLF8MHfFaCIaxMyByxW0gUi4SJZsL4uCEi5JquH21IqyZHtZN7xP\n9oTvWA8hdnSYhIhAzX7hVhrNaYzl+G5hcZrSbNZJlxHRRUQUuKJyAJjZA8JL63b826Vp7tbXkZ65\n0GhyV1UA+K6n4tQNz4O1DUXvfbr4bs/KLX9vXLPtX65PvrvTU3x49wpNd196In0T0fmq4twuCKIu\nirJLlu3v+ZOoTxgiOkOWHN/Kkm3hGWNvdyTGjkHLikNYcDzmTn7YScBVRDQgwOOXj0cMtQis1oxG\nFAhICPQcM9cx8wpmXncaCCwwc4koSItzijbA5W5vkZaZtxICSazIjvWiKD/TpvC4YZod74Kapg4S\nml9vUZAQHZmqoTmP7LgQCb9QZMcHM8bf0+eys192XnzmiyEXzHrOGR6a+FtJsnUrwf9kwGwuzi5c\n1xQoN7qqNg+a7vYAOPjDR2bRU1giy8LiNIOIbIrseF8UlZzYqCFvRkekviMKcpks2f7Y2bIohuF7\nuq6h+JNPF9/t2bT7v77t6R/xp9/d5UnPXKAJgqRLou3fRJTWE/Eys+HT3LcYhi+lsHT7b/OKN92j\n6Z7hPs11LjM3drdfSVQetKuhH00aeePoq+a9Jl4850X7kH7nXCmKyg4iGnIiMYuCdIsiO79L63fW\nWbJkF+Kjh7W7R5btSOkzRQRwSds2AkIjoKoAUMc+1LAXLV+kAhGCIGsAQk8kxlMF3fBeweDshWue\nQn7JFui6F03uauzc/yl2H/wCk0f9Wp018f5+fftMflAUlX1E1LK3uKKyJkdxeWrb9en21uNw1QG0\n5A0CgNtTAwDHPKUItCSTqy/PGHeXPTYqDS1/EsHOKMyZ9KCDQJf4E/+P1YdIFEAh9zyrvVpT9tb0\n93ytC2A3uauxZtsrTSYbv/+pVDX4uWJtF1pYnGbIkv2jmMiBc6eOuc2myHYbALjc1Vi68f/uaXRV\neAH84Xh9+D+4byCiPx3KW3mlJKp3hAbHhw3pf47NroamllSkp2RkL/6lIIg3m6bxQU/E7Xdw75FV\nBCKKFQX5mXnTn7Y57c32XJKkYsSgX4iqGhy8M+OzNwBM7mbfvUVBfunc6U/bfJobhWXb0dGWjk0N\nkQkU1PY6A7s24rB7L1fZD8MNEQQHZMzjRAxHJKrhVQEcZVNARMkAkgEUMXNWd2L/MfCbxPYnovs2\n7HzrAV339iZBoLjo4Th3+jMIdjYvXkVHDLCFh/RRdh/44n0AEwDUMfPnC1c9eXl0ZKocEzkYKQmT\noekerNn2CgYkzYRNCQYAVNcVoK6xFOhc/l6aKKqhMb0Gt2uQJRtS+kyWDuYt/wWA59u2E9FYWXI8\nR6AZDJAiOzM03fUEM5+UmoLMzEQ0O7tg3Rc5hevHxkUPM72+RrO86pAM0POmqVsWDqc5lsiysDiN\nIKJUWbLPnT7uTrvYKlnYYY/ArAn3Or9e8ehDRPSXzib4MnOWLNli4mNGhE8be7vaIiZieg2SkuMn\nSotWP/UmEa30C6TOxhhKJNwoS/ZrACiG4fvGMLVXmLmoi9M9FldFRaQKm3a/jerafMiyHcnxEzAo\n5UwMSJxOO/Z9PIqI4rkbTtpEwq+S4ycg2BkDzb8q0+iqRJCjV7t7c4s3NTC4joiGodnlvNn7CJCa\noNmuRH+MQC8QgEzU4X84gKUo1CXQh5rfO4qI+sqS431Jso0Mccb4GpoqFEV2ZGi6+5pTPV+tNcz8\nVwB/JaIreoX1e2PWhHuC294zsO8cYfeBL4YR0UBJsv1TluyTB/WdI9nUEBSWbucd+z8m09SRFD8e\nowZfApNNFB/ejQ0733CZpn5XJ60m7LKkdnywQ3FKaD4hehRENF0S1UWj0y6zpyRMJVGQUHR415BN\nu99+XxTlRw1D+0dXX5PO4C8vNY2IhuQVb56IZjPSb5m5/RKfxWmHJbIsLE4vzkqMHUutBVYLwc5o\nBDtjzNqGogkAVnWmMyJSREG+YcyQK9S2qzXhIX2Q3Gci5RZuuBHAnzrZX6IkqpvjooeGDEia4RAF\nGXklWwZkF667k4jmtrYuOBEEQbqwvrFEGTn4UkwccQM83ufPsPcAACAASURBVDoczF2Ob9c8g7On\nPg6bGuxrdHmjAHRZZImCnBIemmgDAFlSMSBpBjbveQczxt2FltddN3xYsfFFbnJVBoUGxz3u9TX9\nzjC81UR0K4AlKsR/3oMR1J++3xFMRRju45F4FJtEDeaT/terlygqm4elnh8xOGWuKIqy3TR1ZOav\nHrVt34cbiWgYMxf2xGv2A5IcFd6vvWMtmnOrgp3Rem1DyZrIsL7RcyY+gBbn+QFJ06mobBdWb33Z\nVVi6s/jDkluSGIAoyNma7nqYmRd0cvwMl7tGCiSMmRl5xZuaABxl7OqvzfnfKaN/40iMHXPkemLs\nGISHJDq+XvHI834rhQ5rc54ozLwPwL6T1b/Fj4OVk2VhcXpxzLQrImJ07TRSL0GQqKXcTVuiI1Jt\nkqQO72xnsmT/YGjq+VEzxt/tiI8Zgd5RaZg44np1+tg7gkRRWUBEchdiCwgRpQgkTpo37Sn0TzwD\nTnsEIsP6YvKom5AYOwZb0z+E21OnoIP6dsdDN3yHKmtyjqwEjhx8CWTJji+WP4Qd+z/B3kPfYP6S\n+wxJtuPiuX+jC2c9F3rZWf8ImjbuzkS/m/odoVDU1gKrhQiyYRDCmgA8pirOLxXZsTE0KC4sNXmG\n2CI2BEHCwL6zKTVppkMUlfu7M4eehIgiRVF+WlGcuYrsKFNkx3wiGneMRwqravMCrqSapo6Gpopg\ngcToSSNuOCKwWujTeyRiIgeZuuF5wTC1ONPUYn1aU1oXBBaYuYlIeHX9jtdduu5tfR37shYabk9d\nGYBlbR4bLghyNAAcyFmKwrKdaMmRCnZGITZ6qAHg4s7GYGHRgiWyLCxOL5bml2xjw2x/Ar3RVYn6\nxjIZwJYu9FdrmJro8QWuPVzXUKIZhpbfmY6IaLJhauMJJJZXZx5J9AaAPr1HISSotwLgnC7EFhBB\nkG7snziNHfaIdm1D+s/z51CJ33R/u4XfLSzdjtr65t1NUZAwbeztmDn+HpRVZpi7D8wvEgRRmzHu\nLrKrzYc6iQjx0cMxedRNDlmy3xsMOaABaxNryBe9zvDQxF+PGXLlhROGX98/2Bktf7X8UVTX5cMw\nfMgp2oDdB76ALNtlAl3VvTn0DETURxKV9OS48Q/PmXh/8rxpT8WMGHjRhbJkXyUI4jUdPPZlZU02\nVdZkt2vIzF8FVQmCqjgREtQ74MPxvUcESaJtIjNXHcN/7ZjohveR6rr8BZ8uvtu9Ze+7+q4D8/HV\n8ocb9hz8Ok83vLMDeJvNNgyfY3/2d6hrKMG+zIWYv/R+lFbsBwAEO6IVAO3fcBYWx8HaLrSwOE0g\noggA9czmqnXbX505ZdRvbJK/DIfbW49VW/7eRCS85E9E7hTM7FJkx4L9Wd/9YnTaZUc5r7u99TiU\nv1I3TO2t48QlSKLtZVFUbk7sPUb2eOuxfsfrsKkhmDHuLthtYQCAmIiBak1dQWDXyi4gikpKWEif\ngCe/bGoIBJJY011Pd7d/Zq4UBPGmb9c+88bQAeer8TEjRK+vEQdylrhr64vKiWhZavKsGwWhvVF9\nQu/RMNmIK4DHdLMOe5syOf8TchDTZyxNHHGj2rIi2bfPROQVb8byTS/CNA1EhiUjMqwv6hvLYLIZ\nLQjSjaap/6e78zkRZMn+xuCUuVEjB19yZLKhwXFCXPRwx8LVv3udiL715xQdgZldRPTLJeuf+3BY\n6gW2pLixgm74kJm/2peVv0aZMf63WLPtXzAMHwJte7vcNbrJelW7hi7g98G6kogGH8hZ+guA7ACv\nQ3NdxKMEFhENlETlDzMn3EOxUd8fqC2t2Ie1217BWVOfQElFugfWVp5FN7BEloXFKQ4RTVBkx0uC\nII0WBdlgNt1FZbuzPv72jv5x0UN1w9D4cNUBmUh8XTe8v+tq/5ruvudAzuJpPq0pfHDKXNluC0Vp\nxT5sS/+widn8OzMf06dHFJUnQ4J6Xzd3ysNyi9no2KFXYfeBL7Bi80uYN+0pEBFqGoq8AE64LIqh\n+w5U1eZ6gZntTCxd7mqYpuYB0H4ZpQuYpvEBEe1Pz/zmoX1ZC6cA1KTp7jeZzTclUX1RlmwBt2QF\nQYQoSDoZxtoPkTnjeh6kCn4xVc8+7KZKXDzkSbTd8k2On4C9h75GUtw4DB940ZHrdQ2l+HbtMy8T\nUQ4zrzqROXUVIuolCvLMIf3ntVOTYSHx6NN7FOeXbLkawMtt25n5ayKanJ654LG9mQtmEkgzTf0z\nw9TujIseJvQKT0F24XqkJs886jlNcyMzf5XPNPV3e2IO/oMDxzw8IInqw2n9z1FaCywAiI0agtS+\ns7Fl77toclU2AljcEzFZ/LywRJaFxSkMEU2SRGXZ2KG/dPSNnwhRlFFVm2ffuOstW31j2deFZTsW\nAtAALGH2detf/8xcTETDswvXPZJTuO46k027JCoZPs31R2aef5z4VFGQ75829jZni8Bqvi5gxKCL\nUVC6HWWV+6EqwaiszhIAnPBReJP1t3IK1z+a1u8chAbHtp4Hdh743EskvM/MnhMdh5l3Abi67XUi\nWpFbvOmKwf3Oand6rrImB6ZpNOkwrtyG8mWHUDtwGsc5ZQi0EkUuh7OXTZEdAdM04qKHo20GR2hw\nLMYMudKxLf3Dp9DJwww9SKxNDfHKsr2dmAWAyLBkR2Hp9jlE9O8Oaivu0XTPvwDsQnNZm8WSaDu/\nrHJ/39Fpl2PZxv8Ds4l+CVMhSSqqanOxcdd/NJONT5k5PUB/ncKftHiRIjseMUx9oEBinWH4XjPZ\neDlwgW7h7KS48QG/C5PixiEjezHrhvdsy6/KojtYIsvC4hRGkR2vjh9+rSOlz/eWT5FhyThrymOO\nz5bccz6ApzpbZuRY+Avv3uv/6QqDbWooQoJi2zUQERJix2B/9ncorzrkMkz9113ZyjxGrMWCIN2+\naM3T/xw64Dw1LnqY6PbWISP7O1dFdXaBbngfONExjsPntfWFLx7MXeZMTZ4ttKxKeXwNWL/zjSaT\njT8xcy0RjffAPWsBci8lkOKBsVf11v+B2XQG8t1qcle1K7IMNG9Bbt7zzoSTPKdAlHq89apPc0OR\n2x8WrKrNhU0NmevVmsqI6JLWK21EFCdLtqUCSSkkiDZmA6Zp/I7B3rXbX/NeOOtZ9czJD2HXgfnY\nvu8jCIIE09RhmvqzJhvPnEjQkqi+qCrBvxk95HJnTEQqmtxVoemZC58ordh/LRFNYL91RisMM0CO\nI4CW5PdiZt5zIjFZ/HyxRJaFxSkKEcVLojowOa7996ss2zEgabqckbPkagBd3iLsQTTD1IiZ222B\nAYCmu1Favi/LZP06Zt4Q4PluYZr6f4lod3rmggf3ZS0cD1CDT2t6DcA7zOzqqXECwcw+Ipq2fd/H\nyzJylkYkxo5xuj21vrySLSDQa6ap/8N/n4nmU2zLgOYVFtPUbyso3T4gKe7ow3lN7ioUH96N8cN+\n1W48TfeASOi4/sxJgpkrFdmxfF/WwrmjBl961HfF4cqDKCnfi4vm/MVWU1dgW7nlpW+IaCQzZxGR\nIEm2FdERqQPqmw4Lk0fdhOiIVP9zGeqa7a+any2+25eaPItiew2RDUNvOlyZAZP1C5j5hIqFE9FY\nVQm65bwZzzhUpdkj1mGPwIzxd9vX7XgtuaB021MA7mv9jMnGZ9mFa2+PDEtulyCWXbDOZ5r6JycS\nk8XPG0tkWVicuoTIskMTBNEWqNFuC5MEEsN+6KDakKEb3oby6kNBbVdhDENDTuF6l8n6xcy890QH\nIqIwAPEAypm5gpl3APhRTt/5xURKfWPp3PTMb8YCaADw+bEMV/3u3jeu3/HaYo+33p6SMIVEUUHJ\n4d3YuPttn8MeAZsa3O6LPit/lU4QPjuZ8+kITXf/JiN78dbGporwgX1n2xqayrE3cwGaXFWQJRsW\nrv4d0vqdjUF9z1QO5i57AMCtAGbblOC4ipps4dzpTyPY+X0R8t5RaZg7+RFh4erf6Qdylv5dFOVe\nuuHdAeD9QFt5XUWSbLen9TtHbRFYLRARRgz8hVpQsvVmIrq/xTQWAAzD99fM/NU3RIQkySmJU0kg\nASabyC5Yy5kFq92Gqf3tRGIiIgnAXDS/d3MBrLS2Hn8+WCLLwuLUJc/naxQamioQ7GzvY1VYur3B\nMLXNP0JcR2BmUxDE+1dvefnNWRPvdfQKb67N7PbUYcPON9zM5vITFVhE1EuW7K+Kgny+qgT7vL5G\nRVGc6zXN9RtmPqEE9xPBv1L1HbpQTJuZ1xHRtJ0Znz63Ze+7M8FMkmQ7pOnuP2ma69kd+z+JHZZ6\ngSRLNhimjuyCtbw/Z7HLMHydMoPtafz5ekPzS7beUVC6/WkiQZg48nokx42HIEioqs3D5j3vINgR\nLROJfnsOOiMyNDlIM7xHCawWwkLiERocz9V1ees03bOoJ+MVSUoJDY5rf+wTQEhQDAxTdwBQABwx\n0GLmQiI6Y2v6+x9v3/9xYmhwnF7XUCKbpp5vGL7LT6RSARHNEUXl4xBnjBwWkiBW1eYaLnd1ExH9\ngpk3dbdfi9MHS2RZWJyiMLNblmyvbdr939tmTbzPLgrf/7nmFW9BVV2+D8CPssLRGtM0PhQESVqy\n/rm/2dQQRZbsZl1jqSoK0nua7r7rRPomIqckqptSEqYkjBj0C8WmBKua7sXB3KUzdh/8cgsRDW8p\nnUNE4aIgP0gk3GSaeogoqdma5noOwHutVy5OIBY7gAsBJAHIB/BVZ8sXtYaZtwM407/CIfo0l9ff\n/7KDucvfzshZMj3IEeV1uasVgPYbhu8GZs450fi7i9+r6g+K7Pjd5FE3Ca23OiPDkjFn0oP4YtkD\nAEz/fjH7fLrbtKshAcUO0Gy1AaBdzccTxTC1jOravCmJsWPafbfV1BdBFOU6w/D52rb5/yGQRkTD\ny6sOJgIoONE8LCIaKonKVzMn3OdoObnIzCgs2xG8dvurS4loKDN3yoPO4vTFElkWFqcwuuF9tLIm\ne/D8pfdNG5g8y64qwUJ+ydbGiposzTB8czpZy+2kY5r6u0T0YaOrYgya68LtNgxfT5QguS4qYkDs\n+GG/UlpyvmRJxdAB5wkuT21wZv6qhwH8logiJFHdnhA7JnZo/3mq0xGJ8qpDadv2ffSqy11zBhHd\nciJCi4jmiYL8UWRYX/QKT7FX1uS4q2pzXyeiK5m5W6sxfi8nvdXvZQDOJqLedQ3FfQGUMXNud2Pu\nCkTUH0AMgGx/HG3bU2TJjoRWJWdaUGQHkuLGIzNvVYsQ/LKiJvsxWVLtpqlDEI7+mjEMH8qrDokA\ntvf0PHTD+0pGzpLrUpNnSq3NaplN7Nz/iZvZfOVY7wO/sOqRJHdZsj82dMC5amtrCCJCYuwYDEia\nrmTmr74bbfLDLH56WCLLwuIUxp9kfa6muyfuzfzmGoHEEE13rwLw4clO8O4qftHQo9uXiuy8cXDK\nmY5ASfWpyTPlrPzVVwP4rSQqv0+Onxg3edSvj+Q09ek9CtGRg5xfLn/oat3wvI029eo6S/OKhPrp\nmZMfckRFDGi5HFxenYllG1741H9irduWA23xi5x2QudkQESjZMn+tiI7+jts4VqDq0JVZOdKTXdd\nz8zlrW4NVmSnRyAh4OqTwxYOEPb6409XZMd3AF24M+NzYXTa5UcORTAztqZ/oBPRupOx1cvM6ZKo\n/H7Byid+N3zghfboyIHU5K5CeuY3TXUNxemmqT/b02N2HIt5VlLchICreX3jJyk5hesvgCWyfvJY\nIsvC4hTH/y/vjf6fHwz/dtZQACKA9J5YNSOiBACxaN6O6YyQcCpK4F0lVQ4Cs2kjIhIE+YZhqRco\nzCZKK/aj0VUOuxqGuJjhGNLvbPueQ1/fhm6KLEmyPTyk/zy1lcACAERHDEBa/3m2/VmLngVwQXf6\n/jEhooGiqKwZN+yXzpQ+U0gQRGiaG7sPfTXnYM6yLUQ0E0Ce//2X5fHWiU3uKjjtke36Kijd3sRs\nrmr5XdPdV5ps/Otg7rIbC8t2UP/EM2Cyiaz8NR6vr/6gpnuuOFnz0g3f80S0bteB+Q8S0XCAqnxa\n07/QnFzfbqvwJGK2r97TTPN1Ctxo8ZPCql1oYWHRDlGQbpZEtcxhj1gT5IhaIQpKhSSpjx2zOvUx\nIKIBiuzYIInqoZCg3ktEUclVZMcSIoo/1nOmqa8pKtsV0MSouHwPBFHeDkA2Td3u0xrx5fKHsX3/\nR6iszcX+7G8xf8m98OluQRDEhO7EDQAEmp0UOzbgikRy3DiBSDhPluwriehs/09707AehIj6S5L6\nN5savEqRHR8Q0Yzu/H+RJdvTwwacb++fOI1aSgRV1uaisjpLBjhJEtVDsmQrEATxBn/R5bc27fqv\nu23dzOyCdVzfWNIA4OuWa8zs03XvTbrhDa9vLH1g94EvPtl78Kt/NrrKz9d0z+ju1iTsLMy83qe5\nLvL6mlK8vsZxzPzfkyGwqJlZgiD+kUj4PRGNbdX6VU7R+oDv3ezCdV7D8H3a0/FYnHpQD+SD9ghE\nxMzcrQ9wCwuLnkMSlbtUNfi5mePvdkSG9QXQXN5l9daXmxpdFa9quufBrvRHRPGiqOwdNfjS0IHJ\nswRRVKDpHuzLWqTvz/q2Qje8aR0Vc26uK6fumDvlUUev8JQj1xtdFVi4+mmX19dwITMvk0S1SpLU\niAnDr0NS3Lgj21Pl1ZlYselF6IbvE8PQurV6osiO/DMnP5zYevwWKmtysGrLP6AbXiiy07CpIY01\n9QWqQOIiTXffwMz13RmzIwRBul4UpH+lJs+UoiMHyo2uCt6f9a3Lp7sX6Lrnly11+ajZ7XQmgL4A\nigAs82/nHkEUZdfFc160O+zhAICisl3YsOstjB/2KyTGjgaRiPKqg1i/840mt6fuecP0vSBLtq8l\nyTZlYPIsuyI7hbySLY1Vtbluw/DNYOb9/u3HB4mECQBq/d5l73bngMDxIKIYQZBuk0Xb+QzWfZrr\nfYDf7unX/BjjR0uSbblNCUpO6TMlyDA1M7tgrccw9Y2a7r4AQJwoKjunjv5NUGJs83uS2URWwVre\nsvfdOsPwpTFz6Q8Rq8XJoTO6xRJZFhYWRyAimyjI5efOeCY4LPjoRSa3tx7zl9zrMUwtkZkrOtun\nJKkvDUicftv44b9q5wG1euvL7oLS7U+bpvHCMWK6QBTkD+Oih3FUxABnbUORJ794Cxj8oGFo/wQA\nUZRXpfSZMn3yqF+3e35f1rfYc/CL5T7NPaezMR8Vv6g83y9x6t0TR9zQrrzM8k0vorImB7Mm3IOW\n7USf5saWve96C0u37dZ0z8SeONkIHBGcO8+d/oy9dTkhXffiu3V/bKqpK3jIZPNfRDRBhfhFGJSg\nZIQIxWg0yuH2+WBe0drsUxAk7xXn/EtRZDuYTXy5/GFMHHE9YqOGHDWuy12DL5Y94DFMLRFAJYBJ\noiBfJQhikKZ7VgL4hJk9oiD/WhTlfwwdcJ4aGzVE9HjrsC/r26aqurw8XfdM7knxQ0RjRFFZkRw/\nQekbP9FmmDoy81e5yir21+mGdyIzF/TUWB2hyI7NA5JnjhqTdoXcIupNU8eaba96SivSv/BprquJ\naLwk2T5WZEev0KA4s6a+QDQMX6Gmey5lZqvg9GlOZ3SLtV1oYWHRmqkhQb25rcACALsagtioITqA\nc7vSIZFw1YDkme0EFgCkJs+yy5L9umM9z8xfG6YWV1i248HdB+b/Padw/ROGqfVtEVgAIIlqVErC\n5IDP942fANM0ul2WxjC1l7IL1zdlZC8xW8qvGKaOjOwlqKjOxJRRN6N1vpYi2zFl1E2qqgSloXk1\nqUeQRPWuQSlnSq0FFgBIkoqxQ692SpLtISJKUiAsuxlpsc9iYvAtNMT5DE0IuQvDeykQFhDRkaNu\nsqRuLyrbAQCoqs2DQAJ69zq6SDIAOOzhiI8ZYQD4BTezQTd8d/k09w3M/D9m9hBRHyLhn+dOf9ox\nLPV8sVd4Cvr0HoW5Ux51JvYe3V8S1ed76nUgIlES1YVTR98SMnnkTTZZsqGsYj9sSoijd1RajCTa\nTvo2HBGNFgRp6Oi0y+XWO7WCIGHSyBttpqlfTEQxzLxF1z0pLnf1nNKK9Bs93vozNN2TZgmsnw+W\nyLKwsGiNTZba16lrQZGdApqL/XYeZkWWAj/SPBZ32B8RCX57gWgAr+mG7x5mfjFA0rxhmoFNtE02\nAep+kjEzlxqGb9KuA58f+PjbO7F43bOYv+Re5BZvAkCIjxkeIG4BA5JmOkVRubi747ZFFOWxMZED\n5UBt0ZEDoenuRBl0zzTEKaMp6qgyR0MoAvOQpNogPtpyzae5nt6a/r6rvvEwNN0DmxoasDQSADjs\nEQqAkI5iEwTpppSEKdS2hiURYeTgS1Vm81oiCii0u8Fcp6OXI6H3KKzd/i+s3f4aVCUYvcJTIJAo\nMBvjiajHXvcOmJTQexQJAWpQqooTEaFJHgCjgeaDK8y8mZk/Z+adPbWyaXF6YIksCwuL1mytqs1V\nfFr7Os6mqaPo8C4AWN+VDomEDUVlOwN+sRSW7dBN0whYr04QpGslyVakKkG7VCVopyTaSkRBuinQ\nvbru+Si7cG3AvJ+cwvWGQMKCrsTcFmY+pOnuEaap1SbHT8RZU5/A9HF3oNkgNrAwEUWZCD0mLMBs\nljW5qwK2NbkqIQpykwzxggmICTjmBMSIDJz9fX+8RNPc93298lHPvsyFrqraPPi09q4gzIyisl1e\nADs7ik0SlSG9wlPabacCQJCjFwRBJADtjyV2j9SYyFR5z6Gv4dNcuHDWsxg+8AKkJs/EjPG/xbSx\nd0AU5P8RkbOHxguE2+tr7LA0jk9zEYAez0OzOP2wRJaFhcURmPmwIEhfrN/55lGnyJhNbE3/wMds\n7uhqmRxNd/9xZ8Zn7tr64qOuV1RnIyN7sU83vC+2fUYU5TvsasircyY+EHv52a84Lz/7FeeZkx+O\nsdvCXpJE5YG295tsvF5Qut11IGeZafqPzTMzCkq3Y2/mAo+me/4EAESUKknqX1UleJEs2f5JRMM6\nOw9m1hn8WHrmgibAhMMWDklSUVGTFfD+nML1Dbrh7XTJnePh01z/Ts9c1GgY7WtF78tapBEJ7wAw\nO1omMcEAcFSzYeqvmaYeV1Kx916A923Z867W1nbgYO5y0+OtrwDQYfFmw9ByahuKAxaxdnvrYZg6\nAegJc1oAOFxbX6wdyluBcUOvgSgerSkTYkcjKiKVcJy6lkSUQkQjiMjRjRi+KSnfK7m97dPMqusK\n0OiuZHTTMsTip0W3E9+JKALAx2guMZEH4PJAJ4SIKA9APQADgMbM4zvoz0p8t7A4BSAihyzZ5xMJ\nZ/RLmKoIgijkFG1w67onQ9M9Z3Xn+L0giNcQia/HRw8zw0MTnRXVWY3lVQfJMLUrmHlh2/FFQS4/\nb8YfnW3zjxqaKvD1ikfchqn1BuBmZq3Vc6myZP9MEMR+EaFJel1jqeDzuWp0w3MVM68XRfkegcRn\nU5NnSZFhyXJtQ7F+IGepZrLxoq57n2wTgwrgMkV2XkUg1as1LfCfXKuTROVeAM+EBseZuu6TTdbt\nZ099HC0O48wm0jMXGnsPfV2kG97+bU/1dRciEmTJ9lVYSMKscUOvdkSGpcDlqUZ65kItu2BtpW54\nR8oQ7puC3vdcS4ParSrN52x9KYre8bAecDWQiEIkybbcroYOSk2eFSSJCnKLNjRU1+U36oZvGjMH\nVpPNzw6QRHXPRXP+z+awHV2zfOve9/WsgtUf+zT3NSf8IjSP5RBIqrCpIY5Lz3op4D0ZOUuwK+Oz\nt3yau91ciWimLNlfJaIEWbLrHm+9RCS8oRveh7pi8yBLtuedjl53TB97hzMspA+YGRXVmVi99WWX\n19d4t2Hqb57ANC1OA07q6UIiegFAJTO/QEQPAwhn5kcC3JcLYMzxPpgtkWVhcWpBRCPQnOQuAlgO\nYOMJlqYJA3AFgHgAuWg+ldZuX5KIzosMS3n/3OlPt8sBqm0oxrIN/6e7vbUCM5Ms2bI13f00gA9a\nYvOvTvVDs2v6ZmZmIpqkyM5l5834gyPI0etIf25vPb5Z9WST21NzeUt5HCKKlkR1Q3hoQkxq0qwg\nQZSQV7TJVVKR7jEM33RmTvfXMZwOwCYK8hQAd8ZGDdXttlC5+PBuXdPd+ZruOZuZC7v7enXwGkpE\n4j2iIN+vG94YQRA9Aonv6Yb3SWY+TERxCoT9v8LAkEnoTQIRmBnbUYE3sb/RB3PUccSSAOBMSbRd\nRiTYNd31LYBPO2NEK0nqE4pkf3R02hWO2KghcHvrkJH9nbegdHulbnjHMPPhHnwdbhIF+Y0r5/0b\notg+TW3H/k+M/dnf/c0wtAfbPDddEtVvp46+xZ4QOxpEAhpdldi06z/uipqsFZruOb+z73EiIlGQ\nHwDRY6rsFE02BF33NRim9pBp6u/20FQtTmFOtsg6AGC6/w+7N4BVzDwowH25AMYyc+Bkgi4Ea2Hx\nc4SIQgRBuk8QpFtMUw+VRCXb11z4+IMTET2nKkR0eXz08DdmT3rgKJFVXVeAZRueR1r/c5CaPBOy\nZEdJRTo2736nye2t+7uuex/vqE9FdswfMejiC9P6ndUuRSK7YB22pr+3zutrOsN/79IByTOntz6a\n77+PN+95p1g3vEncZk+NiCLR7PoehOaafCckSDsDEckA9LbjENEwG8TPVYhxCQgyStAkuKBXeWBc\nwcw9WvYoQEznKLLjccPUhwkkNhqm9l/T1P92vM//4/TZm0i4QRLVwYbhyzPZ+C8z5yqyI33s0KuH\nDEiaftT9mu7B50vudfm0psnMvLt1myI7d08aecPw5PijD5sahobPl97X5PHWzWLmLV2MTwYwCM27\nNQfavjcsfrqcbJFVw8zh/v8mANUtv7e5LwdAHZrfgK8x8xvdDdbC4ucGEYVJom1LfMywhKEDzrM5\nHb1QUZWJ7fs/anJ5aj/Sdc/NPzWhRUQpkmjbd/k5q3B8XQAAIABJREFU/7RJrfJtlqx/DsnxE5Ca\nfLQrgttbjy+W3ufRDd8gZs4P1KeqOPPmTnk0KSI0qV1bo6sSX694tEbTPRFElCiJ6sHLz/6nTZKO\n3nFjZny1/OGG+qayS5l5yXHmECWJ6pPM5nWGqQfJkq1A1z3PM/j1H+JL2P+ZPAbNZqSF8K/onexx\nexpBEK8RSHwjKX4CR4X3s9c2FPuyCtaYYH7aMLVloqisHjf0amdKwlRIooLqunxs2vVfV21DyRea\nfvT2JBH1FkUl76p5r6ktDvet2XVgvrkvc+HfdcNn1RO06BSd0S3HrF1IREsB9A7QdNS/GP3L8R39\nAU9h5lIiigKwlIgOMPPaDsZ7utWvq5h51bHis7D4qSMK8pMJsaMTp46+RW1ZVUmIHY2YXoOdXy1/\n6Epd97wNYN2PGmQPw8w5iuxYs3Xv+9MnjrhOJRLg8tSiui4fsyfe3+5+uxqCvn2mCJn5q64G8OfA\nvVKNy10TUGS5PTUgEloymAeHhfTxSpLazlaCiBAbNUSpbyobAqBDkeUXWDv79pkUNaT/PMXp6IXy\nqkPJ29I/+EtDU/k0IvrlyRY8/v63+X9OS4houCTZXpt3xlO2sJAjvm3K0AHnYeHqp540vHW7DMM3\nbcf+T17asve9CaIgGczsNtn4i2nqgcxt7ZKo6IIgBjwFqcpOgUgIPmkTsjjtIaIZAGZ05Zljiixm\nPvMYgx0mot7MXOav1VUe6D72lw1g5goi+gLAeAABRRYzP93ZwC0sfibcNGLghUcEVguKbMeQ/vPs\nuw9+eTt+YiILADTdfUVe8cZlJeV7BvZPnOZ0++ohiSoFyr8BgGBntCIIUnRH/fm0plf3ZS36a3zM\nCGfr15KZsS9rkccwfK/7L1W63NUiMwf0jGpwlWsAjrn1JYnqMykJU6Injrj+SLCxUWk4+4wnnV8u\nf/AC3fBMA7D6WH1YALJkuz+t/zy1lcACADjtERiTdoVza/p7j3t9TdMATCOiUNPUHQDKmbkja4VC\nw/D5auoLneEh7UtZ5pVsadAN76qenofFTwf/ws+qlt+J6KnjPXMiFg5fA2hxar4OwJdtbyAiBxEF\n+//bCWAugC4d/7aw+LlCRIJhaiHBzkCLyUBIUKwgCGL7pZnO9S1RR86TpwDMXKvpnnFN7qrz9mYu\n+GtW3up0n9YEtydgiUOUlO/1mqa25xhd/q+qLi9rzbZXPPWNzfnXja5KbNz1H19JeXqxyUaLe/wO\nn+6uKS5v31V9YxnKKg+ICPBZ1wIRkcnGtUMHnNdODcqSiiH95jlkyX7LMeK08EMkTo6PHhawMHds\n9FAYpj6y5XdmrmPm0mMILDCzbjK/sH7HG00+7WgLq6yCNVxdl+8G8FlPxW9hARxnJes4PAfgEyL6\nNfwWDgBARHEA3mDmc9G81Tjf/1kuAXj/eLkMFhYWzTCzKUu28uq6vOiWQs2tqazN0Q1D73R5jmZR\nRTfIku1xAH2JRE2R7fM13fM4M+f0ZOw9gX/LazWA1ZKk1iT0Ho3t+z/GlFE3g1o5bZeU70VFdaaE\nZkuZjvryENHUosO7fl9UtvM3DFYB6AKJ7+iG93H219Xzpz5cv2brPxeMHXqVvW+fKSQKIgrLdmLT\n7rddYPN+PnYNPsk0dbvTHth3M8gZRYIgtq9ZZBGIek8AHyoA8HjrQSS0d049DqapvVDfVNb/syV3\n/7JfwlTRrobKBaXbGuoaS5sMwzerM6coLSy6glUg2sLiFEYUpEejIwc+MWfSg47WybpN7mp8veJR\nt6a7J3TWHFSWbP9w2MNvHD/sWmdsVBo8vkYczF1m7Mta1GgYvnHMnHnSJnKCiILkuWjOX9T1O/4N\nw9TRP3EaVCUIxYd3o6B0O9g0sn26u39n+iIiEc2nABs7WvkgonGK7PizprlnMCDIkn2Pprseb+vp\nFQhZspfNnfJwTK/wfu3atu37SDuUu/xVTffc3ZlYf84Q0a29e6X95czJDzvbLrpu2PmmN6dow9sC\nSTJgXszMoiBIazXd/UxnTlAS0QAAlxGJIczGVgBft/Zcs7DoDCf1dGFPY4ksC4v2EJEiS7bFQY6o\ncUMHnOcMcvTC4apDZnrmNx7D8P1ON3zt3NI76CdNlhzbLj7zRbuqHF1tJD1zobk3c8Fin69p3kmZ\nRA+gKM4tE4dfPy4pbhyKDu9CXvFmGIYPvcL7weWu1rIL172h6Z47enpcvyATuvIFLAjSg1Hh/Z4+\nc/LDjtY5ZPWNZfhm1ZNu3fCOZOZDPR3rTw0ickiibXtS/Li+owZdqjrs4fD6GrEva5GekbOkjpnV\nIf3PsfVPPEMSBQUFpdt4Z8Znbk33XMdsWtt+FicdS2RZWPwEICIJwCWK7LwdQDSzuVvT3X/tip+P\nJCovDEo5894xQ65slyKgaW58/N0dPtPUo46zFdad2AnABAAxAA4xc0Y3+znfbgv78Nxpv3c67N87\nxVTV5mLxumdduuEddaoIFyKSZMn2lU0NnTak/7ygIEcvlFUeaHaXN7XftnYCJ6JRBLoIRCqzuQrA\nEstn6XuaLUzUv5psXimJiqkbXkkU5EXM5tAJI67r3y9h6lHfGVW1efhu7R+aDFOLZuYubydaWHQF\nS2RZWFgAABTZ8e7otCuuGdh3VsD2jxbd5vJpTYN60qGciKZLku09VXaGBTujjeq6ApnZPKDp7kuZ\nOber/Umi8hiInuyXMJVCg+LU8uqDrqKyXWSY+lXM5lc9FXdP4HdOP1uRHbcRCTGGoW3XDe8/WkQm\nEamyZP9cEMSZ/ROnqZJkE3OLNjS43LVluuGZyczFxxniROPrK4rK3aIgnwXAq+nu/zGbbzFz3ckc\nt7v4Hfaj0XyyM0lVgjdfdvbLToHan91avP7PDYcrM25l5g9+6Dgtfl6csE+WhYXFTwPd8O4oq9x/\n8cC+s9oVw21oOgzD8JkAerLsyQhRVBZNG3ObIz5mJIgIpqkjI2fJiN0H5m8iotSufqHrhu9ZIvog\nM3/1taIoJ+i6dx/A/+Nu1FI82fhXoxb5f9ohS/aXoyNTZ80Yf7ddFJo/hoenXhi859BX9v1Zi5YQ\n0dCT5aVFRLNEUfl6YPIsJTF2rKwZHhzKXfmH0or0+4lo/MkWeN2Bmd0A8oHmw1VBjig9kMACgPCQ\nPurhyozYgI0WFj8wJ2LhYGFhcZrAbL5TVLYT5dVH57abpoGtez9wE9Eb3IXiuMdDluy/HznoEluf\n3qOO+E0JgoQh/eeJsVFDgwC6oTv9MnOeaerPaJr7ZmbzpVNRYB0PIgozTf1Xk0fdfERg+a9jeOqF\nkiw7EgFMPUlj20RB/mL2hPucY4deLUdHpiI+ejhmTrjbkdb/nGhZsv/vBPoW/DlsJ5uc+sZSxTAC\np8lVVGd6AWT/AHFYWBwXS2RZWPwMYOZqw9QuWbr+edfGXf/xFpRux6G8lViw8vHGw1UZ23XD91hP\njmeY2tyUPpMDfr70T5ruUGXnVT053ukCEQ0WReUdWbYrG3a8juzC9TBMvXU7kmLH2QBM7kRfkUQ0\nhYiGdsHz7JKIsGS5d1Rau4ah/c+VmM3JRNSns/PxxzFGkZ2LCeQDSFMV504iOq8rfXQFZs4GsDsj\nZ0m7k6HF5XtQ21CiATjuKVALix8Ca7vQwuJnAjN/R0Sp2YXrbiso3TaV2az1aa63ACw6loljNwcT\nAtWHAwChebHjh1jxOKUQBPFKSVTfGpwyV42LHia4vfU4mLccmXkrMXvSA5Cl5ko+Xl+DBqDDpG0i\nCpIl+2uiIF8S7Iz2eLwNkmFqFUR0MzMvO8ZzI4mE16IjBtgDtUuSiiBntLe2vjAZQFFn5kRE0yRR\n+XZU2qX2fn2mkCjKKCzbOXLz7nc+FgXpAcPUX+1MP11F091X7Tn4xeaq2tzgAUkz7KIoI79kiy8z\nf7XPMHwXWnYMFqcKlsiysPgZ4c+3eeJkjyOK6tr8kq2zU5NntlthyS3a4NZ0zymVqB4I/6nONAAM\nYP+JCFEi6i0K8n/OmfY7e+uSLklxY7Fux+vYfWA+xg69Gl5fE/JLtxE6cJUnIkGWbMv6xIwcMW74\nNSqB1KyCtSg+vMdZWZP1LRHdB+CfbfO5iChEFJWV/RKmOhuaAlZAg2nqaHJVKQBKOzknkiXbO1NG\n3+JIjB3Tak7jEB6S6Ph65WMvEtEHJyOZnpnziGhQQem2m0sr9l0DQDEM30LD1P7RUZFwC4sfA2u7\n0MLiJwg1c6YiOz61qcFrJEn9KxG1d8c8SWi666nt+z5yV9Z8byTPzMgt2sj5pdt8Juv//qFi6SpE\nRKIo3yOKSrnDFrHObgvbIIlqmShIv+lunwKJNybHT6S2NfOIBIwafAmyC9ehpq4ASzc810QQ/nOM\nU55n2tTQIVPG3GKrayjBV8sfRnVdPvolTMHwgRdJDnvES7Jk+46IlKPGAV0bGzVEHp12Bcoq96O2\nvv1CVVbBWgb4gH87rjOMkES1V0Lv0e0aQoJiEBuVZgC4uJN9dRlmrjFN4wWvr3G419c4SDd891sC\ny+JUw1rJsrD4ieH3afpcUYJmDel3jtNpj6SyqowJh/JW3ioI4q9N0/jwZMfAzBuIhF8tXvent8ND\nkzg0KFYqrz5kuD21tYbhO5+ZK052DN1FFJWnHLbwB2aMu8sZHpoIAKiqzQ1aueXvLwuCNJfZ6PKX\nuSipw6MjB9gCtQU5omCaJhau+X0DEb1gGL5nO+pHkmxXDUye7TR0L1ZvfRlTx9yKuOhhR9oH9ztL\nWLHpxTPKqzOfAfBIy3VZdsxNih3nVGQ7xg+7Fks3vIDhAy9CQuxo6LoXmfkrcSB3WaNh+K7twrR6\nOewRRkfpYCHOGLUYiASatzgBulGRHTcDHMzMGzXd/X/MvCPQs0TkADAYgBtAxsk6aWlhcbKxRJaF\nxU8MgcR7w4L7zJk75RGHKDYvaCTEjlb6J07HojVPv0VEG5k572THwWzOJ6KFlTVZ8yprsmIAHAKw\nqjtmm0SUJonqrYIg9TMMLcMwfa+djDJARBQmCNLDc6c8anPaI45cjwzri7mTH1G+WfXkJcziuYrs\n+EzT3Td2NvfHNLSC+sYyDUC7wtEeXwMM0+czTT3heFtrRIJNklTKKdqImMiBRwksABAFCROGX2tf\nsPKJ24nody0nRpnNJk1vLoqckjAZQY5e2J/9LXYd+ByCIMHrazRNUxvHzAc7Mx8/B+oaSlRN90KW\n1HaNJRX7PAD2EVGEJKqboyNT49L6ne2wqSEoKd+bsPfQggsEQbrdNPV3vp8fyZKo/lkU5NvstnBd\n1z2iYfqqiYQ7mc2vuxCbhcUpgbVdaGHxE4KISBCkB8YNu+aIwGohPKQPBiROF0RRuf2HioeZvcz8\nBTP/m5lXdEdgSZL6e1mybxuUMve28cN/NW9g39l3SaK6WxSVk1H/7+zoiFSttcBqISSoNyLDUzBl\n9G9sEaFJF8uS7e+d7dQwtbcO5a3Q3QEKHmdkfWeIgrygM7lLmuZakle8ubGmvgAxkYMD3hMSFAtR\nVAQAR7yiNN393oHcZY0tL390ZCpmjL8bV5zzCkYMvBCiIK/posACMxcRCWt2H/hca7vQlFu0kZtc\nlY0AlsiS7W99+0xOnD3xAUdc9DBEhCZh6IDzhHnTnnIIJLxKRL1bnpMl23uRYX1vu3D2846Lz/xL\nyGVnv+ycMf7uBEW2f0QkXNCV+CwsTgUskWVh8dNCNQytV6/wlICNMb0GqZKojAnYeApCROcosuP+\ni2a/YB+ddpmU0mcyxg69Srlg1p/tkqg8S0STenhIm6o4O/xcVCQHiIBp4+50mKZxAxGFd3Rva5j5\noMnm3xatfqopv2QLfJoLdQ2l2Lz7Hd/+nMVVmu6+p5PxfVRRk+VuclfD7akJeIOue6EbXhlAa0X3\nnctTk7Fu+2ueFqFnsonc4k3Ymv6BS9NdD3Zy/KPQdPc1h/JWFXy37g+N2YXrkV+yFau3vuzasOut\nOt3wngNANUz98pGDfqG03VYMDY5DcvxEIhJuAAAiGgII58/+//buPLyq8s4D+Pd3zrlrQiAkQMK+\n7woCIsUNUSy4b+3UaX202+horbW2VWv7VGdqbWvHunaemW4ztm5d1GJxQ5SissgmCgQCYQuEhCV7\nbu69Z/nNH7kwAW4ghJzcJHw/z5PHnHPee84v3oebb97znvedcU80O5qP1D4U9pmAC6Z9IxKwwk+e\nxFQVRJ0CbxcSdS9JEbFjjVWh5mv8HdIQO6ie57bq6bHOIBiIfn/KuM9nRcI9j9ifHc3HmWOuCa/b\n9PJ30b6Dq5eX7VtvuK6N5os7A4DtxFFxcBOmn/klREI5yM0ZlDhQvW06gLdac2LHSTwgIquXffy7\nHzhOcrxhGDEFnnXd5E9Vtbw151DVmIhcWL5/w+L9lVv6Thx1BQKBI2dk2Fr6vppmcLnr2lXNXueK\nyOzdFWuf2bl31eezIr0TiWRdUFV3uG7y66q6KjVY/hrLDM1W9eKuZ/8JwLLjjYdS1f0iMnF/5dYb\naur2flEg4aQTe03V+72qVonIEMsMuZFwr7Svz88dHt61d+Xk1Oa1IwefH7DS3Hos7DMBhmHmAxgN\n4KR63IgyiT1ZRN2IqnqGGXhhY8kbztHHHCeBjSVvxGyn8b8yUVtbeJ47oV/+2LTHCvLHGYC0a6+c\nqm4CsHzl+ueSze9seurho0/+gP59z0BWJA8A4Hq2ADjm//MJzv9yItkwxfXssO0kejtO4lutDVjN\nzlHkenZ/z3MWvvnBw/b+yhKoKmy7EUUlb+nqDS822HbszjSvq0/ajTd7nl1Y11BxSdKOnZm0Y+NV\n9UMRGWGZoe15vYb9ZvLY626dMOryOyPhXm8HrPDC1LqBx6snrqp/TCTr58WTdRd5nvuYqh4KeAcd\nN2HFk3VpX1tZsxOu61wfDGRtBOSMYCCa9g9/EYFlhl0Ax62FqLNhTxZRN+M48QeKd7w7T1Xzxo+c\nZ0XDudhXWYxV65+P2U7jAgAfZLrG1hIxahvjVbmHbh81F4tXQUTS3zM7BbbTeP323cve2l2+dsLw\nQedGVRU7yz5CTnYhLpx2BwCgtr4ctfXlBoAP2/v6rZHqmZpbXbfnzoVLf3a/5zm5qp5hBcLVphHY\nJ5BbReQJVS1O89pqAKsObYuIYZmhRVPGf75g7PA5h/7wNs4cfVXW4pVPzSw/UPQkgK+3sc76YCD6\n2oYtC66ZOuELR/y+qY8dwM6ylbj64p+Z1bWl4z5c++vhxTsWx88cc0346LuCdQ0ViCdqDACb2lIH\nUaZIZ3kytjWrWRNR64hIf8sMPeR57pc8dUIBK1zmuMmfq3pPt2XweaaYhvXAwIKzfnDh2Xce8YtX\nVfH20p/GKg4UfVtV271nLjX2Z6Yh5lPhUK+JMybfEhjYbxIAoLZ+LxYtf6wh1lj1b66X/J2I8dWA\nFblA1a2ynfj/AFjUkVMOiEjIssLvZkfyJk0YeVlWJJyL8gMb7U3bFjquZ3/N89znT/D6eTnZhS9d\nPfunPY4ON43xary88J6469kFbZ1UVEQKLTO0anD/aXljh80JhUM52Lt/Az7Z/CrGj5iHcSMuBQAc\nqNqGtz78iXfmmGt04sjLzUO12E4c7yx7NFZVs+sJ24m36/JPRKeiNbmFIYuoG0uFBVNVT+q2Vmch\nIjmWGVo7dMCMgWeOuTqYHc1HXUMF1hb9JbG7Yl2x48TPUdVGH69vWVb4F+q5/9Kzx4Ck5zmoa6gw\nIPLvrptcaRqB1wYXTjMGFEyKxhO1WlTydkPSrl9mO/Er2nPB7eOxzODD/fLH3T37nLsjzZcyqq7d\njQVLHmx03eSY40xuChH54cRRVzw4Zfzn0w4f+duie2tq6vfOVdXlba1RRPIMw/qWaVi3A9K7X/5Y\njBv+WRQetYbi3xbdV18X21+bHc3rMbT/OT0SyXp72+6ljkL/6jjxW9p9+SeiU9Ca3MLbhUTdWKpH\npUsGLABQ1VoROXvHnuWPbNv94U2AmALYEPmN6yZ/4GfASl3fEZF/MwzLq63f+xVP3YhlBouTduyg\nIdZrs2fck90sKMiYYZdkL1z6s/MOVJW8Ew71CKlqImk3PAvgeVVtcT3CthIR0zSDd0ybcGPk6LUi\ne+UMxIhB5xlbdy25FcdfSqm+MV6TBHDMZKmqioTdYAGoP5U6VfUggB+KSOnwgef+8rypt0bTtYuE\ne7k19WV31NaXxz8pnv8ZQBsBvHKy00sQdRYMWUTUqalqJYBbReQOANkA6jqqR0NE8iwztHpQ4dSC\niSMvC0Ujeag4uGniqvUv/KchhhzdE1NVsxM1dWWRof3POX/owBlw3QQ2b393yoGqkgdEZIaqVrRz\niTlQjfTKGZD2YN/eo0M79qyYdIJzvLyjbMVPpk28EaFg9hEHyvZ/CtdNVgLY0D7lYm3Z/vXqqQdD\njuw4c5wEDlSVhAB8nJos9812uiZRxvDpQiLqElTVUdXqjrxlZJmhHw8bOLPw/Km3hXJ7DkYomIXB\nhVNxxayHgrYbDzRfm9HzHCz+6CnMnPxVnDf1VgzsNwlD+k/HpefelzVm2CUDAlbkDz6U2KDqaWM8\n/XCpuoYK1/OcYxcqbEZVdwrkv9/64CcNB6t3AGh6mnLX3tVYsvKZRtuJ/2s7jjFb5brJkvVb/n5E\n76qqYvXGl5Iixvt+r0YgImdbVujpUDDrBRHjmyKSfn4JonbAMVlERGmIiGEYVv01F/88ku7pxk+K\n5yPWWIkZk24BAOwqW4WNJW9i7vnH3pmznQT+/OYdccdNjlTVPe1ZZzAQeW700NmfP/rpvaTdgFcW\nfjeWsOvPBxAKWJG7DDFHe+put53Gp9G0xJGmflYxDOvbhpj3m2Yg7Lq2KWJus53Yt1R1YXvWKyKD\nLDP0fk52Ye+Rg8/v4amLLTsW1zXEK3c6TnxW6tZiuxMRM2CFnzONwJVjhl0SioR7mXv2fdJQVvGJ\n53r2Faq6xI/rUvfFMVlERC1IzdY+DUASwHJVTRzVJKzqBdMFLADomV2IA5Ulh7dr6veiT++RadsG\nrBBysgsTlTU7RwJo15BlO/Hvbt7+zsWOm+w9bvhnA9FILsoPFGHV+ucaXM9+1jJDnzPN4Dcnjro8\n3LvnEKO6ds/k9Vv/frljxytDgex9njrLADzhuvZ/iMgTjpsYBCChqmXtWechqloqIqMqa3Zcs7ao\n4kqFOo4T/yuAN/3spTQM67s9ewy48tKZ90UPTXg6euhFWWX71uO9Fb/8u4gMaTa/F1G7YMgiotOK\niAQDVvhJ0wjc3CtnYMJxE1LfcEBMM/gD100+2axpo2FYtVW1pbm5OYOOOc/+yi3ewert7r7KLYE+\nuSMRsMLYX7k17TU99dDQeDAAYH97/zyqWiYik0t2vf+DrTuX3OSpk2WZ4a22E/sJgLJIqOerV170\ncDQcygEAFPaZICMGnx96Y8lDhUP6Ty/01Bm/advCmw3D/LKqvgRge3vXmKZmG8CfU1++ExHTMkPf\nmTHplujRM8r37zsRA/pNMkvL19wM4PGOqIdOHwxZRHRaCViRZ/N6Dbvq/Gm3hyOhnDAAVNfuwaLl\nv3jEMoOu4yafAZqezDTN4NOrN7z4ndnnfPuIp/fqGvaheMd7CcdNPvrO0ke/7qmT73muGIZl1Nbv\nNXKyC4+45s49K+B5bqmqbvTjZ0rNGv+N1NdhwUDWm5PGXns4YP3//ggmjb0OxTsW4dJz7w8MGzAj\n8PqSh34vIv842Rnou4g8AFm9ew5Je3BAv0nR8gNFM8GQRe2MA9+J6LQhIqMUevXsc+6ORJoFj145\nAzB7xrejEPmxiBxetNDz7If3V25ZveAfP6rfvnsZ9h0sxieb/+b+ffEPY56696h6P3Lc+ADPc/IA\nzYJ6t73x/r/HtpUuhe0kEE/UYv2WBd7Sj3/bYDuNN3f8z4tx+bkj044Z6dN7BGrqm/JUbs/BGDpg\nhhhifrVDC+w4Mc9zTNuJpz3YmKjxPM+p7OCa6DTAniwiOp1cPrT/dKRbhDg3ZxCi4VyjrqFiGoBl\nAKCqCRGZXVW76/oVnzz7rwDyPXXXOE78MVVdm2qjAA4tzvdrEdnx0afPPvTh2v+eLhDHNIOvuW7y\nQVVtr2kQToLsq2uoGNy75+BjjtQ17EPzoNk3b3S4tHz1mR1ZXUdpWt4na/HWnf+4ZNyIzx4ROh03\nic3bFjY6buJ/M1UfdV8MWUR0OgmYZrDFHnzTCCiO+lxMjR96MfV1Qqmn8Vp8Ik9ELAAXAOgNYIOq\nFrXmvG2RtBue+rT4tV8NKpiS1fx2p6qHDVtfx/BB5x7eV1u/13Zdu8WZ4bs624ndvaboz8sByRo5\n5EIJWCFU1ZZixbr/jdluYiGANs9oT9QSTuFARKcNEZkaDvZYcsPcp6JHT4bZ0HgQr77zvZjr2f1U\n9ZRmOD/O9a+yzNDvopHegexoPg5UlQRU9RPbaby+vad2SF0vELDCC3v3HHr2lPGfi+b2HIKauj34\ntHg+GhO1mDPzXlhmEPFEHV555zuNttM41c/Ql2kiMjEYiD7uOMkLTNNyPPUSUH3c9eyHu+rSU5Q5\nXLuQiOgowUB06bCBn5k6/Yybgod6d5J2DO8sezRWXbv7GduJf8+P64rIBQEr8sbsGd+O9ssbA6Bp\nAtNPi19zNmx9vcxxE2P9WCZIRIIi5l2WGbjLcZMFphGQXjkDdcakL5s9on2wd/8GrFz/fEMiWfu0\n7STua+/rd0Yi0gNNqwfsZ7iitmLIIiI6iojkBqzIfBFjyuDCaYbjxr3S8rWGiPFHx4nf5tdcTaFg\n1odnT/zSzBGDzzvm2JsfPFy/7+Dmb6iq7+OCRKTQMoPfV+BL6rlRywptStqxH6tqh0ynQNRdMGQR\nEbVARCahaWxUEsACVT3u8jOneK2AQOL/fMWvDdMMHnO8ZNcHWLn+udcTyfrL/aqBiNoXZ3wnImqB\nqq4DsK6DLicKQCT9mPvUfjPtQSLqsjhPFhFEkGglAAAN0klEQVSRz1Q1GQxE1u7auzrt8ZLSD+qT\ndsNfO7gsIvIZQxYRUQdI2rHvL1/3P7Gqml2H96l6KCp529tfWdwA4PnMVUdEfuCYLCKiDmIY5k2G\nmL/K7TnE6xHtY5YfLILjxEttJ36Fqpac+AxE1Flw4DsRUTsTEQFgtvXRfxGJAJiL1GSkAFZoZ/kg\nJqJWY8giImonIpJvmsEHVfUWz7Ojlhna73nOY566j6VmhSei0whDFhFROxCRPMsMrRk2cEbBxFFX\nBbOj+ThYvR2rNrwQq6rZ+YHtxC/za34tIuqcWpNbOPCdiOgETCN4/5D+0/t9ZvJXgz2y+kBEkJ87\nHHNm3huNhHNnAuD8VkR0DIYsIqITEMFXJoy6LHT0ftOwMHHk5dnBQPT2TNTVHYlIH9OwfhQKZm8M\nBbOLLSv0SxEZkum6iNqCk5ESEZ2A6zk9siP5aY9lRfMhYvTt4JK6JREZbZrBpUP6n501cvCFYcMw\nsWPPitu37Fz8NRG5VFWXZbpGopPBkEVEdAKWGdqxr3LLyP59Jx5zrOLgJsf1nLUZKKvbCViRP00e\nd0PuuOFzDt9l6dt7VLB/nwnBf6x65m8i0p8LOlNXwtuFREQnYDuNP121/vkG24kfsb+uoQJFJW/a\njhN/PEOlQUQmishcERmbqRrag4hMEDFGjRk6+5jfSwMLzkKPaN8wgEszUBpRm7Eni4joxH5f33jg\nwlcXfe+6CSMui/bI6iv7KrfYm7cvcjx17lHVTzu6IBE5M2BFXggFs4fmZBfaNXVlgWAgWmw7jV9Q\n1c0dXU87GNYrZ6BtGOmXcMzvPcKqrts9vINrIjolDFlERCegqp6I3Ow48d+s2/zKbYZhDnRde53j\nJp5R1U0dXY+IDDbN4PtnT/xij+GDzxNDDHiei+Id7565euNLS0VkvKpWdHRdp6istr7cUvXSLqRd\nXVNqA9jT8WURtR1vFxIRtYI2WZK0Y/8cT9RdYDvxOzMRsADANIPfGzN0dmTkkAvESAUSwzAxdvgc\nY9iAz0QNw7ozE3WdorWumyjfsWfFMQf2VW5BVW2pAni948siajuGLCKiLsYQ4/oRgy8IpDs2csgF\nYcsMfqGjazpVqqq2E//C0o9/W7+26C9Obf1e1Mf2Y8PW1713lv485nr2F1U1kek6iU4GbxcSEXUx\nqmqZRvqPb9MIAF30s11VV4nIWUUlb91XVPLWtQAMEWOR4yYeVlU+wUldDnuyiIi6GBHjnZ1lK9Mu\n47Oz7CPb89w3Orqm9qKqW20n/jXbiefZTjw3acduYMCiroohi4ioi7Gdxkc+LZ6f2Fe55Yj9e/dv\nQNG2t5OOm/iPDJVGRM1wgWgioi5IRC4zjcALuT0HS++eQ4IHq7clq2vLXNdLXqeq72W6PqLurjW5\nhSGLiKiLEpEQgCsBDASwA8ACVbUzWhTRaYIhi4iI6BSJSJYh5l2mGbjd9Zw80wzusu3YowB+p6pe\npuujzGDIIiIiAE0TmAIYAaBcVYsyXU9XISJZlhVeWpA3dtQZo6+O5GQX4GD1NqzZ+KeG2oaKtx0n\nfgOD1umpNbmlzQPfReRzIrJBRFwRmXKcdnNFZJOIbBGRe9t6PSIiOnkiMiAYiC62zNDm3j2HvBIK\nZK8KBqIbjve5Tf/PEPOufnljRl90zt2RPr1HIBTMQv++Z2De+T/MioR6zgFweaZrpM6rzT1ZqcVI\nPQD/BeAeVV2Tpo0JYDOAS9C0HMJKADem+yuKPVlERO1LRHIsM1Q0fuS8vhNHXWFZZhCeeti+exmW\nr/t9vesmp6pqcabr7MyCgcieOTPv7Z+fO+KYY1t3LsGqDc8vTCQbjli4WkTOBvDZ1OZbqrqyA0ql\nDtaa3NLmCesOLSchctzzTwewVVV3pNq+COBqAOyqJiLynXy5IH9cr8ljrzv8WW+IgRGDzkV9w77I\nhpI3fgjgpgwW2Om5ntO7R1ZB2mM52QUAZMChbRHpEbAiC8KhnCnDB54bBoBtuz+8LxiIrrGdxitU\ntbZjqqbOwu95sgYAKG22vTu1j4iIfBYMRL84euhF0XTHRgw+31TPvbqja+pqTCNYerB6e9pjB6q3\ne6re4U6DgBV5flDBlLNvuPSJrGkTbzSnTbzRvOGzT2YNKpw6PWBFXuiwoqnTOG5PlogsBJAuwn9f\nVV9rxflP6l6kiDzYbHOxqi4+mdcTEdERQqYZSnvAMkNQaJdcfqcj2U7sF2s2/umxfnmjs0wzeHh/\nY6IWnxbPj9tO4y8BQESGWWbokhmTbgkbhnm4nSEGZky6JbSrbOVsERmmqukTG3V6IjILwKyTec1x\n/4Gp6pxTqAdoGoc1qNn2IDT1ZrV0vQdP8XpERJTiusk3dpZ9NKawz/hjktauvavUNILLMlFXF/Pb\nuobyefPfe+CSM0ZflZ2TXYADVdt0ffH8mOsmHlPVD1PtZvTLH2tbVih89AksM4iCPuPt3eVrZwJg\nyOqiUh0/iw9ti8iPTvSa9vorpqWBWasAjBKRoQDKAPwTgBvb6ZpERHQcrmc/XVL6wR2DCs4KDeg3\n6fD+6trdWLPxpUbbiT2UwfK6BFV1ReR624lfuWr9c98AZICqV2Q7jc0DFgDEbbuxxbs3STumAOL+\nV0ydyak8XXgtgCcB5AOoAbBWVeeJSH8Av1bVy1Pt5gF4HIAJ4Leq+kgL5+PThURE7UxEzjWN4Pxe\nOQMC/fLGRKprd8crDm42PHVv8zz3D5mur7sQkRzTCJRfffHPItnR/COO1ccO4G+L7m10PbuAg9+7\nD05GSkREEJEggCsAjARQAeBlVa3LbFXdj2WFHoiGc++fNf2bWbk5TSNlqmp3Y/FHT8Ri8apHHCfx\n4wyXSO2IIYuIiKiDiIgYRuCbIvKjcLCHBQjiyVpX1XvI85wntLP8wqV2wZBFRETUwUQkAOCM1Oan\nXLS7e2LIIiIiIvKBr2sXEhEREVHLGLKIiIiIfMCQRUREROQDhiwiIiIiHzBkEREREfmAIYuIiIjI\nBwxZRERERD5gyCIiIiLyAUMWERERkQ8YsoiIiIh8wJBFRERE5AOGLCIiIiIfMGQRERER+YAhi4iI\niMgHDFlEREREPmDIIiIiIvIBQxYRERGRDxiyiIiIiHzAkEVERETkA4YsIiIiIh8wZBERERH5gCGL\niIiIyAcMWUREREQ+YMgiIiIi8gFDFhEREZEPGLKIiIiIfMCQRUREROQDhiwiIiIiHzBkEREREfmA\nIYuIiIjIBwxZRERERD5gyCIiIiLyAUMWERERkQ8YsoiIiIh8wJBFRERE5AOGLCIiIiIfMGQRERER\n+YAhi4iIiMgHDFlEREREPmDIIiIiIvIBQxYRERGRDxiyiIiIiHzAkEVERETkA4YsIiIiIh8wZBER\nERH5gCGLiIiIyAcMWUREREQ+YMgiIiIi8gFDFhEREZEPGLKIiIiIfMCQRUREROQDhiwiIiIiH7Q5\nZInI50Rkg4i4IjLlOO12iMgnIrJWRD5q6/Wo8xKRWZmugdqO71/Xxvev6+J71/2dSk/WpwCuBbDk\nBO0UwCxVPUtVp5/C9ajzmpXpAuiUzMp0AXRKZmW6AGqzWZkugPxltfWFqroJAESkNc1b1YiIiIio\nu+iIMVkK4B0RWSUiX++A6xERERFlnKhqywdFFgIoSHPo+6r6WqrNewDuUdU1LZyjUFX3ikgfAAsB\n3Kmq76dp13IhRERERJ2Mqh73Tt1xbxeq6px2KGBv6r/7ReQVANMBHBOyTlQoERERUVfSXrcL0wYk\nEYmKSI/U91kALkXTgHkiIiKibu1UpnC4VkRKAcwAsEBE3kjt7y8iC1LNCgC8LyIfA1gB4O+q+vap\nFk1ERETU2R13TBYRERERtU2nmfFdRB4VkSIRWSciL4tIz0zXRK3X2slpqfMQkbkisklEtojIvZmu\nh1pPRH4nIhUiwuEXXZCIDBKR91KfmetF5JuZrolaR0TCIrJCRD5OvXcPHq99pwlZAN4GMEFVJwEo\nBnB/huuhk9PayWmpExARE8DTAOYCGA/gRhEZl9mq6CT8Hk3vHXVNNoC7VXUCmobc3MF/f12DqsYB\nXKSqkwFMBjBXRM5pqX2nCVmqulBVvdTmCgADM1kPnRxV3aSqxZmug1ptOoCtqrpDVW0ALwK4OsM1\nUSulpsGpynQd1DaqWq6qH6e+rwdQBKB/Zqui1lLVWOrbIIAAAK+ltp0mZB3lKwBez3QRRN3YAACl\nzbZ3p/YRUQcSkaEAzkJT5wJ1ASJipB7oqwDwtqqubKltm5fVaYtWTm76AICkqj7fkbXRibXm/aMu\ng0+8EGWYiGQD+AuAu1I9WtQFpO66TU6NHX9FRCao6oZ0bTs0ZJ1oclMRuQXAZQAu7pCC6KS0x+S0\n1GnsATCo2fYgNPVmEVEHEJEAgL8C+KOqvprpeujkqWpNatWbuQDShqxOc7tQROYC+C6Aq1MDy6jr\n4uz9nd8qAKNEZKiIBAH8E4D5Ga6J6LQgIgLgtwA2qurjma6HWk9E8kWkV+r7CIA5aBpTl1anCVkA\nngKQDWChiKwVkV9luiBqvZYmp6XOSVUdAN8A8BaAjQBeUtUWPyiocxGRFwAsBTBaREpF5MuZrolO\nyrkAvgTgotTvu7Wpjgbq/AoBvCsi6wB8hKYxWS2OIedkpEREREQ+6Ew9WURERETdBkMWERERkQ8Y\nsoiIiIh8wJBFRERE5AOGLCIiIiIfMGQRERER+YAhi4iIiMgH/wfFhtau+2o+NAAAAABJRU5ErkJg\ngg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Generate a dataset and plot it\n", + "np.random.seed(0)\n", + "X, y = sklearn.datasets.make_moons(400, noise=0.25)\n", + "plt.scatter(X[:,0], X[:, 1], s=60, c=y, cmap=plt.cm.Spectral)" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 102, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAAHfCAYAAABj+c0fAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XecleWZ//HPdeoUqkAEpaiggiJYEFExFiyoiUaNmmyM\ndU3bZDdr1jWbTTHZJJtfiomJ2SRqookxtmjsvaAoFlRARRSlqfQmZcqp1++P+wxTOGcozsMM8H2/\nXrxg5jznPPc5M8z5zn1fz3WbuyMiIiIiHSvW2QMQERER2REpZImIiIhEQCFLREREJAIKWSIiIiIR\nUMgSERERiYBCloiIiEgEPnLIMrM/mdlSM3u9xed2MbPHzGy2mT1qZr0+6nlEREREticdMZN1AzCx\nzee+CTzm7vsAT5Q+FhEREdlpWEc0IzWzPYD73P2A0sdvAUe7+1Iz6w9McvfhH/lEIiIiItuJqGqy\ndnX3paV/LwV2jeg8IiIiIl1SIuoTuLub2UbTZeU+JyIiItJVubttyfFRhaylZtbf3ZeY2QBgWbmD\ntnSwsvMysyvd/crOHod0ffpekS2h7xfZXFszORTVcuG9wAWlf18A3B3ReURERES6pI5o4XALMAXY\n18zeN7OLgJ8AJ5jZbOC40sciIiIiO42PvFzo7p+tcNPxH/WxRVqY1NkDkO3GpM4egGxXJnX2AGTH\n1SEtHLbqxGaumiwRERHZHmxNbtG2OiIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIiIhIB\nhSwRERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQUskREREQioJAlIiIi\nEgGFLBEREZEIKGSJiIiIREAhS0RERCQCClkiIiIiEVDIEhEREYmAQpaIiIhIBBSyRERERCKgkCUi\nIiISAYUsERERkQgoZImIiIhEQCFLREREJAIKWSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQ\nJSIiIhIBhSwRERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQUskREREQi\noJAlIiIiEgGFLBEREZEIKGSJiIiIREAhS0RERCQCClkiIiIiEVDIEhEREYmAQpaIiIhIBBSyRERE\nRCKgkCUiIiISAYUsERERkQgoZImIiIhEQCFLREREJAIKWSIiIiIRUMgSERERiYBCloiIiEgEFLJE\nREREIqCQJSIiIhIBhSwRERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQU\nskREREQioJAlIiIiEgGFLBEREZEIKGSJiIiIREAhS0RERCQCClkiIiIiEUhE+eBmNh9YCxSAnLuP\njfJ8IiIiIl1FpCELcOAYd18V8XlEREREupRtsVxo2+AcIiIiIl1K1CHLgcfN7GUzuzTic4mIiIh0\nGVEvFx7p7ovNrB/wmJm95e6Tm240sytbHDvJ3SdFPB4RERGRTTKzY4BjPtJjuHuHDGaTJzL7HrDe\n3X9R+tjdXUuJIrJTMrODIP0tyJ8KxRSk5kHmZ8AN7p7r7PGJSGtbk1siC1lmVgPE3X2dmdUCjwLf\nd/dHS7crZInITsks9mlI/hmOroJRMagC3gOeqoPlr0DmRHfPdPY4RaRZVwtZewL/KH2YAG529/9t\ncbtClojsdMxsACTmwD9XQ/82txaAvzXAgqvcc9/ujPGJSHldKmRt8sQKWSKyEzJLXAmjroDTq8of\nsRy4dg3kPubu2W04NBFpx9bkFnV8FxHZplITYUSFgAXQD6iKAcO21YhEJBoKWSIi25aH7jbtHmJs\n+iAR6eIUskREtqnsAzCzofLtS4FMDnh3W41IRKKhkCUisk0VroU3HT4oc1seeKge/Gq1cRDZ/qnw\nXURkGzOzT0DyNjg8BQcmQguHBcCkOvjwOch8QiFLpGvR1YUiItsJMxsBqf8APzM0I02+A40/A251\n90Jnj09EWlPIEhEREYmAWjiIiIiIdBEKWSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIi\nIhIBhSwR6XRmFjOzeGePQ0SkIylkiUinMbOTzaqfA3JAzqxqtpldYmb62SQi2z01IxWRTmGW/DZU\n/RecUAP7AXFgDvB4HXz4OGTOUudzEekq1PFdRLYLZjYWqp+Cr9RA9za35oA/1cHib7j7HzpjfCIi\nbanju4hsJ9KXw5FVGwcsgCRwQi2krzAz/SImItsthSwR6QxHwj7t/PzZE8gNAmq21YBERDqaQpaI\ndIYCFNu5uQi40f5BIiJdmkKWiHQCfxjezFe+fTaQnuXuDdtsSCIiHUwhS0Q6QfYqeCEHy8vc1gA8\nWgeN/7OtRyUi0pF0daGIdAqz+Och8Xs4IgUjE5AA3nF4uh6y10H2Mu+sH1AiIm2ohYOIbFfM7ABI\nfQM4BTwO8anQ+FN3f7KzxyYi0pJCloiIiEgE1CdLREREpItQyBIRERGJgEKWiIiISAQUskREREQi\noJAlIiIiEgGFLBEREZEIJDp7ACIi25KZ7QPsA6wHprh7tpOHJCI7KIUskR2cmSWBT0H8MPAcFB8B\nnt7Zuqmb2ShI/xHS+0P/LNQbrMYs+WPI/3Rnez1EJHpqRiqyAzOzoyB5N/RLwojukHeYXgcNiyB7\nkrvP7+wxbgtmNhKSU2BiNxhtzb9fLgPuqIM1f3TP/FtnjlFEujZ1fBeRDcxsf0i+COfWwrAWtzjw\nfAGeWg65fd19bWeNcWuZWS1wKtAHeB94xN1zlY+vfgaOGw9jy/zMqQeuboDMge4+O6Ihi8h2Th3f\nRaSF9JXw8erWAQvAgCPisFd3sAu3/bi2nplZ0uy/E7BsCFx/IPyiP9ychKUxs89VuM8gKBwKB1X4\n4VgDHJKA5JcjHLqI7IRUkyWyAzKzBMROh0Pa+UVqbC0s+BLw6202sI8oCT/oAf9+HtT0bv509ULg\nr3BtzKxYdL+lzd32hF0ykKyq/Mi7J2Ha/lGMWUR2XprJEtkx1YJ5mKUpJwcsBhhqVvOCWfr/zGx4\n26PMLG5mp5lVP2pW86ZZ9RNmdlapmH6bMrN+DpdfCLW929y2O/A5qEnAb8ws3ubmLNTVhGXSStY5\nFFd26IBFZKenmSyRHdN6IA+rU9A2kqwE/gr0Ak5JQfVhsOBgmHqhWepqyH3L3d3MukPqceixHxzZ\nLZQ/rR4BU8bC6rlmdoy7r96Gz+mzw6HYvcKNA4FekFoOE4BHAcysByRvgGICFgB7lLmnAy/VQeaG\nSEYtIjstzWSJ7IDcvQCxG+H5NsXgOULAOhK4ABgF7A0cn4R/rYZuX4PYxeHY9E0wfBR8pRscBAwG\nRgNf6gajhkP6jm31fABiMHhXqG7vmF3Dz7SBzZ9J/QRG7AkTDe4BPmxzDwceycH6ecDjHTxkEdnJ\nKWSJ7LByP4Rpa+C5AuRLn3uTMIM1pszxtcCnaiHxfTMbDMWT4NSqjX9MGDAxBRxhZvtGN34ws6Fm\ndoSZ7VGEJasg097xK6FA6MuAmdVA8QI4Lh3C5GHA7wlh6xVgMnA18OpqyE4Ajqwyu7vKbGG12fyU\n2VVmtmeUz09EdmxaLhT5iMwsBcTcvbGzx9KSuy82s7HwzC3wzCjYMw9LauDYtjVLLQwGkj0hexHs\nU4R0heMSwMgYvHIq8HZHj93MJkDVryA9FHpkYU0abPZrZOxEyk9nLQOWh38+VvrUUKjNh1AJMA4Y\nCUwHPgBSwKHA5HlJ+FYKLj0KavYCywGvw7+8Al80s3Pc/YGOfo4isuNTyBLZSmZ2BlR9B2x0+Lhq\nAWR+AvwxLNd1PnefB4wLRe1vjYGqb0NVO7NPBqQLUFcLVe2EMYB0Amjnir2tY2anQ/pvcFoN7AvE\nq8NM3MxRBe7P/5Fc4yVQ1TJorQZuDg2v/tvdm2a7cpCPhyXBpu4N3YDxLe75JkCvbnDppVDb8jKB\n3SE1ElJ/htvMbIS7v9/Rz1VEdmwKWSJbwSz1Y+jxr3BSLQwnvInP2xMevwpWnmpmZxIqxfciFKHP\n7MxtW9z9LeAts9QYmL8X7Fvh6sA6YG0aeBre/TJ4ujmgtPVOPWFaqMOEWcHEn+G8GhjU4pYEoR6s\nJrGC27M/J9e4H/guUL0Y6uaGNc3v5dyvaXGn2ZCrg4W1rcq0WpleF6dxl5PbBKwmA8NZE9PhK8B/\ndciTFJGdhmqyRLZQaauaf4Mv1sL+QJzwX2kocEkt7HICxKdBYgH0fRi6PQ+p983iF3bmuIPcb+GV\nAqyrcPuUPMTvAx6EhpWlmZ4y3gE+bAAe6eABnga7xloHrJaGAT0KBTj/dfjm0/CD2fD1PPTPuf+8\n5ZHuXoT8j+G++vKlXLOBuQWgZ9t2rS2NClN2Z27d0xGRnZlmskS2WPpyOLo6FIq3lQBOqIZ7D4Av\nA+uqQv3Pqm4w/f/MkkPcc9+PcnRmNgA4AMgCL7p7Q9Nt7v6OWeqncP1/wOk1sCdhpqqOELBeWgm5\nr5daOJwDdz8Ba2tDt/QqQliZ4fBYA+TOjmBZdDjsWe6FbXp2wJAkrOzv7q2aqJqZAXF3zzd/tvhr\nWD0arjkHxtfAEINGYFoDzMxD/jNxuKu93zYTrf4SEdl8+sEhssV8HAxrZ/+qPYG1wG2EUuy9gCKQ\nr4bYd83sHuA1YAIkz4JYLWRfAr/J3dv2GNhsZrYbpK+DxHGwawZyBqtiZqnflXpf5QHcs98zi82B\n2/4HYn0hnYf1KYjfA7nL3H1ROM5fMrPDYdJP4PHjoSoHmSQknobcN9192taOtR11UJejcsU9UJcn\nLME2Pe9Rafh2DE4vQjJttiIPvy7C1e6+zswugexf4MnLwEYBDZD7GxT+AKxwWL8QqnavcLZ3oFCE\n5zrwOYrITkIbRItsIbOqxXBJf/hYmVszwIuE9gDjCf2omn6XyRJaMU1fA74cuveHg2shbTCnHt41\nKFzqXrx5y8dku0JyOozrC0cmmuvRVwH31sOiJyB7esu6sNLMzzBCW/j57r6mncfvCfQDVkbZgNTM\n9oLkTLi8Klz911YD8ItGyA9x92VmNjEJdx4NVQdBrIbQx/4ZaJgL72dh3KbGGzf71hD49uehuu2M\n1jrgt1DfCEe6e4fWn4nI9mVrcotCluwUzGwkpP4FEiOhuBoabwDua720VPG+NcDJhEL29yF5Dhz+\neTiuzdV364A/E65mGwKcVubRHPgLkC7CubHWReVLgRsaoPGT7v7Elj2/1O/gwEvg1DIF7Tngt+vh\nw7Pc/dEtedzOYFZ1Nww7Cc6sCvVuTfLArQ3w/m3ujReZWY8ELDofage3eQwH7ofsG3BXo/tn2z+f\npdLwxAA4eALUDCQ023oLeATqG+DHOfcfdeiTFJHtjkKWSBthtib1W4hdCGOTsHsirDRNXQerlkH2\n403LY+Xvm7wC/NswoAi947CkAKsdij3gi7Sezfozoc/UK4Ru6v0qjGoO8ATwhTK3vQ488LJ7w6Fb\n8BxTkFgNX61p7gnV1ssOjz/i3nDy5j5uZzGzWkg/AOlDYFwt9DZYUYQXGyD7FGQ/7e4ZM/vK3vDT\nz5UvjqMO+CU05mGgu7e7L6GZpWLwrwm4rAB9HWIpmNYIP3D3+yJ5oiKyXdma3KKaLNnBJS6HXufD\nRdWtW1ge3B2eroYpT5rZeRA7E+LdIfcacKu7r4PkldD9G/C52jCJ1eQD4GaMPwCH4xxA6NS0DPgn\n4Gmgbztj6keo2SpnBHDvAWa2q7sv3cwn2S/M+FQKWAC7GfhhZjbU3eds5uN2CnevM7NjITMenv4i\nxAZCYR5kfwdMbVryrILj9qsQsCDc0A8yi0Pvhyc3cc4s8HMz+wXQHcg3uNd33LMSkZ2RQpbssMws\nCcn/grNqN+4RbsDYBLw4DGqehYOTUBODuXUw/1dmdhkkroAL09CjzX0HAv9Emr8wnCm8y0tkKZBl\nNJAk1EOtoXLoWUMogyonAaSzkOtJWD/cHOsgnwzLghXaX1FHNfQqwBtVZvdl4PMtmnZ2OaUgNbn0\np5JCcROPU2z112aft1ICFhHZIuqTJdsFM0uZ2blm1f8wq3nYLHFlqVVB2+NiZnayWdXfoWoqJGtC\nhU5bReAWYGQc/iMNx8fgCOC8Wri0BhK/hn3YOGA1GUQjPXkTYyR5DqNAbEP90AGEJcNKXiLspVdO\nHdCQJNRvY2a7mdk4M9u3VKi+EXdfC8kX4I2KZ0zyIseQsf+AqsHwiTRscXF9V9MI98+o3PCLNcDK\nUD3/8rYbVWtmNtos9j2z+E/N7Dwz6/AO+SLSdSlkSZdnZvtAcj7sfh2c8Ck49SQYfQUk5prFv9Li\nuL6QmgF97oLjz4JPjYYxqZAn7qf1hMYcQiH1KWz832BXYO8UDGinjQDA7mQ5hBcZyBQM421CoDsc\neBWY2eZ4J1x5+DaVQ9ZLBUjcAwwxq54EyTmhoWn1q5B+xyz2qfL3a/w2PFwPS8rc9jJJ5jOakDjO\nCdN6p0S9ufM2cMciyM0qc0OB0E01Bje6+/oyh0TKzHYxq5oMNVPg8O/AMZfDkN9BYpmZVfgaisiO\nRoXv0qWFIujkXJjYDw5p8/2yCri+HurPBR6A1Cw4cN9wIWDLQzOEoLUXcEzpc3cRuopXqi9/FlgB\ntPd+eG1pDPuVPn6DcO6DgEXAHYQ8sw/hbf81IOeQew8G9IMzaqB36b45YGoRnloDuX+CxB1wQqkJ\naIoQEOcA/6iHxq+7F64r81qdC/EbYG+DfasgS4pXSLOaC8i1qhJ7GPIvwf8W3L/bzhPs8sxsTBKe\nGA3pgyFdS3jln4G6lTA9C8dv6427zSwOqWnhe/GkVOsrJD8AbqqHzKnuPmlbjktEPhpdXSg7HDO7\nFIZeBZ/vVv6IWcA9r0HjVdDtRriM8hO0qwmh6DJC3dJfCQGr0mTOGuAa4BuU3wN5OXA98O8tbl9C\nuMLwIOAwQv30DMLS4RKgRwbWzYDcBEh+F/wr0Dcf7r8oDbGXofEiSP8dThoFB5f5/7EC+F0WCruV\nu2LOzHqD/bKWxD/thSf3I88+tH6bB3gBeBKuz7hfWuEF2CJmFgNOr4LLC7CfQdbhrhxc5e6zO+Ic\n7Zx7twR8NQ7nF6FbHOY2wi+A2909F+W5K4znk/Cxm+HL3cvv+7jlV5CKSOfT1YWyA6q+GMZUCFhQ\nmiXaF5I/gEOovALeG+gJ/A6YUDpuCZVDVjVAAW7Owj+1uTLxQ+BvpcdpGcD6E9o63EpYFnSa32QT\nHu5XfBU4BHJXAFfCkvGlB3nD3eea2f4Q2xtGV/iP3BcYlorx9vyE2b/l3f/U8lZ3X21mf0iQO+tM\nSFb6abAEGnPwboWbt4iZxVNwWzeYeAzU7kFp8x24+EX4vJmd4+4PdMS5yim14PhW6U8XUPVFGFch\nYEHpCtKRZra7uy/cliMTkW1LIUu2ubCcQn/CGtgSb386tXvlK/EgzNEkc5Ab2P5xEK726ws8BDSs\ngQ9q4PBk+c7i0x1iz8LSt+Gq82FEEvrEQ5uGt4EDKb/U2As4G/gDYdYsTenN1uDddIzbvlRD7vws\nLMvCZwmFYbnSH4DhsFt+47mnlvZiGHO6LSH/m6RZn5z7z9oc8EIDrJ4D3cptfLweeAPMQ1fUjywG\nX+8LJ18ENS2vbTweksMheSPcXmodUa5gbAdk/UOgryQB1Gbhwz6AQpbIDkyF7xI5M+tjZqea2elm\n8R9Bcimk34HUXEi9Zxb7Smm5qen4YWap35ulVkN2BNxNWHIr15x9LZBJQLEA77czigLh/WwUcCkQ\nq42RjcW4idZX7BcJS3yP1UHmX90zX4TcMHjjfXiGUMeVAo6m8kzFLoQtdFJtjhlGkQn0J1lzKuyR\ngOf7wD2D4JYkvFNl9jhQA/XtruEbdfSmyMUhVf4gFPw389D06Qt3QP0cWl9buRK4EeoMfunui9s7\nz+Yws1gC/vOUNgGryUBgJMTiYYpvJ1GcC8vb+RpmgPVpSleQisiOSzNZskml1gEnAv8Kqd2B9yD7\nV+DuUhPHSvfrAenfQ+IM+FgGVnWHXWNwErAb4e1/wUB46Kew8iSz1DLgXEh0h24OYy2EokWE/Xlf\nJ0z+zCPUYmWAdUWwh6E4Ed5Nhpqlco1AXyPMMjV1aD8w0Z1XGc4iXuHXxOgPVOMspEi+WCB3lbu/\nBuDuC82qDXYn1Gp1JxS8d6/wzFcT8k+532EOYh6PcXbpEdZBt9MIkex5OGYyjMqzPBlu3aXM/YvA\nq4ygSC9gOPjM0F7+Fy2PcveHzezs2+C6Wug+AOxDKC6DmMGPcvD/Kgx+Sw2JQ7dKmysDjISqt8Me\nQ9/voHN2cZlrYMopMKa2/I/YaQ6JZ9xzy7f50ERkm1LIknaZ2emQuAWoDvvxjQCKo2HaqbBitZld\nAPEjILEXFBZC/s/u/rqZVUPqWRixDxydhj9XhfBzPmEprEiYmRoCHFMLd50G4wowOh5KlN6zcIXf\ne8CngaGEvlbXEHpXHUgIMgtisHwi2LswaD/4Sww+WTo+Rogv0whd2M9v8cyGsYa3yVHP5eSZywfk\nCCNsgNjt8Dkzu7J5KdNWwughYUxG6HU1pMKr9hKhyXg5aWIkaaDAGOA3wKlsmBuLN0LvF/CZzh0H\nwAWx1jVfToyHSZJhJrAHMAiqZ4cvykbc/UEzG5SF41bDp9NwQAJWZ8KL0puQ5D6qmIEbzdc/Li89\nn6YuY6WouTPNmk+GxqfhlmPhzOrmpvRFwhWoT9RB7rJOHJ+IbCMKWVKWmR0E8R9C8pTw/vg5wr58\nTcbG4PU+cO/9cEAWuqVgYREWfN0s+QEwCXYbCqenQw1UHhhHaOA9hzA7BaGgPAOcCwxrUYi0H7A3\n4SrAx4DjCIXjh9B6qW5/4ONVcP0wmJeBA6vgUQuVR70Js0qDCJM9u7YYfxboz2vM5Thap5RS/BtC\nmKoqrSU2/B6m/xIuqYXnCX+eAcbTnB+cEOhmAF+q8MrWUyRHDc2LiVmay+oPh8QLFPaNs6xo/CpW\nYCzObsBaUrxAL9ZxNjn+WHpF1kOhEFYBKzkgCbf0h/SB0D0BvA3HzoYfxszOL7r/vZ37bo73cpB7\nmfCV7UZ4teuBxwmvaxwyufBF3Cm4u5vZGfDBNfDLz8PgHFQbLDDILYLcZ929cudYEdlhqIWDtFLa\nnPcuiI+HQ2pgPjAcOLLCPW4nXKWXJ8weFQiF4RBq2wEW0LzdS4wQkg4iRItlhLCyFLiQ5t/6XyMs\nEa4jRJECIZZ8mY23yIFQj3VTBpgNsaHg1XCihaBWrmv7zcB+JHmbk3iLMYSwM5MwE/M8FB3GufvU\n5tcl+RYcPQCOjIfs9XfC38NLjzmLUL9eDXyVcpM3xtMMZzLnkmcdYSbrClqXuf8QcmMhPgpiU4mz\nggQ1FDmY3Ib5uVsI8fIhqG+AI919+kbnMuubhHdOh54j2xSQLQZugIYsTHD358u8QJstZnZjCi44\nl9CJrEkjoZpuLhSzMNTd53+U82yPzGwX4ATCN8VM4OVNXOghIl2UWjjIZild3TeBMFuzEnjYN2yG\nm74dhn4czqoK78s/Jmx6XM5awnLekcBYmqNCjjBxMa3076b5jfmEmqqWM2IfIywHPgI8DJwFPEV4\nPzoZ2JMQK1YDk4AbgYvYuHfVQKAqDXU1KbLzc9hQZ0q6uVFoS68RQt3ZFFhCA/AoMJVQtdUd6A+x\n5fBcyuzlHEx097VmdgQ88yhMHQQH1sJw4sygwFTC4t2ZpXH8FbiH0E2+qWl8EXiNJJOZUCrgf5mw\nAU/LgJUDHGLVEOsPfJICIWC2lgZeglwRXigXsAiv2qXDId02YAEMAE6AqifgSkKR3FZLw6hTaB2w\nIHyFPg38CnLZcMr5H+U82yN3XwXc1tnjEJHOoZC1kzCzgcAZwOFx+ERvYADEPoTCYoinzH6Ug3ug\n6lg4oyq8secJfz9FeJ8eQugr1RQLnifMpxze5mxJQkBaRghhtYS3YKd1wGrp48CvCEXtrxCW21q2\nx+pN6L5+L6G+qm0usNLxdUP7AUtwBvIhC/hNaXyDgQbCUt5CwvJnihjv8SzhgvtRhLL2BYSy/EGQ\nfA87PIYvMbMj3X2ame1nZK/vxzMX7IHFZ2Gs43Ra12B9FngA+CUwGCNNgnl0J8M55OlTGsXLwCVt\nnsWb4ZnMfRcGHNX6BdigqfYpC6/l2mlJn4KLDik/7Qfh+dpDcJyZpbd2s2gz2yMNw/evcHsCGAfJ\nyeEL+pFmzEREtjeRhSwzm0h414wD17t7R13NJJsQlrb4OOENdjakr4Dkp2F4DHqkjAWsYglFoDcx\njiPLy/Dfq+AiqKoOF541NdLsVnqYNOE98hHgM4SlwOmEdghlR0GoV7qTMNM1l7B0V0k1IdpMIfSf\nKpcvDDgKuI5QkdSyaUAGWIExgu68wWogRqH0+YXhZSBJWNo7nTDPspA8yzmPMA/VZCrwIAmM3Sky\nFKirhhkvm1U9BZyZhtrD8PhgnGlUE+ajWkoTZrXWYtxDFW/gOHsQ5vbeIdQsXUjr6weXEvbby8NX\nF8LfPyDMi7X1GpCH+Vn3MeVeySYO3WrbuT0NxKBYbC6M2xoDekA23k6Y6wOxWOWrBEREdliRhKzS\nctQ1wPGEd7ipZnavu5fby1U6iJklIPljSPwLfKy0XcvCWtjd4JwNV6qFxarFrOJmVrEf8/kAZ3Et\n2N6hfukEwjLbh4QNWF4lRIIjCFdH3QxcTHhfLtdmoElfwkxYX8JK0cbLXq0VCAXr49s5ZhdCPFjX\n5txTAcPZld7M5BM4vwcOpcB05pPjbMJsmhEC5BzgDsaTbxWw3gEeIY1zEb6hpgzgxBjcdxTMeiRP\n5uX1UFwLsRi9qHzhXA+cUQzifU4gyxzCLNQngZuAO2DdGOieCuetfwesABe7+6Nmdt5f4NZToXp/\nwn/URuBV8KdgfXszWE0M3l4YNkksq9Q/oJHWjcK21NK1kCpQuX3qqhDk2mtiJiKyQ4pqJmss8G5T\noauZ3UqYPlDIioiZJSH+NFSNDdu39SLMzywlLF+1bRU5gLB6+AAFcoTAMrJ029OlP+cQlv16E2qM\nLi4d8x5DR7T+AAAgAElEQVRhVitGeH8uV1gOoY4qSViE24uwQDa2wrFrCAX0u9L+pEqxdHvTW3qG\nELBeAJwEC+mN041QdDYTOItGHuI2Gkhh9MZZTYosBbJMaPPoD5MizxnQKmBROt9pKZg/Mk/mpqnQ\nsDfUOutovX1Oa8ZqulOgH9AUdhyIQXYlXPoUTIxBdQaed/izu38I4O73mtnEh+Cn98OB1ZCth1Qc\nnsjBf2zOLyyNcNVkGDMSupX7jz45vHi/d/fiph6rEnefW202exaMHlnm9gLwAjRkQgt8EZGdSlQh\na3da/+b6AWHHXCkj7FfHSELR0FPuvm4L798XUlNhlz3CbFMvNlwjRxWte363tBeh1HoMYXWxKSgc\nQ5i9+gvwBcLy3XOEENQfOBj4EyFkvUSYsCznBULh+suEoPcYoaFo2+W1AqHovRehtcMrhC5L5cwl\nBK3rCZVUKwhhciyhnuvdDVFxBKEy6jxgX7IsIst61tONEA1fonU0WgmsIdbOuWPAuFp4anyOzPSX\nYEw12XSWuYQrK9sqkGAqB7eZwXsvPNIy4PaMe8WiaHd/BhhnZv1zIQUvyYVC6s31wHp49mb4+Ceh\npmnerx54GrKzYEkefrIFj1dWI/zbvfBgN6jZo8XnM8DdIWC9SPgGEhHZqUQVsjbrEmUzu7LFh5Pc\nfVIko+miQrhK3wTV+8LAfFi5WZw0S/8Bspe7e7l9ZMpI3wsHDoaJNMeGwYSGnXcS+lSdXm4EhPfu\nAbSOG0boR7WQEJCOIQSZDwghq2nm6gvADaXP7d/iMYqE2qp5hGXGuwltGs4lbKw8j9DCoZbQTGAK\nIQxeSghmzwJv0dwaocl6Qhg7kTBHlQO+Qvg2vpY4OY6iuGEHw7bPqGVXcid04mw5B1UPxKkl327f\nzJ4GsQEZOHUWPOBkD4nx96oiF9HcTR4gR5w7GUi21XnzwCPgOfjp5l7KX9rzb4v3/XP3opmd9gH8\nz//Bv/SCYgJ8BaTjcF8OvuLuq7f0ccuc52kzO/tmuKkPJPeE6jrIzQq1WA9k4QK1LRCR7Y2ZHUN4\nA9xqUYWshYRr9psMIrxDt+LuV0Z0/i7PzPaB5BQ4sTscaM3LX2uBu74Ai4aY2Vkt35ys1KSjxceH\nJ+AXUH14CB5tl6zihAqgqwmLZ22LyYs0bxNTzhhCL6hjCLNNTeFjCaFnVQ/CVXo3EdorjCgdN4sQ\noIYTQtN5hLA3o/S5lYROT3mgD2H2bb/SfacRZoXuLP19cOlc8wmBbwwhAI4GfkeYj5pBjAaOp8C4\nFvn+bUJ8LKeKMG04nzDXRulVKLCuNK5K/zWWFaAwx93XmNlRwKFGww/h2uNgUAH2SMVYC7yO4exH\njmzp0eYCTwIr4H2H/6twgg7l7jngm2b2/RXhhUsAr+fc22tgujXnedDMdl0Cpy4JX+Q64F53f68j\nzyMisq2UJn4mNX1sZt/b0seIKmS9DOxtZnsQ1n/OJawXyQbpq+CobnBIm2TUA/hcDVx9ImQPN7OX\ngG9A6gvAHmbxPCSeKG1APDFP1kLgqFR2XE2IEXPYeKuXdwlF5G3rj5r0JESRbOn+JxDmfp4t3X4X\n4UrDLwC/JSyEDSb0utqdEMb+SghyRxNC2luEGahs6bimXuvLCO0ZwlWCoR7sdsLsXqE0xs/T3LU9\nARxCjEn0IrthPqtJI6HxRNOjtdzNcAVhIXQI8A9CF7D+hMXKXYGFvEYId21lgRczkL0OQmdvQso7\nMVzROe9smH9+d3z8eZBcS4iYD5VetX7ASsgW4GR339RVAB3K3Rto/sJFdY48oXjvnijPIyKyvYgk\nZLl73sy+SrjePw78UVcWbthouRfQGxLHw5gK61JJ4IgaeOp2sN3DjFOBEGAOTsGUk8PszgTC/Eil\ngNUkTlgMa2kBYbZoApWKtkNdVw9Cq86BhPmfBwiTFAVCfl5EaL1wJiEkLSaErRzh6sRehOL7h0of\nQ1+K9MB4jzsp0psiudJjxgjLkqeXju1DWG6spC/9MZYTSt8PKX32beAJQiQaTKge6xv+5FZA4yKo\n2Qdi54DNILQ37Vc6toos8GDpuY5o8dqsA+7AyFYZTK4yeyQD33X3aQDuXgfcaGZ/bYQH/gFHHge1\nny2N43XwJ0Nivdzd32znSYmIyA5C2+psA6VwdTGk/xsKu4f9dGuT0N4esXMItUznEmaFCoRZoAcJ\n8y6fI4SnmYSIcWGFxykSmmLWE/pU1RCCUT0hViQo3w3AgVsJM0wNhCC1iNCM9OTSY44pHXdi6T5N\ns1GL2HANHf2BEcRZjfEaB1LkFArECLHqjtIze58EYbJzBqGn1a6EmbDLqfy7wIuM4HEayGWXQSID\nMS+N9IDS6OKE2axfQCYbBj3D4KYrIFFVGvENpWOThHm9LPA8KRpJU2SgQz3GB3YAcHrpWszp4E9A\nQw7OcPdHW44qtNLg4jT8Zwb2Miim4KkM/NDdn67wZEREpAvTtjpdUAhYqVuh5+kwMR2W7tYR6ona\n6y60ljB71NSOMkGoW3qUsGzXdL99CbNE82ndUrPJa4TYMJEQIz4k1EQtJ8wwrSEsC46nuc1DljAX\nNI9Q1XRs6bZBhLqupiW8pqsBm/QjzPwcCTzPSeR5nEUMZyl9KHAQTu+Wr02Lex9BgancS45/JoS2\nhYQFvzcJvdjbKpLieQ4lx1OQaQS+CqnebDwv93b43Bx3/y8z6xkDb9qU5zlCRdhRbe4zniwfkOUW\nZrEHcBrNG/nEgXFgA6DmJvi7me1aWo5rHhzcnAldU83BG1X4LSKy01HIipCZ1SbggTw1R8M/07yP\nXU/ClWhvEa7Ka8sJbQyOaPP5dYT375bl3AlCv6vbCV3QRxGWEhsIpXHPEArPmxpuFwj9q6oIdVKZ\n0jE/IwS2AmEWrWkz5paxqMkMQpuD5aXnQmlcjwPLSbOIz1BgT8L2NjnyfJzW32w5wixWVemeh+Ek\nWcezXA2MpsBgQqi7v3SOIa3uHecePkYd/YDFIQF+70/wnbOhZjAhaOUJDSMehLpc2PAQYJ1BwwpI\n9iHMA/57mWfY1Ovewc5g4y5jlEY0EJgfCsj+bGbD0/DdWCg2i8UgY/DHHPwvW3F1oIiIbN8UsrZA\naRnoEMKa22x3X9jOsckUPBknNSbPcTQHrCZHE8qu+9C68LwprOQJoafVo5Zub9v8ciihfPsZwkxX\nihBj+hLmXRYQlgdXE/pf9STkgnTpPElC/VHTdXZHEa7+m8fGIesDQqPSswj73o4tjXcatWSZQK5V\nGf4nCJVfvyoduQuh8PzV0iMXCF24rgfGUeQ8irzOqyzgdVZTpEj+Hrjpk3H6xAoMI856jFnsiXMW\nOe6FhjjcmnP/Sczsg5vh/6WhR3corISUwcwcfNndX4bQ1iBpdu2z8LVTIV0kXAdZzmJCzCsXsJoM\nh+6L4EgzeycJjxwB1YdAvBuwChLPw5emw2fNbGxTc14REdk5qCarHWZ2AKT+HeLjwHtCrhfUFqC2\nCCvSEHsWMl9y9zll7vuZAXDdCuLdcvw75ffie51QSL4boV6qgdDCIAv8Cxt3UnfCVXyfoPzSIIQL\nuxKEgvbrCcuDSXpTZB1Onp6EgNeTMLnyHCGAjSUs81WXzvMGcF/puINLj/kuoah9AgleYDCrWAn0\nx3kf51zKb1C3mFBcPpKwbc1AwuLkboTZrMGEqrOXCB1sjTDPNxc+yLgPMrP94zBlF6geAsn9Sq/U\nZFi/Ct7NhieaKz2ZWYQmXL2ABe7+btvxmFmfJEw/FHadAckLaN3hqslbhC6aF1R4pQGeBZ8Evzc4\n+2zoW66N6bNQmAxTG93b7qTdLjPra3BeAoYXYEURbnH3mVvyGCIi0jFUk9WBzFLfCxsrj0vC0MSG\na8R4BziJEBFeOg6enmpmh7j7vJb3r4Kvj4du925YuCrnAELkuIYwnzKbMEP0BKFeqm3IMkLj/IcI\nW9y0nR1bSIgGXyLEAwP+kxS/41OsZhowm5XU8zRhtqsnIbD1KZ3z6tI4MoQ5ppGE+qtJQAyjO3G6\nE+NhxlPkKIr8jbDguYQw93UmMIxQ8u6EubC7gVMIc2VvEqrDmp7ZAYRwdRitN0N+BgrzwpQZ7j7T\nzPZYCZesgy+8Dj0MFuahJgb7joSfxsKrZw6vZ+DT7v5KhRcdd19pZmNegevycMqzED+zzHGDCF3C\n1lM+IjvwKqzPw/IBkK7UJ34cxCfDaDMb7u5vVRpXS0mzyxPwg+FQ3B1q1kH+VbisymxSBs4uXc0o\nIiJdmEJWGWZ2FvT4T7i0unWjzmGEeqXbCZ3Gx8eg0AOmXE2ojd7AYciuhIW8WbyOb1Ra3eQdwrJg\nFXA4odapgbAFze6woX95k10Jy37/R5iR2osQAGcQ6rhGEuaHcoS+UsuJs543CQHnDIr8g9Vk+SQh\n4jR1kfgkMe6hN29zKgUWAjOYyWpgAhn6A3U0kCYsKiZLo3yfsAS4B2FG6s7SiPuURmmEULUfoWfU\nYFpHxyo2jqDrgSmQycBVG17P0Jn858DPzaxnEl47HHY7GhJN38QF4Dk4eHLYkHyku6+o8KLj7kuB\n08xsxCx45jHo9XFINMXWOuAhaIzB+geg9tNQ3fYShalQXA8rDapHVO7oSiK8ZoW3wsroJkNWwuzS\nWrjyYqjq1eLTx0HiLjj2XXjWzH5DyLBPf5S9B0VEJDoKWWVVfR9OqSn/vjmUEIpeJez3NzYOk08w\ns74t39QNPlwL/ceT5x0mk2MErVtiQoghTxH6Qr1I8zYyowjNBa4FxtG8x+B0YDpDyLM/WSbzEPVA\nASPEnv6EWajxhOXH9cBt5MiTJcxb9QIuIs/t3M9qHifOXsTI4MxhKHAmBdKlM84hw+rS/fZqM3In\nzH3tQ1hYvJBwbeEqQorYmzAztRshRL0ITGbjRhNzaH6VC4S5vIegrgC/cPdXy3wBiMEXh0K/CW2+\nf+PAxyGxEnq9Af8GfKfc/Vs9D/dZZnbAy3DjS3D07pApgC0OmzHfnoXL5sA9f4DRR0G3/oTrPqdC\n/RxYn4MTDC4utLdLdHhuXnqK7TKzeBL+9zNQ06vNbQngLKj6BRw4CK5ZDfn1UGdmF7v7Q5t6bBER\n2bZUk9WGmfWBxCL4VoqKe9jNI0SMfy59/Os1sOr4puJqgJjZ1/eFH30GaqYBD5CkyKEUN+zx9ybG\nSyTJUyTeolaq5ebJCwg9sBYDcYzVXECOPQhB5k7CDFJfYCFJ3iOOcTBODxK8j/M2B1NgAk6MMC30\nVTZcNcf/g2wSbDwkh9N8nSCExg+PEDpiPUCYHxtDmIVaQghM6wn93q8Drijdz2neDbiWMKvVdFnd\nhbSufVoP/AZyBVhvUF2EeBJmNcL33f2uCi8+VWYLPgeDB1e4fQnwJ1iRce9X6THKMbOBpadZAKZ4\naeuZ0gUPp1fBZQ57GHyYgWsdbixtr3N0T7j/69Ct3Dd0I/BzyORhL3dfVOaQlmM4qg/c/7WN14o3\neLI0wOMJ34m3QkM29Ot6ZEuer4iIbD7VZHWMNMQLlQMWhHqmpkkJBxoShP4KGzjcMAe++SJUjYXY\nIHI8z4vM4RVyQAN5n0jB3gNWUcRYySJewluFrCE0l5K/SzW3s4RQqLSCEHzeJixiziCXS5JjDM8n\nM8ToS4HRNF859xJhqa6ptqgRyEGxAHc9A6cXoXoQxEqNH+rnQDEG7/+9dInjdFg6PVT8JxOwJAtv\nxeGwX0OvGMRyhLk0I8y9HUqoEMuyYfHTU6WZniKhT/39UF+EX+bhu4RVx1zefc2mvkB56Nd2TrCl\nvuG8u2zqcdpy9w8ov8dmnpBp76xw12caYNGLMHRcm8ZnDjwCmTg8mNtEwCrp02MTG6z3IPQXM8IM\n46eh+k74nZkN1UbMIiJdh0LWxpZBMQPLqstfcwZh/qBpD725QHEZYaVrg9IMxxFPwGMvQL+DoFsf\nCraMwrrFUEjB4p4w4kzCzM+LOGEO5kVCGXhLa4hzNx8jy3LCtX4jwkB5E3gKnm2EH6Tga0vxCWdS\nqGkKU0VCL6hJwPktHnEK5BPwUAbOy8GRk+DrsTCNVt8INzr82d0/LHWrJ1vhzdvMUil4ciYceWCL\nz8cJoQ7gTmiogxnXwKgqKObCzYtL29LcXDqsYv1UW3H48EOoblut1iRcT8nazX28j8rd3cwmPgHP\nL4DuY6GmJ+Hr8yzULYM52eY+XZsyfxkkilSO+UtpPeu4N5CCfpkwCzd1q5+IiIh0KC0XlmGW/BEM\nuwzOrdq4zKYe+D1hu5tuwPX1sO4Sd7+1/GOZAcfF4RMxSOXCJr13xeAb+8O3zwo9EygSmiPcQZIc\nfclyKFBFnDkYr3EMeca3meBYCNwE8xrc9yqdK5WCXxfhgkGQq4b0e5BKEzbOGUQo6J4C+ZdgVQ4O\nbq/X1+a/XnZcFdz3z1DTdoZpFnAnrM2HCbf1hFKtDPD+1s66JMyuHAlXnNHchL2VByE7Da7Jun9j\nax5/a5lZL4OL0vDlIvSOwcLGsJXPre6e2czHsDS89SnYZ0SZ2+uA3xDaxLYMWjfB2jlwUXvLrCIi\nsvW2JrcoZJVhZrWQeh6G7g0TqsICVNMi10OEj2sbQ+US33HP/XwrztEvAfPOh9qWtUV5wuzUgyQL\nGWLFEeSSJ1KkbRE0wKOQmwrXZt2/2uaxexP2pqkGBqbgohj0rwqF0qk43JOByzZVH7Ql4mYXx+C3\no4C9oSoHzID170EuBye2rFf7qMysXxJmHg99xkKs6ZvICXsKPggf5uCAjgiQncHMjk7Bg2dCzb40\nx/wVhJYSexO6oLX0G1i7Ek7T3ohdj5mNIJQ3pgmXAT/i7pu8CEJEuhaFrA4UglbiO8CXIRmHfBxi\nqyE/H5KrIPcKFK79KG/kZnZSEu46DJKjIVlFKHWfBHVr4cks3NMXrr4Uatt2xFoGXBc2KB7t7u9s\n4jxGuCyyFpjvm1H3tJXPZ/c4fCkVWsZnG0Kvi1ui6OlkZsNS8EAV7HYAVBvYG1BfD8uz8Al3f7Oj\nz7ktmdmxafhjCvoNAFsLtesI7WIPp/X86kLgRliRg/568+46zGyXdKjjO2wkxNKQmA31a6A+F3q5\nPdvZYxSRzaeQFQEzSxIaVmWBxR1dWGxmw5LwDYOzHNJxeKsxbCR4F6FY/PpqOPcYqB3Khv34Cs9B\nJgeXFt3/1pHj2Z6UwuORwDGE3DGZ0Ddqhyj+Lj2/scCeCfjaEDjoXKhOtThmFXAD1NfBVwvuN3TO\nSKWt0rZar46GfU6CVMvi19nAHSFoHeHuMzprjCKyZRSydkClN9pTquA/C3AgkDd4IAs/d/fXOnt8\nsm2YWToNNzl8chTEe0LyA6ibAzGHb+bdf93ZY5RmZnbOAPjjFyq09XgB/Cl4uNH9lG0+OBHZKgpZ\nIjs4M9sLOCcOfQphM8lbo1r+la1XYzbpZDh6VIXbG4GfQbYA/dx9m10JKyJbT32yRHZw7j4X+Eln\nj0Pa57Bbe43aqoA05OrDDlQKWSI7qPY6boqIyNZ5v73Gbw1AJvTv3ez+cCKy/VHIEhHpYI1wzRRY\nX2nn7lehmICH3H1dhUNEZAegkCUi0vHu+xDeuhcy2RafdMKG6pOgPgPf6pyhici2osJ3EZEImFn3\nNNxchBP2hWIaYu9CvgFWZOHsjmzQKyLR09WFIiJdjJkNASYSOr5PA57dUXq5iexMFLJEREREIrA1\nuUU1WSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIiIhIBhSwRERGRCChkiYiIiERAIUtE\nREQkAgpZIiIiIhFQyBIRERGJgEKWyE7OzHrHza6oMpuXMltTbTY7ZvY1M+vW2WMTEdmeae9CkZ2Y\nme2RhBf2gR6HQXUvYAUwBerfg8VZONzdl3f2OEVEOps2iBaRzWZmloKZx8A+R0C85W0OPAbZV+C5\nRvfjOmeEIiJdhzaIFpEtcXgaBo9rE7AADDgWUg6Hm9nQThibiMh2TyFLZOd17AFQVemHQBLYGwrA\nsdtwTCIiOwyFLJGdVywWJq0qiofb9XNCRGQr6IenyM7rhZlQX6kqswDMDiHr+W04JhGRHYZClsjO\n64k6WP16qHPfyItQcJjl7q9v64GJiOwIdHWhyE7MzEYlYPJYqD4Ukr2AlcDzkH0N1uTgMHef19nj\nFBHpbGrhICJbzMyGJOGbRfh8AWoTsBa4Lg8/c/elnT0+EZGuQCFLRD4SM4u7e6GzxyEi0tWoT5aI\nfCQKWCIiHUchS0RERCQCClkiIiIiEVDIEhEREYmAQpaIiIhIBBSyRERERCKgkCUiIiISAYUsERER\nkQgoZImIiIhEQCFLREREJAIKWSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIiIhIBhSwR\nERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQiCVlmdqWZfWBm00p/JkZx\nHhEREZGuKhHR4zpwlbtfFdHji4iIiHRpUS4XWoSPLSIiItKlRRmyvmZmM8zsj2bWK8LziIiIiHQ5\n5u5bd0ezx4D+ZW76b+AFYHnp4/8BBrj7JW3u7+6u2S4RERHp8rYmt2x1TZa7n7A5x5nZ9cB9FW67\nssWHk9x90taO5/+3d38xlt51Hcc/X7cuthUUQ9L/hmpKYrmwJbGQUGGioaleUHqBRRJFMaZJxQtj\njLYktldGYvRGgyERkQutqUZIm1rt1rAJGBAJTW0oTdtIdYulcgGJFHC37deLOZDpOrvb+fPtOWf2\n9Uomc57zzDzP9+Lk5J3nmfMbAID9UlUbSTb2dIzdXsk67UGrLurupxePfyPJT3T3u0/6GVeyAIC1\n8LJeyTqDD1TVVdn8lOGXktw8dB4AgJU0ciXrJZ3YlSwAYE3splus+A4AMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADNh1ZFXVO6vqC1X1fFW94aR9t1bV41X1aFVdt/cxAQDWyzl7+N2Hk9yY5ENbn6yq\nK5PclOTKJJckeaCqXtfdL+zhXAAAa2XXV7K6+9HufmybXTckubO7T3T3k0meSHLNbs8DALCOJv4m\n6+IkT23ZfiqbV7QAAM4ap71dWFVHkly4za7buvueHZynT3H8O7ZsHu3uozs4JgDAiKraSLKxl2Oc\nNrK6+227OOaXk1y2ZfvSxXPbHf+OXRwfAGDU4sLP0e9sV9XtOz3Gft0urC2P707yrqo6XFWXJ7ki\nyWf36TwAAGthL0s43FhVx5K8Kcm9VXVfknT3I0nuSvJIkvuS3NLd294uBAA4qGpZ/VNV3d115p8E\nAFiu3XSLFd8BAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAG7DqyquqdVfWFqnq+qt6w5fnXVtW3qurBxdcH\n92dUAID1cc4efvfhJDcm+dA2+57o7qv3cGwAgLW268jq7keTpKr2bxoAgANi6m+yLl/cKjxaVdcO\nnQMAYGWd9kpWVR1JcuE2u27r7ntO8Wv/leSy7v7a4m+1Pl5Vr+/u/9nm+Hds2Tza3Udf2tgAAHOq\naiPJxp6O0d17HeITSX6zuz+/k/1V1d3tXiMAsPJ20y37dbvwuyetqtdU1aHF4x9JckWSf9+n8wAA\nrIW9LOFwY1UdS/KmJPdW1X2LXW9N8lBVPZjkb5Lc3N1f3/uoAADrY8+3C3d9YrcLAYA1sczbhQDA\nCquqc6vql8+r+sy5VY+dW3VvVV1XVVpgiCtZAHDAVdUPH04+dXHy6jcm3//KJF9J+p+TZ59NPnU8\nuaG7jy97zlW2m24RWQBwgFVVHU4efUvyo9cmh7buey7Jnck3jyUf+d/u9y1pxLXgdiEAcLKfOj+5\n+M0nBVayuVjmDcl5LyTvrapXLWG2A01kAcAB9r3JO65Ozj/VJZhXJbkwOZHkLS/jWGcFkQUAB1gl\n571iy3qW23nFi76xX0QWABxgx5PPPJY8e6r9J5IcSw4neejlm+rsILIA4GC78z+y+Y+Ft/OvyQvf\nk3y+u594OYc6G4gsADjAuvsbzyW/8BfJtx7M5pWrJPlGkgeS5z+RfP3byS8ub8KDyxIOAHAWqKpr\nvy/5vRPJGw8nJ44nhw4lf3s8eX93/+ey51t11skCAE6rql6dzQ8VfrW7v7nsedaFyAIAGGAxUgCA\nFSGyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaILACAASILAGCAyAIA\nGOkZzZ4AAARrSURBVCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaI\nLACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaI\nLACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaI\nLACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABuw6sqrqD6rqi1X1UFX9XVX9wJZ9\nt1bV41X1aFVdtz+jcjarqo1lz8B68FphJ7xemLSXK1n3J3l9d/94kseS3JokVXVlkpuSXJnk+iQf\nrCpXzNirjWUPwNrYWPYArJWNZQ/AwbXr+OnuI939wmLzX5Jcunh8Q5I7u/tEdz+Z5Ikk1+xpSgCA\nNbNfV5jem+TvF48vTvLUln1PJblkn84DALAWzjndzqo6kuTCbXbd1t33LH7m/UmOd/dfneZQfYrj\nb/s8bKeqbl/2DKwHrxV2wuuFKaeNrO5+2+n2V9UvJfnZJD+95ekvJ7lsy/ali+dOPna95CkBANbM\nXj5deH2S30pyQ3d/e8uuu5O8q6oOV9XlSa5I8tm9jQkAsF5OeyXrDP44yeEkR6oqST7d3bd09yNV\ndVeSR5I8l+SW7nZbEAA4q5T+AQDYf0tdv6qq7qiqp6rqwcXX9cuch9VTVdcvFrV9vKp+e9nzsNqq\n6smq+rfF+4k/U+BFqurPq+qZqnp4y3M/VFVHquqxqrq/qn5wmTOyGk7xWtlxsyx7kdBO8kfdffXi\n6x+WPA8rpKoOJfmTbC5qe2WSn6+qH1vuVKy4TrKxeD+xPh8n+0g230+2+p0kR7r7dUn+abEN271W\ndtwsy46sJPEpQ07lmiRPdPeT3X0iyV9nc7FbOB3vKWyruz+Z5GsnPf32JB9dPP5okne8rEOxkk7x\nWkl2+P6yCpH164v/f/hhl2k5ySVJjm3ZtrAtZ9JJHqiqz1XVry57GNbCBd39zOLxM0kuWOYwrLwd\nNct4ZC3udT+8zdfbk/xpksuTXJXk6SR/OD0Pa8WnMtipN3f31Ul+JsmvVdVPLnsg1sfik/DedziV\nHTfLXpZweEnOtKDpd1TVnyW5Z3gc1svJC9telhf/yyZ4ke5+evH9q1X1sWzecv7kcqdixT1TVRd2\n91eq6qIk/73sgVhN3f3d18ZLbZZlf7rwoi2bNyZ5+FQ/y1npc0muqKrXVtXhJDdlc7Fb+H+q6ryq\neuXi8flJrov3FM7s7iTvWTx+T5KPL3EWVthummX8StYZfKCqrsrm5dkvJbl5yfOwQrr7uap6X5J/\nTHIoyYe7+4tLHovVdUGSjy0WRz4nyV929/3LHYlVUlV3JnlrktdU1bEkv5vk95PcVVW/kuTJJD+3\nvAlZFdu8Vm5PsrHTZrEYKQDAgFX4dCEAwIEjsgAABogsAIABIgsAYIDIAgAYILIAAAaILACAAf8H\nYwJ7l+X9lxkAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Take a look here: http://stackoverflow.com/questions/28160335/plot-a-document-tfidf-2d-graph\n", + "from sklearn.decomposition import PCA\n", + "import matplotlib.pyplot as plt\n", + " \n", + "pca = PCA(n_components=2).fit(train_vecs)\n", + "data2D = pca.transform(train_vecs)\n", + "plt.scatter(data2D[:,0], data2D[:,1], s=80, c=train_tags)" + ] + }, + { + "cell_type": "code", + "execution_count": 119, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "LogisticRegressionCV(Cs=10, class_weight=None, cv=None, dual=False,\n", + " fit_intercept=True, intercept_scaling=1.0, max_iter=100,\n", + " multi_class='ovr', n_jobs=1, penalty='l2', refit=True,\n", + " scoring=None, solver='lbfgs', tol=0.0001, verbose=0)" + ] + }, + "execution_count": 119, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Fit a logistic rgeression classifier\n", + "clf = sklearn.linear_model.LogisticRegressionCV()\n", + "clf.fit(train_vecs, train_tags)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loss after iteration 0: 0.333610\n", + "Loss after iteration 1000: 0.000334\n" + ] + } + ], + "source": [ + "X= train_vecs\n", + "y=train_tags\n", + "y=y.astype(int)\n", + "num_examples = len(X) # training set size\n", + "nn_input_dim = len(train_vecs[0]) # input layer dimensionality\n", + "nn_output_dim = 2 # output layer dimensionality\n", + "\n", + "# Gradient descent parameters (I picked these by hand)\n", + "epsilon = 0.01 # learning rate for gradient descent\n", + "reg_lambda = 0.01 # regularization strength \n", + "\n", + "\n", + "def forward(W1, b1, W2, b2, x):\n", + " z1 = x.dot(W1) + b1\n", + " a1 = np.tanh(z1)\n", + " z2 = a1.dot(W2) + b2\n", + " exp_scores = np.exp(z2)\n", + " # softmax\n", + " y_hat = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", + " return y_hat, z1, a1, z2\n", + "\n", + "def predict(model, x):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " y_hat, _, _, _ = forward(W1, b1, W2, b2, x)\n", + " return np.argmax(y_hat, axis=1)\n", + "\n", + "def calculate_loss(model):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " y_hat, _, _, _ = forward(W1, b1, W2, b2, X)\n", + " correct_logprobs = -np.log(y_hat[range(num_examples), y])\n", + " data_loss = np.sum(correct_logprobs)\n", + " return 1./num_examples * data_loss\n", + "\n", + "\n", + "# This function learns parameters for the neural network and returns the model.\n", + "# - nn_hdim: Number of nodes in the hidden layer\n", + "# - num_passes: Number of passes through the training data for gradient descent\n", + "# - print_loss: If True, print the loss every 1000 iterations\n", + "def build_model(nn_hdim, num_passes=2000, print_loss=False):\n", + " \n", + " # Initialize the parameters to random values. We need to learn these.\n", + " np.random.seed(0)\n", + " W1 = np.random.randn(nn_input_dim, nn_hdim) / np.sqrt(nn_input_dim)\n", + " b1 = np.zeros((1, nn_hdim))\n", + " W2 = np.random.randn(nn_hdim, nn_output_dim) / np.sqrt(nn_hdim)\n", + " b2 = np.zeros((1, nn_output_dim))\n", + "\n", + " # This is what we return at the end\n", + " model = {}\n", + " \n", + " # Gradient descent. For each batch...\n", + " for i in range(0, num_passes):\n", + " # feedforward\n", + " y_hat, z1, a1, z2 = forward(W1, b1, W2, b2, X)\n", + " \n", + " # Backpropagation\n", + " delta3 = y_hat\n", + " delta3[range(num_examples), y] -= 1\n", + " #print [range(num_examples), y]\n", + " dW2 = (a1.T).dot(delta3)\n", + " db2 = np.sum(delta3, axis=0, keepdims=True)\n", + " delta2 = delta3.dot(W2.T) * (1 - np.power(a1, 2))\n", + " dW1 = np.dot(X.T, delta2)\n", + " db1 = np.sum(delta2, axis=0)\n", + "\n", + " # Gradient descent parameter update\n", + " W1 += -epsilon * dW1\n", + " b1 += -epsilon * db1\n", + " W2 += -epsilon * dW2\n", + " b2 += -epsilon * db2\n", + " \n", + " # Assign new parameters to the model\n", + " model = { 'W1': W1, 'b1': b1, 'W2': W2, 'b2': b2}\n", + " \n", + " # Optionally print the loss.\n", + " # This is expensive because it uses the whole dataset, so we don't want to do it too often.\n", + " if print_loss and i % 1000 == 0:\n", + " print \"Loss after iteration %i: %f\" %(i, calculate_loss(model))\n", + " #print y_hat[:2]\n", + " \n", + " \n", + " return model\n", + "\n", + "# Build a model with a 3-dimensional hidden layer\n", + "model = build_model(3, print_loss=True)\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/.ipynb_checkpoints/theano_tutorial-checkpoint.ipynb b/.ipynb_checkpoints/theano_tutorial-checkpoint.ipynb new file mode 100644 index 0000000..d3c1ce6 --- /dev/null +++ b/.ipynb_checkpoints/theano_tutorial-checkpoint.ipynb @@ -0,0 +1,2650 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#Theano Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# The code is from Theano Tutorial: http://deeplearning.net/software/theano/tutorial/\n", + "# See this tutorial too" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from theano import *\n", + "import theano.tensor as T\n", + "from theano import function" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "5.0\n", + "28.4\n" + ] + } + ], + "source": [ + "# A function to add to scalars\n", + "x = T.dscalar('x')\n", + "y = T.dscalar('y')\n", + "z = x + y\n", + "f = function([x, y], z)\n", + "print f(2, 3)\n", + "print f(16.3, 12.1)\n", + "# T.dscalar is the type we assign to “0-dimensional arrays (scalar) of doubles (d)”" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# x and y are instances of TensorVariable. \n", + "print type(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, scalar)\n" + ] + } + ], + "source": [ + "# x and y are are assigned the theano Type dscalar in their type field:\n", + "print x.type" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, scalar)\n" + ] + } + ], + "source": [ + "print z.type" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Adding two matrices:\n", + "x = T.dmatrix('x')\n", + "y = T.dmatrix('y')\n", + "z = x + y\n", + "f = function([x, y], z)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# Again x and y are instances of TensorVariable, but \n", + "# dmatrix is the Type for matrices of doubles. \n", + "print type(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, matrix)\n" + ] + } + ], + "source": [ + "print x.type" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, matrix)\n" + ] + } + ], + "source": [ + "print z.type" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 11. 22.]\n", + " [ 33. 44.]]\n" + ] + } + ], + "source": [ + "# Then we can use our new function on 2D arrays:. \n", + "print f([[1, 2], [3, 4]], [[10, 20], [30, 40]])\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 11., 22.],\n", + " [ 33., 44.]])" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# The variable is a NumPy array. We can also use NumPy arrays directly as inputs:\n", + "import numpy\n", + "f(numpy.array([[1, 2], [3, 4]]), numpy.array([[10, 20], [30, 40]]))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Plural Constructors\n", + "from theano.tensor import *\n", + "x, y, z = dmatrices(3) # creates three matrix Variables with no names\n", + "x, y, z = dmatrices('x', 'y', 'z') # creates three matrix Variables named 'x', 'y' and 'z'" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, matrix)\n" + ] + } + ], + "source": [ + "print x.type" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Random numbers, etc.: http://deeplearning.net/software/theano/tutorial/examples.html" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "gpu\n", + "float32\n" + ] + } + ], + "source": [ + "print(theano.config.device)\n", + "print(theano.config.floatX)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Logistic Regression Example" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial model:\n", + "Final model:\n", + "target values for D:\n", + "[0 1 1 1 0 0 1 1 1 0 1 1 0 1 1 1 0 0 1 1 1 0 0 1 0 1 1 1 0 0 1 0 1 1 0 0 1\n", + " 1 0 1 1 1 1 0 1 0 0 0 0 0 1 1 1 0 1 1 1 1 0 0 0 1 1 1 0 0 0 0 1 0 0 1 0 0\n", + " 0 0 1 0 1 1 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 1 1 0 1 0 1 0 0 1 0 0 0 0 0\n", + " 1 1 1 1 1 0 0 0 1 1 0 1 0 1 0 1 0 0 1 0 1 1 0 0 0 1 0 0 0 0 0 0 1 0 1 1 0\n", + " 0 0 0 1 0 1 0 1 1 1 0 1 1 1 1 1 1 1 1 1 0 1 0 1 0 0 1 1 1 1 0 0 0 1 0 0 0\n", + " 1 1 1 1 1 1 0 1 0 1 1 1 0 1 1 0 1 1 1 0 0 0 1 0 0 1 0 0 1 1 0 1 1 0 0 1 0\n", + " 1 1 1 1 1 0 0 0 1 1 0 0 0 1 0 1 0 0 1 1 0 1 0 1 1 1 0 1 1 0 0 1 1 0 1 0 1\n", + " 0 0 1 1 0 0 0 1 1 1 1 1 0 0 1 1 0 1 0 0 0 0 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0\n", + " 0 1 0 1 0 1 0 0 1 0 0 0 1 0 1 0 1 0 0 0 1 0 0 0 1 0 1 1 1 0 0 1 0 0 0 1 1\n", + " 1 0 1 1 0 0 0 0 0 1 1 1 1 0 1 0 0 1 0 1 1 0 0 1 0 1 1 0 1 1 1 1 0 0 1 1 1\n", + " 1 1 1 1 1 1 0 0 0 0 1 1 1 0 1 1 0 0 1 1 1 0 0 1 1 1 0 0 1 0]\n", + "prediction on D:\n", + "[1 1 0 1 1 1 1 1 0 1 0 1 0 1 1 0 0 0 1 1 0 1 0 1 0 1 1 1 0 1 0 0 0 1 1 0 1\n", + " 1 1 0 0 0 1 0 1 0 0 1 1 1 1 1 0 0 0 1 1 0 0 0 0 0 1 1 0 0 1 0 1 1 0 0 0 1\n", + " 1 1 1 0 1 0 1 0 0 0 1 1 0 1 0 0 1 1 0 0 1 0 0 0 0 0 1 0 1 1 1 0 1 0 1 1 0\n", + " 1 0 1 1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 1 0 0 0 0 1 1 0 0 0 1 0 0 0 1\n", + " 0 1 0 0 0 0 1 1 1 0 0 1 0 0 1 0 0 1 0 1 1 1 0 0 1 1 0 0 0 0 1 0 0 1 1 0 1\n", + " 0 1 1 1 1 1 1 0 1 1 0 1 1 1 1 0 1 1 1 1 1 0 1 0 1 0 0 0 1 1 0 0 0 0 1 0 0\n", + " 1 1 1 1 1 0 1 0 1 0 1 1 0 0 1 0 1 0 0 1 0 0 0 0 1 1 0 0 0 0 0 1 1 0 1 0 1\n", + " 0 0 1 1 1 0 1 0 1 1 1 0 1 0 1 0 1 1 1 0 1 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0\n", + " 0 1 0 0 0 1 0 1 0 1 1 0 1 1 0 1 1 0 0 0 1 0 0 1 0 1 0 1 0 0 1 0 1 0 0 1 1\n", + " 0 1 0 1 0 0 0 0 0 0 0 1 1 1 1 0 1 0 1 1 1 1 0 0 0 1 1 0 1 1 0 1 0 0 1 0 0\n", + " 0 0 0 1 1 1 0 1 1 0 1 1 0 1 1 1 0 1 0 0 0 1 1 1 0 1 1 1 1 0]\n" + ] + } + ], + "source": [ + "# Logistic Regression: http://deeplearning.net/software/theano/tutorial/examples.html\n", + "import numpy\n", + "import theano\n", + "import theano.tensor as T\n", + "rng = numpy.random\n", + "\n", + "N = 400 # training sample size\n", + "feats = 784 # number of input variables\n", + "\n", + "# generate a dataset: D = (input_values, target_class)\n", + "D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", + "training_steps = 100\n", + "#np.asarray(your_data, dtype=theano.config.floatX)\n", + "\n", + "# Declare Theano symbolic variables\n", + "x = T.matrix(\"x\")\n", + "y = T.vector(\"y\")\n", + "\n", + "# initialize the weight vector w randomly\n", + "#\n", + "# this and the following bias variable b\n", + "# are shared so they keep their values\n", + "# between training iterations (updates)\n", + "w = theano.shared(rng.randn(feats), name=\"w\")\n", + "\n", + "# initialize the bias term\n", + "b = theano.shared(0., name=\"b\")\n", + "#print b.eval()\n", + "print(\"Initial model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "\n", + "# Construct Theano expression graph\n", + "p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1\n", + "prediction = p_1 > 0.5 # The prediction thresholded\n", + "xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function\n", + "cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize\n", + "gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost\n", + " # w.r.t weight vector w and\n", + " # bias term b\n", + " # (we shall return to this in a\n", + " # following section of this tutorial)\n", + "\n", + "# Compile\n", + "train = theano.function(\n", + " inputs=[x,y],\n", + " outputs=[prediction, xent],\n", + " updates=((w, w - 0.1 * gw), (b, b - 0.1 * gb)),\n", + " allow_input_downcast=True) # added downcasting...\n", + "predict = theano.function(inputs=[x], outputs=prediction)\n", + "\n", + "# Train\n", + "for i in range(training_steps):\n", + " pred, err = train(D[0], D[1])\n", + "\n", + "print(\"Final model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "print(\"target values for D:\")\n", + "print(D[1])\n", + "print(\"prediction on D:\")\n", + "print(predict(D[0]))\n", + "#----------------------------------------------------------" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "25000\n", + "200\n", + "200\n", + "7142\n", + "6994\n", + "0\n", + "200\n", + "200\n", + "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + "200\n", + "200\n", + "(200, 7142)\n", + "\n", + "\n", + "\n", + "Done fitting classifier on training data...\n", + "\n", + "================================================== \n", + "\n", + "Results with 5-fold cross validation:\n", + "\n", + "================================================== \n", + "\n", + "********************\n", + "\t accuracy_score\t0.715\n", + "********************\n", + "precision_score\t0.765432098765\n", + "recall_score\t0.62\n", + "\n", + "classification_report:\n", + "\n", + " precision recall f1-score support\n", + "\n", + " 0.0 0.68 0.81 0.74 100\n", + " 1.0 0.77 0.62 0.69 100\n", + "\n", + "avg / total 0.72 0.71 0.71 200\n", + "\n", + "\n", + "confusion_matrix:\n", + "\n", + "[[81 19]\n", + " [38 62]]\n" + ] + } + ], + "source": [ + "# Get text data\n", + "#----------------\n", + "from collections import namedtuple\n", + "\n", + "all_data = [] \n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " #print my_data[line_no]\n", + " #break\n", + "train_data = all_data[:25000]\n", + "test_data = all_data[25000:50000]\n", + "print len(train_data)\n", + "\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n", + "#--------------------\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "from collections import defaultdict\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]\n", + "#-------------------------\n", + "import numpy as np\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "#test_vecs= get_sparse_vectors(test_data, word_space)\n", + "\n", + "#print train_vecs, test_vecs[0]\n", + "print len(train_data[12500:12600])\n", + "print len(train_vecs)\n", + "print len(test_vecs)\n", + "#-------------------------\n", + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "from random import shuffle, randint\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "\n", + "\n", + "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]\n", + "print len(train_tags)\n", + "print len(test_tags)\n", + "#--------------------\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "print train_vecs.shape\n", + "#--------------------------------\n", + "# Classification with scikit-learn\n", + "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", + "# Let's use sklearn to train an svm classifier:\n", + "#-------------------------------------------------\n", + "\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC, LinearSVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "import gensim\n", + "n_jobs = 2\n", + "\n", + "#train_vecs=array(train_vecs)\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "\n", + "print type(train_tags)\n", + "print type(train_vecs)\n", + "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + "clf.fit(train_vecs, train_tags)\n", + "print \"\\nDone fitting classifier on training data...\\n\"\n", + "\n", + "#------------------------------------------------------------------------------------------\n", + "print \"=\"*50, \"\\n\"\n", + "print \"Results with 5-fold cross validation:\\n\"\n", + "print \"=\"*50, \"\\n\"\n", + "#------------------------------------------------------------------------------------------\n", + "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", + "print \"*\"*20\n", + "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", + "print \"*\"*20\n", + "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", + "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", + "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + "#----------------------" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(200, 7142)\n", + "(200,)\n" + ] + } + ], + "source": [ + "# This creates an artficial dataset (code from the Theano tutorial):\n", + "D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", + "#print D\n", + "print D[0].shape\n", + "print D[1].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(array([[ 0., 0., 0., ..., 1., 1., 1.],\n", + " [ 0., 0., 0., ..., 1., 1., 1.],\n", + " [ 0., 0., 0., ..., 1., 1., 1.],\n", + " ..., \n", + " [ 0., 0., 0., ..., 1., 0., 1.],\n", + " [ 0., 0., 0., ..., 1., 1., 1.],\n", + " [ 0., 0., 0., ..., 1., 1., 1.]], dtype=float32), array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))\n", + "(200, 7142)\n", + "(200,)\n" + ] + } + ], + "source": [ + "#But let's use our data to construct D:\n", + "# Let's ensure our x is float32, for use with Theano:\n", + "x= train_vecs\n", + "x=x.astype(theano.config.floatX)\n", + "y=train_tags\n", + "y=y.astype(int)\n", + "# Now create the dataset, and check dimensions, etc.\n", + "D=(x,y)\n", + "print D\n", + "print D[0].shape\n", + "print D[1].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial model:\n", + "Final model:\n", + "target values for D:\n", + "[1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", + " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", + " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", + "prediction on D:\n", + "[1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", + " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", + " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n" + ] + } + ], + "source": [ + "# Use with logistic regression:\n", + "# Logistic Regression: http://deeplearning.net/software/theano/tutorial/examples.html\n", + "import numpy\n", + "import theano\n", + "import theano.tensor as T\n", + "rng = numpy.random\n", + "#theano.config.optimizer='fast_compile'\n", + "\n", + "#N = 400 # training sample size\n", + "#feats = 784# \n", + "feats = 7142 # number of input variables\n", + "\n", + "# generate a dataset: D = (input_values, target_class)\n", + "#D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", + "training_steps = 1000\n", + "#np.asarray(your_data, dtype=theano.config.floatX)\n", + "\n", + "# Declare Theano symbolic variables\n", + "x = T.matrix(\"x\")\n", + "y = T.vector(\"y\")\n", + "\n", + "# initialize the weight vector w randomly\n", + "#\n", + "# this and the following bias variable b\n", + "# are shared so they keep their values\n", + "# between training iterations (updates)\n", + "w = theano.shared(rng.randn(feats), name=\"w\")\n", + "\n", + "# initialize the bias term\n", + "b = theano.shared(0., name=\"b\")\n", + "#print b.eval()\n", + "print(\"Initial model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "\n", + "# Construct Theano expression graph\n", + "p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1\n", + "prediction = p_1 > 0.5 # The prediction thresholded\n", + "xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function\n", + "cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize\n", + "gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost\n", + " # w.r.t weight vector w and\n", + " # bias term b\n", + " # (we shall return to this in a\n", + " # following section of this tutorial)\n", + "\n", + "# Compile\n", + "train = theano.function(\n", + " inputs=[x,y],\n", + " outputs=[prediction, xent],\n", + " updates=((w, w - 0.1 * gw), (b, b - 0.1 * gb)),\n", + " allow_input_downcast=True) # added downcasting...\n", + "predict = theano.function(inputs=[x], outputs=prediction)\n", + "\n", + "# Train\n", + "for i in range(training_steps):\n", + " pred, err = train(D[0], D[1])\n", + "\n", + "print(\"Final model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "print(\"target values for D:\")\n", + "print(D[1])\n", + "print(\"prediction on D:\")\n", + "print(predict(D[0]))\n", + "#----------------------------------------------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Now try the code with different values of \"training_steps\" and see what you get.\n", + "# For example, you can try:\n", + "# training_steps= 100, training_steps=500, training_steps=10000" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# We need to create some functions to load the IBDB data:\n", + "from collections import namedtuple\n", + "from collections import defaultdict\n", + "\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + "def get_data():\n", + " '''\n", + " \n", + " '''\n", + " all_data = [] \n", + " DataDoc= namedtuple('DataDoc', 'tag words')\n", + " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " all_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + " all_tags+=all_tags\n", + " return all_data, all_tags\n", + " #--------------------------------------------------\n", + " \n", + "all_data, all_tags= get_data()\n", + "\n", + "#train_data=train_data[:100]+train_data[12500:12600]\n", + "#test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n", + "\n", + "\n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]+ [ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "\n", + "\n", + "test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "\n", + "\n", + "\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "from collections import defaultdict\n", + "\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]\n", + "\n", + "\n", + "\n", + "\n", + "all_data = [] \n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " #print my_data[line_no]\n", + " #break\n", + "train_data = all_data[:25000]\n", + "test_data = all_data[25000:50000]\n", + "print len(train_data)\n", + "\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n", + "#--------------------\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]\n", + "#-------------------------\n", + "import numpy as np\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "#test_vecs= get_sparse_vectors(test_data, word_space)\n", + "\n", + "#print train_vecs, test_vecs[0]\n", + "print len(train_data[12500:12600])\n", + "print len(train_vecs)\n", + "print len(test_vecs)\n", + "#-------------------------\n", + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "from random import shuffle, randint\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "\n", + "\n", + "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]\n", + "print len(train_tags)\n", + "print len(test_tags)\n", + "#--------------------\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "print train_vecs.shape\n", + "#--------------------------------\n", + "# Classification with scikit-learn\n", + "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", + "# Let's use sklearn to train an svm classifier:\n", + "#-------------------------------------------------\n", + "\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC, LinearSVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "import gensim\n", + "n_jobs = 2\n", + "\n", + "#train_vecs=array(train_vecs)\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "\n", + "print type(train_tags)\n", + "print type(train_vecs)\n", + "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + "clf.fit(train_vecs, train_tags)\n", + "print \"\\nDone fitting classifier on training data...\\n\"\n", + "\n", + "#------------------------------------------------------------------------------------------\n", + "print \"=\"*50, \"\\n\"\n", + "print \"Results with 5-fold cross validation:\\n\"\n", + "print \"=\"*50, \"\\n\"\n", + "#------------------------------------------------------------------------------------------\n", + "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", + "print \"*\"*20\n", + "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", + "print \"*\"*20\n", + "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", + "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", + "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + "#----------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "space_len: 975\n", + "train_vecs.shape: 25000, 975\n", + "dev_vecs.shape: 5000, 975\n", + "test_vecs.shape: 20000, 975\n", + "Subtensor{int64}.0\n", + "... building the model\n", + "... training the model\n", + "epoch 1, minibatch 41/41, validation error 52.083333 %\n", + " epoch 1, minibatch 41/41, test error of best model 50.505051 %\n", + "epoch 2, minibatch 41/41, validation error 52.083333 %\n", + "epoch 3, minibatch 41/41, validation error 52.083333 %\n", + "epoch 4, minibatch 41/41, validation error 52.083333 %\n", + "epoch 5, minibatch 41/41, validation error 52.083333 %\n", + "epoch 6, minibatch 41/41, validation error 52.062500 %\n", + " epoch 6, minibatch 41/41, test error of best model 50.484848 %\n", + "epoch 7, minibatch 41/41, validation error 51.895833 %\n", + " epoch 7, minibatch 41/41, test error of best model 50.328283 %\n", + "epoch 8, minibatch 41/41, validation error 51.687500 %\n", + " epoch 8, minibatch 41/41, test error of best model 50.111111 %\n", + "epoch 9, minibatch 41/41, validation error 51.354167 %\n", + " epoch 9, minibatch 41/41, test error of best model 49.944444 %\n", + "epoch 10, minibatch 41/41, validation error 51.020833 %\n", + " epoch 10, minibatch 41/41, test error of best model 49.641414 %\n", + "epoch 11, minibatch 41/41, validation error 50.500000 %\n", + " epoch 11, minibatch 41/41, test error of best model 49.363636 %\n", + "epoch 12, minibatch 41/41, validation error 50.125000 %\n", + " epoch 12, minibatch 41/41, test error of best model 49.000000 %\n", + "epoch 13, minibatch 41/41, validation error 49.687500 %\n", + " epoch 13, minibatch 41/41, test error of best model 48.661616 %\n", + "epoch 14, minibatch 41/41, validation error 49.125000 %\n", + " epoch 14, minibatch 41/41, test error of best model 48.257576 %\n", + "epoch 15, minibatch 41/41, validation error 48.645833 %\n", + " epoch 15, minibatch 41/41, test error of best model 47.853535 %\n", + "epoch 16, minibatch 41/41, validation error 48.291667 %\n", + " epoch 16, minibatch 41/41, test error of best model 47.585859 %\n", + "epoch 17, minibatch 41/41, validation error 47.979167 %\n", + " epoch 17, minibatch 41/41, test error of best model 47.217172 %\n", + "epoch 18, minibatch 41/41, validation error 47.812500 %\n", + " epoch 18, minibatch 41/41, test error of best model 47.000000 %\n", + "epoch 19, minibatch 41/41, validation error 47.604167 %\n", + " epoch 19, minibatch 41/41, test error of best model 46.737374 %\n", + "epoch 20, minibatch 41/41, validation error 47.395833 %\n", + " epoch 20, minibatch 41/41, test error of best model 46.520202 %\n", + "epoch 21, minibatch 41/41, validation error 47.041667 %\n", + " epoch 21, minibatch 41/41, test error of best model 46.292929 %\n", + "epoch 22, minibatch 41/41, validation error 46.770833 %\n", + " epoch 22, minibatch 41/41, test error of best model 46.030303 %\n", + "epoch 23, minibatch 41/41, validation error 46.645833 %\n", + " epoch 23, minibatch 41/41, test error of best model 45.813131 %\n", + "epoch 24, minibatch 41/41, validation error 46.125000 %\n", + " epoch 24, minibatch 41/41, test error of best model 45.570707 %\n", + "epoch 25, minibatch 41/41, validation error 45.895833 %\n", + " epoch 25, minibatch 41/41, test error of best model 45.348485 %\n", + "epoch 26, minibatch 41/41, validation error 45.812500 %\n", + " epoch 26, minibatch 41/41, test error of best model 45.121212 %\n", + "epoch 27, minibatch 41/41, validation error 45.666667 %\n", + " epoch 27, minibatch 41/41, test error of best model 44.994949 %\n", + "epoch 28, minibatch 41/41, validation error 45.458333 %\n", + " epoch 28, minibatch 41/41, test error of best model 44.848485 %\n", + "epoch 29, minibatch 41/41, validation error 45.312500 %\n", + " epoch 29, minibatch 41/41, test error of best model 44.676768 %\n", + "epoch 30, minibatch 41/41, validation error 45.229167 %\n", + " epoch 30, minibatch 41/41, test error of best model 44.606061 %\n", + "epoch 31, minibatch 41/41, validation error 45.125000 %\n", + " epoch 31, minibatch 41/41, test error of best model 44.515152 %\n", + "epoch 32, minibatch 41/41, validation error 44.895833 %\n", + " epoch 32, minibatch 41/41, test error of best model 44.378788 %\n", + "epoch 33, minibatch 41/41, validation error 44.833333 %\n", + " epoch 33, minibatch 41/41, test error of best model 44.303030 %\n", + "epoch 34, minibatch 41/41, validation error 44.750000 %\n", + " epoch 34, minibatch 41/41, test error of best model 44.176768 %\n", + "epoch 35, minibatch 41/41, validation error 44.500000 %\n", + " epoch 35, minibatch 41/41, test error of best model 44.080808 %\n", + "epoch 36, minibatch 41/41, validation error 44.458333 %\n", + " epoch 36, minibatch 41/41, test error of best model 43.959596 %\n", + "epoch 37, minibatch 41/41, validation error 44.416667 %\n", + " epoch 37, minibatch 41/41, test error of best model 43.878788 %\n", + "epoch 38, minibatch 41/41, validation error 44.375000 %\n", + " epoch 38, minibatch 41/41, test error of best model 43.813131 %\n", + "epoch 39, minibatch 41/41, validation error 44.354167 %\n", + " epoch 39, minibatch 41/41, test error of best model 43.757576 %\n", + "epoch 40, minibatch 41/41, validation error 44.291667 %\n", + " epoch 40, minibatch 41/41, test error of best model 43.656566 %\n", + "epoch 41, minibatch 41/41, validation error 44.250000 %\n", + " epoch 41, minibatch 41/41, test error of best model 43.530303 %\n", + "epoch 42, minibatch 41/41, validation error 44.166667 %\n", + " epoch 42, minibatch 41/41, test error of best model 43.500000 %\n", + "epoch 43, minibatch 41/41, validation error 44.166667 %\n", + "epoch 44, minibatch 41/41, validation error 44.041667 %\n", + " epoch 44, minibatch 41/41, test error of best model 43.368687 %\n", + "epoch 45, minibatch 41/41, validation error 44.104167 %\n", + "epoch 46, minibatch 41/41, validation error 44.041667 %\n", + "epoch 47, minibatch 41/41, validation error 44.041667 %\n", + "epoch 48, minibatch 41/41, validation error 43.979167 %\n", + " epoch 48, minibatch 41/41, test error of best model 43.126263 %\n", + "epoch 49, minibatch 41/41, validation error 43.979167 %\n", + "epoch 50, minibatch 41/41, validation error 43.937500 %\n", + " epoch 50, minibatch 41/41, test error of best model 43.035354 %\n", + "epoch 51, minibatch 41/41, validation error 43.875000 %\n", + " epoch 51, minibatch 41/41, test error of best model 43.015152 %\n", + "epoch 52, minibatch 41/41, validation error 43.833333 %\n", + " epoch 52, minibatch 41/41, test error of best model 42.984848 %\n", + "epoch 53, minibatch 41/41, validation error 43.708333 %\n", + " epoch 53, minibatch 41/41, test error of best model 42.868687 %\n", + "epoch 54, minibatch 41/41, validation error 43.729167 %\n", + "epoch 55, minibatch 41/41, validation error 43.708333 %\n", + "epoch 56, minibatch 41/41, validation error 43.645833 %\n", + " epoch 56, minibatch 41/41, test error of best model 42.772727 %\n", + "epoch 57, minibatch 41/41, validation error 43.541667 %\n", + " epoch 57, minibatch 41/41, test error of best model 42.727273 %\n", + "epoch 58, minibatch 41/41, validation error 43.541667 %\n", + "epoch 59, minibatch 41/41, validation error 43.520833 %\n", + " epoch 59, minibatch 41/41, test error of best model 42.691919 %\n", + "epoch 60, minibatch 41/41, validation error 43.520833 %\n", + "epoch 61, minibatch 41/41, validation error 43.500000 %\n", + " epoch 61, minibatch 41/41, test error of best model 42.616162 %\n", + "epoch 62, minibatch 41/41, validation error 43.520833 %\n", + "epoch 63, minibatch 41/41, validation error 43.500000 %\n", + "epoch 64, minibatch 41/41, validation error 43.520833 %\n", + "epoch 65, minibatch 41/41, validation error 43.437500 %\n", + " epoch 65, minibatch 41/41, test error of best model 42.515152 %\n", + "epoch 66, minibatch 41/41, validation error 43.437500 %\n", + "epoch 67, minibatch 41/41, validation error 43.416667 %\n", + " epoch 67, minibatch 41/41, test error of best model 42.474747 %\n", + "epoch 68, minibatch 41/41, validation error 43.395833 %\n", + " epoch 68, minibatch 41/41, test error of best model 42.429293 %\n", + "epoch 69, minibatch 41/41, validation error 43.395833 %\n", + "epoch 70, minibatch 41/41, validation error 43.375000 %\n", + " epoch 70, minibatch 41/41, test error of best model 42.373737 %\n", + "epoch 71, minibatch 41/41, validation error 43.354167 %\n", + " epoch 71, minibatch 41/41, test error of best model 42.368687 %\n", + "epoch 72, minibatch 41/41, validation error 43.312500 %\n", + " epoch 72, minibatch 41/41, test error of best model 42.328283 %\n", + "epoch 73, minibatch 41/41, validation error 43.270833 %\n", + " epoch 73, minibatch 41/41, test error of best model 42.313131 %\n", + "epoch 74, minibatch 41/41, validation error 43.229167 %\n", + " epoch 74, minibatch 41/41, test error of best model 42.282828 %\n", + "epoch 75, minibatch 41/41, validation error 43.229167 %\n", + "epoch 76, minibatch 41/41, validation error 43.229167 %\n", + "epoch 77, minibatch 41/41, validation error 43.208333 %\n", + " epoch 77, minibatch 41/41, test error of best model 42.202020 %\n", + "epoch 78, minibatch 41/41, validation error 43.187500 %\n", + " epoch 78, minibatch 41/41, test error of best model 42.186869 %\n", + "epoch 79, minibatch 41/41, validation error 43.166667 %\n", + " epoch 79, minibatch 41/41, test error of best model 42.166667 %\n", + "epoch 80, minibatch 41/41, validation error 43.166667 %\n", + "epoch 81, minibatch 41/41, validation error 43.145833 %\n", + " epoch 81, minibatch 41/41, test error of best model 42.151515 %\n", + "epoch 82, minibatch 41/41, validation error 43.125000 %\n", + " epoch 82, minibatch 41/41, test error of best model 42.146465 %\n", + "epoch 83, minibatch 41/41, validation error 43.083333 %\n", + " epoch 83, minibatch 41/41, test error of best model 42.126263 %\n", + "epoch 84, minibatch 41/41, validation error 43.020833 %\n", + " epoch 84, minibatch 41/41, test error of best model 42.116162 %\n", + "epoch 85, minibatch 41/41, validation error 43.020833 %\n", + "epoch 86, minibatch 41/41, validation error 43.020833 %\n", + "epoch 87, minibatch 41/41, validation error 42.979167 %\n", + " epoch 87, minibatch 41/41, test error of best model 42.070707 %\n", + "epoch 88, minibatch 41/41, validation error 42.958333 %\n", + " epoch 88, minibatch 41/41, test error of best model 42.045455 %\n", + "epoch 89, minibatch 41/41, validation error 42.937500 %\n", + " epoch 89, minibatch 41/41, test error of best model 42.030303 %\n", + "epoch 90, minibatch 41/41, validation error 42.916667 %\n", + " epoch 90, minibatch 41/41, test error of best model 42.035354 %\n", + "epoch 91, minibatch 41/41, validation error 42.895833 %\n", + " epoch 91, minibatch 41/41, test error of best model 42.030303 %\n", + "epoch 92, minibatch 41/41, validation error 42.875000 %\n", + " epoch 92, minibatch 41/41, test error of best model 42.030303 %\n", + "epoch 93, minibatch 41/41, validation error 42.875000 %\n", + "epoch 94, minibatch 41/41, validation error 42.854167 %\n", + " epoch 94, minibatch 41/41, test error of best model 42.010101 %\n", + "epoch 95, minibatch 41/41, validation error 42.854167 %\n", + "epoch 96, minibatch 41/41, validation error 42.833333 %\n", + " epoch 96, minibatch 41/41, test error of best model 42.005051 %\n", + "epoch 97, minibatch 41/41, validation error 42.791667 %\n", + " epoch 97, minibatch 41/41, test error of best model 41.984848 %\n", + "epoch 98, minibatch 41/41, validation error 42.770833 %\n", + " epoch 98, minibatch 41/41, test error of best model 41.979798 %\n", + "epoch 99, minibatch 41/41, validation error 42.770833 %\n", + "epoch 100, minibatch 41/41, validation error 42.770833 %\n", + "epoch 101, minibatch 41/41, validation error 42.770833 %\n", + "epoch 102, minibatch 41/41, validation error 42.770833 %\n", + "epoch 103, minibatch 41/41, validation error 42.770833 %\n", + "epoch 104, minibatch 41/41, validation error 42.770833 %\n", + "epoch 105, minibatch 41/41, validation error 42.791667 %\n", + "epoch 106, minibatch 41/41, validation error 42.791667 %\n", + "epoch 107, minibatch 41/41, validation error 42.791667 %\n", + "epoch 108, minibatch 41/41, validation error 42.791667 %\n", + "epoch 109, minibatch 41/41, validation error 42.770833 %\n", + "epoch 110, minibatch 41/41, validation error 42.770833 %\n", + "epoch 111, minibatch 41/41, validation error 42.770833 %\n", + "epoch 112, minibatch 41/41, validation error 42.770833 %\n", + "epoch 113, minibatch 41/41, validation error 42.770833 %\n", + "epoch 114, minibatch 41/41, validation error 42.750000 %\n", + " epoch 114, minibatch 41/41, test error of best model 41.868687 %\n", + "epoch 115, minibatch 41/41, validation error 42.750000 %\n", + "epoch 116, minibatch 41/41, validation error 42.729167 %\n", + " epoch 116, minibatch 41/41, test error of best model 41.863636 %\n", + "epoch 117, minibatch 41/41, validation error 42.750000 %\n", + "epoch 118, minibatch 41/41, validation error 42.750000 %\n", + "epoch 119, minibatch 41/41, validation error 42.750000 %\n", + "epoch 120, minibatch 41/41, validation error 42.729167 %\n", + " epoch 120, minibatch 41/41, test error of best model 41.853535 %\n", + "epoch 121, minibatch 41/41, validation error 42.729167 %\n", + "epoch 122, minibatch 41/41, validation error 42.729167 %\n", + "epoch 123, minibatch 41/41, validation error 42.708333 %\n", + " epoch 123, minibatch 41/41, test error of best model 41.833333 %\n", + "epoch 124, minibatch 41/41, validation error 42.687500 %\n", + " epoch 124, minibatch 41/41, test error of best model 41.828283 %\n", + "epoch 125, minibatch 41/41, validation error 42.666667 %\n", + " epoch 125, minibatch 41/41, test error of best model 41.818182 %\n", + "epoch 126, minibatch 41/41, validation error 42.645833 %\n", + " epoch 126, minibatch 41/41, test error of best model 41.808081 %\n", + "epoch 127, minibatch 41/41, validation error 42.645833 %\n", + "epoch 128, minibatch 41/41, validation error 42.645833 %\n", + "epoch 129, minibatch 41/41, validation error 42.645833 %\n", + "epoch 130, minibatch 41/41, validation error 42.645833 %\n", + "epoch 131, minibatch 41/41, validation error 42.645833 %\n", + "epoch 132, minibatch 41/41, validation error 42.645833 %\n", + "epoch 133, minibatch 41/41, validation error 42.645833 %\n", + "epoch 134, minibatch 41/41, validation error 42.645833 %\n", + "epoch 135, minibatch 41/41, validation error 42.645833 %\n", + "epoch 136, minibatch 41/41, validation error 42.645833 %\n", + "epoch 137, minibatch 41/41, validation error 42.645833 %\n", + "epoch 138, minibatch 41/41, validation error 42.625000 %\n", + " epoch 138, minibatch 41/41, test error of best model 41.792929 %\n", + "epoch 139, minibatch 41/41, validation error 42.604167 %\n", + " epoch 139, minibatch 41/41, test error of best model 41.792929 %\n", + "epoch 140, minibatch 41/41, validation error 42.604167 %\n", + "epoch 141, minibatch 41/41, validation error 42.604167 %\n", + "epoch 142, minibatch 41/41, validation error 42.583333 %\n", + " epoch 142, minibatch 41/41, test error of best model 41.792929 %\n", + "epoch 143, minibatch 41/41, validation error 42.583333 %\n", + "epoch 144, minibatch 41/41, validation error 42.583333 %\n", + "epoch 145, minibatch 41/41, validation error 42.583333 %\n", + "epoch 146, minibatch 41/41, validation error 42.583333 %\n", + "epoch 147, minibatch 41/41, validation error 42.583333 %\n", + "epoch 148, minibatch 41/41, validation error 42.583333 %\n", + "epoch 149, minibatch 41/41, validation error 42.583333 %\n", + "epoch 150, minibatch 41/41, validation error 42.583333 %\n", + "epoch 151, minibatch 41/41, validation error 42.562500 %\n", + " epoch 151, minibatch 41/41, test error of best model 41.762626 %\n", + "epoch 152, minibatch 41/41, validation error 42.562500 %\n", + "epoch 153, minibatch 41/41, validation error 42.541667 %\n", + " epoch 153, minibatch 41/41, test error of best model 41.762626 %\n", + "epoch 154, minibatch 41/41, validation error 42.541667 %\n", + "epoch 155, minibatch 41/41, validation error 42.541667 %\n", + "epoch 156, minibatch 41/41, validation error 42.541667 %\n", + "epoch 157, minibatch 41/41, validation error 42.541667 %\n", + "epoch 158, minibatch 41/41, validation error 42.541667 %\n", + "epoch 159, minibatch 41/41, validation error 42.541667 %\n", + "epoch 160, minibatch 41/41, validation error 42.541667 %\n", + "epoch 161, minibatch 41/41, validation error 42.541667 %\n", + "epoch 162, minibatch 41/41, validation error 42.520833 %\n", + " epoch 162, minibatch 41/41, test error of best model 41.747475 %\n", + "epoch 163, minibatch 41/41, validation error 42.520833 %\n", + "epoch 164, minibatch 41/41, validation error 42.520833 %\n", + "epoch 165, minibatch 41/41, validation error 42.520833 %\n", + "epoch 166, minibatch 41/41, validation error 42.520833 %\n", + "epoch 167, minibatch 41/41, validation error 42.520833 %\n", + "epoch 168, minibatch 41/41, validation error 42.520833 %\n", + "epoch 169, minibatch 41/41, validation error 42.520833 %\n", + "epoch 170, minibatch 41/41, validation error 42.520833 %\n", + "epoch 171, minibatch 41/41, validation error 42.520833 %\n", + "epoch 172, minibatch 41/41, validation error 42.520833 %\n", + "epoch 173, minibatch 41/41, validation error 42.520833 %\n", + "epoch 174, minibatch 41/41, validation error 42.520833 %\n", + "epoch 175, minibatch 41/41, validation error 42.520833 %\n", + "epoch 176, minibatch 41/41, validation error 42.520833 %\n", + "epoch 177, minibatch 41/41, validation error 42.520833 %\n", + "epoch 178, minibatch 41/41, validation error 42.520833 %\n", + "epoch 179, minibatch 41/41, validation error 42.520833 %\n", + "epoch 180, minibatch 41/41, validation error 42.520833 %\n", + "epoch 181, minibatch 41/41, validation error 42.520833 %\n", + "epoch 182, minibatch 41/41, validation error 42.520833 %\n", + "epoch 183, minibatch 41/41, validation error 42.520833 %\n", + "epoch 184, minibatch 41/41, validation error 42.520833 %\n", + "epoch 185, minibatch 41/41, validation error 42.520833 %\n", + "epoch 186, minibatch 41/41, validation error 42.520833 %\n", + "epoch 187, minibatch 41/41, validation error 42.520833 %\n", + "epoch 188, minibatch 41/41, validation error 42.520833 %\n", + "epoch 189, minibatch 41/41, validation error 42.520833 %\n", + "epoch 190, minibatch 41/41, validation error 42.520833 %\n", + "epoch 191, minibatch 41/41, validation error 42.520833 %\n", + "epoch 192, minibatch 41/41, validation error 42.520833 %\n", + "epoch 193, minibatch 41/41, validation error 42.520833 %\n", + "epoch 194, minibatch 41/41, validation error 42.520833 %\n", + "epoch 195, minibatch 41/41, validation error 42.520833 %\n", + "epoch 196, minibatch 41/41, validation error 42.520833 %\n", + "epoch 197, minibatch 41/41, validation error 42.520833 %\n", + "epoch 198, minibatch 41/41, validation error 42.520833 %\n", + "epoch 199, minibatch 41/41, validation error 42.520833 %\n", + "epoch 200, minibatch 41/41, validation error 42.520833 %\n", + "epoch 201, minibatch 41/41, validation error 42.520833 %\n", + "epoch 202, minibatch 41/41, validation error 42.520833 %\n", + "epoch 203, minibatch 41/41, validation error 42.520833 %\n", + "epoch 204, minibatch 41/41, validation error 42.520833 %\n", + "epoch 205, minibatch 41/41, validation error 42.520833 %\n", + "epoch 206, minibatch 41/41, validation error 42.520833 %\n", + "epoch 207, minibatch 41/41, validation error 42.520833 %\n", + "epoch 208, minibatch 41/41, validation error 42.520833 %\n", + "epoch 209, minibatch 41/41, validation error 42.520833 %\n", + "epoch 210, minibatch 41/41, validation error 42.520833 %\n", + "epoch 211, minibatch 41/41, validation error 42.520833 %\n", + "epoch 212, minibatch 41/41, validation error 42.520833 %\n", + "epoch 213, minibatch 41/41, validation error 42.520833 %\n", + "epoch 214, minibatch 41/41, validation error 42.520833 %\n", + "epoch 215, minibatch 41/41, validation error 42.520833 %\n", + "epoch 216, minibatch 41/41, validation error 42.520833 %\n", + "epoch 217, minibatch 41/41, validation error 42.520833 %\n", + "epoch 218, minibatch 41/41, validation error 42.520833 %\n", + "epoch 219, minibatch 41/41, validation error 42.520833 %\n", + "epoch 220, minibatch 41/41, validation error 42.520833 %\n", + "epoch 221, minibatch 41/41, validation error 42.520833 %\n", + "epoch 222, minibatch 41/41, validation error 42.520833 %\n", + "epoch 223, minibatch 41/41, validation error 42.520833 %\n", + "epoch 224, minibatch 41/41, validation error 42.520833 %\n", + "epoch 225, minibatch 41/41, validation error 42.520833 %\n", + "epoch 226, minibatch 41/41, validation error 42.520833 %\n", + "epoch 227, minibatch 41/41, validation error 42.520833 %\n", + "epoch 228, minibatch 41/41, validation error 42.520833 %\n", + "epoch 229, minibatch 41/41, validation error 42.520833 %\n", + "epoch 230, minibatch 41/41, validation error 42.520833 %\n", + "epoch 231, minibatch 41/41, validation error 42.520833 %\n", + "epoch 232, minibatch 41/41, validation error 42.520833 %\n", + "epoch 233, minibatch 41/41, validation error 42.520833 %\n", + "epoch 234, minibatch 41/41, validation error 42.520833 %\n", + "epoch 235, minibatch 41/41, validation error 42.520833 %\n", + "epoch 236, minibatch 41/41, validation error 42.520833 %\n", + "epoch 237, minibatch 41/41, validation error 42.520833 %\n", + "epoch 238, minibatch 41/41, validation error 42.520833 %\n", + "epoch 239, minibatch 41/41, validation error 42.520833 %\n", + "epoch 240, minibatch 41/41, validation error 42.520833 %\n", + "epoch 241, minibatch 41/41, validation error 42.520833 %\n", + "epoch 242, minibatch 41/41, validation error 42.520833 %\n", + "epoch 243, minibatch 41/41, validation error 42.520833 %\n", + "epoch 244, minibatch 41/41, validation error 42.520833 %\n", + "epoch 245, minibatch 41/41, validation error 42.520833 %\n", + "epoch 246, minibatch 41/41, validation error 42.520833 %\n", + "epoch 247, minibatch 41/41, validation error 42.520833 %\n", + "epoch 248, minibatch 41/41, validation error 42.520833 %\n", + "epoch 249, minibatch 41/41, validation error 42.520833 %\n", + "epoch 250, minibatch 41/41, validation error 42.520833 %\n", + "epoch 251, minibatch 41/41, validation error 42.520833 %\n", + "epoch 252, minibatch 41/41, validation error 42.520833 %\n", + "epoch 253, minibatch 41/41, validation error 42.500000 %\n", + " epoch 253, minibatch 41/41, test error of best model 41.712121 %\n", + "epoch 254, minibatch 41/41, validation error 42.500000 %\n", + "epoch 255, minibatch 41/41, validation error 42.500000 %\n", + "epoch 256, minibatch 41/41, validation error 42.500000 %\n", + "epoch 257, minibatch 41/41, validation error 42.500000 %\n", + "epoch 258, minibatch 41/41, validation error 42.500000 %\n", + "epoch 259, minibatch 41/41, validation error 42.500000 %\n", + "epoch 260, minibatch 41/41, validation error 42.500000 %\n", + "epoch 261, minibatch 41/41, validation error 42.500000 %\n", + "epoch 262, minibatch 41/41, validation error 42.500000 %\n", + "epoch 263, minibatch 41/41, validation error 42.500000 %\n", + "epoch 264, minibatch 41/41, validation error 42.500000 %\n", + "epoch 265, minibatch 41/41, validation error 42.500000 %\n", + "epoch 266, minibatch 41/41, validation error 42.500000 %\n", + "epoch 267, minibatch 41/41, validation error 42.500000 %\n", + "epoch 268, minibatch 41/41, validation error 42.500000 %\n", + "epoch 269, minibatch 41/41, validation error 42.500000 %\n", + "epoch 270, minibatch 41/41, validation error 42.500000 %\n", + "epoch 271, minibatch 41/41, validation error 42.500000 %\n", + "epoch 272, minibatch 41/41, validation error 42.500000 %\n", + "epoch 273, minibatch 41/41, validation error 42.500000 %\n", + "epoch 274, minibatch 41/41, validation error 42.500000 %\n", + "epoch 275, minibatch 41/41, validation error 42.500000 %\n", + "epoch 276, minibatch 41/41, validation error 42.500000 %\n", + "epoch 277, minibatch 41/41, validation error 42.500000 %\n", + "epoch 278, minibatch 41/41, validation error 42.500000 %\n", + "epoch 279, minibatch 41/41, validation error 42.500000 %\n", + "epoch 280, minibatch 41/41, validation error 42.500000 %\n", + "epoch 281, minibatch 41/41, validation error 42.500000 %\n", + "epoch 282, minibatch 41/41, validation error 42.500000 %\n", + "epoch 283, minibatch 41/41, validation error 42.500000 %\n", + "epoch 284, minibatch 41/41, validation error 42.500000 %\n", + "epoch 285, minibatch 41/41, validation error 42.500000 %\n", + "epoch 286, minibatch 41/41, validation error 42.500000 %\n", + "epoch 287, minibatch 41/41, validation error 42.500000 %\n", + "epoch 288, minibatch 41/41, validation error 42.500000 %\n", + "epoch 289, minibatch 41/41, validation error 42.500000 %\n", + "epoch 290, minibatch 41/41, validation error 42.500000 %\n", + "epoch 291, minibatch 41/41, validation error 42.500000 %\n", + "epoch 292, minibatch 41/41, validation error 42.500000 %\n", + "epoch 293, minibatch 41/41, validation error 42.500000 %\n", + "epoch 294, minibatch 41/41, validation error 42.500000 %\n", + "epoch 295, minibatch 41/41, validation error 42.500000 %\n", + "epoch 296, minibatch 41/41, validation error 42.500000 %\n", + "epoch 297, minibatch 41/41, validation error 42.500000 %\n", + "epoch 298, minibatch 41/41, validation error 42.500000 %\n", + "epoch 299, minibatch 41/41, validation error 42.500000 %\n", + "epoch 300, minibatch 41/41, validation error 42.500000 %\n", + "epoch 301, minibatch 41/41, validation error 42.500000 %\n", + "epoch 302, minibatch 41/41, validation error 42.500000 %\n", + "epoch 303, minibatch 41/41, validation error 42.500000 %\n", + "epoch 304, minibatch 41/41, validation error 42.500000 %\n", + "epoch 305, minibatch 41/41, validation error 42.500000 %\n", + "epoch 306, minibatch 41/41, validation error 42.500000 %\n", + "epoch 307, minibatch 41/41, validation error 42.500000 %\n", + "epoch 308, minibatch 41/41, validation error 42.500000 %\n", + "epoch 309, minibatch 41/41, validation error 42.500000 %\n", + "epoch 310, minibatch 41/41, validation error 42.500000 %\n", + "epoch 311, minibatch 41/41, validation error 42.500000 %\n", + "epoch 312, minibatch 41/41, validation error 42.500000 %\n", + "epoch 313, minibatch 41/41, validation error 42.500000 %\n", + "epoch 314, minibatch 41/41, validation error 42.500000 %\n", + "epoch 315, minibatch 41/41, validation error 42.500000 %\n", + "epoch 316, minibatch 41/41, validation error 42.500000 %\n", + "epoch 317, minibatch 41/41, validation error 42.500000 %\n", + "epoch 318, minibatch 41/41, validation error 42.500000 %\n", + "epoch 319, minibatch 41/41, validation error 42.500000 %\n", + "epoch 320, minibatch 41/41, validation error 42.500000 %\n", + "epoch 321, minibatch 41/41, validation error 42.500000 %\n", + "epoch 322, minibatch 41/41, validation error 42.500000 %\n", + "epoch 323, minibatch 41/41, validation error 42.500000 %\n", + "epoch 324, minibatch 41/41, validation error 42.500000 %\n", + "epoch 325, minibatch 41/41, validation error 42.500000 %\n", + "epoch 326, minibatch 41/41, validation error 42.500000 %\n", + "epoch 327, minibatch 41/41, validation error 42.500000 %\n", + "epoch 328, minibatch 41/41, validation error 42.500000 %\n", + "epoch 329, minibatch 41/41, validation error 42.500000 %\n", + "epoch 330, minibatch 41/41, validation error 42.500000 %\n", + "epoch 331, minibatch 41/41, validation error 42.500000 %\n", + "epoch 332, minibatch 41/41, validation error 42.500000 %\n", + "epoch 333, minibatch 41/41, validation error 42.500000 %\n", + "epoch 334, minibatch 41/41, validation error 42.500000 %\n", + "epoch 335, minibatch 41/41, validation error 42.500000 %\n", + "epoch 336, minibatch 41/41, validation error 42.500000 %\n", + "epoch 337, minibatch 41/41, validation error 42.500000 %\n", + "epoch 338, minibatch 41/41, validation error 42.500000 %\n", + "epoch 339, minibatch 41/41, validation error 42.500000 %\n", + "epoch 340, minibatch 41/41, validation error 42.500000 %\n", + "epoch 341, minibatch 41/41, validation error 42.500000 %\n", + "epoch 342, minibatch 41/41, validation error 42.500000 %\n", + "epoch 343, minibatch 41/41, validation error 42.500000 %\n", + "epoch 344, minibatch 41/41, validation error 42.500000 %\n", + "epoch 345, minibatch 41/41, validation error 42.500000 %\n", + "epoch 346, minibatch 41/41, validation error 42.500000 %\n", + "epoch 347, minibatch 41/41, validation error 42.500000 %\n", + "epoch 348, minibatch 41/41, validation error 42.500000 %\n", + "epoch 349, minibatch 41/41, validation error 42.500000 %\n", + "epoch 350, minibatch 41/41, validation error 42.500000 %\n", + "epoch 351, minibatch 41/41, validation error 42.500000 %\n", + "epoch 352, minibatch 41/41, validation error 42.500000 %\n", + "epoch 353, minibatch 41/41, validation error 42.500000 %\n", + "epoch 354, minibatch 41/41, validation error 42.500000 %\n", + "epoch 355, minibatch 41/41, validation error 42.500000 %\n", + "epoch 356, minibatch 41/41, validation error 42.500000 %\n", + "epoch 357, minibatch 41/41, validation error 42.500000 %\n", + "epoch 358, minibatch 41/41, validation error 42.500000 %\n", + "epoch 359, minibatch 41/41, validation error 42.500000 %\n", + "epoch 360, minibatch 41/41, validation error 42.500000 %\n", + "epoch 361, minibatch 41/41, validation error 42.500000 %\n", + "epoch 362, minibatch 41/41, validation error 42.500000 %\n", + "epoch 363, minibatch 41/41, validation error 42.500000 %\n", + "epoch 364, minibatch 41/41, validation error 42.500000 %\n", + "epoch 365, minibatch 41/41, validation error 42.500000 %\n", + "epoch 366, minibatch 41/41, validation error 42.500000 %\n", + "epoch 367, minibatch 41/41, validation error 42.500000 %\n", + "epoch 368, minibatch 41/41, validation error 42.500000 %\n", + "epoch 369, minibatch 41/41, validation error 42.500000 %\n", + "epoch 370, minibatch 41/41, validation error 42.500000 %\n", + "epoch 371, minibatch 41/41, validation error 42.500000 %\n", + "epoch 372, minibatch 41/41, validation error 42.500000 %\n", + "epoch 373, minibatch 41/41, validation error 42.500000 %\n", + "epoch 374, minibatch 41/41, validation error 42.500000 %\n", + "epoch 375, minibatch 41/41, validation error 42.500000 %\n", + "epoch 376, minibatch 41/41, validation error 42.500000 %\n", + "epoch 377, minibatch 41/41, validation error 42.500000 %\n", + "epoch 378, minibatch 41/41, validation error 42.500000 %\n", + "epoch 379, minibatch 41/41, validation error 42.500000 %\n", + "epoch 380, minibatch 41/41, validation error 42.500000 %\n", + "epoch 381, minibatch 41/41, validation error 42.500000 %\n", + "epoch 382, minibatch 41/41, validation error 42.500000 %\n", + "epoch 383, minibatch 41/41, validation error 42.500000 %\n", + "epoch 384, minibatch 41/41, validation error 42.500000 %\n", + "epoch 385, minibatch 41/41, validation error 42.500000 %\n", + "epoch 386, minibatch 41/41, validation error 42.500000 %\n", + "epoch 387, minibatch 41/41, validation error 42.500000 %\n", + "epoch 388, minibatch 41/41, validation error 42.500000 %\n", + "epoch 389, minibatch 41/41, validation error 42.500000 %\n", + "epoch 390, minibatch 41/41, validation error 42.500000 %\n", + "epoch 391, minibatch 41/41, validation error 42.500000 %\n", + "epoch 392, minibatch 41/41, validation error 42.500000 %\n", + "epoch 393, minibatch 41/41, validation error 42.500000 %\n", + "epoch 394, minibatch 41/41, validation error 42.500000 %\n", + "epoch 395, minibatch 41/41, validation error 42.500000 %\n", + "epoch 396, minibatch 41/41, validation error 42.500000 %\n", + "epoch 397, minibatch 41/41, validation error 42.500000 %\n", + "epoch 398, minibatch 41/41, validation error 42.500000 %\n", + "epoch 399, minibatch 41/41, validation error 42.500000 %\n", + "epoch 400, minibatch 41/41, validation error 42.500000 %\n", + "epoch 401, minibatch 41/41, validation error 42.500000 %\n", + "epoch 402, minibatch 41/41, validation error 42.500000 %\n", + "epoch 403, minibatch 41/41, validation error 42.500000 %\n", + "epoch 404, minibatch 41/41, validation error 42.500000 %\n", + "epoch 405, minibatch 41/41, validation error 42.500000 %\n", + "epoch 406, minibatch 41/41, validation error 42.500000 %\n", + "epoch 407, minibatch 41/41, validation error 42.500000 %\n", + "epoch 408, minibatch 41/41, validation error 42.500000 %\n", + "epoch 409, minibatch 41/41, validation error 42.500000 %\n", + "epoch 410, minibatch 41/41, validation error 42.500000 %\n", + "epoch 411, minibatch 41/41, validation error 42.500000 %\n", + "epoch 412, minibatch 41/41, validation error 42.500000 %\n", + "epoch 413, minibatch 41/41, validation error 42.500000 %\n", + "epoch 414, minibatch 41/41, validation error 42.500000 %\n", + "epoch 415, minibatch 41/41, validation error 42.500000 %\n", + "epoch 416, minibatch 41/41, validation error 42.500000 %\n", + "epoch 417, minibatch 41/41, validation error 42.500000 %\n", + "epoch 418, minibatch 41/41, validation error 42.500000 %\n", + "epoch 419, minibatch 41/41, validation error 42.500000 %\n", + "epoch 420, minibatch 41/41, validation error 42.500000 %\n", + "epoch 421, minibatch 41/41, validation error 42.500000 %\n", + "epoch 422, minibatch 41/41, validation error 42.500000 %\n", + "epoch 423, minibatch 41/41, validation error 42.500000 %\n", + "epoch 424, minibatch 41/41, validation error 42.500000 %\n", + "epoch 425, minibatch 41/41, validation error 42.500000 %\n", + "epoch 426, minibatch 41/41, validation error 42.500000 %\n", + "epoch 427, minibatch 41/41, validation error 42.500000 %\n", + "epoch 428, minibatch 41/41, validation error 42.500000 %\n", + "epoch 429, minibatch 41/41, validation error 42.500000 %\n", + "epoch 430, minibatch 41/41, validation error 42.500000 %\n", + "epoch 431, minibatch 41/41, validation error 42.500000 %\n", + "epoch 432, minibatch 41/41, validation error 42.500000 %\n", + "epoch 433, minibatch 41/41, validation error 42.500000 %\n", + "epoch 434, minibatch 41/41, validation error 42.500000 %\n", + "epoch 435, minibatch 41/41, validation error 42.500000 %\n", + "epoch 436, minibatch 41/41, validation error 42.500000 %\n", + "epoch 437, minibatch 41/41, validation error 42.500000 %\n", + "epoch 438, minibatch 41/41, validation error 42.500000 %\n", + "epoch 439, minibatch 41/41, validation error 42.500000 %\n", + "epoch 440, minibatch 41/41, validation error 42.500000 %\n", + "epoch 441, minibatch 41/41, validation error 42.500000 %\n", + "epoch 442, minibatch 41/41, validation error 42.500000 %\n", + "epoch 443, minibatch 41/41, validation error 42.500000 %\n", + "epoch 444, minibatch 41/41, validation error 42.500000 %\n", + "epoch 445, minibatch 41/41, validation error 42.500000 %\n", + "epoch 446, minibatch 41/41, validation error 42.500000 %\n", + "epoch 447, minibatch 41/41, validation error 42.500000 %\n", + "epoch 448, minibatch 41/41, validation error 42.500000 %\n", + "epoch 449, minibatch 41/41, validation error 42.500000 %\n", + "epoch 450, minibatch 41/41, validation error 42.500000 %\n", + "epoch 451, minibatch 41/41, validation error 42.500000 %\n", + "epoch 452, minibatch 41/41, validation error 42.500000 %\n", + "epoch 453, minibatch 41/41, validation error 42.500000 %\n", + "epoch 454, minibatch 41/41, validation error 42.500000 %\n", + "epoch 455, minibatch 41/41, validation error 42.500000 %\n", + "epoch 456, minibatch 41/41, validation error 42.500000 %\n", + "epoch 457, minibatch 41/41, validation error 42.500000 %\n", + "epoch 458, minibatch 41/41, validation error 42.500000 %\n", + "epoch 459, minibatch 41/41, validation error 42.500000 %\n", + "epoch 460, minibatch 41/41, validation error 42.500000 %\n", + "epoch 461, minibatch 41/41, validation error 42.500000 %\n", + "epoch 462, minibatch 41/41, validation error 42.500000 %\n", + "epoch 463, minibatch 41/41, validation error 42.500000 %\n", + "epoch 464, minibatch 41/41, validation error 42.500000 %\n", + "epoch 465, minibatch 41/41, validation error 42.500000 %\n", + "epoch 466, minibatch 41/41, validation error 42.500000 %\n", + "epoch 467, minibatch 41/41, validation error 42.500000 %\n", + "epoch 468, minibatch 41/41, validation error 42.500000 %\n", + "epoch 469, minibatch 41/41, validation error 42.500000 %\n", + "epoch 470, minibatch 41/41, validation error 42.500000 %\n", + "epoch 471, minibatch 41/41, validation error 42.500000 %\n", + "epoch 472, minibatch 41/41, validation error 42.500000 %\n", + "epoch 473, minibatch 41/41, validation error 42.500000 %\n", + "epoch 474, minibatch 41/41, validation error 42.500000 %\n", + "epoch 475, minibatch 41/41, validation error 42.500000 %\n", + "epoch 476, minibatch 41/41, validation error 42.500000 %\n", + "epoch 477, minibatch 41/41, validation error 42.500000 %\n", + "epoch 478, minibatch 41/41, validation error 42.500000 %\n", + "epoch 479, minibatch 41/41, validation error 42.500000 %\n", + "epoch 480, minibatch 41/41, validation error 42.500000 %\n", + "epoch 481, minibatch 41/41, validation error 42.500000 %\n", + "epoch 482, minibatch 41/41, validation error 42.500000 %\n", + "epoch 483, minibatch 41/41, validation error 42.500000 %\n", + "epoch 484, minibatch 41/41, validation error 42.500000 %\n", + "epoch 485, minibatch 41/41, validation error 42.500000 %\n", + "epoch 486, minibatch 41/41, validation error 42.500000 %\n", + "epoch 487, minibatch 41/41, validation error 42.500000 %\n", + "epoch 488, minibatch 41/41, validation error 42.500000 %\n", + "epoch 489, minibatch 41/41, validation error 42.500000 %\n", + "epoch 490, minibatch 41/41, validation error 42.500000 %\n", + "epoch 491, minibatch 41/41, validation error 42.500000 %\n", + "epoch 492, minibatch 41/41, validation error 42.500000 %\n", + "epoch 493, minibatch 41/41, validation error 42.500000 %\n", + "epoch 494, minibatch 41/41, validation error 42.500000 %\n", + "epoch 495, minibatch 41/41, validation error 42.500000 %\n", + "epoch 496, minibatch 41/41, validation error 42.500000 %\n", + "epoch 497, minibatch 41/41, validation error 42.500000 %\n", + "epoch 498, minibatch 41/41, validation error 42.500000 %\n", + "epoch 499, minibatch 41/41, validation error 42.500000 %\n", + "epoch 500, minibatch 41/41, validation error 42.500000 %\n", + "epoch 501, minibatch 41/41, validation error 42.500000 %\n", + "epoch 502, minibatch 41/41, validation error 42.500000 %\n", + "epoch 503, minibatch 41/41, validation error 42.500000 %\n", + "epoch 504, minibatch 41/41, validation error 42.500000 %\n", + "epoch 505, minibatch 41/41, validation error 42.500000 %\n", + "epoch 506, minibatch 41/41, validation error 42.500000 %\n", + "epoch 507, minibatch 41/41, validation error 42.500000 %\n", + "epoch 508, minibatch 41/41, validation error 42.500000 %\n", + "epoch 509, minibatch 41/41, validation error 42.500000 %\n", + "epoch 510, minibatch 41/41, validation error 42.500000 %\n", + "epoch 511, minibatch 41/41, validation error 42.500000 %\n", + "epoch 512, minibatch 41/41, validation error 42.500000 %\n", + "epoch 513, minibatch 41/41, validation error 42.500000 %\n", + "epoch 514, minibatch 41/41, validation error 42.500000 %\n", + "epoch 515, minibatch 41/41, validation error 42.500000 %\n", + "epoch 516, minibatch 41/41, validation error 42.500000 %\n", + "epoch 517, minibatch 41/41, validation error 42.500000 %\n", + "epoch 518, minibatch 41/41, validation error 42.500000 %\n", + "epoch 519, minibatch 41/41, validation error 42.500000 %\n", + "epoch 520, minibatch 41/41, validation error 42.500000 %\n", + "epoch 521, minibatch 41/41, validation error 42.500000 %\n", + "epoch 522, minibatch 41/41, validation error 42.500000 %\n", + "epoch 523, minibatch 41/41, validation error 42.500000 %\n", + "epoch 524, minibatch 41/41, validation error 42.500000 %\n", + "epoch 525, minibatch 41/41, validation error 42.500000 %\n", + "epoch 526, minibatch 41/41, validation error 42.500000 %\n", + "epoch 527, minibatch 41/41, validation error 42.500000 %\n", + "epoch 528, minibatch 41/41, validation error 42.500000 %\n", + "epoch 529, minibatch 41/41, validation error 42.500000 %\n", + "epoch 530, minibatch 41/41, validation error 42.500000 %\n", + "epoch 531, minibatch 41/41, validation error 42.500000 %\n", + "epoch 532, minibatch 41/41, validation error 42.500000 %\n", + "epoch 533, minibatch 41/41, validation error 42.500000 %\n", + "epoch 534, minibatch 41/41, validation error 42.500000 %\n", + "epoch 535, minibatch 41/41, validation error 42.500000 %\n", + "epoch 536, minibatch 41/41, validation error 42.500000 %\n", + "epoch 537, minibatch 41/41, validation error 42.500000 %\n", + "epoch 538, minibatch 41/41, validation error 42.500000 %\n", + "epoch 539, minibatch 41/41, validation error 42.500000 %\n", + "epoch 540, minibatch 41/41, validation error 42.500000 %\n", + "epoch 541, minibatch 41/41, validation error 42.500000 %\n", + "epoch 542, minibatch 41/41, validation error 42.500000 %\n", + "epoch 543, minibatch 41/41, validation error 42.500000 %\n", + "epoch 544, minibatch 41/41, validation error 42.500000 %\n", + "epoch 545, minibatch 41/41, validation error 42.500000 %\n", + "epoch 546, minibatch 41/41, validation error 42.500000 %\n", + "epoch 547, minibatch 41/41, validation error 42.500000 %\n", + "epoch 548, minibatch 41/41, validation error 42.500000 %\n", + "epoch 549, minibatch 41/41, validation error 42.500000 %\n", + "epoch 550, minibatch 41/41, validation error 42.500000 %\n", + "epoch 551, minibatch 41/41, validation error 42.500000 %\n", + "epoch 552, minibatch 41/41, validation error 42.500000 %\n", + "epoch 553, minibatch 41/41, validation error 42.500000 %\n", + "epoch 554, minibatch 41/41, validation error 42.500000 %\n", + "epoch 555, minibatch 41/41, validation error 42.500000 %\n", + "epoch 556, minibatch 41/41, validation error 42.500000 %\n", + "epoch 557, minibatch 41/41, validation error 42.500000 %\n", + "epoch 558, minibatch 41/41, validation error 42.500000 %\n", + "epoch 559, minibatch 41/41, validation error 42.500000 %\n", + "epoch 560, minibatch 41/41, validation error 42.500000 %\n", + "epoch 561, minibatch 41/41, validation error 42.500000 %\n", + "epoch 562, minibatch 41/41, validation error 42.500000 %\n", + "epoch 563, minibatch 41/41, validation error 42.500000 %\n", + "epoch 564, minibatch 41/41, validation error 42.500000 %\n", + "epoch 565, minibatch 41/41, validation error 42.500000 %\n", + "epoch 566, minibatch 41/41, validation error 42.500000 %\n", + "epoch 567, minibatch 41/41, validation error 42.500000 %\n", + "epoch 568, minibatch 41/41, validation error 42.500000 %\n", + "epoch 569, minibatch 41/41, validation error 42.500000 %\n", + "epoch 570, minibatch 41/41, validation error 42.500000 %\n", + "epoch 571, minibatch 41/41, validation error 42.500000 %\n", + "epoch 572, minibatch 41/41, validation error 42.500000 %\n", + "epoch 573, minibatch 41/41, validation error 42.500000 %\n", + "epoch 574, minibatch 41/41, validation error 42.500000 %\n", + "epoch 575, minibatch 41/41, validation error 42.500000 %\n", + "epoch 576, minibatch 41/41, validation error 42.500000 %\n", + "epoch 577, minibatch 41/41, validation error 42.500000 %\n", + "epoch 578, minibatch 41/41, validation error 42.500000 %\n", + "epoch 579, minibatch 41/41, validation error 42.500000 %\n", + "epoch 580, minibatch 41/41, validation error 42.500000 %\n", + "epoch 581, minibatch 41/41, validation error 42.500000 %\n", + "epoch 582, minibatch 41/41, validation error 42.500000 %\n", + "epoch 583, minibatch 41/41, validation error 42.500000 %\n", + "epoch 584, minibatch 41/41, validation error 42.500000 %\n", + "epoch 585, minibatch 41/41, validation error 42.500000 %\n", + "epoch 586, minibatch 41/41, validation error 42.500000 %\n", + "epoch 587, minibatch 41/41, validation error 42.500000 %\n", + "epoch 588, minibatch 41/41, validation error 42.500000 %\n", + "epoch 589, minibatch 41/41, validation error 42.500000 %\n", + "epoch 590, minibatch 41/41, validation error 42.500000 %\n", + "epoch 591, minibatch 41/41, validation error 42.500000 %\n", + "epoch 592, minibatch 41/41, validation error 42.500000 %\n", + "epoch 593, minibatch 41/41, validation error 42.500000 %\n", + "epoch 594, minibatch 41/41, validation error 42.500000 %\n", + "epoch 595, minibatch 41/41, validation error 42.500000 %\n", + "epoch 596, minibatch 41/41, validation error 42.500000 %\n", + "epoch 597, minibatch 41/41, validation error 42.500000 %\n", + "epoch 598, minibatch 41/41, validation error 42.500000 %\n", + "epoch 599, minibatch 41/41, validation error 42.500000 %\n", + "epoch 600, minibatch 41/41, validation error 42.500000 %\n", + "epoch 601, minibatch 41/41, validation error 42.500000 %\n", + "epoch 602, minibatch 41/41, validation error 42.500000 %\n", + "epoch 603, minibatch 41/41, validation error 42.500000 %\n", + "epoch 604, minibatch 41/41, validation error 42.500000 %\n", + "epoch 605, minibatch 41/41, validation error 42.500000 %\n", + "epoch 606, minibatch 41/41, validation error 42.500000 %\n", + "epoch 607, minibatch 41/41, validation error 42.500000 %\n", + "epoch 608, minibatch 41/41, validation error 42.500000 %\n", + "epoch 609, minibatch 41/41, validation error 42.500000 %\n", + "Optimization complete with best validation score of 42.500000 %,with test performance 41.712121 %\n", + "The code run for 610 epochs, with 11.447371 epochs/sec\n", + "The code for file best_model.pkl ran for 53.3s\n", + "Now predicting...\n", + "Predicted values for the first 10 examples in test set:\n", + "[0 0 0 0 0 0 1 0 0 0]\n" + ] + } + ], + "source": [ + "\"\"\"\n", + "This tutorial introduces logistic regression using Theano and stochastic\n", + "gradient descent.\n", + "\n", + "Logistic regression is a probabilistic, linear classifier. It is parametrized\n", + "by a weight matrix :math:`W` and a bias vector :math:`b`. Classification is\n", + "done by projecting data points onto a set of hyperplanes, the distance to\n", + "which is used to determine a class membership probability.\n", + "\n", + "Mathematically, this can be written as:\n", + "\n", + ".. math::\n", + " P(Y=i|x, W,b) &= softmax_i(W x + b) \\\\\n", + " &= \\frac {e^{W_i x + b_i}} {\\sum_j e^{W_j x + b_j}}\n", + "\n", + "\n", + "The output of the model or prediction is then done by taking the argmax of\n", + "the vector whose i'th element is P(Y=i|x).\n", + "\n", + ".. math::\n", + "\n", + " y_{pred} = argmax_i P(Y=i|x,W,b)\n", + "\n", + "\n", + "This tutorial presents a stochastic gradient descent optimization method\n", + "suitable for large datasets.\n", + "\n", + "\n", + "References:\n", + "\n", + " - textbooks: \"Pattern Recognition and Machine Learning\" -\n", + " Christopher M. Bishop, section 4.3.2\n", + "\n", + "\"\"\"\n", + "from collections import namedtuple, defaultdict\n", + "from random import shuffle, randint\n", + "#----------------------------------------------------\n", + "__docformat__ = 'restructedtext en'\n", + "\n", + "import cPickle\n", + "import gzip\n", + "import os\n", + "import sys\n", + "import timeit\n", + "\n", + "import numpy\n", + "import numpy as np\n", + "import theano\n", + "import theano.tensor as T\n", + "#----------------------------------------------------\n", + "\n", + "def get_data():\n", + " '''\n", + " \n", + " '''\n", + " all_data = [] \n", + " DataDoc= namedtuple('DataDoc', 'tag words')\n", + " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " train_data = all_data[:25000]\n", + " dev_data = all_data[25000:27500]+all_data[47500:50000]\n", + " test_data=all_data[27500:47500]\n", + " # labels\n", + " train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + " dev_tags= [ 1.0 for i in range(2500)] + [ 0.0 for i in range(2500)]\n", + " test_tags= [ 1.0 for i in range(10000)] + [ 0.0 for i in range(10000)]\n", + " return train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", + " #--------------------------------------------------\n", + "#train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + "########################\n", + "\n", + "\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]+=1\n", + " return word_space\n", + "\n", + "# train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + "# word_space=get_space(train_data)\n", + "# word_space={w: word_space[w] for w in word_space if word_space[w] > 500}\n", + "# space_len=len(word_space)\n", + "# print \"space_len: \", space_len\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "# train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "# test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "# dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", + "# #---------------------------\n", + "# train_vecs=np.array(train_vecs)\n", + "# train_tags=np.array(train_tags)\n", + "# dev_vecs=np.array(dev_vecs)\n", + "# dev_tags=np.array(dev_tags)\n", + "# test_vecs=np.array(test_vecs)\n", + "# test_tags=np.array(test_tags)\n", + "# print train_vecs.shape\n", + "# print dev_vecs.shape\n", + "# print test_vecs.shape\n", + "\n", + "\n", + "def load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags):\n", + " #------------------------------\n", + " # Modified from Theano tutorial.\n", + " # I basically pass data_x, data_y instead of data_xy\n", + " def shared_dataset(data_x, data_y, borrow=True):\n", + " \"\"\" Function that loads the dataset into shared variables\n", + "\n", + " The reason we store our dataset in shared variables is to allow\n", + " Theano to copy it into the GPU memory (when code is run on GPU).\n", + " Since copying data into the GPU is slow, copying a minibatch everytime\n", + " is needed (the default behaviour if the data is not in a shared\n", + " variable) would lead to a large decrease in performance.\n", + " \"\"\"\n", + " shared_x = theano.shared(numpy.asarray(data_x,\n", + " dtype=theano.config.floatX), borrow=borrow)\n", + " shared_y = theano.shared(numpy.asarray(data_y,\n", + " dtype=theano.config.floatX),\n", + " borrow=borrow)\n", + " # When storing data on the GPU it has to be stored as floats\n", + " # therefore we will store the labels as ``floatX`` as well\n", + " # (``shared_y`` does exactly that). But during our computations\n", + " # we need them as ints (we use labels as index, and if they are\n", + " # floats it doesn't make sense) therefore instead of returning\n", + " # ``shared_y`` we will have to cast it to int. This little hack\n", + " # lets ous get around this issue\n", + " return shared_x, T.cast(shared_y, 'int32')\n", + " #-----------------------------------------------------------------\n", + " train_set_x, train_set_y = shared_dataset(train_vecs, train_tags)\n", + " valid_set_x, valid_set_y = shared_dataset(dev_vecs, dev_tags)\n", + " test_set_x, test_set_y = shared_dataset(test_vecs, test_tags)\n", + "\n", + " rval = [(train_set_x, train_set_y), (valid_set_x, valid_set_y),\n", + " (test_set_x, test_set_y)]\n", + " return rval\n", + "\n", + "#rval=load_data(train_vecs, train_tags)\n", + "#print rval\n", + "\n", + "\n", + "class LogisticRegression(object):\n", + " \"\"\"Multi-class Logistic Regression Class\n", + "\n", + " The logistic regression is fully described by a weight matrix :math:`W`\n", + " and bias vector :math:`b`. Classification is done by projecting data\n", + " points onto a set of hyperplanes, the distance to which is used to\n", + " determine a class membership probability.\n", + " \"\"\"\n", + "\n", + " def __init__(self, input, n_in, n_out):\n", + " \"\"\" Initialize the parameters of the logistic regression\n", + "\n", + " :type input: theano.tensor.TensorType\n", + " :param input: symbolic variable that describes the input of the\n", + " architecture (one minibatch)\n", + "\n", + " :type n_in: int\n", + " :param n_in: number of input units, the dimension of the space in\n", + " which the datapoints lie\n", + "\n", + " :type n_out: int\n", + " :param n_out: number of output units, the dimension of the space in\n", + " which the labels lie\n", + "\n", + " \"\"\"\n", + " # start-snippet-1\n", + " # initialize with 0 the weights W as a matrix of shape (n_in, n_out)\n", + " self.W = theano.shared(\n", + " value=numpy.zeros(\n", + " (n_in, n_out),\n", + " dtype=theano.config.floatX\n", + " ),\n", + " name='W',\n", + " borrow=True\n", + " )\n", + " # initialize the biases b as a vector of n_out 0s\n", + " self.b = theano.shared(\n", + " value=numpy.zeros(\n", + " (n_out,),\n", + " dtype=theano.config.floatX\n", + " ),\n", + " name='b',\n", + " borrow=True\n", + " )\n", + "\n", + " # symbolic expression for computing the matrix of class-membership\n", + " # probabilities\n", + " # Where:\n", + " # W is a matrix where column-k represent the separation hyperplane for\n", + " # class-k\n", + " # x is a matrix where row-j represents input training sample-j\n", + " # b is a vector where element-k represent the free parameter of\n", + " # hyperplane-k\n", + " self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b)\n", + "\n", + " # symbolic description of how to compute prediction as class whose\n", + " # probability is maximal\n", + " self.y_pred = T.argmax(self.p_y_given_x, axis=1)\n", + " # end-snippet-1\n", + "\n", + " # parameters of the model\n", + " self.params = [self.W, self.b]\n", + "\n", + " # keep track of model input\n", + " self.input = input\n", + "\n", + " def negative_log_likelihood(self, y):\n", + " \"\"\"Return the mean of the negative log-likelihood of the prediction\n", + " of this model under a given target distribution.\n", + "\n", + " .. math::\n", + "\n", + " \\frac{1}{|\\mathcal{D}|} \\mathcal{L} (\\theta=\\{W,b\\}, \\mathcal{D}) =\n", + " \\frac{1}{|\\mathcal{D}|} \\sum_{i=0}^{|\\mathcal{D}|}\n", + " \\log(P(Y=y^{(i)}|x^{(i)}, W,b)) \\\\\n", + " \\ell (\\theta=\\{W,b\\}, \\mathcal{D})\n", + "\n", + " :type y: theano.tensor.TensorType\n", + " :param y: corresponds to a vector that gives for each example the\n", + " correct label\n", + "\n", + " Note: we use the mean instead of the sum so that\n", + " the learning rate is less dependent on the batch size\n", + " \"\"\"\n", + " # start-snippet-2\n", + " # y.shape[0] is (symbolically) the number of rows in y, i.e.,\n", + " # number of examples (call it n) in the minibatch\n", + " # T.arange(y.shape[0]) is a symbolic vector which will contain\n", + " # [0,1,2,... n-1] T.log(self.p_y_given_x) is a matrix of\n", + " # Log-Probabilities (call it LP) with one row per example and\n", + " # one column per class LP[T.arange(y.shape[0]),y] is a vector\n", + " # v containing [LP[0,y[0]], LP[1,y[1]], LP[2,y[2]], ...,\n", + " # LP[n-1,y[n-1]]] and T.mean(LP[T.arange(y.shape[0]),y]) is\n", + " # the mean (across minibatch examples) of the elements in v,\n", + " # i.e., the mean log-likelihood across the minibatch.\n", + " return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])\n", + " # end-snippet-2\n", + "\n", + " def errors(self, y):\n", + " \"\"\"Return a float representing the number of errors in the minibatch\n", + " over the total number of examples of the minibatch ; zero one\n", + " loss over the size of the minibatch\n", + "\n", + " :type y: theano.tensor.TensorType\n", + " :param y: corresponds to a vector that gives for each example the\n", + " correct label\n", + " \"\"\"\n", + "\n", + " # check if y has same dimension of y_pred\n", + " if y.ndim != self.y_pred.ndim:\n", + " raise TypeError(\n", + " 'y should have the same shape as self.y_pred',\n", + " ('y', y.type, 'y_pred', self.y_pred.type)\n", + " )\n", + " # check if y is of the correct datatype\n", + " if y.dtype.startswith('int'):\n", + " # the T.neq operator returns a vector of 0s and 1s, where 1\n", + " # represents a mistake in prediction\n", + " return T.mean(T.neq(self.y_pred, y))\n", + " else:\n", + " raise NotImplementedError()\n", + "\n", + "\n", + "def sgd_optimization(learning_rate=0.13, n_epochs=1000,\n", + " batch_size=600):\n", + " \"\"\"\n", + " Demonstrate stochastic gradient descent optimization of a log-linear\n", + " model\n", + "\n", + " This is demonstrated on MNIST.\n", + "\n", + " :type learning_rate: float\n", + " :param learning_rate: learning rate used (factor for the stochastic\n", + " gradient)\n", + "\n", + " :type n_epochs: int\n", + " :param n_epochs: maximal number of epochs to run the optimizer\n", + "\n", + " :type dataset: string\n", + " :param dataset: the path of the MNIST dataset file from\n", + " http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz\n", + "\n", + " \"\"\"\n", + " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", + " train_set_x, train_set_y = datasets[0]\n", + " valid_set_x, valid_set_y = datasets[1]\n", + " test_set_x, test_set_y = datasets[2]\n", + " print train_set_x.shape[0]\n", + " # compute number of minibatches for training, validation and testing\n", + " n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size\n", + " n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size\n", + " n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size\n", + "\n", + " ######################\n", + " # BUILD ACTUAL MODEL #\n", + " ######################\n", + " print '... building the model'\n", + "\n", + " # allocate symbolic variables for the data\n", + " index = T.lscalar() # index to a [mini]batch\n", + "\n", + " # generate symbolic variables for input (x and y represent a\n", + " # minibatch)\n", + " x = T.matrix('x') # data, presented as rasterized images\n", + " y = T.ivector('y') # labels, presented as 1D vector of [int] labels\n", + "\n", + " # construct the logistic regression class\n", + " # Each MNIST image has size 28*28\n", + " # MAM: We change size: n_in=space_len\n", + " classifier = LogisticRegression(input=x, n_in=space_len, n_out=2)\n", + "\n", + " # the cost we minimize during training is the negative log likelihood of\n", + " # the model in symbolic format\n", + " cost = classifier.negative_log_likelihood(y)\n", + "\n", + " # compiling a Theano function that computes the mistakes that are made by\n", + " # the model on a minibatch\n", + " test_model = theano.function(\n", + " inputs=[index],\n", + " outputs=classifier.errors(y),\n", + " givens={\n", + " x: test_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: test_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + "\n", + " validate_model = theano.function(\n", + " inputs=[index],\n", + " outputs=classifier.errors(y),\n", + " givens={\n", + " x: valid_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: valid_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + "\n", + " # compute the gradient of cost with respect to theta = (W,b)\n", + " g_W = T.grad(cost=cost, wrt=classifier.W)\n", + " g_b = T.grad(cost=cost, wrt=classifier.b)\n", + "\n", + " # start-snippet-3\n", + " # specify how to update the parameters of the model as a list of\n", + " # (variable, update expression) pairs.\n", + " updates = [(classifier.W, classifier.W - learning_rate * g_W),\n", + " (classifier.b, classifier.b - learning_rate * g_b)]\n", + "\n", + " # compiling a Theano function `train_model` that returns the cost, but in\n", + " # the same time updates the parameter of the model based on the rules\n", + " # defined in `updates`\n", + " train_model = theano.function(\n", + " inputs=[index],\n", + " outputs=cost,\n", + " updates=updates,\n", + " givens={\n", + " x: train_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: train_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + " # end-snippet-3\n", + "\n", + " ###############\n", + " # TRAIN MODEL #\n", + " ###############\n", + " print '... training the model'\n", + " # early-stopping parameters\n", + " patience = 5000 # look as this many examples regardless\n", + " patience_increase = 2 # wait this much longer when a new best is\n", + " # found\n", + " improvement_threshold = 0.995 # a relative improvement of this much is\n", + " # considered significant\n", + " validation_frequency = min(n_train_batches, patience / 2)\n", + " # go through this many\n", + " # minibatche before checking the network\n", + " # on the validation set; in this case we\n", + " # check every epoch\n", + "\n", + " best_validation_loss = numpy.inf\n", + " test_score = 0.\n", + " start_time = timeit.default_timer()\n", + "\n", + " done_looping = False\n", + " epoch = 0\n", + " while (epoch < n_epochs) and (not done_looping):\n", + " epoch = epoch + 1\n", + " for minibatch_index in xrange(n_train_batches):\n", + "\n", + " minibatch_avg_cost = train_model(minibatch_index)\n", + " # iteration number\n", + " iter = (epoch - 1) * n_train_batches + minibatch_index\n", + "\n", + " if (iter + 1) % validation_frequency == 0:\n", + " # compute zero-one loss on validation set\n", + " validation_losses = [validate_model(i)\n", + " for i in xrange(n_valid_batches)]\n", + " this_validation_loss = numpy.mean(validation_losses)\n", + "\n", + " print(\n", + " 'epoch %i, minibatch %i/%i, validation error %f %%' %\n", + " (\n", + " epoch,\n", + " minibatch_index + 1,\n", + " n_train_batches,\n", + " this_validation_loss * 100.\n", + " )\n", + " )\n", + "\n", + " # if we got the best validation score until now\n", + " if this_validation_loss < best_validation_loss:\n", + " #improve patience if loss improvement is good enough\n", + " if this_validation_loss < best_validation_loss * \\\n", + " improvement_threshold:\n", + " patience = max(patience, iter * patience_increase)\n", + "\n", + " best_validation_loss = this_validation_loss\n", + " # test it on the test set\n", + "\n", + " test_losses = [test_model(i)\n", + " for i in xrange(n_test_batches)]\n", + " test_score = numpy.mean(test_losses)\n", + "\n", + " print(\n", + " (\n", + " ' epoch %i, minibatch %i/%i, test error of'\n", + " ' best model %f %%'\n", + " ) %\n", + " (\n", + " epoch,\n", + " minibatch_index + 1,\n", + " n_train_batches,\n", + " test_score * 100.\n", + " )\n", + " )\n", + "\n", + " # save the best model\n", + " with open('best_model.pkl', 'w') as f:\n", + " cPickle.dump(classifier, f)\n", + "\n", + " if patience <= iter:\n", + " done_looping = True\n", + " break\n", + "\n", + " end_time = timeit.default_timer()\n", + " print(\n", + " (\n", + " 'Optimization complete with best validation score of %f %%,'\n", + " 'with test performance %f %%'\n", + " )\n", + " % (best_validation_loss * 100., test_score * 100.)\n", + " )\n", + " print 'The code run for %d epochs, with %f epochs/sec' % (\n", + " epoch, 1. * epoch / (end_time - start_time))\n", + " print ('The code for file ' +\n", + " 'best_model.pkl' +\n", + " ' ran for %.1fs' % ((end_time - start_time)))\n", + "\n", + "\n", + "def predict():\n", + " \"\"\"\n", + " An example of how to load a trained model and use it\n", + " to predict labels.\n", + " \"\"\"\n", + "\n", + " # load the saved model\n", + " classifier = cPickle.load(open('best_model.pkl'))\n", + "\n", + " # compile a predictor function\n", + " predict_model = theano.function(\n", + " inputs=[classifier.input],\n", + " outputs=classifier.y_pred)\n", + "\n", + " # We can test it on some examples from test test\n", + " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", + " #train_set_x, train_set_y = datasets[0]\n", + " #valid_set_x, valid_set_y = datasets[1]\n", + " test_set_x, test_set_y = datasets[2]\n", + " test_set_x = test_set_x.get_value()\n", + " predicted_values = predict_model(test_set_x[:10])\n", + " print (\"Predicted values for the first 10 examples in test set:\")\n", + " print predicted_values\n", + "\n", + "\n", + "if __name__ == '__main__':\n", + " train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + " word_space=get_space(train_data)\n", + " word_space={w: word_space[w] for w in word_space if word_space[w] > 600}\n", + " space_len=len(word_space)\n", + " print(\"space_len: %d\" % space_len)\n", + " train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + " test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + " dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", + " #del word_space\n", + " #---------------------------\n", + " train_vecs=np.array(train_vecs)\n", + " train_tags=np.array(train_tags)\n", + " dev_vecs=np.array(dev_vecs)\n", + " dev_tags=np.array(dev_tags)\n", + " test_vecs=np.array(test_vecs)\n", + " test_tags=np.array(test_tags)\n", + " #del train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", + " print('train_vecs.shape: %d, %d' % train_vecs.shape)\n", + " print('dev_vecs.shape: %d, %d' % dev_vecs.shape)\n", + " print('test_vecs.shape: %d, %d' % test_vecs.shape)\n", + " sgd_optimization()\n", + " #------------------------------------------------------\n", + " print('Now predicting...')\n", + " predict()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/best_model.pkl b/best_model.pkl new file mode 100644 index 0000000..ccc27aa --- /dev/null +++ b/best_model.pkl @@ -0,0 +1,857 @@ +ccopy_reg +_reconstructor +p1 +(c__main__ +LogisticRegression +p2 +c__builtin__ +object +p3 +NtRp4 +(dp5 +S'b' +g1 +(ctheano.sandbox.cuda.var +CudaNdarraySharedVariable +p6 +g3 +NtRp7 +(dp8 +S'index' +p9 +NsS'container' +p10 +g1 +(ctheano.gof.link +Container +p11 +g3 +NtRp12 +(dp13 +S'name' +p14 +S'b' +sS'storage' +p15 +(lp16 +ctheano.sandbox.cuda.type +CudaNdarray_unpickler +p17 +(cnumpy.core.multiarray +_reconstruct +p18 +(cnumpy +ndarray +p19 +(I0 +tS'b' +tRp20 +(I1 +(I2 +tcnumpy +dtype +p21 +(S'f4' +I0 +I1 +tRp22 +(I3 +S'<' +NNNI-1 +I-1 +I0 +tbI00 +S'\xf8\n,>\xef\n,\xbe' +tbtRp23 +asS'strict' +p24 +I00 +sS'readonly' +p25 +I00 +sS'type' +p26 +g1 +(ctheano.sandbox.cuda.type +CudaNdarrayType +p27 +g3 +NtRp28 +(dp29 +S'broadcastable' +p30 +(I00 +tp31 +sg14 +NsbsS'allow_downcast' +p32 +Nsbsg14 +S'b' +sS'auto_name' +p33 +S'auto_8591' +p34 +sS'tag' +p35 +(itheano.gof.utils +scratchpad +p36 +(dp37 +S'trace' +p38 +(lp39 +(S'/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/theano/compile/sharedvalue.py' +p40 +I209 +S'shared' +p41 +S'utils.add_tag_trace(var)' +tp42 +asbsS'get_value_return_ndarray' +p43 +I01 +sS'owner' +p44 +Nsg26 +g28 +sbsS'y_pred' +p45 +g1 +(ctheano.tensor.var +TensorVariable +p46 +g3 +NtRp47 +(dp48 +g33 +S'auto_8603' +p49 +sg9 +I1 +sg35 +(itheano.gof.utils +scratchpad +p50 +(dp51 +g38 +(lp52 +sbsg14 +S'argmax' +p53 +sg44 +g1 +(ctheano.gof.graph +Apply +p54 +g3 +NtRp55 +(dp56 +S'inputs' +p57 +(lp58 +g1 +(g46 +g3 +NtRp59 +(dp60 +g33 +S'auto_8600' +p61 +sg9 +I0 +sg35 +(itheano.gof.utils +scratchpad +p62 +(dp63 +g38 +(lp64 +(S'' +p65 +I221 +S'__init__' +p66 +Vself.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b) +tp67 +asbsg14 +Nsg44 +g1 +(g54 +g3 +NtRp68 +(dp69 +g57 +(lp70 +g1 +(g46 +g3 +NtRp71 +(dp72 +g33 +S'auto_8599' +p73 +sg9 +I0 +sg35 +(itheano.gof.utils +scratchpad +p74 +(dp75 +g38 +(lp76 +(g65 +I221 +g66 +Vself.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b) +tp77 +asbsg14 +Nsg44 +g1 +(g54 +g3 +NtRp78 +(dp79 +g57 +(lp80 +g1 +(g46 +g3 +NtRp81 +(dp82 +g33 +S'auto_8593' +p83 +sg9 +I0 +sg35 +(itheano.gof.utils +scratchpad +p84 +(dp85 +g38 +(lp86 +(g65 +I221 +g66 +Vself.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b) +tp87 +asbsg14 +Nsg44 +g1 +(g54 +g3 +NtRp88 +(dp89 +g57 +(lp90 +g1 +(g46 +g3 +NtRp91 +(dp92 +g33 +S'auto_8588' +p93 +sg9 +Nsg35 +(itheano.gof.utils +scratchpad +p94 +(dp95 +g38 +(lp96 +(S'' +p97 +I331 +S'sgd_optimization' +p98 +Vx = T.matrix('x') # data, presented as rasterized images +tp99 +asbsg14 +S'x' +sg44 +Nsg26 +g1 +(ctheano.tensor.type +TensorType +p100 +g3 +NtRp101 +(dp102 +g30 +(I00 +I00 +tp103 +sS'dtype' +p104 +S'float32' +p105 +sS'numpy_dtype' +p106 +g22 +sS'sparse_grad' +p107 +I00 +sg14 +Nsbsbag1 +(g46 +g3 +NtRp108 +(dp109 +g33 +S'auto_8592' +p110 +sg9 +I0 +sg35 +(itheano.gof.utils +scratchpad +p111 +(dp112 +g38 +(lp113 +sbsg14 +Nsg44 +g1 +(g54 +g3 +NtRp114 +(dp115 +g57 +(lp116 +g1 +(g6 +g3 +NtRp117 +(dp118 +g9 +Nsg10 +g1 +(g11 +g3 +NtRp119 +(dp120 +g14 +S'W' +sg15 +(lp121 +g17 +(g18 +(g19 +(I0 +tS'b' +tRp122 +(I1 +(I3047 +I2 +tg22 +I00 +S'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\xa8\x82;\xd2\xa7\x82\xbb\xf4\xb9\xa7\xbc\xe6\xb9\xa7<\x1f;\x1e=\x0b;\x1e\xbd\x80\r[;\r\r[\xbbyq\x8b\xbc\x94q\x8b<\x1a\xb5\x16\xbb\x19\xb6\x16;\xbf\xdd\xdb<\x91\xdd\xdb\xbcW\x86\x0c\xbeP\x86\x0c>\'\xcc@\xbe)\xcc@>8n?\xbc\xf3n?<\x01c#<\xb6b#\xbcs/\xbc\xb9\x976\xbc9\xf7[K<\xb2[K\xbc\xdff\xf4=\xd5f\xf4\xbd\xd0t\x19\xbd\xebt\x19=6\xaa\xf0\xbch\xaa\xf0<\x99\xa5\xb8;/\xa5\xb8\xbb\x05\xc9\xbd;\x94\xc8\xbd\xbbh\xbeq\xbdz\xbeq=\xf8\x80\xe8\xbd\x08\x81\xe8=\x93b\xe5=\x84b\xe5\xbd\x12\xf9\x86>\x14\xf9\x86\xbe\x1c\x8b5\xbd:\x8b5=6?\x86=$?\x86\xbd\xff\x9f\xdf\xbd\x05\xa0\xdf=\x15:\x96=\x1d:\x96\xbd\x1f@r=\x0e@r\xbd\xbb\x93\xfc\xbbE\x94\xfc;\xa3A#\xbe\xa4A#>\xd7v8\xbb\xa1x8;\xf5\xfc\xbe;\xaa\xfc\xbe\xbb\n\x11\x16=\x01\x11\x16\xbd\x0e\x96\xd7=\n\x96\xd7\xbd8\'K\xbd;\'K=^\xb25>^\xb25\xbe\xe8\xaas\xbb\xc9\xaas;\x81\x9b/<\x1e\x9b/\xbcQ\x88A\xbdc\x88A=e\x1c\xe8:\xb4\x1a\xe8\xba\x94\xd4\x93\xbd\x9e\xd4\x93=\x87\xce\t=o\xce\t\xbd\x86\xbd[={\xbd[\xbd\xf7\x8eG;\xa6\x8dG\xbb\x94/\xd2=\x8e/\xd2\xbd\xaf,\xaa=\xa9,\xaa\xbd\xb9\x0b\x1e<\xb7\x0b\x1e\xbc(\x84A=)\x84A\xbd\x84\xea\xc7=l\xea\xc7\xbd\x8e~\x0c\xbe\x95~\x0c>F\x15\x12=B\x15\x12\xbd\x80\xc2\xa2\xbd\x8e\xc2\xa2=\xd7`3\xbc\xf8`3<\xbbC\x98=\xb1C\x98\xbd\xbd\x121\xbe\xc0\x121>\xb3\xbbG\xbd\xcc\xbbG=\xe3\xaf"\xbd\xf3\xaf"=\xfb\xfc#>\xf8\xfc#\xbe*\xca\xbf\xbd7\xca\xbf=\x90;\xd2=\x8a;\xd2\xbd\xe4\x0f\x1a<\xd6\x0f\x1a\xbcTA\xe3<\x1fA\xe3\xbc\nR\x8f\xbd\x12R\x8f=b{\xe7xu\x16\xbe\x84\x19\xdc\xbd}\x19\xdc=\xa77\xd2\xde\x94\x1f\xbe\xa9&w\xbd\xb3&w=\xdbz5=\xdaz5\xbd`\x8bP=V\x8bP\xbd\xe5\xeb\xbe\xbe\xe1\xeb\xbe>\x15\x02B\xbe\x1c\x02B>J\xc7~=E\xc7~\xbd\xb5"\x94\xbd\xb7"\x94=E\xe3\x12=A\xe3\x12\xbd\xa0\xd8[=\xa0\xd8[\xbd\xc5\n)=\xb3\n)\xbd\xbeY\xe2=\xa6Y\xe2\xbdZ\xda\x07\xbb\xa3\xdb\x07;Kj\x12=Cj\x12\xbd\x94\x9c\r=\x9d\x9c\r\xbd\xdf\'\xc8=\xe2\'\xc8\xbd\x97\xfae=\x8a\xfae\xbd^X\xaa\xbdlX\xaa=-d\x19\xbdAd\x19=\x90\xdf\x0f\xbe\x98\xdf\x0f>7\xf7\x94>;\xf7\x94\xbe\xabM\x91=\xa9M\x91\xbd\x06\x89<\xb7\x16\xcb<7\xcf\xe8\xd0=\xc6\xe8\xd0\xbd^\xea\x8e\xbcv\xea\x8e<\x16fw;Qew\xbb\xd0\xb3\x7f\xbc\xb8\xb3\x7f<\xd2j\xb1\xbd\xd8j\xb1=\x00\x7f\xab\xba\x12\x82\xab:\x9c;R=\x8f;R\xbd\x17\xe7N=\x13\xe7N\xbd\x1d\xf5\xab;l\xf4\xab\xbb\x8auu<"uu\xbc\xda@)<\xae@)\xbc\xcf\xa4\xcb\xbd\xde\xa4\xcb=\x1a\xaf\x01>\x15\xaf\x01\xbe\t\xfd\x17\xbe\t\xfd\x17>\x0c@\xaa=\xfd?\xaa\xbd\xe1\xb2\'>\xda\xb2\'\xbe\xc2\xdc\x90=\xbc\xdc\x90\xbdn#n\xbcx#n\xbdW\xcd>=\x81L\x8f:\xc8O\xbe\xcf?\xc0=\xc9?\xc0\xbd\x08\xd52\xbc\x1b\xd52<\xdf^\xe6=\xe2^\xe6\xbd\x14\xba\xd7\xbd"\xba\xd7=b$\x98=A$\x98\xbd\xa7f\x0b>\xa7f\x0b\xbeU\x90\n\xbbR\x91\n;s\xb7\x02=^\xb7\x02\xbd\xa8;t=\xaa;t\xbd\x94\xbc\\\xbd\x95\xbc\\=f\x19\x15\xbdr\x19\x15=(|\xa6\xbd.|\xa6=\xfd\xda\xf0\xbd\x0f\xdb\xf0=\xb0\x9bj>\xae\x9bj\xbe) \xa0\xbd2 \xa0=\x95\x04\xdc\xbc\xb5\x04\xdcp\xf4\x07\xbe\x96\xb6\x1e\xbd\x96\xb6\x1e=\xd2*\xda\xbd\xd2*\xda=Y\x07\x10>W\x07\x10\xbe\x10[\x17\xbd\x1c[\x17=\x00\xe5;\xbb\xd8\xe5;;\xcc\xce\x86=\xce\xce\x86\xbd\xe6\xed\xb98p\xe1\xb9\xb8*\xe4\xf49\xf7\xdd\xf4\xb9[\xce\x05\xbcC\xce\x05c\xe7\xa6;H\xe7\xa6\xbb\xbb\x1b\xd9=\xbe\x1b\xd9\xbdng\x15\xbdwg\x15=\x15\xaa\x86\xbd#\xaa\x86=\x03\x0f\x9b\xbb\x15\x0f\x9b;n\x81\xa8\x83\x9b\x88m\x9c\x1d\xbe\xb7\x11[\xbd\xd7\x11[=$8E=&8E\xbd\t9N\xbdC9N=u\x7f~=a\x7f~\xbd\x01\xa0\xde<\xd3\x9f\xde\xbcg\xcd\xb9;%\xcd\xb9\xbb&;\x01>\';\x01\xbe\xa4\xd0\xaa=\x9a\xd0\xaa\xbd\xe8y\x89=\xdey\x89\xbd\xd0p\x08\xbe\xd0p\x08>\x1f\xa3E\xbe\'\xa3E>T\xd1+=R\xd1+\xbd\xb9\x98\x02=\xa9\x98\x02\xbd\x94\xa1\x95\xbd\x97\xa1\x95=\xbaY\xc9\xbd\xb6Y\xc9=\xa7\x17\t\xbe\xa7\x17\t>\x8f\xd0\x14=\x85\xd0\x14\xbd6o\x91>3o\x91\xbeT\xc8\xdb<=\xc8\xdb\xbc\xed\x80\xa7<\xdb\x80\xa7\xbct\x0bk\xbd\x9e\x0bk=Z\xaa\xc3\xbdf\xaa\xc3=\xdd|@\xbd\xe4|@=\xd6\x8a(=\xd3\x8a(\xbd\x0fM\xf2\xbd\x15M\xf2=[7\xc7\xbda7\xc7=K\x7f\xf0\xbdJ\x7f\xf0=\x86S\xdd=\x84S\xdd\xbd\xf3\xf4\x1c\xbc\x05\xf5\x1c\xcf0\xbd\xe9\xfe\x82=\xe6\xfe\x82\xbd\x03\x042=\xff\x032\xbd\x1a\xcfV=\x1e\xcfV\xbd:\x96\x17>=\x96\x17\xbe\xe4|\xbf\xbd\xea|\xbf=\xc2\xc7n=\xc6\xc7n\xbd\x056\x9e\xbd\x1a6\x9e=8\x8d\x0b\xbdL\x8d\x0b=\xd0]\x16>\xcf]\x16\xbe`\xe1\x14;\x11\xe1\x14\xbb\xf7\x03|:\xf3\xff{\xbaF\x02\x89\xbdN\x02\x89=mY3\x01\xe3;}\x00\xe3\xbb|\xc9N=~\xc9N\xbd\x0e\xe8X<\xb8\xe7X\xbc\xd3\xab\x15\xbe\xd0\xab\x15>%\xa5\xac\xbd0\xa5\xac=\xa4\xed\x07;;\xed\x07\xbb99\xb4\xbd;9\xb4=[\x8cr;J\x8br\xbb\xaeX7\xbd\xb0X7=\x7f\x10\x9d>\x83\x10\x9d\xbevs\xf2\xbd~s\xf2=U\x98f=M\x98f\xbd\x92\xe4\x88\xbe\x92\xe4\x88>z\x0fn\xbc\xa6\x0fn<*\xd2k;\xce\xd1k\xbb\xbf\x97^\xbb\x1b\x98^;\x00\x00\x00\x00\x00\x00\x00\x00\xfa\xd5\xc0<\xc9\xd5\xc0\xbc\x0c\xb4\x92=\x11\xb4\x92\xbdd\x81\xfe\xbb\xff\x81\xfe;\xdc\x15\x8f:\\\x14\x8f\xba\x00\xe3-\xbd\x08\xe3-=3\x8c\xef\xbd7\x8c\xef=c\xd0\xb7\xbdc\xd0\xb7=\xb6:\x83\xbd\xb1:\x83=\x9a\x8dK<\x8b\x8dK\xbc\xcfT\x1e\xbe\xd2T\x1e>G\x1a\x0b<\n\x1a\x0b\xbc!?!>\x1b?!\xbe\xd1M\x08>\xccM\x08\xbe\x93R\x1b\xbe\x8eR\x1b>j\x05\x91=h\x05\x91\xbd\x13\xfc\x1c\xbe\x17\xfc\x1c>n\xa7\x94=k\xa7\x94\xbd:\x15\x8e\xbc9\x15\x8e<\xa7\x130\xf2V\x9d\xbd\xf2V\x9d=\x88\xea\x95>\x87\xea\x95\xbe\xfdEN>\xffEN\xbeD%B\xbdL%B=\xfe7\x07=\xea7\x07\xbdc\xd3{\xbd]\xd3{=0Z\xf0=1Z\xf0\xbd\x8cX"=\x91X"\xbd\x1b\x05\xd1\xbd)\x05\xd1=gsn\xbdysn=3=V\xbd1=V=\xb6A,>\xb0A,\xbe\xa9\x99\xa3=\xa3\x99\xa3\xbd\xbbN\xfc\xbc\xadN\xfc<\x9d\x0b<>\x9b\x0b<\xbeZ\xfc\x82=Q\xfc\x82\xbd\x1eHF\xbd8HF=\xff\xaa;\xbd\xfd\xaa;=&\xd7\x85>)\xd7\x85\xbe[D\xb0>_D\xb0\xbe\x13\x03P\xbe\x10\x03P>*\xa5G\xbe+\xa5G>\xab\xac\x83=\xab\xac\x83\xbd&\x931>\x1e\x931\xbe\x8f\x10\xb3=\x89\x10\xb3\xbd/\xb5F\xbe0\xb5F>JGd\xbeLGd>\xda\\Z\xbd\xe2\\Z=\xbd\xf5\x03\xbe\xbd\xf5\x03>\x00\x00\x00\x00\x00\x00\x00\x00U#\xc1<7#\xc1\xbc\x83\xb6\x82\xbe\x8b\xb6\x82>G\x19\x88=C\x19\x88\xbd\xccD\n\xbe\xcdD\n>\xefo\xa8=\xe6o\xa8\xbd\xdb\xae\xc4<\xda\xae\xc4\xbcZ\x13\x90\xbeW\x13\x90>\xb3\xcew\xbd\xb5\xcew=\xe0\xd2\xbc\xbc\xe4\xd2\xbc<\x00\x00\x00\x00\x00\x00\x00\x00\xc0\xaa\x17>\xbf\xaa\x17\xbe)\xb5\xa1\xbe,\xb5\xa1>_\xf3O\xbc[\xf3O<\xd8K\xe8=\xdbK\xe8\xbd;\xea8=7\xea8\xbd\x7f^{\xbe\x85^{>\x8e\xc9\x05\xbd\x9e\xc9\x05=\x9d\x88\x1f>\x99\x88\x1f\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xe1\x1e\xbb\xbd\xe2\x1e\xbb=P2\x8a\xbdR2\x8a=W\xb5\xc9=Y\xb5\xc9\xbd@\xb8z=B\xb8z\xbd\xec\xcf\xdb\xbd\xf0\xcf\xdb=\xc6\'\xbe=\xaf\'\xbe\xbd\xe0\xae\xb0>\xdf\xae\xb0\xbe\xaa9\x15=\xa29\x15\xbd\x00\x00\x00\x00\x00\x00\x00\x00D3\x1b\xbeI3\x1b>\xe2b">\xe0b"\xbe\\\x0f\x8a;,\x0f\x8a\xbb\xac\xf6\xad;\x93\xf6\xad\xbbPW\xad\xbctW\xad<~T\xb2\xbd\x86T\xb2=-\x18:<\xfb\x17:\xbc\x171J>\x111J\xbe\xfae^\xbe\xf3e^>\xff\xb3\x00>\x00\xb4\x00\xbe\x01\xe5\n\xbe\x03\xe5\n>\xa0\x06c\xbe\x9f\x06c>\x1cE\xda<#E\xda\xbc\xbf\xddZ\xbd\xb3\xddZ=\x024\xc3<\xfb3\xc3\xbc|\x08\x7f>\x80\x08\x7f\xbe\xc8\xa5J\xbc\xf8\xa5J<\xfb\xc1->\xf8\xc1-\xbe(\x1dI=#\x1dI\xbd\xccqd\xbe\xcfqd>H\xff)>I\xff)\xbe\x13\xee\x1f>\x0c\xee\x1f\xbe\x1c\xa9<>\x1d\xa9<\xbeF(F=<(F\xbdE\xab\xb51zz<\x11zz\xbc\x12-\xf2\xbc4-\xf2<\x00\x00\x00\x00\x00\x00\x00\x00\xd8\xc1\xd0\xbd\xda\xc1\xd0=\xe0\xb5t\xbd\xf4\xb5t=t\xb6|=i\xb6|\xbd\x00\x00\x00\x00\x00\x00\x00\x001\xa5\xe2\xbcQ\xa5\xe2+\xdf,\xbe\xb9\x18\xea\xbd\xbd\x18\xea=FI\\\xbeHI\\>\xf0\xa6n>\xef\xa6n\xbet\x9c\r>s\x9c\r\xbeX\xe8\xbe\xbd[\xe8\xbe=U\x95=>P\x95=\xbeT\xf9\x10?R\xf9\x10\xbf\xad\xd1\x18\xbd\xb7\xd1\x18=\x00\xf7\xb2=\xfb\xf6\xb2\xbd\xd9\x99:=\xdd\x99:\xbd\x81\x84\'\xbc\x93\x84\'<\xbe\x1d\x14\xbd\xc8\x1d\x14=\xc2;h<\x94;h\xbc\xa4\xa1\x0e\xbe\xa7\xa1\x0e>\x86"*\xbd\x9c"*=@\t\xd4=.\t\xd4\xbd\xd3\xa5\x84\xbd\xd0\xa5\x84=\x8dq+\xbd\x94q+=\xf0\x82<>\xee\x82<\xbe\xc1\x90%>\xc2\x90%\xbe\xc9o\x08>\xc9o\x08\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xcd\xc8\xc2\xbd\xcb\xc8\xc2=\xfbg\xb6=\xefg\xb6\xbd\x86L\xa1\xbc\x98L\xa1<\x00\x00\x00\x00\x00\x00\x00\x00\x9f\x06)=\x93\x06)\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcb\xeb\xdf\xbd\xc9\xeb\xdf=\x82\xcd!\xbe\x87\xcd!>\xf6\xe2\x1b>\xf0\xe2\x1b\xbe\xab\xd8\\\xbd\xc3\xd8\\=\x00\x00\x00\x00\x00\x00\x00\x00\x84\x0bn=\x84\x0bn\xbdX`\x8d\xbeY`\x8d>\xe7\x94\x8a\xbb\xfe\x94\x8a;\rD\x10>\x08D\x10\xbe&8\xbb\xbd\'8\xbb=\xec\t\xc0>\xec\t\xc0\xbe\xdeC\xe9<\xc8C\xe9\xbcx6\x84\xbew6\x84>\x9d\xdan\xbd\xa9\xdan=\xd6\x1ad<\xc5\x1ad\xbc\xdc\xa2\xc3\xbd\xdc\xa2\xc3=\xc5\x01\x17\xbe\xc9\x01\x17>\xb9Q\xb6=\xb7Q\xb6\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x19\xfd\xde\xbd\x11\xfd\xde={WE=iWE\xbd\x00\x00\x00\x00\x00\x00\x00\x00_)\x17=S)\x17\xbdq9*>l9*\xbe\xf3t\xa7<\xe5t\xa7\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x9dG\xf2=\x94G\xf2\xbd\rH\x89\xbd\x11H\x89=dz\x00>ez\x00\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\xf2\xa3=k\xf2\xa3\xbd\x96z\x1b\xbe\x95z\x1b>\x00\x00\x00\x00\x00\x00\x00\x00F~\x0b?D~\x0b\xbf\x00\x00\x00\x00\x00\x00\x00\x00-\x03\xd7\xbb`\x03\xd7;\xe7\xcd\x96=\xe1\xcd\x96\xbdw\x91o\xbe}\x91o>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1ap\x1f=\x12p\x1f\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00/\xbc\x83=)\xbc\x83\xbd\x00\x00\x00\x00\x00\x00\x00\x00%\xf1\xdf=\x1f\xf1\xdf\xbdA1o\xbdW1o=l\xe1-=n\xe1-\xbdf\x83\x8f>h\x83\x8f\xbe\x90k~\xbd\xa0k~=(\xe1\x1a\xbf(\xe1\x1a?\xfe\xc0\x1f=\xef\xc0\x1f\xbdU\xd24\xbdc\xd24=\xa5\xde\x1c\xbd\xb3\xde\x1c=\x94\xa9X<\x9b\xa9X\xbc\xc8\x91\xba\xbb\xfc\x91\xba;9SY>6SY\xbe\x1b\xb0\xfb\xbd\x1d\xb0\xfb=\xf7\xfc\x05\xbe\xf7\xfc\x05>\xf6h\xe4\xbc\x1ai\xe4<\'}R=&}R\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x84L\xbd+\x84L=9\xe9\x9d>7\xe9\x9d\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00sG\x00>vG\x00\xbeTK@\xbd^K@=\xff;Z\xbd\x0f\xd7\xfdH\xbd\xe1\xfdH=\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x8c\'\xbe\x98\x8c\'>_\x08t>[\x08t\xbe\x00\x00\x00\x00\x00\x00\x00\x00v\t\xe3\xbcr\t\xe3<\x00\x00\x00\x00\x00\x00\x00\x00i\xd5\x95>i\xd5\x95\xbeW0L\xbeX0L>\x00\x00\x00\x00\x00\x00\x00\x00>T\x92\xbe?T\x92>LZ5>DZ5\xbe\rG\xfc=\x06G\xfc\xbd\n\x14\x14>\x0e\x14\x14\xbe|\xe6\xd8\xbd\x82\xe6\xd8=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf8\x82\xf3\xba/\x85\xf3:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\xea\x9a=T\xea\x9a\xbd\x0c\xe8\x9d=\t\xe8\x9d\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xbc\x8f\x83=\xb7\x8f\x83\xbd\xd8&n=\xd3&n\xbd\xceb\xa9<\xa2b\xa9\xbc\xbf\xee\xf3\xb9^\xf4\xf39\xca\xd4\xa7\xbc\xde\xd4\xa7<\x93\xb1\xca\xbd\x97\xb1\xca=O\xce8>I\xce8\xbeOr\x84$ \x97=% \x97\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xf2\xc3>\xfe\xf2\xc3\xbeA\x81G=<\x81G\xbd0\x0e\xab\xbd2\x0e\xab=\x81\xe2\x10\xbd\x8e\xe2\x10=\xe9@}>\xe2@}\xbeh\x03t=h\x03t\xbdq2\xa6\xbcx2\xa6<`\x81q=c\x81q\xbd\x93\xca\x94\xbe\x90\xca\x94>\xb6\xe0\xbe\xbd\xbc\xe0\xbe=\xab&\xd2<\x9a&\xd2\xbc#\xf9\xdb>"\xf9\xdb\xbe/\xb5\xee=.\xb5\xee\xbd#"a=\x1c"a\xbd\xecg%<\xabg%\xbc\x00\x00\x00\x00\x00\x00\x00\x00\xaa\x11\xda\xbd\xac\x11\xda=~/G\xbc\x9e/G<\x00\x00\x00\x00\x00\x00\x00\x00\xe8\xba\xfc\xbc\xf0\xba\xfc\x7f\x0b\x16\xbe\x0f\x99|\xbd\x19\x99|=\x9e\xe7\xb4=\x85\xe7\xb4\xbd\x94\x97\x0f\xbc\xbc\x97\x0f<\x00\x00\x00\x00\x00\x00\x00\x00Y[{\xbeW[{>\x00\x00\x00\x00\x00\x00\x00\x00|\xaa\xc4\xbd\x8f\xaa\xc4=\x00\x00\x00\x00\x00\x00\x00\x00$:\x13\xbe/:\x13>\xc2\xaa\x85>\xc3\xaa\x85\xbeI\x96\x11\xbb\x81\x96\x11;\x00\x00\x00\x00\x00\x00\x00\x00\xafI\xeb>\xaeI\xeb\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9c\xf0\xff=\x90\xf0\xff\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x917\xfe;\xaa7\xfe\xbbi\xd6\x82\xbe\xe5\x06\xbe\x00\x00\x00\x00\x00\x00\x00\x00o\x97\xc7=h\x97\xc7\xbd%\nI\xbc@\nI<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00,0\xf8=)0\xf8\xbd\xa7\x0b\xd1=\xa8\x0b\xd1\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00n\x9c)>_\x9c)\xbe\xb3\x9c\xa4=\xb3\x9c\xa4\xbd\xdbP\x92=\xd9P\x92\xbd\xb1j\xaa=\xacj\xaa\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xc1\xc2\xed<\xb0\xc2\xed\xbc\x17/\xbc=\x18/\xbc\xbd\x88\xd4\xc9\xbb\x86\xd4\xc9;\x00\x00\x00\x00\x00\x00\x00\x00\x0c-\x14\xbe\x0e-\x14>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9c\xa8\xfb=\xa2\xa8\xfb\xbd\xb8\x0es\xbe\xbb\x0es>\x17\xa4"\xbd\x1a\xa4"=\x04\x9f\x8f=\x01\x9f\x8f\xbd\xc7o/>\xb8o/\xbe\xf7h\xc9\xbd\xf5h\xc9=8\x9d{<,\x9d{\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5j\x02<\xccj\x02\xbc\x00\x00\x00\x00\x00\x00\x00\x00j\xde\xa9\xbc\xa4\xde\xa9<\xe1q\x91\xbb\xecq\x91;\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8b\t@\xbe\x88\t@>\'w\xf9\xbd/w\xf9=\xa5y\xf2;\x13y\xf2\xbb\xe9\xa1%\xbe\xea\xa1%>\x00\x00\x00\x00\x00\x00\x00\x00\xa8]\x0f?\xa7]\x0f\xbf\xd1\x90\x03>\xd3\x90\x03\xbe\x05v\xa9\xbd\x01v\xa9=\x00\x00\x00\x00\x00\x00\x00\x00\x7f\x1f@\xbd\x84\x1f@=M\xf2\x15\xbdK\xf2\x15=\x00\x00\x00\x00\x00\x00\x00\x00\x88\xb6\xcb\xbe\x88\xb6\xcb>\xca\xf8.=\xcb\xf8.\xbd\xbeP\x969nO\x96\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa3R\x8b\xbd\x9bR\x8b=\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xd9Y\xbe\xd9\xd9Y>\xa3G"\xbe\xa5G">\x00\x00\x00\x00\x00\x00\x00\x00\x04\xba\xf9\xbc \xba\xf9<\x00\x00\x00\x00\x00\x00\x00\x00\xa9c\x86=\x96c\x86\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xab\xb3\xc8<\x8b\xb3\xc8\xbc\xe2\x8d\xc9\xbe\xe2\x8d\xc9>\xe4\x1e\xba=\xe1\x1e\xba\xbd-\x05\xff=\x1b\x05\xff\xbd_\xe6u=P\xe6u\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xa6\x02U=\xa3\x02U\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xeb\x17\x8f=\xeb\x17\x8f\xbd\x88r\x92=wr\x92\xbd\x00\x00\x00\x00\x00\x00\x00\x005\x93=\xbe9\x93=>H\x01\x8c\xbcS\x01\x8c<\xd8\x925>\xdb\x925\xbe\xa1\xef@\xbe\x9f\xef@>f\xb4S\xbdt\xb4S=nSW\xbemSW>Z-T>W-T\xbe9\xfb\xae><\xfb\xae\xbe\x00\x00\x00\x00\x00\x00\x00\x00v\x81(\xbev\x81(>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe1}\x8a>\xde}\x8a\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa3\x90#=\x9e\x90#\xbd\x14\x9b;\xbd\x1a\x9b;=\x00\x00\x00\x00\x00\x00\x00\x00l\xcc\x82\xbc\x90\xcc\x82<\x00SK\xbe\x01SK>\x00\x00\x00\x00\x00\x00\x00\x00\xb2\x0eq>\xaf\x0eq\xbe1\x1f\x17>4\x1f\x17\xbe\xa7z\x8f\xbe\xa9z\x8f>\xcf\x9fH\xbb\xbf\xa0H;\x1b#\xca\xbd\x19#\xca=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x87\xb0n\xbe\x8e\xb0n>TE\x1d>QE\x1d\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9bWy>\x9bWy\xbe\x95(!=\x8f(!\xbd\xbc"\r=\xb1"\r\xbd\xfa\xa5\t\xbf\xf9\xa5\t?\x00\x00\x00\x00\x00\x00\x00\x00{\x15\x14>\x80\x15\x14\xbe\x1e#1=\x00#1\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xd1g=\x07\xd2g\xbd\x9e9D>\x989D\xbe\xec\xad\xb8\xbd\xf1\xad\xb8=\xde:F\xbd\x08;F=\xd3em>\xd4em\xbe\xbfi\xb0\xbe\xbei\xb0>&\xd9z\xbe.\xd9z>\xa1+\x10\xbd\x9d+\x10=\xbc\xcc.=\xae\xcc.\xbdd\x11\'?d\x11\'\xbf\x16C\x9b=\x19C\x9b\xbd\xa8\x1e\x18>\xa7\x1e\x18\xbe\x00\x00\x00\x00\x00\x00\x00\x00KH\xe9=5H\xe9\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x957>\xef\x957\xbe\x00\x00\x00\x00\x00\x00\x00\x00B2k=42k\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\xcf\x1a\xbe\xe0\xcf\x1a>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nI\x9d\xbd\x0eI\x9d=\x00\x00\x00\x00\x00\x00\x00\x00\x7f\x00\x87;d\x00\x87\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdb\x0cI\xbd\xe5\x0cI=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x02\xf1=\xe5\x02\xf1\xbdX\xb4\x93\xbdb\xb4\x93=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tF\xc6\xba\x81F\xc6:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00U{/\xbd\x9d{/=O\xdc\xe0\xbcX\xdc\xe0<7\xc1\xc0=4\xc1\xc0\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00{\xee\xed=u\xee\xed\xbd0\xe6\xba\xbd7\xe6\xba=\x00\x00\x00\x00\x00\x00\x00\x00\xb5\xdc\xa9=\xb9\xdc\xa9\xbd\xf1]\x07\xbd\xfe]\x07=\x00\x00\x00\x00\x00\x00\x00\x00\x12\x80\xe7>\x11\x80\xe7\xbe\x00\x00\x00\x00\x00\x00\x00\x00o\x14\x1c\xber\x14\x1c>N\xac\xf0\xbdP\xac\xf0=S\xf5\x0b=J\xf5\x0b\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb7\xa2\x02\xbe\xb3\xa2\x02>\x00\x00\x00\x00\x00\x00\x00\x00.$v="$v\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\xd2\xaa=\xeb\xd2\xaa\xbdD\x0e\xbd>E\x0e\xbd\xbeO\xd7z\xbdY\xd7z=\x00\x00\x00\x00\x00\x00\x00\x00\x12\x02\x14\xbb\xec\x01\x14;\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc4\xe5\x05=\xab\xe5\x05\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xd6\xb6\x95\xbd\xdc\xb6\x95=\x00\x00\x00\x00\x00\x00\x00\x00\xcca\xd0=\xc7a\xd0\xbde\xb8\xce=Y\xb8\xce\xbd\x00\x00\x00\x00\x00\x00\x00\x00%\xc8N\xbd5\xc8N=#\xf8\xe0\xbd5\xf8\xe0=\x18\xff\xe2\xbd\x17\xff\xe2=\xee\xe3=\xbd\xfc\xe3==\x00\x00\x00\x00\x00\x00\x00\x00\xcf,\xb6>\xd6,\xb6\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cA1\xbe\x0cA1>\xccg\x07>\xceg\x07\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xb8\x81=\xfa\xb7\x81\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb6c6\xbd\xa9c6=O\xe3Z\xbdv\xe3Z=\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x89\x8b=\x11\x89\x8b\xbd\xe5\x97\x9e=\xe1\x97\x9e\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x9b\x03\xf1=\x9d\x03\xf1\xbdEw\x93=9w\x93\xbd\xeb\xff\n\xbe\xf8\xff\n>\x00\x00\x00\x00\x00\x00\x00\x00k(\xbe\xbb\x80(\xbe;K\'\xb9=L\'\xb9\xbd=\xf6\x89\xbd8\xf6\x89=\x00\x00\x00\x00\x00\x00\x00\x00\x88\xf9\\>\x81\xf9\\\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbfQ\x99:%O\x99\xba\xe0\xbc\n>\xdf\xbc\n\xbe\xbc\x98U\xbd\xb4\x98U=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00cD\'>`D\'\xbeJ\xa7@>L\xa7@\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00f\xb1w=_\xb1w\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xee}\xbd\xe9\xee}=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]4\x12=U4\x12\xbdh\x06\xbc\xbct\x06\xbc<\x00\x00\x00\x00\x00\x00\x00\x00\xa2\xdf\xbb\xbd\x93\xdf\xbb=\x00\x00\x00\x00\x00\x00\x00\x00\xdf\x979=\xda\x979\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xe3\x06=\xf7\xe2\x06\xbd\xff+\xb4=\xf3+\xb4\xbd\x00\x00\x00\x00\x00\x00\x00\x00H\x98\x18\xbdV\x98\x18=\x00\x00\x00\x00\x00\x00\x00\x00G-6>H-6\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xddd\xe07NE\xe0\xb7\xd4bk\xbd\xe4bk=\x8a\xce\xa5\xbd\x95\xce\xa5=\x00\x00\x00\x00\x00\x00\x00\x00\x04\xab\x0c\xbd\x0b\xab\x0c=\x00\x00\x00\x00\x00\x00\x00\x00K\x88I\xbeN\x88I>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc99M\xbe\xc79M>\xb6K\x93\xbc\xb6K\x93<\ttk>\x02tk\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9f\x81/\xbe\xa7\x81/>\x97\xc4\xe8<\x92\xc4\xe8\xbc\x00\x00\x00\x00\x00\x00\x00\x00\xb0\xd6\xc5\xbd\xb5\xd6\xc5=\xff"\xa2=\xfa"\xa2\xbd\xe1\xf2\x88=\xd6\xf2\x88\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x1d\xfe\x8c=\x1a\xfe\x8c\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x81r\x81\xbe\x81r\x81>\x00\x00\x00\x00\x00\x00\x00\x00(\x15@\xbd6\x15@=1s;=\x1es;\xbdB\xaf\xa1=A\xaf\xa1\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xe0\xaa\x97=\xdf\xaa\x97\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x11\xcf\xe4<\xf5\xce\xe4\xbcd\x0b\x1d\xbdq\x0b\x1d=\x81\xac\x11=x\xac\x11\xbdx\xdd\xa2\xbd~\xdd\xa2=\x00\x00\x00\x00\x00\x00\x00\x00\xc3\xb6R=\xb0\xb6R\xbd\x05\xe5\xa8\xbcK\xe5\xa8<\x00\x00\x00\x00\x00\x00\x00\x00\xe2\xce\xdd=\xeb\xce\xdd\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5d\r?\xf5d\r\xbf\x00\x00\x00\x00\x00\x00\x00\x00z\xb0?>z\xb0?\xbe\x00\x00\x00\x00\x00\x00\x00\x00p\xdf\x15\xber\xdf\x15>\x00\x00\x00\x00\x00\x00\x00\x00\xf3[\x93<\xeb[\x93\xbc\x16 \\>\x18 \\\xbe\xa1sb>\x9esb\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xdeN\xa8=\xd7N\xa8\xbdb\x86\xd2>b\x86\xd2\xbe\xba\xfb\xf8;\x8b\xfb\xf8\xbb\xc8\x9d\x89\xbd\xc5\x9d\x89=\x00\x00\x00\x00\x00\x00\x00\x00\xf9\xc6\xc5\xbe\xf2\xc6\xc5>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbf\x04\x9d>\xbf\x04\x9d\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00s\x1e{>s\x1e{\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00{\xcc\xf6>v\xcc\xf6\xbe>\xcb\x01<\x8c\xca\x01\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbb\x9f\'\xbd\xc0\x9f\'=\xd9\xf7%=\xcf\xf7%\xbd4GU>(GU\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbbz7\xbe\xbdz7>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa6s\x1a\xbe\xaas\x1a>\x00\x00\x00\x00\x00\x00\x00\x00\xc44\xd9<\xb34\xd9\xbc\xd6\xc30\xbe\xd7\xc30>z\x91\xd5>{\x91\xd5\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xdd\x82\xb9\x15\xe3\x829\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00b\xf9D\x00\x00\x00\x00\x00\x00\x00\x00\x84\x1e\xcf>\x86\x1e\xcf\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x8b\x0f>\x01\x8c\x0f\xbe\xf4`,>\xe5`,\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xd8\x9ev>\xdc\x9ev\xbe\x11h\x13\xbe\x13h\x13>\x00\x00\x00\x00\x00\x00\x00\x00L\xefG=7\xefG\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01IS<\xe2HS\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfa-\xe0=\xf5-\xe0\xbd.\x1e\xbc="\x1e\xbc\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>\\\x96=<\\\x96\xbd\x00\x00\x00\x00\x00\x00\x00\x00h\xff\x88\xbek\xff\x88>\xcc\'+=\xcb\'+\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde! =\xdc! \xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8co\xef=\x8eo\xef\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xba\x03\xa3<\xb2\x03\xa3\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00:\n?=?\n?\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa5\xec\xf0=\xa0\xec\xf0\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00?\'\xc3\xbb\x91\'\xc3;\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00`\x11\x1b=J\x11\x1b\xbd\xf4\xdb:<\xc7\xdb:\xbc\x00\x00\x00\x00\x00\x00\x00\x00K\xdf7\xbdJ\xdf7=\xc7\x0c\xce=\xc7\x0c\xce\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb0\xea\xab<\xad\xea\xab\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8fp\xa6=\x88p\xa6\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf2W\xbd\xce2W=\xa4\x0f\xf5\xbb2\x10\xf5;\xaa\x1b\x85>\xa6\x1b\x85\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x19\x1d\x15=\x16\x1d\x15\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01h\x96>\x02h\x96\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xfa\xbf\xe0\xbd\x00\xc0\xe0=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00<\xee\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00w\x95\x82\xbcl\x95\x82<\xb6g>\xbe\xb9g>>\x00\x00\x00\x00\x00\x00\x00\x007\xe8T\xbdC\xe8T=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\x85x=\xd1\x85x\xbd8\xda\x1f\xbdC\xda\x1f=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00R\xae\xd3\xbdX\xae\xd3=[+\x0b>X+\x0b\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xc45\xd7\xbe\xc75\xd7>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00WS\x94>^S\x94\xbeIgd\xbdggd=\x00\x00\x00\x00\x00\x00\x00\x00{\xb4\xcd\xbd\x82\xb4\xcd=\x00\x00\x00\x00\x00\x00\x00\x00;\x89\xed\xbdS\x89\xed=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xddK=\xf3\xdcK\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x87\xd78>\x8b\xd78\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe1\xf1\x02\xbe\xea\xf1\x02>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa3\xbeO=\x90\xbeO\xbdh\xa4\xfd=o\xa4\xfd\xbd\x00\x00\x00\x00\x00\x00\x00\x005O\x12>2O\x12\xbeR\x92\x0b\xbfS\x92\x0b?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8a\xf6Z=\x7f\xf6Z\xbd\x00\x00\x00\x00\x00\x00\x00\x00>D<\xbeDD<>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1cJ\xf5\xbd"J\xf5=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b(\xd2=\x10(\xd2\xbdt]\xb5\xbcf]\xb5<\x8d0\xbe\xbe\x8c0\xbe>\xee\xed\xc2;\xa1\xed\xc2\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00D\xaf\x15\xbdF\xaf\x15=\x00\x00\x00\x00\x00\x00\x00\x00\xab\x12\x8d\xbd\xae\x12\x8d=\x00\x00\x00\x00\x00\x00\x00\x00m\xa1\x92\xbdn\xa1\x92=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbf\x0f\x03\xbd\xc2\x0f\x03=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00`\xa6W>_\xa6W\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x89\x13d>\x8f\x13d\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xd5\xd6\xe0\xbb\xeb\xd6\xe0;\x8a\xf7T\xbe\x96\xf7T>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00L\x11A>O\x11A\xbe/_\x14=(_\x14\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95\xfd\xb4<\x88\xfd\xb4\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00?\x99\xa7\xbd<\x99\xa7=c\x84\xec\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00z\x95\xab\xbc\x8a\x95\xab<\x91\xb1z>\x95\xb1z\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00#\x80\x83\xbd+\x80\x83=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa7p\x87<\x9fp\x87\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x1c1\xd2\xbcm1\xd2<\x00\x00\x00\x00\x00\x00\x00\x00\x86\xcf\x0b\xbf\x86\xcf\x0b?roy\xbcJoy<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbek\n>\xbdk\n\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x83\xb0\\>\x83\xb0\\\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\x9e\x9b\xbd\x1f\x9e\x9b=\x00\x00\x00\x00\x00\x00\x00\x008\xd9\xe2\xbd4\xd9\xe2=&8\x89\xbd98\x89=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a40=\x0640\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00}\xa9\xf9>~\xa9\xf9\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\xd3\x92\xbd\x13\xd3\x92=8\x99\'>:\x99\'\xbe\x00\x00\x00\x00\x00\x00\x00\x00\n\xb8\xc2=\x00\xb8\xc2\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!J\xd3=\x06J\xd3\xbdJ\xc9\xf0\xbcR\xc9\xf0<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00o\xd1r>g\xd1r\xbeJ\x82?=,\x82?\xbd\x00\x00\x00\x00\x00\x00\x00\x00`7\x07>e7\x07\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\xe21\xbe\xea\xe21>\x00\x00\x00\x00\x00\x00\x00\x00zY\xb7\xbc\x88Y\xb7<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00e\x11\x19=e\x11\x19\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8f\xb5\x91\xbd\x9e\xb5\x91=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb8\xd0\x8f>\xb4\xd0\x8f\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00C\xa5\x90\xbeB\xa5\x90>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00wR\xbd\xbdvR\xbd=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n\x11(\xbe\x0f\x11(>\xec\xd8:\xbe\xf0\xd8:>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\xbbg<\xcb\xbag\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00G#l=+#l\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb2JP>\xb0JP\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa963\xbb\x1b73;A\x94{?>\x94{\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xca(\xb9=\xca(\xb9\xbd\nn-?\nn-\xbf\x978\x0c\xbd\x9d8\x0c=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x175>\x0c\x175\xbe\x00\x00\x00\x00\x00\x00\x00\x00cX\xfb=mX\xfb\xbdm\x8e/\xbd|\x8e/=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u\xbc\xbe\x04\xff\xa7>\x03\xff\xa7\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x16\x93\x0c>\x14\x93\x0c\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xca\x90J=\xcb\x90J\xbd5HN=$HN\xbd\xfb\xbc\x14>\xf4\xbc\x14\xbe\x00\x00\x00\x00\x00\x00\x00\x00}\xfd\x11=s\xfd\x11\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\xae\xa4=\x06\xae\xa4\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\xb4W=Q\xb4W\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01!i\xbe\x08!i>\x00\x00\x00\x00\x00\x00\x00\x00\x12\xd6d>\x11\xd6d\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd1\xcc\x02>\xd1\xcc\x02\xbe\x00\x00\x00\x00\x00\x00\x00\x00.c\xcf\xbd*c\xcf=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00f\x00^=Z\x00^\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00#@F\xbd4@F=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00b\xe3-=F\xe3-\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa3Q1<\x95Q1\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbcM\x8f=\xbaM\x8f\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf8\xf7\x19\xbf\xf4\xf7\x19?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcei\xbd\xbc\xe9i\xbd<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf7\x1e9\xbcW\x1f9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\x88I\xbe\xed\x88I>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff[\xf4=\xf3[\xf4\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xe9\x7f=\xfa\xe9\x7f\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0012\xc7\xbdA2\xc7=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00.\xcb\x8f=2\xcb\x8f\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nT\xc5=\x07T\xc5\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\xebZ<\xe6\xeaZ\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B_\xe2\x00\x00\x00\x00\x00\x00\x00\x00\xb6\xad\xb2=\xa7\xad\xb2\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\xa1\xff\xbe$\xa1\xff>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00^m>\xbalo>:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xeb\xecM?\xeb\xecM\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00o\xdb\x11\xbew\xdb\x11>\x00\x00\x00\x00\x00\x00\x00\x00:T\xea\xbdDT\xea=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc84\xd4\xbc\xc24\xd4<\x00\x00\x00\x00\x00\x00\x00\x00\xca\x1a\xf0>\xc6\x1a\xf0\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x8bS=\xf5\x8aS\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xcd\n\xd9=\xcc\n\xd9\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c_)>\x19_)\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00R\x11\xcd<+\x11\xcd\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xab\xff9\xbe\xb0\xff9>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00H\x19\xc1v\x14 \xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbb}\xce\xbd\xc3}\xce=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaf\x87\x01\xbe\xb4\x87\x01>?\x13\x87\xbd3\x13\x87=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa7\x98\xce\xbd\xaf\x98\xce=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$\x8c\xf3\xbd&\x8c\xf3=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\xa3\xa8<\xfd\xa2\xa8\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd8\x1b[\xbd\xe4\x1b[=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd77\x9d=\xda7\x9d\xbd\x00\x00\x00\x00\x00\x00\x00\x00\xca\xff\x1d\xbd\xe7\xff\x1d=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\x12w\xbd\xe4\x12w=j \xe0;\x12 \xe0\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00#\xab\xd3=\'\xab\xd3\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\xe7\'?\x0c\xe7\'\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xc4\x80\xbe\x02\xc5\x80>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x92\x15g\xbe\x94\x15g>(\xb7\xdf<\x06\xb7\xdf\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdc\xeb\xa1=\xd2\xeb\xa1\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x02\x83\xbd&\x02\x83=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00i\x1f\x07=\\\x1f\x07\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00A\x9b\xed=6\x9b\xed\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x1e\xfdK=!\xfdK\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc8\x9c\xbc\x0b9\x9c<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0\x90=>\xe7\x90=\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc3I\x1c<\x85I\x1c\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x10\xf1\xb9=\x11\xf1\xb9\xbd\xb8\x17\x15\xbd\xce\x17\x15=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95\xb3\xa9\xbd\xa7\xb3\xa9=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"v\r?\x1ev\r\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb6r\x1f\xbd\xb8r\x1f=\x00\x00\x00\x00\x00\x00\x00\x00\xe8T\x0e>\xdfT\x0e\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xf9r\xd9<\xe5r\xd9\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002_\x0f\xbd3_\x0f=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00q?\xb6=s?\xb6\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaaj\x0c>\xadj\x0c\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc1\x11\x94=\xb5\x11\x94\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa5\xa9\x0b>\x9e\xa9\x0b\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaajV>\xabjV\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00CR\x83<\x11R\x83\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x8f\xa5\x1f\xbc\xc5\xa5\x1f<\x00\x00\x00\x00\x00\x00\x00\x00\xff\x97\x1f\xbe\t\x98\x1f>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00nA\xb4\xbdjA\xb4=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00T@\xe9\xbdV@\xe9=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8e=M>\x8b=M\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00&\x1a\x8d=#\x1a\x8d\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00v> \xbfy> ?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaemK\xbd\xc1mK=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x97g\xcc\xbb\x14h\xcc;\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe3\x1b\xe1=\xe2\x1b\xe1\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf3\xd6A\xbe\xfc\xd6A>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\xbd\xee=\x12\xbd\xee\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xe3\xdd\xba\xdc\xe5\xdd:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00<=\xb5=:=\xb5\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x000\r\xdc=,\r\xdc\xbd8\x9c\xba\xbe;\x9c\xba>\x00\x00\x00\x00\x00\x00\x00\x00\xdc\x9cK;\xbf\x9bK\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e O\xbd$ O=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6X\xeb\xbd\xeaX\xeb=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe3^\x94\xbd\xf8^\x94=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbf\xd2\xc1=\xc1\xd2\xc1\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00=\xec<\xbeM\xec<>\x00\x00\x00\x00\x00\x00\x00\x00*\x08\xb2>/\x08\xb2\xbe\x00\x00\x00\x00\x00\x00\x00\x00\xb0\xa9\x06=\xa5\xa9\x06\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfbs\xb6=\xe7s\xb6\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xde\xb1\x8f\xbd\xe0\xb1\x8f=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x93\x8dG\xbe\x8c\x8dG>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xca%\xbe\x04\xcb%>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x85f3=\x82f3\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xae"\xd2=\xaa"\xd2\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00f\x1bp>^\x1bp\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00=j\xed=4j\xed\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\x12}\xbe*\x12}>\x00\x00\x00\x00\x00\x00\x00\x00\x94\x84\x90=\x8b\x84\x90\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00W\xd8\x06>[\xd8\x06\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xbd!\xbe\x80\xbd!>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!e\x16\xbcse\x16<\x00\x00\x00\x00\x00\x00\x00\x00\x99\xdb]\xbe\xa8\xdb]>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Z\x96\x07=S\x96\x07\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x86\xc6P\xbd\xac\xc6P=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\xf7!\xbe\x15\xf7!>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xec\xbd\xce\xbd\xea\xbd\xce=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbd\xef\xda=\xba\xef\xda\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x003\xd9K>1\xd9K\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe9u\x88>\xe4u\x88\xbe\x00\x00\x00\x00\x00\x00\x00\x005\xac\x8c>1\xac\x8c\xbeU\xf76\xbdc\xf76=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x89\xd96>x\xd96\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc3\x81C=\xb4\x81C\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9e>\xd4\xbc\xd8>\xd4<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a~\xc0;\x19}\xc0\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xa0:\xbe+\xa0:>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8a7/={7/\xbd\xbc\xcd\x86>\xba\xcd\x86\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xf4\x99<\xd8\xf3\x99\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\xc3V>j\xc3V\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc5\xdf\xd8;-\xdf\xd8\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd8|\xaa\xbd\xed|\xaa=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x007\xb1\xd1=5\xb1\xd1\xbd0\xd8\xff\xbd@\xd8\xff=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xced\xbe\xa2\xced>\x8f\x91\x9a\xbd\x9d\x91\x9a=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13<\xba<\xfa;\xba\xbc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00gV\xd9\xbd_V\xd9=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfcR\x98\xbcES\x98<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfch\x04>\xfbh\x04\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb4\xffH\xbc\x9f\xffH<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\xd0A\xbe\x07\xd0A>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9f7\xfe=\x937\xfe\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x05\x9b=\x05\x05\x9b\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00H\xa1\xb2=H\xa1\xb2\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x068\xaf\xbe\x088\xaf>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xee\x17\xfa\xbc\xed\x17\xfa<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c\x14\x80?\x19\x14\x80\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x006\x97\x8f\xbdH\x97\x8f=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9d\xff\x94\xbc\xd8\xff\x94<\x00\x00\x00\x00\x00\x00\x00\x00\x8e\x84t=p\x84t\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x15P\xe8=\x10P\xe8\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00*[\xd7= [\xd7\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa3H\x96;\xbdG\x96\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd0N\xe5\xbc\xfeN\xe5<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$b\xd5=\x1fb\xd5\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbc\xfc\xab=\xaa\xfc\xab\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x004\xd27\xbeB\xd27>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00V\t`>T\t`\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xba\xb6s\xbax\xbes:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00s8\xca\xbdy8\xca=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00nL\x16>eL\x16\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf\xd0\xbf>\xd0\xd0\xbf\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0:\x9c>\xee:\x9c\xbe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n\x0c9\xbd\x14\x0c9=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xdf\xb7\xbc\xea\xdf\xb7<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa2\xf3d\xbe\x9e\xf3d>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +tbtRp123 +asg24 +I00 +sg25 +I00 +sg26 +g1 +(g27 +g3 +NtRp124 +(dp125 +g30 +(I00 +I00 +tp126 +sg14 +Nsbsg32 +Nsbsg14 +S'W' +sg33 +S'auto_8590' +p127 +sg35 +(itheano.gof.utils +scratchpad +p128 +(dp129 +g38 +(lp130 +(g40 +I209 +g41 +S'utils.add_tag_trace(var)' +tp131 +asbsg43 +I01 +sg44 +Nsg26 +g124 +sbasg35 +(itheano.gof.utils +scratchpad +p132 +(dp133 +bsS'outputs' +p134 +(lp135 +g108 +asS'op' +p136 +g1 +(ctheano.sandbox.cuda.basic_ops +HostFromGpu +p137 +g3 +NtRp138 +(dp139 +S'_op_use_c_code' +p140 +S'/usr/bin/clang++' +p141 +sbsbsg26 +g1 +(g100 +g3 +NtRp142 +(dp143 +g30 +(I00 +I00 +tp144 +sg104 +S'float32' +p145 +sg106 +g22 +sg107 +I00 +sg14 +Nsbsbasg35 +(itheano.gof.utils +scratchpad +p146 +(dp147 +bsg134 +(lp148 +g81 +asg136 +g1 +(ctheano.tensor.basic +Dot +p149 +g3 +NtRp150 +(dp151 +g140 +g141 +sbsbsg26 +g1 +(g100 +g3 +NtRp152 +(dp153 +g30 +(I00 +I00 +tp154 +sg104 +S'float32' +p155 +sg106 +g22 +sg107 +I00 +sg14 +Nsbsbag1 +(g46 +g3 +NtRp156 +(dp157 +g33 +S'auto_8598' +p158 +sg9 +I0 +sg35 +(itheano.gof.utils +scratchpad +p159 +(dp160 +bsg14 +Nsg44 +g1 +(g54 +g3 +NtRp161 +(dp162 +g57 +(lp163 +g1 +(g46 +g3 +NtRp164 +(dp165 +g33 +S'auto_8594' +p166 +sg9 +I0 +sg35 +(itheano.gof.utils +scratchpad +p167 +(dp168 +g38 +(lp169 +sbsg14 +Nsg44 +g1 +(g54 +g3 +NtRp170 +(dp171 +g57 +(lp172 +g7 +asg35 +(itheano.gof.utils +scratchpad +p173 +(dp174 +bsg134 +(lp175 +g164 +asg136 +g1 +(g137 +g3 +NtRp176 +(dp177 +g140 +g141 +sbsbsg26 +g1 +(g100 +g3 +NtRp178 +(dp179 +g30 +(I00 +tp180 +sg104 +g145 +sg106 +g22 +sg107 +I00 +sg14 +Nsbsbasg35 +(itheano.gof.utils +scratchpad +p181 +(dp182 +bsg134 +(lp183 +g156 +asg136 +g1 +(ctheano.tensor.elemwise +DimShuffle +p184 +g3 +NtRp185 +(dp186 +S'drop' +p187 +(lp188 +sS'shuffle' +p189 +(lp190 +I0 +asS'augment' +p191 +(lp192 +I0 +asS'input_broadcastable' +p193 +g180 +sS'inplace' +p194 +I00 +sS'new_order' +p195 +(S'x' +I0 +tp196 +sg140 +g141 +sbsbsg26 +g1 +(g100 +g3 +NtRp197 +(dp198 +g30 +(I01 +I00 +tp199 +sg104 +g145 +sg106 +g22 +sg107 +I00 +sg14 +Nsbsbasg35 +(itheano.gof.utils +scratchpad +p200 +(dp201 +bsg134 +(lp202 +g71 +asg136 +g1 +(ctheano.tensor.elemwise +Elemwise +p203 +g3 +NtRp204 +(dp205 +S'__module__' +p206 +S'tensor' +p207 +sS'scalar_op' +p208 +g1 +(ctheano.scalar.basic +Add +p209 +g3 +NtRp210 +(dp211 +S'output_types_preference' +p212 +ctheano.scalar.basic +upcast_out +p213 +sg140 +g141 +sg14 +S'add' +p214 +sbsg14 +S'Elemwise{add,no_inplace}' +p215 +sg140 +g141 +sS'destroy_map' +p216 +(dp217 +sS'nfunc_spec' +p218 +(g214 +I2 +I1 +tp219 +sS'inplace_pattern' +p220 +(dp221 +sS'openmp' +p222 +I00 +sS'__doc__' +p223 +S"elementwise addition\n\n Generalizes a scalar op to tensors.\n\n All the inputs must have the same number of dimensions. When the\n Op is performed, for each dimension, each input's size for that\n dimension must be the same. As a special case, it can also be 1\n but only if the input's broadcastable flag is True for that\n dimension. In that case, the tensor is (virtually) replicated\n along that dimension to match the size of the others.\n\n The dtypes of the outputs mirror those of the scalar Op that is\n being generalized to tensors. In particular, if the calculations\n for an output are done inplace on an input, the output type must\n be the same as the corresponding input type (see the doc of\n scalar.ScalarOp to get help about controlling the output type)\n\n Examples:\n Elemwise(add) # represents + on tensors (x + y)\n Elemwise(add, {0 : 0}) # represents the += operation (x += y)\n Elemwise(add, {0 : 1}) # represents += on the second argument (y += x)\n Elemwise(mul)(rand(10, 5), rand(1, 5)) # the second input is completed\n # along the first dimension to match the first input\n Elemwise(true_div)(rand(10, 5), rand(10, 1)) # same but along the\n # second dimension\n Elemwise(int_div)(rand(1, 5), rand(10, 1)) # the output has size (10, 5)\n Elemwise(log)(rand(3, 4, 5))\n " +p224 +sbsbsg26 +g1 +(g100 +g3 +NtRp225 +(dp226 +g30 +(I00 +I00 +tp227 +sg104 +g145 +sg106 +g22 +sg107 +I00 +sg14 +Nsbsbasg35 +(itheano.gof.utils +scratchpad +p228 +(dp229 +bsg134 +(lp230 +g59 +asg136 +g1 +(ctheano.tensor.nnet.nnet +Softmax +p231 +g3 +NtRp232 +(dp233 +g140 +g141 +sbsbsg26 +g225 +sbag1 +(ctheano.tensor.var +TensorConstant +p234 +g3 +NtRp235 +(dp236 +g33 +S'auto_300' +p237 +sg9 +Nsg35 +(itheano.gof.utils +scratchpad +p238 +(dp239 +S'unique_value' +p240 +Nsbsg14 +NsS'cached' +p241 +I01 +sg26 +g1 +(g100 +g3 +NtRp242 +(dp243 +g30 +(tsg104 +S'int8' +p244 +sg106 +g21 +(S'i1' +I0 +I1 +tRp245 +(I3 +S'|' +NNNI-1 +I-1 +I0 +tbsg107 +I00 +sg14 +NsbsS'data' +p246 +g18 +(g19 +(I0 +tS'b' +tRp247 +(I1 +(tg245 +I00 +S'\x01' +tbsbasg35 +(itheano.gof.utils +scratchpad +p248 +(dp249 +bsg134 +(lp250 +g1 +(g46 +g3 +NtRp251 +(dp252 +g33 +S'auto_8602' +p253 +sg9 +I0 +sg35 +(itheano.gof.utils +scratchpad +p254 +(dp255 +g38 +(lp256 +sbsg14 +S'max' +p257 +sg44 +g55 +sg26 +g1 +(g100 +g3 +NtRp258 +(dp259 +g30 +(I00 +tp260 +sg104 +g145 +sg106 +g22 +sg107 +I00 +sg14 +Nsbsbag47 +asg136 +g1 +(ctheano.tensor.basic +MaxAndArgmax +p261 +g3 +NtRp262 +(dp263 +g140 +g141 +sbsbsg26 +g1 +(g100 +g3 +NtRp264 +(dp265 +g30 +(I00 +tp266 +sg104 +S'int64' +p267 +sg106 +g21 +(S'i8' +I0 +I1 +tRp268 +(I3 +S'<' +NNNI-1 +I-1 +I0 +tbsg107 +I00 +sg14 +NsbsbsS'params' +p269 +(lp270 +g117 +ag7 +asS'W' +g117 +sS'input' +p271 +g91 +sS'p_y_given_x' +p272 +g59 +sb. \ No newline at end of file diff --git a/neural_net_demystified_part_2_forward_propagation.ipynb b/neural_net_demystified_part_2_forward_propagation.ipynb new file mode 100644 index 0000000..62a67e1 --- /dev/null +++ b/neural_net_demystified_part_2_forward_propagation.ipynb @@ -0,0 +1,982 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Populating the interactive namespace from numpy and matplotlib\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%pylab inline\n", + "# Neural Networks Demystified\n", + "# Part 1: Data + Architecture\n", + "\n", + "from IPython.display import YouTubeVideo\n", + "YouTubeVideo('bxe2T-V8XRs')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "#Import code from last time\n", + "\n", + "#\n", + "from IPython.display import YouTubeVideo\n", + "YouTubeVideo('bxe2T-V8XRs')\n", + "# Supporting code for short YouTube series on artificial neural networks.\n", + "#\n", + "# Stephen Welch\n", + "# @stephencwelch\n", + "\n", + "import numpy as np\n", + "\n", + "# X = (hours sleeping, hours studying), y = Score on test\n", + "X = np.array(([3,5], [5,1], [10,2]), dtype=float)\n", + "y = np.array(([75], [82], [93]), dtype=float)\n", + "\n", + "# Normalize (by dividing by the maximum value in each array)\n", + "X = X/np.amax(X, axis=0)\n", + "y = y/100 #Max test score is 100" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(3, 2) (3, 1)\n" + ] + } + ], + "source": [ + "print X.shape, y.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0.3 1. ]\n", + " [ 0.5 0.2]\n", + " [ 1. 0.4]]\n", + "[[ 0.75]\n", + " [ 0.82]\n", + " [ 0.93]]\n" + ] + } + ], + "source": [ + "print X\n", + "print y" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "class Neural_Network(object):\n", + " def __init__(self): \n", + " #Define Hyperparameters\n", + " self.inputLayerSize = 2\n", + " self.outputLayerSize = 1\n", + " self.hiddenLayerSize = 3\n", + " \n", + " def forward(self, X):\n", + " #Propagate inputs though network" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "def sigmoid(z):\n", + " #Apply sigmoid activation function to scalar, vector, or matrix\n", + " return 1/(1+np.exp(-z))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAEACAYAAACuzv3DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XucVXW9//HXx+GqhooEyc3BIO94KdEfmpFpkifxJHWU\n0rIrJ8NuP01Qw/GSSNYJTSsLjLwcqUPlAY+lJnIqNYUSb6CChXLxhnnjIhf5nD++CxiGmdlrZtbe\na6/vfj8fj/2Y+c5es/fnw2I+s+az1ve7zN0REZHi2invAEREpGNUyEVECk6FXESk4FTIRUQKToVc\nRKTgVMhFRAquZCE3sxvM7EUze6yVba4xs8Vm9oiZHZZtiCIi0po0R+Q/B0a29KSZnQQMdvchwJeA\nH2cUm4iIpFCykLv7n4BXW9lkFPCLZNsHgd3NrE824YmISClZ9Mj7AcsajZcD/TN4XRERSSGrk53W\nZKx5/yIiFdIpg9dYAQxoNO6ffG07ZqbiLiLSDu7e9GB5O1kU8lnAOGCGmR0FvObuL7YnmCIzswZ3\nb8g7jnKJOb+Yc4Pay8+MnYDehLZv/yYf+yXPvRPoRcdq4FvAmuSxtpXHlufXJd+zAViffGz8eQsf\nbVGpQEomYWa3Ah8AepnZMuBioDOAu1/v7neY2UlmtiQJ+LOp/gniU593AGVWn3cAZVSfdwBlVp93\nAFkzYxdgH+DdcMS/mNEbGBzGDCCpUSm8CawCXm70eAV4DXi9mccbWz53Z2NmCbXCUhz+lizk7j4m\nxTbj0oUkIpKeGd2A/YADgYMaPeq3bXUAwPuafOsrhBbv8iYfVwAvEgr2KnfeKmP4FZNFa0WC6XkH\nUGbT8w6gjKbnHUCZTc87gDTM6Ewo0sOAI5KPBwB1zWy+EfgH8Awc9xbwx/A5zwBL3VlbkaCrhFXq\nxhJm5jH3yEWkbZL2yHDgg4T27eFAtyabbQYWA483eSxxZ1Plos1PmtqpQp4RMxvh7nPzjqNcYs4v\n5twgm/x01VllNFcj09ROtVZEJBXdFrK8LM1ZzZa+V0fkIlJK8vObdxhRM7N2H5FrGVsRkYJTIc+I\nmY3IO4Zyijm/mHOD+PMTFXIRidCkSZP44he/WHXvW19fzz333JP5+6pHLiIlqUeejUGDBjFt2jSO\nO+64HZ5Tj1xEysZMV7dVOxXyjMTeh4w5v5hzg/bnZ8ZOZowBFmYbUbYmT55M//796dGjB/vttx9z\n5syhoaGBM888c+s2N954I3vvvTe9evXi8ssvp76+njlz5gDQ0NDAJz7xCc4880x69OjB0KFDWbx4\nMZMmTaJPnz4MHDiQu+++e+trrVy5klGjRrHnnnsyZMgQpk6duvW5pu970003bX3fK664omz/Birk\nIrIDM44DFgD/CQwpsW1mj7Z66qmnuO6665g/fz5vvPEGd911F/X19dtdk71w4UK+8pWvcOutt/L8\n88/z+uuvs3Llyu1e5/bbb+fTn/40r776KocddhgnnngiEIr2xIkTGTt27NZtTz/9dAYOHMjzzz/P\nzJkzueCCC7j33nuTf4vt3/fss8/mlltuYeXKlbzyyissX7687UmmoEKekZhnBkLc+cWcG7QtPzMG\nmfFr4B7gYOA54AtlCq3D6urqWL9+PU888QQbN25k4MCB7LPPPttNXpo5cyajRo1i+PDhdO7cmUsv\nvXSHyTfHHnssJ5xwAnV1dXz84x/n5ZdfZvz48dTV1XHaaaexdOlS3njjDZYtW8b999/P5MmT6dKl\nC4cccghf+MIXuPHGGwF2eN+TTz6ZY445hi5dunDZZZex007lKbkq5CKCGV3NuARYBJxKWJL6QmBf\nd6a19r3u2T3aavDgwUyZMoWGhgb69OnDmDFjeP7557fbZuXKlfTvv+3uk927d2fPPffcbpvevXtv\n93yvXr22Fvvu3bsDsHr1alauXEnPnj3ZZZddtm4/cOBAVqzY4V46O7zvzjvvvMP7ZkWFPCPqsxZX\nzLlB6fzMGE5oo0wEugI3Ewr4FUVY5nXMmDH86U9/4tlnn8XMOP/887c74u7bt+92LY1169bxyiuv\ntOu9+vbtyz//+U9Wr1699WvPPffcdgW78bbLlm27nfHatWvb/b6lqJCL1CgzdjXjGuDPhDW/nwLe\n786Z7jverrEaPf3008yZM4f169fTtWtXunXrRl3d9qvejh49mtmzZ/PAAw+wYcMGGhoa2r1uzIAB\nAxg+fDgTJkxg/fr1PProo9xwww2cccYZO2w7evRobr/9du677z42bNjAxIkT2bx5c7vetxQV8oyo\nz1pcMecGzednxnuBvwHnEJaKvQI41J0/Vza6jlm/fj0TJkzgne98J3vttRerVq1i0qRJwLYTjwce\neCA//OEPOf300+nbty/veMc76N27N127dt26XdOeeWvjW2+9laVLl9K3b19OPfVULr300q3XhTd+\nrQMPPJDrrruOT37yk/Tt25eePXsyYMAAykETgkRqSHI/y28SCndnwtreZ7qzoPXvi2dC0OrVq9lj\njz1YsmQJe++9d97hbKUJQVWg1vusRRZzbrAtPzN6Ab8DriIU8WuBYaWKeAxmz57N2rVrWbNmDeee\ney5Dhw6tqiLeUSrkIjXAjMOBvwIfJtzPcpQ757izLt/IKmPWrFn069ePfv368cwzzzBjxoy8Q8qU\nWisikTPjU8BUwm3UHgJGu9OmmSkxtVaqlVorIrKDZIr9VYTLCbsB04Bj21rEpfqpkGekVvqsMYox\nNzO6Ab8EzoV7NwFfBr7ozvp8I5NyUCEXiYwZPYG7gY8Db8B/f8udn7ij3kik1CMXiYgZewO/J0zw\nWQ6c5M5jHX9d0y+BCmhvj1zrDItEwox3A3OAgcBjhCKeST9cB2HVTa2VjMTYZ20s5vxiyM2MfYE/\nEor4AzQ6qRlDfq2JPb80VMhFCs6MA4H/BfoSivmJ7ryWb1RSSeqRixSYGfsTincv4A/AKe6szTcq\nyZKuIxeJmBmDCMW7F3AncLKKeG1SIc9I7H26mPMrYm5m7EUo4lvaKae2tHZ4EfNri9jzS0OFXKRg\nzNiTcJ34PsB8dCRe89QjFykQM7oTLjE8inB3+w+4syrfqKSc1CMXiUiylvhNhCL+HPBhFXEBFfLM\nxN6nizm/AuV2JTAaeIMw2SfV7dgKlF+7xJ5fGirkIgVgxljgPGATYRnaJ3IOSapIyR65mY0EpgB1\nwFR3n9zk+d0Iy2QOIEz5/567T2/mddQjF2kHM44nrJ9SB3zenRtyDkkqKE3tbLWQm1kd4c7axwMr\ngHnAGHdf1GibC4B3uPsEM+uVbN/H3Te1NRgR2Z4Z9YQ7+/QErnRnQr4RSaVlcbJzGLDE3Ze6+0Zg\nBnBKk202Az2Sz3sArzQt4rUg9j5dzPlVa25m7Az8llDE7wAuat/rVGd+WYk9vzRKFfJ+wLJG4+XJ\n1xq7FjjAzFYCjwBfyy48kdpkhgHXA4cCzwBnuPN2vlFJtSq1jG2ai8xHAn9z9w+a2buBu83sEHd/\ns+mGZjYdWJoMXwMWuPvc5LkRAEUdb/latcSj/NKP3X1uNcUDAFOuhkPPgBFrgH8FO8Qsnvxi338d\nGSefn0WwlBRK9ciPAhrcfWQyngBsbnzC08xuBya5+33J+B7gfHef3+S11CMXScGMI4E/Ew60/s2d\n/8o5JMlRFj3y+cAQM6s3sy7AacCsJts8RzgZipn1AfYF/t6+kIsr9j5dzPlVU25m7E44F9UJmJJF\nEa+m/Moh9vzSaLW14u6bzGwcYWW1OmCauy8ys7HJ89cDlwHTzexRwIBvufs/yxy3SHSSvvhPYeuV\nKuNzDUgKQ2utiFSJZNLPT4A3gcPdWZJzSFIFtNaKSEGYcTBh4h3AWBVxaQsV8ozE3qeLOb+8czOj\nK3AL0A2Y5s6t2b5+vPsO4s8vDRVykfw1AAcDS9A8DGkH9chFcmTGcOBPyfD97tyfZzxSfdQjF6li\nZuwC/ILwc3iViri0lwp5RmLv08WcX465fRcYDDwGXFyuN4l530H8+aWhQi6Sg2Rp2rOBjcCZ7qzP\nOSQpMPXIRSosaak8BgwCLnLnOzmHJFVMPXKR6nQJoYg/QmiviHSICnlGYu/TxZxfJXMz473ANwjr\n+H/BnY3lf8949x3En18aKuQiFWJGZ2Aq4efuanfml/gWkVTUIxepEDO+BUwmrDF9kDtr8o1IiiBN\n7VQhF6kAM94NPE6Yhj/SnTtzDkkKQic7Kyj2Pl3M+ZU7t2R52h8RivgtlS7iMe87iD+/NFTIRcrv\nY8CHCbc3/GbOsUiE1FoRKSMzdgYWAQOBce5cl3NIUjBqrYjkbzyhiC8g3DRCJHMq5BmJvU8Xc37l\nyi05wfmtZDjOnbfL8T6l44h330H8+aWhQi5SPlOArsCN7tyXdzASL/XIRcrAjI8Cs4E3gH3deSHn\nkKSg1CMXyYEZ3YCrk+HFKuJSbirkGYm9TxdzfmXI7avAPsATkP9VKjHvO4g/vzRUyEUyZEZv4MJk\n+I1KLIoloh65SIbM+BHwZeB37pyUdzxSfFprRaSCzDgAeDQZDnVnYZ7xSBx0srOCYu/TxZxfhrld\nBdQBP62mIh7zvoP480tDhVwkA2Z8GDgJeBNoyDcaqTVqrYh0kBl1wMPAwcB4dybnHJJERK0Vkcr4\nLKGIP8u268dFKkaFPCOx9+lizq8juZmxC3BZMhzvzluZBJWhmPcdxJ9fGirkIh3zNeBdwDzglznH\nIjVKPXKRdjJjT+DvQA/gQ+7MyTkkiZB65CLlNZ5QxO9WEZc8qZBnJPY+Xcz5tSc3M/oD5yTDCZkG\nlLGY9x3En18aKuQi7XMxYa3xX7nz17yDkdpWskduZiMJC+TXAVPdfYdrZJPfiD8AOgOr3H1EM9uo\nRy5RMGM/wsqGDhzgztM5hyQRS1M7O5V4gTrgWuB4YAUwz8xmufuiRtvsTliq80R3X25mvToeukhV\nu5zw1+xPVcSlGpRqrQwDlrj7UnffCMwATmmyzSeBX7v7cgB3X5V9mNUv9j5dzPm1JTczjgBGA28B\nl5YrpizFvO8g/vzSKFXI+wHLGo2XJ19rbAjQ08zuNbP5ZnZmlgGKVAszDLgyGV7tzoo84xHZotXW\nCqEHWEpn4HDgQ8DOwANm9hd3X9zR4IrE3efmHUM5xZxfG3I7HjgOeA2Ks55KzPsO4s8vjVKFfAUw\noNF4AOGovLFlhBOc64B1ZvZH4BBgh0JuZtOBpcnwNWDBlp2w5c8jjTWuznHdCLjtajgZ4Ltgh5hV\nU3waxzJOPj+LYCkptHrVipl1Ap4iHG2vBB4CxjQ52bkf4YToiYTLsR4ETnP3hU1eK+qrVsxsRMxH\nBjHnlyY3Mz4C3AGsAga5s7oSsWUh5n0HNZFfx65acfdNZjYOuJNw+eE0d19kZmOT56939yfN7PeE\nO6NsBn7WtIiLFFnSG99yYnNykYq41AattSJSghkfBWYDLwH7uLMm55CkhmitFZEOSo7GL0mGV6qI\nSzVSIc9I7NeyxpxfidxGEa7KegH4SUUCyljM+w7izy8NFXKRFiRH4w3J8Ep31uUYjkiL1CMXaYEZ\nHwN+Q7hia7AKueRBPXKRdjJjJ7b1xiepiEs1UyHPSOx9upjzayG3Uwk3VF4BTK1oQBmLed9B/Pml\noUIu0kRyNN6QDL9TjTdUFmlMPXKRJsw4jbDS5zJgiDvrcw5Japh65CJtZEYd4e4/AJeriEsRqJBn\nJPY+Xcz5NcntNGB/4Flgeh7xZC3mfQfx55eGCrlIIjkan5gML3dnQ57xiKSlHrlIwowzgJuAfwD7\nurMx55BE1CMXScuMTmzrjV+mIi5FokKekdj7dDHnl+T2KWAw8AzhqDwaMe87iD+/NFTIRehRB3w7\nGVzqzqY8oxFpK/XIpeaZ8TlgGuH2hAeokEs1UY9cpAQzOgMXJcNLVMSliFTIMxJ7ny7i/D4DcwcB\nTxJmc0Yn4n0HxJ9fGirkUrPM6ML2vfG384xHpL3UI5eaZcZYwl1/FgJDVcilGqlHLtICM7oCFybD\nS1TEpchUyDMSe58uwvw+DwwAHocuL+cdTDlFuO+2E3t+aaiQS80xoxtwQTJsgI2V6S+KlIl65FJz\nzDgHuAZ4FDjMnc05hyTSojS1U4VcaooZ3QnT8PcCTnXntzmHJNIqneysoNj7dBHlN5ZQxB8GboOo\ncmuW8oufCrnUDDN2ASYkw4nuqDcuUVBrRWqGGecCVwHzgCNVyKUI1CMXSZixK+GGEb2Aj7jz+5xD\nEklFPfIKir1PF0F+4whF/C/AnY2fiCC3Vim/+KmQS/TM6AGclwzVG5foqLUi0TPjIuAy4M/AsSrk\nUiTqkUvNM2M3YCmwO3CcO/fmG5FI26hHXkGx9+kKnN/XCUV8bktFvMC5paL84qdCLtEyYw/gm8nw\n4jxjESkntVYkWmZcRriN2x/cOSHveETaI5PWipmNNLMnzWyxmZ3fynZHmNkmMzu1PcGKZMmMPQlt\nFdDRuESu1UJuZnXAtcBI4ABgjJnt38J2k4HfAzV51B17n66A+Z0H7Arc6c79rW1YwNzaRPnFr9QR\n+TBgibsvdfeNhJvTntLMducAM4GoF+iXYjDjXcBXk+HEPGMRqYRShbwfsKzReHnyta3MrB+huP84\n+VJNXqPr7nPzjqGcCpbfRUB34DZ3Hiq1ccFyazPlF79ShTxNUZ4CjPdw1tSo0daKVAczBgFfIvzf\n/XbO4YhURKcSz68g3NdwiwGEo/LG3gvMMDNIFiQys43uPqvpi5nZdMLkDIDXgAVbfptu6XMVePz1\nyPIpZH7gnwM6w4y7YEyvLccirX1/4x5r3vGXY6z8ijVOPj8rSWkpKbR6+aGZdQKeAj4ErAQeAsa4\n+6IWtv85MNvdf9PMc1FffmhmI2L+E68I+ZlxEOH2bZuAfd35R7rvq/7cOkL5FVua2tnqEbm7bzKz\ncYTV4uqAae6+yMzGJs9fn1m0BRfzfyQoTH6XE1p7P01bxKEwubWb8oufJgRJFMw4krBE7TpgH3de\nyDkkkUxorZUKiv1a1gLkd0Xy8eq2FvEC5NYhyi9+KuRSeGYcDxxHOIH+3ZzDEak4tVak0Mww4EHg\nCOACdyblHJJIptLUThVyKTQzTiPMOH4BGOzOmpxDEsmUeuQVFHufrhrzM6MrcGUynNjeIl6NuWVJ\n+cVPhVyK7CtAPbAQ+Hm+oYjkR60VKSQzegJLgD2Aj7rzPzmHJFIWaq1IzC4kFPE5wB05xyKSKxXy\njMTep6um/MzYBxiXDM9z79iKm9WUWzkov/ipkEsRXQF0AW5y5295ByOSN/XIpVAaTcVfD7zHnedy\nDkmkrNQjl6gkk3++nwx/oCIuEqiQZyT2Pl2V5DcGOBp4iW3Xj3dYleRWNsovfirkUghm7ApclQwn\nuPN6nvGIVBP1yKUQzPgOcAEwHzjSnc05hyRSEVprRaJgxrsJsze7AMPdeSDnkEQqRic7Kyj2Pl3O\n+X2fbZcbZl7Ete+KLfb80lAhl6pmxgnAKcAaYHzO4YhUJbVWpGqZ0QVYAOwPjHdncs4hiVScWitS\ndOcSivhiYErOsYhULRXyjMTep6t0fsl6Kt9Ohme7s75876V9V2Sx55eGCrlUnWQG53VAN+AWd/6Q\nc0giVU09cqk6ZnwC+BXhZsr7ufNiziGJ5EY9cikcM3oAVyfD8SriIqWpkGck9j5dBfO7HNiLsMLh\nzyrxhtp3xRZ7fmmokEvVMONowg0j3gb+XdPwRdJRj1yqghndCdeMvwf4jjsX5RySSFVQj1yK5FJC\nEX8CuCznWEQKRYU8I7H36cqZX3LXn28Cm4HPlvOa8ebfX/uuyGLPLw0VcsmVGd2AnxP+L37PnXk5\nhyRSOOqRS67MmERYDOsp4FB33so5JJGqovXIpaqZcSwwF3Dg/e7cn29EItVHJzsrKPY+Xdb5mbE7\ncBNgwKQ8i7j2XbHFnl8aKuSSl+uAgcA84JKcYxEpNLVWpOLM+BRwM+FmEYe5szjnkESqllorUnXM\nqAd+lAy/riIu0nGpCrmZjTSzJ81ssZmd38zznzKzR8zsUTO7z8yGZh9qdYu9T5dFfmZ0Bf4L6AH8\nFpjW0dfMgvZdscWeXxolC7mZ1QHXAiOBA4AxZrZ/k83+Dhzr7kMJs/J+mnWgEoX/AN4HLAU+705l\n+noikSvZIzez/wdc7O4jk/F4AHe/soXt9wAec/f+Tb6uHnkNM+N04FZgA3C0O/NzDkmkELLqkfcD\nljUaL0++1pLPA3ekeF2pEWbsD0xNhl9XERfJVqcU26T+89fMPgh8Dji6heenE/6shnD3lwXuPjd5\nbgRAgcdfjyyfTPID/xswE+buAi/eA6f9pEry2Tpu3GOthniUX23nl3x+VpLSUlJI01o5Cmho1FqZ\nAGx298lNthsK/AYY6e5LmnmdqFsrZjZiy06JUXvyM6OOcFLzZGARMMyd1WUIr0O074qtBvLr+BR9\nM+tEWAfjQ8BK4CFgjLsvarTNQGAOcIa7/6W9wUhcGq2j8iqhiO/wC15EWpemdpZsrbj7JjMbB9wJ\n1AHT3H2RmY1Nnr8emAjsAfzYzAA2uvuwjiYgxZVM+hlPuNvPx1XERcpHMzszUgN/3qXOz4xhwB+B\nrsA4d64rZ2wdpX1XbDWQn2Z2SmWZMQS4nVDEr2fbLE4RKRMdkUtmzOgDPAAMIrTiTnZnY75RiRSb\njsilYsx4B2H+wCBgPqEvriIuUgEq5BmJfb2H1vJL1lD5NXA48AzwL9V4mWFLannfxSD2/NJQIZcO\nMaMz8EvgBOAl4ER3Xso3KpHaoh65tFtSxG8FRhNm6h7nzsP5RiUSF/XIpWzM6ES4Vdto4HXgBBVx\nkXyokGck9j5d4/ySI/FfAKcBbxLaKYVdCKuW9l2MYs8vjTSLZolsZUZ34FfAR4HVwEh3Hsw3KpHa\nph65pGbGbsAs4Fjgn8BH3Hko36hE4pbJWisiAGb0Bn5HuMRwJaEnvjDfqEQE1CPPTMx9OjMOgjsf\nYdt14kfHVMRj3neg/GqBCrm0yoyRwP3Q9V2EJYyPcU+32L2IVIZ65NIsMww4B/gB4Rf+r4Cz3FmX\na2AiNUbXkUu7mLErcDNwNeH/yKXAGBVxkeqkQp6RWPp0ZhxAaKF8ElhDKOAXgx2bb2TlE8u+a4ny\ni58KuQChlWLGZ4B5wP7AQuAId2bkG5mIlKIeuWDGOwk3gfhY8qWbgX93Z01+UYkIqEcuKZhxCvA4\noYi/CXwW+LSKuEhxqJBnpGh9OjP6mvFL4DagN3AvcLA7093Z4c+0ouXXFjHnBsqvFqiQ1xgz6swY\nBywC/g1YC3wDON6dZ3MNTkTaRT3yGmLGCOD7hBmaALOBc1TARaqX1loRYOslhZMJKxYCLCdM9vnv\n5tooIlIsaq1kpBr7dGbsY8ZU4DG2LTv7bWA/d25rSxGvxvyyEnNuoPxqgY7II2TGe4ALgDOAOuBt\n4CdAgzsv5hmbiGRPPfJIJGujDAe+Rrj92k6EAn4zcIU7T+cYnoi0k3rkNcCMboRbrn2VbScxNwLT\ngCvd+XtesYlIZahHnpFK9umS6fSHm3EN4cTldEIRXwV8BxjkzpeyLOIx9yFjzg2UXy3QEXmBmDGA\ncO33Z4CDGz31MHANMMOdt/KITUTyox55lTNjMKHnfSowrNFTq4BbgOnuLMgjNhEpP/XICyi5S/0x\nwIeBkcBBjZ5eS7hv5s3AHe5sqHyEIlJtVMgzYmYj3H1u27+PLoT+9jHA8cAHgG6NNnmDMAPz18Cd\n7qzteLRt1978iiDm3ED51QIV8gpLlowdBhxNKN5HsH3hBlgA3AXcCdznzvqKBikihaIeeZmYsRMw\nCDgMODR5HAb0bWbzJ4H7gLnA3Zq0IyJbqEdeAWbsBrwH2Dd57Jd8HMKOR9oQpsk/DNxPKN73u/NK\nZaIVkRiVLORmNhKYQpjqPdXdJzezzTXARwgn485y94ezDjQPSf+6D+Eoeu/kMbDJ57uHrecCI5q+\nxPOENskCQvFeADzjzuYyh565mPuQMecGyq8WtFrIzawOuJZwEm4FMM/MZrn7okbbnAQMdvchZnYk\n8GPgqDLG3C5Jq6MHofDukTx2B3oSinVzjz1SvPQ6YDH8z2YYcTvwVPJ42p3Xs84jR4cSflvFKObc\nQPlFr9QR+TBgibsvBTCzGcAphJsSbDEK+AWAuz9oZrubWR93b3Of14w6oGvy6NLo866ENsWuwC6N\nHq2Nd2X7gr0bbZ/J+jbwEuHI+tnk8VyTz1e542bfa3C/qqGtORfI7nkHUEYx5wbKL3qlCnk/YFmj\n8XLgyBTb9IcdT9iZMZ/ti3PTgl3Xhtjb403gNeDV5LHl8xdbeLxSxDaIiNSWUoU87SUtTc+otvR9\n703xfuuBDcnHxo+3gDWEk4VrGj1Wt/J546L9ujubUubTHvVlfO1qUJ93AGVUn3cAZVafdwBlVp93\nAHkrVchXAAMajQcQjrhb26Z/8rVmlLz60AgtlOau9ugwK/PFj2b2mfK+Q75izi/m3ED5xa5UIZ8P\nDDGzemAlYbnUMU22mQWMA2aY2VHAa831x2vpGnIRkUpqtZC7+yYzG0eYYVgHTHP3RWY2Nnn+ene/\nw8xOMrMlhJbGZ8setYiIbFWxmZ0iIlIeFb2xhJmdY2aLzOxxM9thYlEMzOz/m9lmM+uZdyxZMrOr\nkn33iJn9xsx2yzumLJjZSDN70swWm9n5eceTJTMbYGb3mtkTyc/cV/OOKWtmVmdmD5vZ7LxjyVpy\nKffM5OduYdK6blbFCrmZfZBwzflQdz8I+F6l3rtSzGwAcALhGvPY3AUc6O6HAE8DE3KOp8MaTXgb\nCRwAjDGz/fONKlMbgW+4+4GESXpfiSw/CPeoXUj6K+yK5GrgDnffHxjK9vN3tlPJI/IvA5PcfSOA\nu79cwfeulP8AvpV3EOXg7ne7+5Zr6h8kXJ1UdFsnvCX/L7dMeIuCu7/g7guSz1cTCkFzi7YVkpn1\nB04CppLikrgiSf7ifb+73wDhfKW7tzhTvJKFfAhwrJn9xczmmtn7KvjeZWdmpwDL3f3RvGOpgM8B\nd+QdRAaxyTzQAAAB80lEQVSam8zWL6dYyiq58uwwwi/hWPwAOA+inLQ3CHjZzH5uZn8zs5+Z2c4t\nbZzp6odmdjfwrmaeujB5rz3c/SgzOwL4FbBPlu9fbiXym0C4q8/WzSsSVIZaye8Cd5+dbHMhsMHd\n/7OiwZVHjH+O78DMdgVmAl9LjswLz8w+Crzk7g9HevPlToQbzoxz93lmNgUYD0xsaePMuPsJLT1n\nZl8GfpNsNy85IbinuxdmCdeW8jOzgwi/QR+xMOuoP/BXMxvm7i9VMMQOaW3/AZjZWYQ/ZT9UkYDK\nL82Et0Izs86Eu0vd7O635R1PhoYDo5JF+7oBPczsRnf/dM5xZWU54S/8ecl4JqGQN6uSrZXbgOMA\nzOw9QJciFfHWuPvj7t7H3Qe5+yDCTji8SEW8lGQ54/OAU9z9rbzjycjWCW9m1oUw4W1WzjFlxsJR\nxTRgobtPyTueLLn7Be4+IPl5Ox2YE1ERx91fAJYltRLCCrRPtLR9JW8scQNwg5k9RlhLJZp/9GbE\n+Cf7DwkLnN2d/NXxgLufnW9IHdPShLecw8rS0cAZwKNmtuUeARPc/fc5xlQuMf7MnQPckhxkPEMr\nky01IUhEpOAqOiFIRESyp0IuIlJwKuQiIgWnQi4iUnAq5CIiBadCLiJScCrkIiIFp0IuIlJw/wfV\nrrHyXC+Q1AAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "testInput = np.arange(-6,6,0.01)\n", + "plot(testInput, sigmoid(testInput), linewidth= 2)\n", + "grid(1)\n", + "legend(['sigmoid'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.7310585786300049" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sigmoid(1)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 0.26894142, 0.5 , 0.73105858])" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sigmoid(np.array([-1,0,1]))" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.46600501, 0.37731874, 0.5415919 ],\n", + " [ 0.23157348, 0.41235015, 0.55084673],\n", + " [ 0.37693986, 0.10342644, 0.711002 ]])" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sigmoid(np.random.randn(3,3))" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Whole Class with additions:\n", + "class Neural_Network(object):\n", + " def __init__(self): \n", + " #Define Hyperparameters\n", + " self.inputLayerSize = 2\n", + " self.outputLayerSize = 1\n", + " self.hiddenLayerSize = 3\n", + " \n", + " #Weights (parameters)\n", + " self.W1 = np.random.randn(self.inputLayerSize,self.hiddenLayerSize)\n", + " self.W2 = np.random.randn(self.hiddenLayerSize,self.outputLayerSize)\n", + " \n", + " def forward(self, X):\n", + " #Propogate inputs though network\n", + " self.z2 = np.dot(X, self.W1)\n", + " self.a2 = self.sigmoid(self.z2)\n", + " self.z3 = np.dot(self.a2, self.W2)\n", + " yHat = self.sigmoid(self.z3) \n", + " return yHat\n", + " \n", + " def sigmoid(self, z):\n", + " #Apply sigmoid activation function to scalar, vector, or matrix\n", + " return 1/(1+np.exp(-z))\n", + " \n", + " def sigmoidPrime(self,z):\n", + " #Gradient of sigmoid\n", + " return np.exp(-z)/((1+np.exp(-z))**2)\n", + " \n", + " def costFunction(self, X, y):\n", + " #Compute cost for given X,y, use weights already stored in class.\n", + " self.yHat = self.forward(X)\n", + " J = 0.5*sum((y-self.yHat)**2)\n", + " return J\n", + " \n", + " def costFunctionPrime(self, X, y):\n", + " #Compute derivative with respect to W and W2 for a given X and y:\n", + " self.yHat = self.forward(X)\n", + " \n", + " delta3 = np.multiply(-(y-self.yHat), self.sigmoidPrime(self.z3))\n", + " dJdW2 = np.dot(self.a2.T, delta3)\n", + " \n", + " delta2 = np.dot(delta3, self.W2.T)*self.sigmoidPrime(self.z2)\n", + " dJdW1 = np.dot(X.T, delta2) \n", + " \n", + " return dJdW1, dJdW2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0.48736193]\n", + " [ 0.54813314]\n", + " [ 0.54630022]]\n" + ] + } + ], + "source": [ + "NN= Neural_Network()\n", + "yHat=NN.forward(X)\n", + "print yHat" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0.75]\n", + " [ 0.82]\n", + " [ 0.93]]\n" + ] + } + ], + "source": [ + "print y" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Third part" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEACAYAAABI5zaHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAF4hJREFUeJzt3X+MVeWdx/H3V/yBjoBULLMCdax1V9iklsVFss1SmnZb\nJFqSxeiincWupmazLNv+o9vNZqvpH5umm6axboyuFpqQFNMf21AEdLMpWdO0tliVWtBIGlaQQJFS\nQKTALN/9496h43Xm3jvPPHfOfb7zeSXEe+595vA8fJzvnPnec88xd0dERGI5r+oJiIhIfiruIiIB\nqbiLiASk4i4iEpCKu4hIQCruIiIBtSzuZvYNMztoZr9oMuYhM3vNzF4ys/l5pygiIqPVzpH7WmDp\nSC+a2TLgA+5+LfBZ4JFMcxMRkUQti7u7PwscaTLkU8A362OfAy4zs5l5piciIily9NxnAXuHbO8D\nZmfYr4iIJMr1hqo1bOuaBiIiFTo/wz7eAOYM2Z5df+4dzEwFX0Qkgbs3HkC3lKO4bwRWAxvMbBHw\nW3c/ONzAlAmWwswecPcHqp5Hp0ReX+S1gdZXutQD45bF3cy+BXwEmGFme4EvAhcAuPuj7r7ZzJaZ\n2W7gBPCZlIkE0Ff1BDqsr+oJdFBf1RPosL6qJ9BhfVVPoBu1LO7uvrKNMavzTEdERHLQJ1TzWVf1\nBDpsXdUT6KB1VU+gw9ZVPYEOW1f1BLqRjdfNOszMI/fcRUQ6IbV25nhDVQAzW+Lu26qeR6dEXl/k\ntUF3rU9nzTWX8wBYxV1ExpVu7Tk8s7yNDbVlRGTc1OtA1dPoSmY27JF7au3UG6oiIgGpuGdiZkuq\nnkMnRV5f5LVB/PXJ8FTcRUQCUs9dRMbNcD33efNu4MSJzv2dPT2wc+f2zv0FmeTuuetsGRGp1IkT\ncMUVnSu+hw7d0LF9dzO1ZTKJ3teMvL7Ia4P468vhK1/5Crfeeus7nluzZg2f+9znKprR2OnIXSS8\nyevMequeRFfr7+/nwQcf5OjRo0ybNo2BgQGefPJJtm7dWvXUkqm4Z9ItnwDslMjri7y2mmnAgTer\nnkWNXVX1DIbT29vL4sWL+fa3v80999zD1q1bueKKK5g/f37VU0umtoyICLBq1SrWr18PwPr16+nv\n7694RmOj4p5J9L5m5PVFXlvNqclVz6AEy5cvZ8eOHbz88ss89dRT3HnnnVVPaUxU3EVEgMmTJ3Pr\nrbdyxx13cOONNzJ79uyqpzQm6rlnEr1vG3l9kddWc9Hvqp5BMz09nT1dsaen/bGrVq3i8ccfZ+3a\ntR2bz3hRcReRSnXTB4zmzJnDxRdfzIoVK6qeypipLZNJ9L5t5PVFXluNeu7tOHv2LF/96ldZuXIl\nl156adXTGTMduYvIhHfixAlmzpzJ1VdfXfS57UPp2jIiwZn17umi89wX6Hruw9P13EVEpCUV90yi\n920jry/y2mrUc5+IVNxFRAJSz10kOPXcy6Ceu4iItKTinkn0vm3k9UVeW4167hORznMXkUrdMG8e\nnb7P3vadO5O/fMmSJfT393P33Xefe27btm309/ezd+/ell8/mrE5qbhnEv36JJHXF3ltNd19bRlO\nnGD7FVd0bPc3HDo0pq83M8zKe7tQbRkRmfDGepu9tWvXMm/ePKZOnco111zDY489BtQ++XrTTTex\nf/9+pkyZwtSpUzlw4ED2+Q9HxT2T6H3byOuLvLYa9dxb6e/vZ+vWrRw9ehTg3G32Vq1aBUCrM3xm\nzpzJU089xbFjx1i7di2f//zneeGFF+jp6WHr1q1ceeWVHD9+nGPHjtHbOz63PFRxF5EJb+ht9oB3\n3GbP3VmzZg3Tp08/9+eWW255R6tm2bJlXH311QAsXryYT3ziEzz77LNA6x8MnaLinkn0vm3k9UVe\nW02X99y7xEi32TMzvv71r3PkyJFzfzZt2vSOor1lyxYWLVrE5ZdfzvTp09m8eTOHDx+uZB2DVNxF\nRBjdbfaGFvZTp06xYsUK7rvvPn79619z5MgRli1bdm5MVW/GqrhnEr1vG3l9kddWo557O5rdZq9Z\na+X06dOcPn2aGTNmcN5557FlyxaeeeaZc6/PnDmTw4cPc+zYsY7Ov5FOhRSRavX0jPl0xVb7b9dI\nt9kb7uh78LkpU6bw0EMPcdttt3Hq1CluueUWli9ffm7cddddx8qVK3n/+9/P2bNn2blz57i8qdry\n2jJmthT4GjAJeNzdv9zw+jRgPTCH2g+Lf3P3dcPsR9eWKZjZlJ3Qc0nV8xjeibfdj8+rehbdSteW\nad/rr7/O3LlzOXjw4LjfjSn3tWWaHrmb2STgYeDjwBvAz8xso7vvGjLs74CX3f0WM5sBvGpm6919\nYLSTkW7Wc0n3FIhGvTOqnoGUb6LdZm8hsNvd9wCY2QZgOTC0uJ8FptYfTwUOT8TCbmZLYp91Ebdv\nq+wk4m32WhX3WcDQCyLsA25sGPMw8AMz2w9MAW7LNz0Rkc7r6enhrbfeqnoaWbUq7u00x5YCP3f3\nj5rZNcB/mdn17n68caCZrQP21Dd/C7w4eMQ0eMZCqduDz3XLfDqxPtg0BW4+/vvH0C3bY1mfu2+r\n+t+3s9sX/a7qfH6/La3Us7urvrkneT/N3twws0XAA+6+tL79BeDs0DdVzWwT8K/u/qP69n8D97v7\n9oZ96Q3VgnXXm3KNeme4H+irehbdqruy6+43VKs03jfr2A5ca2Z9ZnYhcDuwsWHM69TecMXMZgJ/\nBPxqtBMpnc6VLpeyk4iatmXcfcDMVgNPUzsV8gl332Vm99ZffxT4ErDOzHYABtzn7r/p8LxFpFAl\nXj63RLqHqrSlu361b6S2TDPKrmy6h6qIiJyj4p6J+rblUnZli59fGhV3EZGAVNwzif0JR4h8TXBl\nV7b4+aVRcRcRCUjFPZP4fb+4fVtlV7b4+aUZ1+u5107J6ka6ZKyIxDLON+vo3nNtx7qH+H2/uH1b\nZVe2+PmlUVtGRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM\n4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVd\nRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z\n4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf9\n4vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCSg\nlsXdzJaa2Stm9pqZ3T/CmCVm9oKZvWxm27LPsgDx+35x+7bKrmzx80tzfrMXzWwS8DDwceAN4Gdm\nttHddw0Zcxnw78An3X2fmc3o5IRFRKS1VkfuC4Hd7r7H3c8AG4DlDWPuAL7r7vsA3P3N/NPsfvH7\nfnH7tsqubPHzS9OquM8C9g7Z3ld/bqhrgfeY2Q/NbLuZ9eecoIiIjF7TtgzgbezjAuBPgI8BlwA/\nNrOfuPtrY51cSeL3/eL2bZVd2eLnl6ZVcX8DmDNkew61o/eh9gJvuvtJ4KSZ/Q9wPTBMcV/RB1ed\nrj2eNgALTsLNx2vbm6bU/lvN9uCvdoP/o2j7ndu1X+03TemWvJTfaPPrrrx+v11T9b9PN23XH99V\n/6fZQyJzH/ng3MzOB16ldlS+H/gpsLLhDdXrqL3p+kngIuA54HZ339mwLwd/PnWindU7w/1A31j2\nYGZLIh9BmE0/AEcaf7B3ibHlp+yqpO+9VszM3d1G+3VNj9zdfcDMVgNPA5OAJ9x9l5ndW3/9UXd/\nxcy2AjuAs8B/NBZ2EREZX02P3LP+RcGP3KMz690DB7r0TCjl14yyK1vqkbs+oSoiEpCKeybxz7WN\ne660sitb/PzSqLiLiASk4p5J5Hfra+KeK63syhY/vzQq7iIiAam4ZxK/7xe3b6vsyhY/vzQq7iIi\nAbW6/IC0KX7fr3v7tpfw5pW9ZntSv34m0GujPo24LSfg7ePu8zqy87Z1b3Y5xP/eS6PiLsW7BLcD\n0JUf0ukF3d+gibH+YO6k7vjBnE7FPZPo17eI3LfdBFNuhuNVz6Nzuje7HD+YO5Vf6T+Y1XMXEQlI\nxT2T2EftELlvG/uoHSJnBxMhvzQq7iIiAam4ZxL/XNvu7duO1SaY0npUyeJmBxMhvzQq7iIiAam4\nZ6Kee7ni92zjZgcTIb80Ku4iIgGpuGeinnu54vds42YHEyG/NCruIiIBqbhnop57ueL3bONmBxMh\nvzQq7iIiAenaMuS5eNEpmHwRZD9C6p6LF8Xt2+raMmWLn18aFXd08SIRiUdtmUziHznE7dsqu7LF\nzy+NiruISEAq7pnEP9c2bt9W2ZUtfn5pVNxFRAJScc8kft8vbt9W2ZUtfn5pVNxFRAJScc8kft8v\nbt9W2ZUtfn5pVNxFRAJScc8kft8vbt9W2ZUtfn5pVNxFRAJScc8kft8vbt9W2ZUtfn5pVNxFRAJS\ncc8kft8vbt9W2ZUtfn5pWhZ3M1tqZq+Y2Wtmdn+TcX9qZgNm9pd5pygiIqPVtLib2STgYWApMA9Y\naWZzRxj3ZWArYB2YZ9eL3/eL27dVdmWLn1+aVkfuC4Hd7r7H3c8AG4Dlw4z7e+A7wKHM8xMRkQSt\nivssYO+Q7X31584xs1nUCv4j9ac82+wKEr/vF7dvq+zKFj+/NK2KezuF+mvAP7q7U2vJTMi2jIhI\nN2l1m703gDlDtudQO3ofagGwwcygdku4m8zsjLtvfPfuVvTBVadrj6cNwIKTcHP9p+6met+smu3B\nvt3gUcBotx+E9y6Ak6lfP9L2IDNbAuDu26rYhremwqZT3ZJXzvyG/lvHzO/U5KrzGXm7vtWF+Z2C\nc+9VjGde9cd31f/qPSSy2gH3CC+anQ+8CnwM2A/8FFjp7rtGGL8W+IG7f2+Y1xz8+dSJdtIMJl1/\niLMvjWUfnbyH6gH3vtz7HS2z6QfgSOMP9q4w1vw6eYPlbsgvcnYwEb73zN191B2Rpkfu7j5gZquB\np4FJwBPuvsvM7q2//mjSbAOK3/eL27dVdmWLn1+aVm0Z3H0LsKXhuWGLurt/JtO8RERkDPQJ1Uzi\nn2sb91xpZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEX\nEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2\n+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/\nuH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlI\nxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+Pml\nUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1b\nZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQmoreJuZkvN7BUze83M7h/m9TvN7CUz22Fm\nPzKzD+afaneL3/eL27dVdmWLn1+alsXdzCYBDwNLgXnASjOb2zDsV8Bid/8g8CXgsdwTFRGR9rVz\n5L4Q2O3ue9z9DLABWD50gLv/2N2P1jefA2bnnWb3i9/3i9u3VXZli59fmnaK+yxg75DtffXnRnI3\nsHkskxIRkbE5v40x3u7OzOyjwN8AHx5+xIo+uOp07fG0AVhwEm6u/9TdVO+bVbM92LcbPAoY7faD\n8N4FcDL160faHmRmSwDcfVsV2/DWVNh0qlvyypnf0H/rmPmdmlx1PiNv17e6ML9TcO69ivHMq/74\nrvpfvYdE5t68dpvZIuABd19a3/4CcNbdv9ww7oPA94Cl7r57mP04+POpE+2kGUy6/hBnXxrLPjbB\nlE78etgLMw649+Xe72iZTT8AR/ZVPY/hjDW/TmUH3ZFf5OxgInzvmbu7jfbr2mnLbAeuNbM+M7sQ\nuB3Y2PCXv49aYf/0cIV9Iojf94vbt1V2ZYufX5qWbRl3HzCz1cDTwCTgCXffZWb31l9/FPgXYDrw\niJkBnHH3hZ2btoiINNNOzx133wJsaXju0SGP7wHuyTu1snTyV/vuEPdcaWVXtvj5pdEnVEVEAlJx\nzyT+kUPcvq2yK1v8/NKouIuIBKTinkn861vE7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+N\niruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsq\nu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE\n7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruI\nSEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LF\nzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF\n7dsqu7LFzy+NiruISEAti7uZLTWzV8zsNTO7f4QxD9Vff8nM5uefZveL3/eL27dVdmWLn1+apsXd\nzCYBDwNLgXnASjOb2zBmGfABd78W+CzwSIfm2tWeh4urnkNnnbmw6hl0irIrW/z80rQ6cl8I7Hb3\nPe5+BtgALG8Y8yngmwDu/hxwmZnNzD7TLncUzq96Dp3lYVt4yq5s8fNL0yr0WcDeIdv76s+1GjN7\n7FMTEZFUrYq7t7kfS/y6MP4XQv/qC/8X9uhI2ZUtfn5pWoX+BjBnyPYcakfmzcbMrj83DFswuumN\njzcBgzHPzeDyDNN5937NuuSHZdz8OpUddEt+cbODifC9N3qtivt24Foz6wP2A7cDKxvGbARWAxvM\nbBHwW3c/2Lgjd288uhcRkQ5pWtzdfcDMVgNPA5OAJ9x9l5ndW3/9UXffbGbLzGw3cAL4TMdnLSIi\nTZl7sb91iIjICLKfIhX5Q0+t1mZmS8zsqJm9UP/zz1XMM4WZfcPMDprZL5qMKTI3aL2+krMDMLM5\nZvZDM/ulmb1sZmtGGFdkhu2sr9QMzWyymT1nZi/W1/bACONGl527Z/tDrXWzG+gDLgBeBOY2jFkG\nbK4/vhH4Sc45dOpPm2tbAmyseq6J6/tzYD7wixFeLzK3Uayv2Ozq8+8FPlR/fCnwapTvvVGsr9gM\ngUvq/z0f+Alw41izy33kHvlDT+2sDd59WmgR3P1Z4EiTIaXmBrS1Pig0OwB3P+DuL9YfvwXsAq5s\nGFZshm2uDwrN0N3frj+8kNrB49mGIaPOLndxj/yhp3bW5sCf1X9t2mxm88Ztdp1Xam7tCpNd/ey2\n+cBzDS+FyLDJ+orN0MzOM7MXgYPAM+7+s4Yho84u94cbIn/oqZ05/hyY4+5vm9lNwPeBP+zstMZV\nibm1K0R2ZnYp8B3gH+pHuO8a0rBdVIYt1ldshu5+FviQmU0D/tPM/tjdf9kwbFTZ5T5yz/yhp67S\ncm3ufnzw1yt33wJcYGbvGb8pdlSpubUlQnZmdgHwXWC9u39/mCFFZ9hqfREydPejwA+pXaxxqFFn\nl7u4n/vQk5ldSO1DTxsbxmwE/hqg2YeeulDLtZnZTDOz+uOF1E41/c34T7UjSs2tLaVnV5/7E8BO\nd//aCMOKzbCd9ZWaoZnNMLPL6o8vBv6C2nsKQ406u6xtGQ/8oad21gbcCvytmQ0AbwN/VdmER8nM\nvgV8BJhhZnuBL1J7Y6fo3Aa1Wh8FZ1f3YeDTwA4ze6H+3D8B74MQGbZcH+Vm+AfAN612ifXzgCfr\nWY2pbupDTCIiAYW+zrOIyESl4i4iEpCKu4hIQCruIiIBqbiLiASk4i4iEpCKu4hIQCruIiIB/T96\nyfLdDqRLRwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "#Compare estimate, yHat, to actually score\n", + "bar([0,1,2], y, width = 0.35, alpha=0.8)\n", + "bar([0.35,1.35,2.35],yHat, width = 0.35, color='r', alpha=0.8)\n", + "grid(1)\n", + "legend(['y', 'yHat'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Fourth part: https://github.com/stephencwelch/Neural-Networks-Demystified/blob/master/Part%204%20Backpropagation.ipynb" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "def sigmoid(z):\n", + " #Apply sigmoid activation function to scalar, vector, or matrix\n", + " return 1/(1+np.exp(-z))\n", + "\n", + "def sigmoidPrime(z):\n", + " #Derivative of sigmoid function\n", + " return np.exp(-z)/((1+np.exp(-z))**2)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAEACAYAAACuzv3DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmYFNXV+PHvYTaGTTZZhl1ABQy+bohoDK6gUYxbEKO+\naFRcUGN+JgJGetoNiTFRI0YElbhE44tR0dc1Iq/7QkRRBAWVHRUQZR+28/vj1gzDMMz0DNVdXXfO\n53nmgeouus+hZs5Un6p7r6gqxhhj4qte1AEYY4zZPVbIjTEm5qyQG2NMzFkhN8aYmLNCbowxMWeF\n3BhjYq7aQi4iD4jItyLySRX73CUic0XkYxE5INwQjTHGVCWVM/IHgYG7elJETgS6qWp34GLgbyHF\nZowxJgXVFnJVfQNYVcUug4C/B/u+BzQVkdbhhGeMMaY6YfTI2wGLym0vBtqH8LrGGGNSENbFTqmw\nbeP+jTEmQ3JDeI0lQIdy2+2Dx3YgIlbcjTGmFlS14snyDsIo5FOA4cDjItIX+EFVv61NMHEmIsWq\nWhx1HOnic34+5wZ1Lz8R6gGtcG3f9hX+bBc8tyfQktrVwPGqXLKbYacslZPgapMQkceAnwEtRWQR\nkADyAFR1vKo+LyInisg8YB1w/u6FHVudow4gzTpHHUAadY46gDTrHHUAYROhIbAX0BUO+bkIrYBu\nbpsOBDUqBWuAFcDycl8rcTd4/Aj8EHyV//v34WUSjmoLuaoOSWGf4eGEY4wx24lQH9gX6AXsV+6r\n8/a9egIcXOGfrsS1eBdX+HMJ8C2uYK9QZWMaw8+YMForxpkUdQBpNinqANJoUtQBpNmkqANIhQh5\nuCLdBzgk+LMnkFPJ7puBr4Ev4eiNwOvu73wJzFdlfUaCzhKSqYUlRER97pEbY2omaI/0A47CtW8P\nBOpX2G0bMBf4tMLXPFW2ZC7a6KRSO62Qh0RE+qvqtKjjSBef88vW3OxOr7qnshqZSu201ooxWcyW\nYqw7RGp/nmtn5MZkqeBnJuowTIaISK3PyG0aW2OMiTkr5CERkf5Rx5BOPufnc26mbrBCbowJzZgx\nY7jooouy7n07d+7Mq6++msGIMst65MZkKeuRh6dLly7cf//9HH300VGHskvWIzfGmDrMCnlIfO+z\n+pyfz7ml09ixY2nfvj1NmjRh3333ZerUqRQXF3PuueeW7fPQQw/RqVMnWrZsyU033UTnzp2ZOnUq\nAMXFxZx55pmce+65NGnShN69ezN37lzGjBlD69at6dixI6+88krZay1dupRBgwbRokULunfvzsSJ\nE8ueq/i+Dz/8cNn73nLLLRn434iWFXJjYkoknK/a+Pzzzxk3bhzTp09n9erVvPzyy3Tu3HmHe6E/\n++wzLr/8ch577DGWLVvGjz/+yNKlS3d4neeee47zzjuPVatWccABBzBgwADAFe3Ro0czbNiwsn3P\nOussOnbsyLJly5g8eTKjRo3itddeC/4vdnzfyy67jEcffZSlS5eycuVKFi9eXLtEY8IKeUiycWRg\nmHzOz+fc0iUnJ4eSkhJmzZrF5s2b6dixI3vttdcOA5gmT57MoEGD6NevH3l5edxwww07DXo58sgj\nOe6448jJyeGMM85g+fLljBgxgpycHAYPHsz8+fNZvXo1ixYt4u2332bs2LHk5+ez//77c+GFF/LQ\nQw8B7PS+J598MkcccQT5+fnceOON1Kvnd6nzOztjPKYazldtdOvWjTvuuIPi4mJat27NkCFDWLZs\n2Q77LF26lPbtt6/6WFhYSIsWLXbYp1WrVjs837Jly7JiX1hYCMDatWtZunQpzZs3p2HDhmX7d+zY\nkSVLdlrDZqf3bdCgwU7v6xsr5CHxvc/qc34+55ZOQ4YM4Y033mDBggWICNdee+0OZ9xFRUU7tDQ2\nbNjAypUra/VeRUVFfP/996xdu7bssYULF+5QsMvvu2jR9mWE169fX+v3jQsr5MaYGvviiy+YOnUq\nJSUlFBQUUL9+fXJydpxt9vTTT+fZZ5/lnXfeYdOmTRQXF9d67pgOHTrQr18/Ro4cSUlJCTNnzuSB\nBx7gnHPO2Wnf008/neeee4633nqLTZs2MXr0aLZt21ar940LK+Qh8b3P6nN+PueWLiUlJYwcOZI9\n99yTtm3bsmLFCsaMGQNsv/DYq1cv/vrXv3LWWWdRVFRE48aNadWqFQUFBWX7VeyZV7X92GOPMX/+\nfIqKijjttNO44YYbyu4LL/9avXr1Yty4cZx99tkUFRXRvHlzOnTogM9sQJAxWcq3AUFr166lWbNm\nzJs3j06dOkUdTtaxAUFZwPc+q8/5+Zxb1J599lnWr1/PunXruOaaa+jdu7cV8TSwQm6MSZspU6bQ\nrl072rVrx5dffsnjjz8edUhestaKMVnKt9aKqZq1Vowxpg6zQh4S3/usPufnc26mbrBCbowxMWc9\ncmOylPXI6xbrkRtjTB1mhTwkvvdZfc7P59wyzcel3i699FJuuumm2oaWEdZaCYmI9Pd5qLfP+WVr\nbtZaCU/5pd6Ki4u5+eabqV+/Prm5ufTs2ZPbb7+dvn37RhqjtVayQDYWgjD5nJ/PuZmdiQhDhgxh\nzZo1LF++nCOOOILTTjut0n3jMtmWFXJjTK3Edak3VS2bhTE3N5fzzjuPb775hpUrVzJ06FAuvfRS\nTjzxRBo1asRrr73G0KFDuf766wGYNm0a7du357bbbqN169YUFRXxzDPP8Pzzz7PPPvvQokWLssnD\nSt/r1ltvpVu3brRs2ZLBgwezatWqEI+Ckxv6K9ZR2frxPCw+5xfX3CQZTqdSEzVv35Rf6q1NmzYs\nXLiQLVu28MYbb5TtU7rU20svvcQhhxzCqFGjKl3qbcqUKUyaNIkLLriAAQMGcNFFF7F06VIefPBB\nhg0bxldffQW4pd569+7N5MmTmT17Nscddxxdu3blqKOOqnSptxdeeIE+ffowcuTIXS71VlJSwqRJ\nk+jYsWPZ4hOPPfYYL7zwAocddhglJSU88sgjO7z+t99+S0lJSVmMF154IQMGDODDDz9kwYIFHHzw\nwZx99tl06tSJu+66iylTpvD666+z5557csUVV3D55Zfzj3/8o8b/51WxM3JjTI3Ffam3J554gmbN\nmtGxY0dmzJjBU089VfbcL37xCw477DCAsil3y79+Xl4e1113XVmMK1eu5KqrrqJhw4b07NmTnj17\n8vHHHwNw7733ctNNN1FUVEReXh6JRILJkyeH3rKxM/KQxPGMriZ8zi+uudXmTDos5Zd6mzVrFgMG\nDODPf/7zDvtkYqm36dOn7xRbKku9DR48uOyXQHkiUumqQ+W1aNFipxhbt269Qx6lKxktWLCAU089\ndYdfJLm5uXz77be0bdu2yvepCTsjN8bUSpyXeqvp3UAVP0mkqmPHjrz44ousWrWq7Gv9+vWhFnGw\nQh4a3+9F9jk/n3NLF1+XeqssvvIXR2vqkksuYdSoUSxcuBCA5cuXM2XKlFq9VlWskBtjaizOS71V\n9r5VPVfxsepiLu+qq65i0KBBHH/88TRp0oTDDjuM999/f5f711a1A4JEZCBwB5ADTFTVsRWe3wN4\nBOiA67n/SVUnVfI6Xg8IMiZsvg0IsqXeqpa2AUEikgPcDQwEegJDRKRHhd0uBz5V1f8C+gO3i4hd\nRDXG2FJvGVJda6UPME9V56vqZuBx4JQK+2wDmgR/bwKsVNUt4YaZ/Xzvs/qcn8+5Rc2WesuM6s6c\n2wGLym0vBg6tsM/dwLMishRoDPwyvPCMqZtE8KINOWHCBCZMmBB1GN6rrpCn0qAbCHyoqkeJSFfg\nFRHZX1XXVNxRRCYB84PNH4CPSu/hLT0riut26WPZEo/ll/q2qk7LpngA4I47MXVS8L0wNNicn9K/\nqepiioj0BYpVdWCwPRLYVv6Cp4g8B4xR1beC7VeBa1V1eoXXsoudxqRAhEOBN0FyfbrYaaqWztkP\npwPdRaSziOQDg4GKN0EuBI4N3rA1sA/wVarB+8L3PqvP+WVTbiI0xV2LshsGTMqq/GZR1S0iMhx4\nCXf74f2qOltEhgXPjwduBCaJyExAgN+r6vdpjtsY7wR98fuAzsB/gINqO6LQ1C22sIQxWUKEYcC9\nwBrgQFXmRRySyQK2sIQxMSHCT3AD7wCGWRE3NWGFPCTZ1GdNB5/zizo3EQqAR4H6wP2qPBbu6/t7\n7MD//FJhhdyY6BUDPwHmAVdFG4qJI+uRGxMhEfoBpcvq/FSVt6OMx2Qf65Ebk8VEaAj8HfdzeJsV\ncVNbVshD4nufzuf8Isztj0A34BMgka438fnYgf/5pcIKuTEREOFY4DJgM3CuKiURh2RizHrkxmRY\n0FL5BOgC/EGVmyMOyWQx65Ebk52SuCL+Ma69YsxusUIeEt/7dD7nl8ncRDgIuBo3j/+FqmxO/3v6\ne+zA//xSYYXcmAwRIQ+YiPu5u1OV6dX8E2NSYj1yYzJEhN8DY3FzTO+nyrpoIzJxkErttEJuTAaI\n0BX4FDcMf6AqL0UckokJu9iZQb736XzOL925BdPT3oMr4o9muoj7fOzA//xSYYXcmPQ7FTget7zh\nbyOOxXjIWivGpJEIDYDZQEdguCrjIg7JxIy1VoyJ3ghcEf8It2iEMaGzQh4S3/t0PueXrtyCC5y/\nDzaHq7I1He9TfRz+HjvwP79UWCE3Jn3uAAqAh1R5K+pgjL+sR25MGohwEvAssBrYR5VvIg7JxJT1\nyI2JgAj1gTuDzYQVcZNuVshD4nufzuf80pDblcBewCyI/i4Vn48d+J9fKqyQGxMiEVoB1wWbV2di\nUixjrEduTIhEuAe4FHhBlROjjsfEn821YkwGidATmBls9lblsyjjMX6wi50Z5Hufzuf8QsztNiAH\nuC+birjPxw78zy8VVsiNCYEIxwMnAmuA4mijMXWNtVaM2U0i5AAzgJ8AI1QZG3FIxiPWWjEmM87H\nFfEFbL9/3JiMsUIeEt/7dD7ntzu5idAQuDHYHKHKxlCCCpHPxw78zy8VVsiN2T1XAW2AD4B/RhyL\nqaOsR25MLYnQAvgKaAIco8rUiEMyHrIeuTHpNQJXxF+xIm6iZIU8JL736XzOrza5idAeuCLYHBlq\nQCHz+diB//mlwgq5MbWTwM01/oQq/4k6GFO3VdsjF5GBuAnyc4CJqrrTPbLBb8S/AHnAClXtX8k+\n1iM3XhBhX9zMhgr0VOWLiEMyHkulduZW8wI5wN3AscAS4AMRmaKqs8vt0xQ3VecAVV0sIi13P3Rj\nstpNuE+z91kRN9mgutZKH2Ceqs5X1c3A48ApFfY5G3hSVRcDqOqK8MPMfr736XzOrya5iXAIcDqw\nEbghXTGFyedjB/7nl4rqCnk7YFG57cXBY+V1B5qLyGsiMl1Ezg0zQGOyhQgC3Bps3qnKkijjMaZU\nla0VXA+wOnnAgcAxQAPgHRF5V1Xn7m5wcaKq06KOIZ18zq8GuR0LHA38APGZT8XnYwf+55eK6gr5\nEqBDue0OuLPy8hbhLnBuADaIyOvA/sBOhVxEJgHzg80fgI9KD0LpxyPbtu3s3M7pD0/fCScD/BFk\nf5Fsis+2fdkO/j4UZz4pqPKuFRHJBT7HnW0vBd4HhlS42Lkv7oLoANztWO8Bg1X1swqv5fVdKyLS\n3+czA5/zSyU3EU4AngdWAF1UWZuJ2MLg87GDOpHf7t21oqpbRGQ48BLu9sP7VXW2iAwLnh+vqnNE\n5EXcyijbgAkVi7gxcRb0xksvbI6NUxE3dYPNtWJMNUQ4CXgW+A7YS5V1EYdk6hCba8WY3RScjSeD\nzVutiJtsZIU8JL7fy+pzftXkNgh3V9Y3wL0ZCShkPh878D+/VFghN2YXgrPx4mDzVlU2RBiOMbtk\nPXJjdkGEU4F/4e7Y6maF3ETBeuTG1JII9djeGx9jRdxkMyvkIfG9T+dzfrvI7TTcgspLgIkZDShk\nPh878D+/VFghN6aC4Gy8ONi8ORsXVDamPOuRG1OBCINxM30uArqrUhJxSKYOsx65MTUkQg5u9R+A\nm6yImziwQh4S3/t0PudXIbfBQA9gATApinjC5vOxA//zS4UVcmMCwdn46GDzJlU2RRmPMamyHrkx\nARHOAR4Gvgb2UWVzxCEZYz1yY1IlQi7be+M3WhE3cWKFPCS+9+l8zi/I7VdAN+BL3Fm5N3w+duB/\nfqmwQm4MTXKA64ONG1TZEmU0xtSU9chNnSfCBcD9uOUJe1ohN9nEeuTGVEOEPOAPwWbSiriJIyvk\nIfG9T+dxfv8N07oAc3CjOb3j8bED/M8vFVbITZ0lQj479sa3RhmPMbVlPXJTZ4kwDLfqz2dAbyvk\nJhtZj9yYXRChALgu2ExaETdxZoU8JL736TzM79dAB+BTyF8edTDp5OGx24Hv+aXCCrmpc0SoD4wK\nNothc2b6i8akifXITZ0jwhXAXcBM4ABVtkUckjG7lErttEJu6hQRCnHD8NsCp6nyVMQhGVMlu9iZ\nQb736TzKbxiuiM8AngavcquU5ec/K+SmzhChITAy2BytivXGjRestWLqDBGuAW4DPgAOtUJu4sB6\n5MYERGiEWzCiJXCCKi9GHJIxKbEeeQb53qfzIL/huCL+LvBS+Sc8yK1Klp//rJAb74nQBPhdsGm9\nceMda60Y74nwB+BG4E3gSCvkJk6sR27qPBH2AOYDTYGjVXkt2oiMqRnrkWeQ7326GOf3G1wRn7ar\nIh7j3FJi+fnPCrnxlgjNgN8Gm4koYzEmnay1Yrwlwo24Zdz+rcpxUcdjTG2E0loRkYEiMkdE5orI\ntVXsd4iIbBGR02oTrDFhEqEFrq0CdjZuPFdlIReRHOBuYCDQExgiIj12sd9Y4EWgTp51+96ni2F+\nvwMaAS+p8nZVO8Ywtxqx/PxX3Rl5H2Ceqs5X1c24xWlPqWS/K4DJgNcT9Jt4EKENcGWwOTrKWIzJ\nhOoKeTtgUbntxcFjZUSkHa64/y14qE7eo6uq06KOIZ1ilt8fgELgaVXer27nmOVWY5af/6or5KkU\n5TuAEequmgp1tLVisoMIXYCLcd+710ccjjEZkVvN80tw6xqW6oA7Ky/vIOBxEYFgQiIR2ayqUyq+\nmIhMwg3OAPgB+Kj0t2lpnyvG27/xLJ9Y5gd6AZAHj78MQ1qWnotU9e/L91ijjj8d25ZfvLaDvw8N\nUppPCqq8/VBEcoHPgWOApcD7wBBVnb2L/R8EnlXVf1XynNe3H4pIf58/4sUhPxH2wy3ftgXYR5Wv\nU/t32Z/b7rD84i2V2lnlGbmqbhGR4bjZ4nKA+1V1togMC54fH1q0MefzNxLEJr+bcK29+1It4hCb\n3GrN8vOfDQgyXhDhUNwUtRuAvVT5JuKQjAmFzbWSQb7fyxqD/G4J/ryzpkU8BrntFsvPf1bITeyJ\ncCxwNO4C+h8jDseYjLPWiok1EQR4DzgEGKXKmIhDMiZUqdROK+Qm1kQYjBtx/A3QTZV1EYdkTKis\nR55BvvfpsjE/EQqAW4PN0bUt4tmYW5gsP/9ZITdxdjnQGfgMeDDaUIyJjrVWTCyJ0ByYBzQDTlLl\nfyMOyZi0sNaK8dl1uCI+FXg+4liMiZQV8pD43qfLpvxE2AsYHmz+TnX3ZtzMptzSwfLznxVyE0e3\nAPnAw6p8GHUwxkTNeuQmVsoNxS8B9lZlYcQhGZNW1iM3XgkG/9webP7FirgxjhXykPjep8uS/IYA\nhwPfsf3+8d2WJbmljeXnPyvkJhZEaATcFmyOVOXHKOMxJptYj9zEggg3A6OA6cChqmyLOCRjMsLm\nWjFeEKErbvRmPtBPlXciDsmYjLGLnRnke58u4vxuZ/vthqEXcTt28eZ7fqmwQm6ymgjHAacA64AR\nEYdjTFay1orJWiLkAx8BPYARqoyNOCRjMs5aKybursEV8bnAHRHHYkzWskIeEt/7dJnOL5hP5fpg\n8zJVStL3Xnbs4sz3/FJhhdxknWAE5zigPvCoKv+OOCRjspr1yE3WEeFM4AncYsr7qvJtxCEZExnr\nkZvYEaEJcGewOcKKuDHVs0IeEt/7dBnM7yagLW6GwwmZeEM7dvHme36psEJusoYIh+MWjNgKXGLD\n8I1JjfXITVYQoRB3z/jewM2q/CHikIzJCtYjN3FyA66IzwJujDgWY2LFzshDIiL9VXVa1HGkSzrz\nC1b9eTvY7KvKB6G+flJygP2Ag4GDgK5AR6ANUMDXFNCFEtxdMiuBecAc3CeENzShS8OMJ9PsezPe\nUqmduZkKxpjKiFAfeBD36fCPYRVxSUpD4KTgayDQspp/Uhh8tcUV/fKvNQ+YAjwJvKsJtd69ySp2\nRm4iJcIY3GRYnwP/pcrGWr9WUgToC/wa+CXQuNzTC3B3wkwHZgMLgaXABmAzUAA0BVrjWjz7Aofi\nViRqVO51FuPuppmgCV1W21iNSZXNR26ymghHAtMABX6qWtZeqdnrJKUecDJwLXBYuafeBSYDzwNz\nNFHzb3ZJSi6uoJ8OnAF0CJ7aErz2LZrQT2oTtzGpsEKeQXWgTxdqfiI0BT7G9aprdZdKcAb+C+Bm\n3ORaAKuAicCDmtDZqcWSWm7B+x0NXIabWjcneOpJ4AZN6MwaJZAh9r0Zb9YjN9lsHK6IfwAka/qP\nJSn9cGt49gseWgT8GZioCV0bVpDlBWf0rwKvSlI6AL8DLsadrZ8mSXkQuE4T+k063t+YXbEzcpNx\nIvwKeAS3WMQBqsxN+d8mpQ2uYA8JHlqO+0UwQRO6KexYU4inLa6lcxmQB6zFfUL4iyY0bTM2mrrD\nWism64jQGddSaQJcpMrElP6d64NfBNyKuyi5AfgT8CdN6Or0RJs6SUr3IJ5BwUOzgQs1obXq+xtT\nKrQBQSIyUETmiMhcEbm2kud/JSIfi8hMEXlLRHrXNui48n2+hzDyE6EA+B9cEX8KuD+lf5eUHsCb\nwL24Iv4C0EsTOjqMIh5GbprQuZrQU4DjgS9wPfs3JSl/laQ0rvpfp5d9b/qv2kIuIjnA3bh7cXsC\nQ0SkR4XdvgKOVNXeuFF594UdqPHCn3GDcuYDv1alyo+DkpR6kpQrgQ9xd6N8g7ut8Oea0K/THGut\naEJfAfYHbsHNGTMc+FSS8rNIAzNeq7a1IiKHAQlVHRhsjwBQ1Vt3sX8z4BNVbV/hcWut1GEinAU8\nBmwCDldlepX7J6U9MAk4JnhoEnC1JvSHNIYZKknK/rhPHQfhbrH8IzA6il6+ia+wWivtcHcElFoc\nPLYrv8bdt2sMACL0gLJe+G9SKOJDgE9wRXwFcJom9Pw4FXEATejHuE8SN+IK+bXAu0GryJjQpHL7\nYcpXQ0XkKOAC3Gi4yp6fhPtYDW5ei49K7/8s7XPFePs3nuUTSn6gHwKTYVpD+PZVGHzvLvdvTAH/\nj8HA+XwNbOAdenKaJvSbdOZXvseapv+/0XKwfEsPrqMbBwAfygC5h1d4Vrel//hlIL9It33LL/j7\n0CCl+aQgldZKX6C4XGtlJLBNVcdW2K838C9goKrOq+R1vG6t1IFBCTXOT4Qc3EXNk3F3cfRRpdJ7\nvCUp++IuhO4HbASuwt1SmPbbqjJ17CQppasfDQ0eegwYpgldk9b3te/NWAvl9kMRycXNg3EMbm6K\n94EhqttHzYlIR2AqcI6qvlvbYIxfys2jsgpXxHf6BQ8gSfkVMB5oiPteO9PnYe8V8v0Cl29Wjgo1\n0QvtPnIROQG4Azck+X5VHSMiwwBUdbyITAROxU1EBLBZVfvUNBjjj3KDfrYCx6sydad9klKIO0O9\nKHjoH8Al6T5DzQaSlH1wn0B+gvsEciVuVGpmBnaY2LABQRlUBz7epZyfCH2A13EzCg5XZdxO+yRl\nb+AJ3K16JbhClpFWyk6xRHTsKvlF9ijuF1moUwzY92a8hTYgyJhUidAdeA5XxMcD9+y0T1IGA//B\nFfF5QF9N6H117WxUE7pBE3oxcA5uuoJfAdMlKT+JNjITN3ZGbkIjQmvgHaAL8BJwsiqby55PSgFu\nUNBlwUNPABdlwxD7qFW42LsBuEwTOinSoExWsNaKyRgRGuPmFj8Qt3jDUeXvUJGkdMEVqoNwg4Ku\nBv5W187CqyJJaYAbRX1+8NAk4HJN6PrIgjKRs9ZKBvk+30NV+QVzqDyJK+JfAj+vUMQH4YbZH4S7\nL/ZwTeg92VLEs+XYaULXa0IvwBXyDbjbFN8LztZrLVvySxff80uFFXKzW0TIA/4JHAd8BwxQ5TsA\nSUqeJOWPwDO4ya6mAAdqQqsc2VnXBS2VPrhbMffD9c3PjjQok9WstWJqLSjij+EWVvgBOFqVGQCS\nlHbA48ARuFsQR+KmnM2Ks/A4kKQ0wl0wLi3i44HfaEJrva6piR/rkZu0ESEXd5/4YOBH4NjSOVQk\nKcfi7gnfEzeI7CxN6BtRxRpnwfJyF+NuUywAPsINIKp0cJXxj/XIM8j3Pl35/IIz8b/jivgaXDtl\nuiQlR5IyGngZV8RfBQ7I9iKezcdOE6qa0PG4ybe+BP4L+I8k5fRUXyOb8wuD7/mlwgq5qRERCnFz\n6pyNW9ZsoCrvBWtYvsr29TeTwABN6HfRROoXTegM3MXiJ3ELc0yWpNwpScmPNjKTDay1YlImwh64\nC5ZHAt8DJ6jyviTlVNy8281wiz+cFyywYEIWtFqGA7fj1gj9ANe6+irSwEzaWI/chEaEVrgl1g7E\n9b2Po1jm4wb4DAt2+1/gfE3o8kiCrEMkKX1wA6o64T4ZXQlMsovJ/rEeeQb53KcTYT946WO23yd+\nOMVSejY4DDfA5yrg5DgW8TgeO03o+7jjMRloBDwAPClJaVlx3zjmVxO+55cKK+SmSiIMBN6GgjbA\n+7T6pD/Fci5u9GZPYA7QRxN6l50NZpYm9HvcGqb/jbvofCrwiSTlhEgDMxlnrRVTKREEuAL4C+4X\n/hNcsfdYWsy9D3fRDWAccK0mdF1EYZqAJKUz8BDw0+Chv+GOjfdTAvvOeuSmVkTYcSBKvU038ofC\nddTbdgOQDywALtCE7jTHuImOJCUHuAa3Rmgebn2AYZrQFyMNzOwW65FnkC99OhF64laBOhtYx8H3\njGR0wQlIFF/BAAAL4UlEQVQs2HYrrohPAHr7VMR9OXaa0K2a0LHAIbhpgjsCL8gZ8pIkpUW00aWP\nL8dvd1ghN4BrpYjw37gLmD0o+GEOV3d4kpMuvxk4mG0sB07QhF5s085mN03ox0Bf4PfARhpxPDBb\nkjIkuH3ReMZaKwYR9sS1Uk4FhUPG/R8nXtkN0XbANtwyf4mwV64x6SdJ6Yb7FNU/eOg14EpN6KeR\nBWVqxHrkploinALcB7Si9cx1DBk0n6YLegVPf4Drsc6ILkKzuyQp9YALgFuBFrhJzMbhfjn/EGVs\npnrWI8+guPXpRCgS4Z/A0zT8rhVn/HIpl+xfGBTxH3B3rBxWWsTjll9N+JwbAMUcqQmdCOyNK+CC\nG0D0hSTlYklKbqTx7Sbvj18KrJDXMSLkiDAcmE3e+l9yxC2bubrDRvb7nyIExa1Q000TercmdGvE\n4ZoQaUK/14QOxw0kegPKWmqzJClnWP88vqy1UoeI0B+4ndyNB3LQeOifLKFwVUHw9PPANZrQ2dFF\naDIlKNpnAjcD3YKHpwMjNKGvRhaY2Yn1yA1QdkvhWHJKTuKAB+FnN2yl8bKc4OnpwHWa0JcjDNFE\nRJKSB/waSABtgodfB24BXrbRutGzQp5BItJfVadFHUd5IuwFjKL+qvM56L569L1DafxN6TGYCVwP\nPJvKD2s25hcWn3OD1PKTpDTE9c1/j1uWD9y96LcAT2tCt6U1yN1QB45ftbUz1hc5TOVE2BsYRZOF\n59D3zhwOug8K1oK7yPUJbuTfk9n8w2kyK5hmYYwkZRxwKfBbts9/PkeSchfwsN2Cmp3sjNwTwdwo\n/ZCtV9Hl1dM5eHw99n0G6pVdr5wK3Aa8ZB+XTXUkKYW4WxZ/jxshCrAaeBAYpwmdG1VsdY21VuoA\nEeoDg2m09Lfs/0hvDhoPzYM1BpStCP8D3KYJ/TDKOE08BT3003C3ox5e7qmXcUX9GU3ohihiqyus\nkGdQJvt0wdn3ARSuvJC9nzuXXk80ouvLkLPF7bAtZwn1tv4NeEATuiyc9/S3D+lzbhBefpKUA3Cr\nE50N1A8e/hH4JzAJeDeKT3t14PhZIc+UTHwzidCBwhVn0/Xfl9D9fzvT4ynID2aQVdmG1nuRelvv\nAV4M+x5wn39YfM4Nws8vmIDrLGAocHC5p77Grec6GXg/U9dg6sDxs0IedyJ0o/kX59LtxXPoMm0v\nur4E+eu371DSaCYFaycAT9hCxybTJCm9cAtbnAO0LffUElxRfwZ4UxNaEkF4XrBCHkMiFNJgeX+6\nvXAubT4+hvbvtqL9u1Cv3MnN+uZfkr/2YXI3PaIJ/TK6aI1xgrnQDwPOwPXUO5R7ej0wDXgReAmY\naxfcU2eFPINq+/FOhHyaft2Xvf59Js3nHUfrmd3p9Ea9spYJwLacbaxtM5OC1ZMoWPOkJnRxiKGn\nGKe/H199zg0yn18wavQQXEE/AehdYZeFuCkC3gDeBGbvThumDhw/K+SZkuo3k+Sv3ZOuL/+cVrMG\n0fzLPrScXUTbGULO5h13XNN2BRua/R9NljxK/R9fjXoOcJ9/WHzODaLPT5LSFjgeGAgch5uBsbzv\ngbeA94APgf/UpE0YdX7pZoU8QiLUo+iDHnR4+yT2WPRTGi/pRdMFRbScnU9hhZlDVWB1+1Wsb/kR\nBasn0/zLp8K628SYbBK0YPYDjsCtL/pToKiSXZfgRpZ+CHwKfAbM04RurmRfr1khzwBpNasZnf7v\nZzRe1o8GK3rTcHlXGn7Thj0WNWSPRYJU8v+7odlmVrdbxMam71L4/ZO0+uxVTeiPmY/emGgFbZjO\nuMJ+EG5mxgOARpXsvgWYC8zGFfYvcHfKfA0s83WkciiFXEQG4laIyQEmqurYSva5C9cLWw8MVd15\nIYI4FnLp8loBLed0p8GK3hSs/gkFq/em/o+dqP9DawpXNqPhd4U0XlqPnC3uW6lLhRfYmgtr2q1l\nTdsFlDSeQb1tU2n74Yt66/exO9v2+eOrz7lB/PILFsLozvai3jP46oybZmJH7mevBLco+FfBIwuB\npRW+fozjRdbdLuQikgN8DhyL+6jzATBEdftUpyJyIjBcVU8UkUOBO1W1b22CSSfp9UQ9Gi9rTcGa\njuStb0+9TUXkbmpN7sa25G4oIm9da/LXNadgdRMKVjeg8Pt8GqyQskE2VVnbagtv5Gxgnx5z2dRo\nDlvzp1O46nX2enWmLx8FReQ3qnpH1HGkg8+5gT/5SVIaAPvginoP3PS7e/EmPTmChim8xAa2F/Xv\ngJXAiir+XJ0NZ/lhTJrVB5inqvODF3wcOAX30abUIODvAKr6nog0FZHWqvptrSPfDXLsqMNp/84/\nydtQSN76+uSvyadgTQ6n/5haUa5oYxNlQ4sS1rf4kY1Nl7Op0SI2F85jW/4scjbOoNObM/X2xRtF\npFjfXVocekLZo2n1u8SWz7mBJ/lpQtcDM4KvMiJSzBHcjjtj7xJ8tcf13ku/2gENga7BV0pvKUlZ\ngxu9Wv5riiZ0/O7mE6bqCnk7YFG57cXAoSns0x6IpJBTb1N9ukxrV+lzmwthY5OtbGq8hc0NStjc\nYANbCteyqeFKNjf4lq0FS9hSsIBtuV8h2+bQ5uMvdMK7GzOcgTGmhjSha3Aze36yq30kKY3ZXthb\nBl8tqvizSbmv8vfFZ92EYdUV8lT7SRVP+6PrQ2nODD789V/YmvcdmrOMrXmLqbd1EYXfL9YnH1lf\n/QvUWuc0vnY26Bx1AGnUOeoA0qxz1AGkWedUdgqK/efBV7WCO2waA3uU+2qK68Vnlep65H2BYlUd\nGGyPBLaVv+ApIvcC01T18WB7DvCziq0Vkcpu3zDGGFOd3e2RTwe6i0hn3AWCwcCQCvtMwc2I9nhQ\n+H+orD8etztWjDEmLqos5Kq6RUSG4+ZHyAHuV9XZIjIseH68qj4vIieKyDxgHXB+2qM2xhhTJmMD\ngowxxqRHvUy+mYhcISKzReRTEdlpYJEPROT/icg2EWkedSxhEpHbgmP3sYj8S0T2iDqmMIjIQBGZ\nIyJzReTaqOMJk4h0EJHXRGRW8DN3ZdQxhU1EckRkhog8G3UsYQtu5Z4c/Nx9FrSuK5WxQi4iR+Hu\nOe+tqvsBf8rUe2eKiHTATQqUdVe1Q/Ay0EtV98cNjR4ZcTy7LRjwdjduMqeewBAR6RFtVKHaDFyt\nqr2AvsDlnuUHcBVuuL6PrYU7gedVtQduBsnZu9oxk2fklwJjVN1IR1VdnsH3zpQ/4xar9Y6qvqJa\nNsrtPdxYgbgrG/AWfF+WDnjzgqp+o6ofBX9fiysElU1QFUsi0h44EZhIZUP3Yyz4xPtTVX0A3PVK\n1V3Px5TJQt4dOFJE3hWRaSJycLX/IkZE5BRgsarOjDqWDLgAeD7qIEJQ2WC2ygeTxVxw59kBuF/C\nvvgL8Dsg8mH0adAFWC4iD4rIhyIyQUQa7Grn6m4/rBEReQVoU8lT1wXv1UxV+4rIIcATwF5hvn+6\nVZPfSNycy2W7ZySoEFWR3yhVfTbY5zpgk6r+I6PBpYePH8d3IiKNcOtoXhWcmceeiJwEfKeqM0Sk\nf9TxpEEubtKw4ar6gYjcAYwARu9q59Co6nG7ek5ELsWt4UcQ2DYRaaGqK8OMIZ12lZ+I7If7Dfqx\niIBrO/xHRPqoxmcdzaqOH4CIDMV9lD0mIwGl3xJ2HHrdAXdW7g0RyQOeBB5R1aejjidE/YBBwaR9\n9YEmIvKQqp4XcVxhWYz7hP9BsD0ZV8grlcnWytPA0QAisjeQH6ciXhVV/VRVW6tqF1XtgjsIB8ap\niFcnmM74d8ApqurL/DNlA95EJB834G1KxDGFRtxZxf3AZz7Mflieqo5S1Q7Bz9tZwFSPijiq+g2w\nKKiV4GagnbWr/UM9I6/GA8ADIvIJsAnw5j+9Ej5+ZP8rkA+8EnzqeEdVL4s2pN2zqwFvEYcVpsNx\nq9vPFJHSGQNHquqLEcaULj7+zF0BPBqcZHxJFYMtbUCQMcbEXEYHBBljjAmfFXJjjIk5K+TGGBNz\nVsiNMSbmrJAbY0zMWSE3xpiYs0JujDExZ4XcGGNi7v8DzCg4W/oGfTYAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "testValues = np.arange(-5,5,0.01)\n", + "plot(testValues, sigmoid(testValues), linewidth=2)\n", + "plot(testValues, sigmoidPrime(testValues), linewidth=2)\n", + "grid(1)\n", + "legend(['sigmoid', 'sigmoidPrime'])\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[-0.00916908, -0.00390022, -0.00285075],\n", + " [-0.00483125, -0.00184321, -0.0016088 ]])" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN = Neural_Network()\n", + "cost1 = NN.costFunction(X,y)\n", + "dJdW1, dJdW2 = NN.costFunctionPrime(X,y)\n", + "dJdW1" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[-0.02903 ],\n", + " [-0.02442499],\n", + " [-0.04018761]])" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dJdW2" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0175075086056 0.0293176938671\n" + ] + } + ], + "source": [ + "scalar = 3\n", + "NN.W1 = NN.W1 + scalar*dJdW1\n", + "NN.W2 = NN.W2 + scalar*dJdW2\n", + "cost2 = NN.costFunction(X,y)\n", + "print cost1, cost2" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0293176938671 0.0133981591651\n" + ] + } + ], + "source": [ + "dJdW1, dJdW2 = NN.costFunctionPrime(X,y)\n", + "NN.W1 = NN.W1 - scalar*dJdW1\n", + "NN.W2 = NN.W2 - scalar*dJdW2\n", + "cost3 = NN.costFunction(X, y)\n", + "print cost2, cost3" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Neural Networks Demystified\n", + "# Part 2: Forward Propagation " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# https://github.com/stephencwelch/Neural-Networks-Demystified/blob/master/Part%202%20Forward%20Propagation.ipynb\n", + "from IPython.display import YouTubeVideo\n", + "YouTubeVideo('UJwK6jAStmg')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Visualizations:" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAEACAYAAAC9Gb03AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHLRJREFUeJzt3Xu0XXV57vHvQxKuGgiICZdIUKBFAUEQImgJFzVFbqLc\nBtqknlM4heCl53ggaIVaWxVti1qHh1OuHkWgaCUKiDEQpAoYCgEkIEaIISAIQgJBEQLv+WPOHXZ2\n9mXtvdaavzl/6/mMscdac6+593xfJnmz8qx5UURgZmb52iB1AWZm1l0e9GZmmfOgNzPLnAe9mVnm\nPOjNzDLnQW9mlrm2B72kiyQ9LumeYdb5sqRfSrpL0l7tbtPMzFrXiXf0FwMzh3pR0mHAThGxM3Ay\n8LUObNPMzFrU9qCPiJuBp4dZ5Ujg0nLd24AtJE1ud7tmZtaaKjL67YCH+y2vALavYLtmZkZ1H8Zq\nwLKvu2BmVpHxFWzjEWBqv+Xty++tQ5KHv5nZGETEwDfT66hi0M8D5gCXS5oOrIyIxwdbcaRim0zS\nORFxTuo6uiHn3qB3+pOYBBwAvBV4M7AnsMMIP/4H4LHy63HgKWAVsLJ87Hv+DPAc8Hz5M31fzwPP\nR7Cmw22t1QP7b8Q3yW0PeknfAg4EXiPpYeBsYAJARJwfEddKOkzSUood/ZftbrOhpqUuoIumpS6g\ny6alLqAbJDYGDoaDjpd4L7A768esfwSWAEuBB/t9LQd+A6yOqH0UOy11Aam1Pegj4sQW1pnT7nbM\nrH0SmwBHA+8H3g1sBq/re/kF4DbgFmAxcBfwQDffbVs1qohurHBJ6gK66JLUBXTZJakLaJfEXhTn\nsZwIbN7vpTtg/3uAi4CfRfB8ivq67JLUBaSmutx4RFLknNGbVU1CwCHAGcCh/V5aBHwduDpinUOf\nrYFamZ1+R18RSTMiYmHqOroh596gmf1JvA34IrB/+a3VwIXABRH8fN1169Wfj8Ab2ljfDHvQm2VE\nYlvgX4Djym89AZwHfC1i2DPYa6UuSUOdSGMPPBzdmGWgjGlmUwz5zSkOW/wn4NwInklY2qiVsyB1\nGbUjadB39I5uzHpAefz7pcAR5beuAU6NYHm6qqxOfD36ikiakbqGbsm5N6h3fxJ7A3dQDPmngQ8A\nR4xmyNe5P+sMD3qzhpJ4P/ATihOC/gt4SwTfbMAJTFbaYIMNePDBB7u/na5vwQCo01ENnZZzb1DP\n/iROB64ENgIuAN4ewbKx/K469ldn06ZN44Ybbkhdxqh40Js1jMQngS9TXK7gLODkTE90qqXyQ9HU\nZYyKB31Fcs5Bc+4N6tWfxMeBvwdeBj4UwWfbjWrq1F/dffCDH2T58uUcccQRvPrVr+YLX/gCxx57\nLNtssw1bbLEFBx54IEuWLFm7/uzZsznttNM4/PDDmThxItOnT18vqpk/fz677LILkyZNYs6cLl0t\nJiJq8VWUkr6OLvY3I3UN7q3Z/UGcBhHl16zc+utXTwyn33+Dtr7Gatq0abFgwYK1yxdffHGsXr06\nXnjhhfjoRz8ae+6559rXZs2aFVtttVUsWrQo1qxZEyeddFKccMIJa1+XFEcccUSsWrUqli9fHltv\nvXX84Ac/GKJvYuB/q4jWZqff0VckMs5Bc+4N6tGfxHso4hqAUyKK23N2Qh36a7LZs2ez2WabMWHC\nBM4++2zuuusunn32WaCIeY455hj22Wcfxo0bx0knncTixYvX+fkzzzyTiRMnMnXqVA466KD1Xu8E\nD3qzmpPYA7ic4s/rORH838QlJdWp9/Sd8NJLL3HmmWey0047sfnmm7PjjjsC8OSTT65dZ/LkV26R\nvckmm7B69ep1fseUKVPWPt90003Xe70TPOgrknMOmnNvkLY/iS0pbt7zKuAy4NOd30be+6/T+l+K\n4LLLLmPevHksWLCAVatW8dBDDwH1u4SDB71ZTZWXNbiI4i5PPwP+W4SPkU9t8uTJ/OpXvwLg2Wef\nZaONNmLLLbfkueee46yzzlpn3dEO/G79BeFBX5Gcc9Cce4Ok/Z0OHEVxO77jo0uHUOa+/zpt7ty5\nfOYzn2HSpEk8/fTT7LDDDmy33XbstttuvO1tb1vnHb+k9S5GNvD1ga+1c/GyofiiZmY1JLE7xdmu\nE4D3RfCdxCVVxhc1G1w7FzXzO/qK5JyD5twbVN+fxHjgYoohf363h3zu+8886M3q6G+AvSluwP3x\nxLVYBhzdmNWIxM7A3cDGwMwIrk9cUuUc3QzO0Y1ZPs6jGPJf78Uhb93hQV+RnHPQnHuD6vqTOAw4\nDHiGCiOb3PefedCb1YLEBOCfy8W/i+C3KeuxvDijN6sBiY9QxDYPALtH8ELikpKRVI+hVENjzeg9\n6M0Sk3g18CDwGorbAH4/cUnWIP4wtkZyzkFz7g0q6e90iiH/U4obe1fK+y9/HvRmCUlswSsfvP6t\nr2Vj3eDoxiwhib8DPgXcGMHBqeux5nFGb1ZjEhOBh4GJFDf3/knikqyBnNHXSM45Yc69QVf7O5li\nyC9MOeS9//LnQW+WgMSGwEfLxS+krMXy5+jGLAGJWcAlwM+BPfwhrI2VoxuzGirvHNV3pM0XPeSt\n2zzoK5JzTphzb9CV/g4B3gQ8Cnyrw7971Lz/8udBb1a9U8vHr/XypQ6sOs7ozSoksT3wa+BlYGoE\njyUuyRrOGb1Z/ZxM8efu2x7yVhUP+orknBPm3Bt0rr/ykMq/Khe/1onf2Qnef/nzoDerzpHAFGAJ\n8OPEtVgPcUZvVhGJ7wPvAT4WwXmp67E8+Fo3ZjUhMQVYAQSwbQRPJC7JMuEPY2sk55ww596gY/2d\nBIwDrqnbkPf+y58HvVmXlWfCzi4XL0lXifUqRzdmXSaxN3A78DuK2MYnSVnHOLoxq4dZ5eM3PeQt\nBQ/6iuScE+bcG7TXn8R44MRy8dKOFNRh3n/5a3vQS5op6X5Jv5R0xiCvz5C0StKd5dcn292mWYMc\nTHHj718AdyauxXrU+HZ+WNI44F+BQ4FHgEWS5kXEfQNWvSkijmxnW00XEQtT19AtOfcGbfd3XPl4\nRV0vR+z9l79239HvCyyNiGUR8SJwOXDUIOv5Q1brOeUlD44pF69MWYv1tnYH/XYUNzfus6L8Xn8B\n7C/pLknXSnpjm9tspJxzwpx7g7b6OxSYBNwbwb2dq6izvP/y11Z0Ay39U/QOYGpE/F7SnwPfBXYZ\nbEVJlwDLysWVwOK+f3b17aymLgN7SqpNPV6uYvmyD5efw15Rj3q8nMNy+Xw2hWW0oK3j6CVNB86J\niJnl8lzg5Yj4/DA/8xCwd0Q8NeD7Po7esiGxEfA4sDnwJxE8kLgky1QVx9HfDuwsaZqkDYHjgXkD\nipgsSeXzfSn+cnlq/V9llpV3UQz5xR7yllpbgz4i1gBzgOspLr16RUTcJ+kUSaeUq70fuEfSYuA8\n4IR2ttlUOeeEOfcGY+7vveXjVR0spSu8//LXbkZPRFwHXDfge+f3e/5V4KvtbsesKSTGAYeXi1en\nrMUMfK0bs46TeDtwM/ArYOe6Hj9veagiozez9fWdS3K1h7zVgQd9RXLOCXPuDUbXX3lJ4qPLxUbE\nNt5/+fOgN+usXYGdKC5J/NPEtZgBzujNOkpiLvCPwKURa09qMesaZ/Rm1VubzyetwqwfD/qK5JwT\n5twbtN6fxDbAfsDzwA+7WVMnef/lz4PerHNmlo8LInguaSVm/TijN+sQiSuBY4E5ET5J0KrRyuz0\noDfrgPKWgU8AWwBviODBxCVZj/CHsTWSc06Yc2/Qcn/TKYb8A00b8t5/+fOgN+uMPy8frxt2LbME\nHN2YdYDEHcBewMwIrk9dj/UOZ/RmFZCYAvwG+AOwZQTPJy7Jeogz+hrJOSfMuTdoqb++wypvbOKQ\n9/7Lnwe9Wfv68vkfJK3CbAiObszaMOCwyp0jWJq4JOsxjm7Muu+tFEN+qYe81ZUHfUVyzglz7g1G\n7O/g8nF+BaV0RY/vv57gQW/Wnr5Bf0PSKsyG4YzebIwkNgaeBjYGto7gycQlWQ9yRm/WXdMphvzd\nHvJWZx70Fck5J8y5Nxi2v4PKx0bHNj28/3qGB73Z2PXl8zcmrcJsBM7ozcZAYjOKfH4csFUEKxOX\nZD3KGb1Z9xwATADu8JC3uvOgr0jOOWHOvcGQ/WVzWGWP7r+e4kFvNjZ9H8Q6n7fac0ZvNkoSmwNP\nAS8DkyJYnbgk62HO6M264x0Uf3Z+5iFvTeBBX5Gcc8Kce4NB+8smn4ee3H89x4PebPScz1ujOKM3\nGwWJrYAngT9S5PN/SFyS9Thn9Gadd2D5eIuHvDWFB31Fcs4Jc+4N1usvq3weem7/9SQPerPRyeJC\nZtZbnNGbtUhiCvAb4PcU+fwLiUsyc0Zv1mF97+Zv9pC3JvGgr0jOOWHOvcE6/WV5WGUP7b+e5UFv\n1rrsPoi13uCM3qwFEq8Dfg08Q3H9+TWJSzIDnNGbdVJfbHOTh7w1jQd9RXLOCXPuDdb2l2U+Dz2z\n/3qaB73ZiDYA5/PWYM7ozUYg8QZgKfA74LURvJy4JLO1nNGbdUbfu/mFHvLWRB70Fck5J8y5t8IV\nJ5RPssvnIf/9l3t/rWh70EuaKel+Sb+UdMYQ63y5fP0uSXu1u02zqkgItuz7f9b5vDVSWxm9pHHA\nL4BDgUeARcCJEXFfv3UOA+ZExGGS9gO+FBHTB/ldzuitdiTeCNwLPAZsG0E9PtQyK1WR0e8LLI2I\nZRHxInA5cNSAdY4ELgWIiNuALSRNbnO7ZlVZe1ilh7w1VbuDfjvg4X7LK8rvjbTO9m1ut3Fyzglz\n7g04GBZCxrFN5vsv+/5aMb7Nn2/1Hc7Af1YM+nOSLgGWlYsrgcURsbB8bQZAU5eBPSXVph4vt7I8\nQfDCDAB4z++la2fUqz4v9+Jy+Xw2hWW0oN2MfjpwTkTMLJfnAi9HxOf7rfN/gIURcXm5fD9wYEQ8\nPuB3OaO3WpHYE7gTWA5Mc3RjdVRFRn87sLOkaZI2BI4H5g1YZx7wF2VB04GVA4e8WU05n7cstDXo\nI2INMAe4HlgCXBER90k6RdIp5TrXAg9KWgqcD5zaZs2NlHNOmHFv5YlS//ho2jK6K+P9B+TfXyva\nzeiJiOuA6wZ87/wBy3Pa3Y5ZlSTGA39WLM2/E85KWo9ZO3ytG7NBSOwL3AYsjWDn1PWYDcXXujEb\nu77r22R52QPrLR70Fck5J8y0t74PYm/ItL+13F/+POjNBpDYEHh7ubgwYSlmHeGM3mwAibcDNwNL\nInhT6nrMhuOM3mxsfDcpy4oHfUVyzgkz7G2d+8Nm2N863F/+POjN+pHYBNif4npMNyUux6wjnNGb\n9SNxMLAAWByBb5JjteeM3mz0fPy8ZceDviI554SZ9XZI+big7xuZ9bce95c/D3qzksRE4K3AS8CP\nE5dj1jHO6M1KEocD3wNuiWD/1PWYtcIZvdnorBfbmOXAg74iOeeEGfU26KDPqL9Bub/8edCbARKv\nBXYHngduTVyOWUc5ozcDJI4HLgd+FME7U9dj1ipn9Gatcz5v2fKgr0jOOWEmvfWdKLXeoM+kvyG5\nv/x50FvPk9gBeAOwCrgjcTlmHeeM3nqexIeAC4GrIzg6dT1mo+GM3qw1Q8Y2ZjnwoK9Izjlhk3uT\nEK98EDvojUaa3F8r3F/+POit1+0KTAEeA5YkrsWsK5zRW0+TOB34MnBZBCelrsdstJzRm43s3eXj\n/KRVmHWRB31Fcs4Jm9qbxEa8cn/YHw69XjP7a5X7y58HvfWyA4BNgXsieDR1MWbd4kFfkYhYmLqG\nbmlwb32xzfXDrdTg/lri/vLnQW+9rKVBb9Z0HvQVyTknbGJvEtsAbwb+APzn8Os2r7/RcH/586C3\nXvWu8nFhBM8nrcSsy3wcvfUkicuAE4GPRvCl1PWYjZWPozcbhMQGsPbmIs7nLXse9BXJOSdsYG9v\nAV4D/Br4xUgrN7C/UXF/+fOgt140s3y8PoJ6ZJdmXeSM3nqOxK3AfsDREVyduh6zdrQyOz3oradI\nTAZ+A7wAbBXBc4lLMmuLP4ytkZxzwob1dhgg4IZWh3zD+hs195c/D3rrNYeXj99PWoVZhRzdWM8o\nr1b5JPAqYIcIlicuyaxtjm7M1jWDYsjf7SFvvcSDviI554QN6q0vtvneaH6oQf2NifvLnwe99YTy\nJuBHlIvO562nOKO3niCxO3A38AQwJYKXE5dk1hHO6M1e8d7y8Xse8tZrPOgrknNO2JDe3l8+fnu0\nP9iQ/sbM/eVv/Fh/UNKWwBXADsAy4LiIWDnIesuAZ4CXgBcjYt+xbtNsLCR2BnYHVgELEpdjVrkx\nZ/SSzgWejIhzJZ0BTIqIMwdZ7yFg74h4aoTf54zeukLiTOCzwDci+GDqesw6qdsZ/ZHApeXzS4Gj\nh6ulje2YtWvMsY1ZDtoZ9JMj4vHy+ePA5CHWC+BHkm6X9FdtbK/Rcs4J69ybxDRgb+A5xniTkTr3\n1wnuL3/DZvSS5gNTBnnpE/0XIiIkDZUBHRARv5G0NTBf0v0RcfMQ27uEIu8HWAksjoiF5Wszym01\nchnYU1Jt6umVZYi9i8crfwbH7wf1qs/LXh7tcvl8NoVltKCdjP5+YEZEPCZpG+DGiPjTEX7mbGB1\nRPzTIK85o7eOk7gFmA4cF8G/p67HrNO6ndHPA2aVz2cB3x2kgE0lvbp8vhnwLuCeNrZp1jKJN1AM\n+eeAaxOXY5ZMO4P+c8A7JT0AHFwuI2lbSdeU60wBbpa0GLgN+H5E/LCdgpsq55ywxr2dVD7+Rzs3\nGKlxfx3h/vI35uPoy8MlDx3k+48C7ymfPwjsOebqzMaovLZN36D/RspazFLztW4sSxL7AIuA3wLb\nRbAmcUlmXeFr3Vgv+0D5+C0Peet1HvQVyTknrFtvEuOBE8rFb7b/++rVX6e5v/x50FuODqU4ge8B\n4PbEtZgl54zesiNxFfA+4JMR/EPqesy6qZXZ6UFvWZGYDKyguL7S6yJ4NHFJZl3lD2NrJOecsGa9\nzaI4bPiaTg35mvXXce4vfx70lo3y2Pn/Xi7+W8pazOrE0Y1lQ+JAYCHwCDDNh1VaL3B0Y73m1PLx\nYg95s1d40Fck55ywDr1JTKU40mYNcH5nf3f6/rrJ/eXPg95ycRowDrgqghWpizGrE2f01ngSmwEP\nA5OA6RHclrgks8o4o7de8QGKIX+rh7zZ+jzoK5JzTpiyN4lxwN+Ui1/qzjby3Xfg/nqBB7013bHA\nLsBDwFWJazGrJWf01lgSGwB3AbsBJ0f4JCnrPc7oLXdHUgz5FcDXE9diVlse9BXJOSdM0Vt5uYNP\nlovnRvDH7m0r330H7q8XeNBbU70f2Bt4DLggcS1mteaM3hpHYkPgXmAn4H9EdPZMWLMmcUZvuTqZ\nYsj/ArgwcS1mtedBX5Gcc8Iqe5OYCHyqXJxbxcXLct534P56gQe9Nc2nga2BnwLfTVyLWSM4o7fG\nkNiLV272vU8Ed6asx6wOnNFbNsqTo75G8f/sVzzkzVrnQV+RnHPCino7FdgPeJRXMvpK5LzvwP31\nAg96qz2JPwHOLRdPj+CZlPWYNY0zeqs1iQnAT4C3Al+PYFbiksxqxRm95eBTFEN+OfDhxLWYNZIH\nfUVyzgm71ZvEkRTXs3kZmBXBqm5sZ+Q68t134P56gQe91VKZy/+/cvGsCBYmLMes0ZzRW+1IbAX8\nJ/CnFDcTOS6CevyPalYzrcxOD3qrlfJG3z8CpgP3AAdE8Gzaqszqyx/G1kjOOWGneiuvSnkFxZBf\nDsysw5DPed+B++sFHvRWCxIbA/8BvAf4HfDuCB5NW5VZHhzdWHISr6K4QNkhFEP+nb7EgVlrWpmd\n46sqxmwwEjsA84A9KO4WdWgE96atyiwvjm4qknNOONbeJN4B/IxiyD8AvKOOQz7nfQfurxd40Fvl\nJMZLfBpYCLyW8iibCJYmLcwsU87orVISewDnUxxZE8DngLMjeDFpYWYN5YzeakNic+BsiuvVjANW\nAB/0Ga9m3efopiI554TD9SbxKom5wEPAxwABXwF2a8qQz3nfgfvrBX5Hb10hsS1wCvDXFPd4Bfgx\n8LEI7khWmFkPckZvHSOxETAT+ABwNK+8kbgV+Ftgga9ZY9ZZzuit6yS2pDjR6TDgvcDm5UsvAf8O\nfBX4sQe8WTpjzuglHSvpXkkvSXrLMOvNlHS/pF9KOmOs22u6XHJCie0kjpH4nMRtwJOw8EpgNsWQ\nXwycAewQwXER3NT0IZ/LvhuK+8tfO+/o76F4B3f+UCtIGgf8K3Ao8AiwSNK8iLivje021Z7QlA8f\nEUWuvguwK8XlgncF3gxsO2D1F+FHy2DGRcDVEeS4bxuz78bI/WVuzIM+Iu4HkIaNhvYFlkbEsnLd\ny4GjIMthMJItUm5cYhzFO+6tgC0HPE4Gtgemll/bAxsN8atWAosozmi9BbgJ/uF/RXzmc11tIK2k\n+64C7i9z3c7otwMe7re8Ativy9sclsTWwOspDvPr/8Ug3xvN1wg//45dJd43wu/YAJhQfo3v93zg\n8sDXNgQ2AzYd5nHTfnW2YiXwS+B+ir+Y7wfuBZZG8PKA/6ZmVmPDDnpJ84Epg7x0VkR8r4XfX8ds\n9kjgguo3+3qA46rf7jpWAk9RXCHyqX7Pn6D4C3lF32MEq0fxe6d1tszamZa6gC6blrqALpuWuoDU\nhh30EfHONn//IxRRQJ+pFMNkUJLq+BdDB12auoAtyq/Xj7TiaN+lS5o1tpKawf01W+79jaRT0c1Q\nY+F2YGdJ04BHgeOBEwdb0cfQm5l1RzuHV75X0sMUF6e6RtJ15fe3lXQNQESsAeYA1wNLgCt69Igb\nM7NkanNmrJmZdUetLmom6XRJ90n6uaTPp66nGyT9T0kvS9oydS2dJOkL5b67S9J3JG0+8k/VX84n\n/EmaKunG8sTHn0v6cOqaOk3SOEl3Smrl4JFGkbSFpKvKP3dLJE0fat3aDHpJB1EcEbNHROwGfDFx\nSR0naSrwTuDXqWvpgh8Cb4qIN1PcLWpu4nra1u+Ev5nAG4ETJe2atqqOehH4WES8iSKCPS2z/gA+\nQhEb5xhdfAm4NiJ2pbhL25CxeG0GPcVVDj8bES8CRMQTievphn8G/nfqIrohIuZHRN/x9bdRnHTV\ndGtP+Cv/v+w74S8LEfFYRCwun6+mGBQDz3xuLEnbU1yD6QJGdw5J7ZX/Yn5HRFwExeehEbFqqPXr\nNOh3Bv5M0q2SFkraJ3VBnSTpKGBFRNydupYKfAi4NnURHTDYCX/bJaqlq8oj4/ai+Es6F/8CfBzW\nPcEvEzsCT0i6WNIdkv5N0qZDrVzp1SuHOQHrE2UtkyJiuqS3AlfSwvHedTJCf3OBd/VfvZKiOqiV\nE+gkfQJ4ISIuq7S47sjxn/vrkfQq4CrgI+U7+8aTdDjw24i4M9OLmo0H3gLMiYhFks4DzgQ+NdTK\nlRnuBCxJfw18p1xvUfmB5VYR8bvKCmzTUP1J2o3ib+C7ymsDbQ/8l6R9I+K3FZbYlpFOoJM0m+Kf\nyodUUlD3jeqEvyaSNAH4NvCNiPhu6no6aH/gSEmHARsDEyV9PSL+InFdnbKCIiFYVC5fRTHoB1Wn\n6Oa7wMEAknYBNmzSkB9ORPw8IiZHxI4RsSPFTnpLk4b8SCTNpPhn8lER8Xzqejpk7Ql/kjakOOFv\nXuKaOkbFu44LgSURcV7qejopIs6KiKnln7cTgBsyGvJExGPAw+WshOIKwfcOtX6dbjxyEXCRpHuA\nF4BsdsogcowEvkJxcbX55b9abomIU9OW1J6IWCOp74S/ccCFmZ3wdwDF3cDulnRn+b25EfGDhDV1\nS45/5k4Hvlm+CfkV8JdDregTpszMMlen6MbMzLrAg97MLHMe9GZmmfOgNzPLnAe9mVnmPOjNzDLn\nQW9mljkPejOzzP1/WZbWrJSu7KcAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import numpy as np\n", + "testValues = np.arange(-5,5,0.01)\n", + "\n", + "plot(testValues, np.tanh(testValues), linewidth=2)\n", + "grid(1)\n", + "legend(['tanh'])\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def ReLU(x):\n", + " return x * (x > 0)\n", + "# See here: http://stackoverflow.com/questions/32109319/how-to-implement-the-relu-function-in-numpy" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWgAAAEACAYAAACeQuziAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFKlJREFUeJzt3X2MXPV1xvHnxDZJKTgxDXFNsLtEclQMpW5KCHUKbBpC\nLcsEJJAwUh2ZKm9uQoAGKC8qdaUqBhUVV6mqVMH2UjlNWiBAzFvsOCyltARIvCHYDi2tjWw3Di81\n4JBYLPj0j7nr7LJ3996duW+/3/1+pBW+O7Mz5+yVD7Nnn7k2dxcAoHneVncBAIB0DGgAaCgGNAA0\nFAMaABqKAQ0ADcWABoCGmp7nTma2S9Krkt6UNOzup5VZFAAg54CW5JL63f3/yiwGAPBLU1lxWGlV\nAADGyTugXdJ3zOxJM/tUmQUBADryrjg+7O4/MbNjJW02sx+7+yNlFgYAbZdrQLv7T5L/vmBmd0k6\nTdIjkmRmXMwDALrg7pOujjMHtJkdKWmaux8ws1+VdI6kv5zKk4TMzFa5+6q66ygL/YUt5v6q6M1M\nsyTtkDRb0ifdtbbM5xv73NkvbvO8gp4t6S4zG7n/19x9U4+1haSv7gJK1ld3ASXrq7uAkvXVXUCJ\n+ip4jtXqzLhHJK2v4PmmJHNAu/tOSQsrqAUAKmOmRZI+I2lY0mfddajmksbhnYTZBuouoGQDdRdQ\nsoG6CyjZQN0FlGigrAc20wxJ/5Ac3uSu7WU9Vy+s1wv2m5nHvIMGEB8zXaPOeuNZSae46xfV15A9\nOxnQGcys390H666jLPQXtqb0R5prcmkzMs/szJuDBoBJ8c/npUsCFt19La+gAfQqmQN1l9FIZtb1\nK2h+SQgADcWAzmBm/XXXUCb6C1vs/bUdAxoAGooBnaEJvyEvE/2FLfb+itDX16cjjzxSRx99tObM\nmaNLLrlEr732WubX9ff3a+3a8e/87uvr05YtW8Z8bmBgQGeccUZhNY9gQAOImpnp3nvv1YEDBzQ0\nNKStW7dq9erVub4uLYEx0efLwIDOEPuOj/7CFnt/RZs9e7bOOeccDQ0NSZIee+wxLVq0SLNmzdLC\nhQv18MMP11zhWAxoAKUzK+6jGyMRwD179ujBBx/U/PnztXfvXi1dulQ33HCD9u/fr5tvvlkXXHCB\nXnrppQI77w0DOkPsOz76C1vs/RXB3XX++edr5syZmjdvnmbPnq1Vq1Zpw4YNWrJkiRYvXixJOvvs\ns3Xqqafqvvvuq7niX2JAAyide3EfU2Vmuueee/Tqq69qcHBQO3bs0IsvvqjnnntOt99+u2bNmnX4\n49FHH9W+ffsmfbzp06dreHh4zOeGh4c1Y8aMqReXgQGdIfYdH/2FLfb+inbmmWdqxYoVuvLKKzVv\n3jwtX75c+/fvP/xx4MABXX311ZM+xrx587Rz584xn9u5c6f6+voKr5cBDaBVLr/8cm3evFmLFi3S\nxo0btWnTJr355ps6ePCgBgcHtXfv3sP3HR4e1sGDBw9/DA8P66KLLtKaNWv0zDPPyN315JNPav36\n9Vq2bFnxxbp7Tx+dh+jtMfjgg4+wP5I50Eh9fX2+ZcuWMZ9buXKlX3jhhf7444/7WWed5cccc4wf\ne+yxvnTpUt+9e7e7u/f397uZjflYvny5Hzp0yG+88UafP3++z5w50xcsWODr1q2b8PknmpF5ZicX\nSwLQMy6WNDEullSi2Hd89Be22PtrOwY0ADQUKw4APWPFMTFWHAAQIQZ0hth3fPQXttj7azsGNAA0\nFDtoAD3jX/WeXLc7aP5VbwA9q+NFmpmukbRa0rOSTnHXL6quoWysODLEvuOjv7DF3N9kvZnpfZL+\nIjlcGeNwlhjQAAJjJpP095LeIWmDu75Tc0mlYQcNIChmWibp65L2S/pNdz1fc0ldIQcNICpmmiVp\nTXJ4VajDOS8GdIaYd3wS/YUu5v4m6G21pNmSHpG0vtKCasCABhAEMy2S9BlJw5I+665DNZdUOnbQ\nABrPTDMk/UDSyZL+yl1/XnNJPWMHDSAWX1RnOD8r6Us111IZBnSGmHd8Ev2FLub+RnprS+Y5DQMa\nQGO1KfOcJtcO2symSXpS0h53P/ctt7GDBlCKWDLPaYrcQV8mabskLogCoBJtyzynyRzQZna8pCWS\nbpXUulfKMe/4JPoLXdz9bbhNLco8p8nzCvoWSVdJ8WcOATRDJ/N8/LlqUeY5zaSXGzWzpZKed/et\nk19ZygYk7UoOX5Y05O6DyW39khTq8cjnmlIP/dFf/P3NnCa9skbql/TVb0iffo/k25tTX3fHyZ9X\nqGOXcpj0l4Rm9iVJyyW9oc5vUWdKutPdPzHqPvySEEBh2nCdZ6mAXxK6+3XuPtfdT5C0TNJ3Rw/n\nNoh7x0d/oYutv7GZ5z/9SqzDOa+p5qBJcQAoxVszz9It36+5pNpxLQ4AjRBz5jkN1+IAEAQyz+kY\n0Bli2/G9Ff2FLaL+xl3nOaLeusaABlCrNl7nOS920ABqE+N1nvNiBw2g6Vp5nee8GNAZYt+D0V/Y\nQu4v6zrPIfdWFAY0gMq1/TrPebGDBlC5tmWe07CDBtA4ZJ7zY0BniH0PRn9hC7S/cZnnNIH2VigG\nNIDKkHmeGnbQACrR5sxzGnbQAJqEzPMUMaAzxL4Ho7+whdJfVuY5/WvC6K1MDGgApSLz3D120ABK\nReY5HTtoALUi89wbBnSG2Pdg9Be2APrLlXlOE0BvpWNAAygFmefesYMGUDgyz9nYQQOoC5nnAjCg\nM8S+B6O/sDWxv24yz+mP07zeqsaABlAYMs/FYgcNoDBknvNjBw2gMmSei8eAzhD7Hoz+wtaw/rrO\nPKdpWG+1YEAD6BmZ53KwgwbQEzLP3WEHDaAKZJ5LwoDOEPsejP7CVnd/RWWe0x877nOXBwMaQFfI\nPJePHTSArpB57g07aAClIPNcDQZ0htj3YPQXthr7KzTznCb2c5cHAxrAlJB5rk7mDtrM3iHpYUlv\nlzRd0h3uvmrU7eyggZYg81ycPLNzetaDuPtBM/uIu//czKZL+jcze8Ddv1dYpQBCQea5QrlWHO7+\n8+SPR0iaIbXnR5rY92D0F7Yq+ysz85z+fHGfuzxyDWgze5uZDUn6qaRN7v5EuWUBaBIyz/WYUg7a\nzN4p6S5Jl7r7tuRz7KCByJF5Ll4hO+jR3P0VM3tI0mJJ20Y90YCkXcnhy5KG3H0wua0/+VqOOeY4\nyOMFR0nbkszzTbdK1yyQ/Pnm1BfGcfLnFZ3v4+F5Oak8KY53S3rD3V82s1+R9G1JN7r7/cntUb+C\nNrP+kW92jOgvbFX0Z6avqBOre0RSf1Wxuhacu0JeQc+RdJuZTVNnZ/3PI8MZQNzIPNeLa3EASEXm\nuVxciwNAL8g814wBnSH2LCb9ha2s/qrOPKfXEPe5y4MBDWAMMs/NwQ4awBhknqvBDhrAlHCd52Zh\nQGeIfQ9Gf2Erob/Sr/OcV+znLg8GNABJZJ6biB00ADLPNWAHDSAvMs8NxIDOEPsejP7CVkR/Tcg8\np4n93OXBgAZajMxzs7GDBlqMzHN92EEDmBCZ5+ZjQGeIfQ9Gf2Hrsb/GZJ7TxH7u8mBAAy1E5jkM\n7KCBliHz3AzsoAGkIfMcCAZ0htj3YPQXtqn219TMc5rYz10eDGigJcg8h4cdNNASZJ6bhR00AElk\nnkPFgM4Q+x6M/sI2hf4anXlOE/u5y4MBDUSOzHO42EEDESPz3FzsoAGQeQ4YAzpD7Hsw+gvbZP2F\nlHlOE/u5y4MBDUSIzHMc2EEDESLz3HzsoIEWIvMcDwZ0htj3YPQXtgn6Cy7znCb2c5cHAxqICJnn\nuLCDBiJB5jks7KCBdiHzHBkGdIbY92D0F7aR/kLPPKeJ/dzlwYAGAkfmOV7soIHAkXkOUyE7aDOb\na2YPmdk2M3vazL5QXIkAekHmOW55VhzDkq5w95MknS7pc2Z2YrllNUfsezD6C92G2xRB5jlN/Ocu\nW+aAdvd97j6U/PlnknZIOq7swgBMrpN5Pv5ckXmO1pR20GbWJ+lhSSclw5odNFADMs/hKzQHbWZH\nSbpD0mUjwxlAbcg8t8D0PHcysxmS7pS0wd3vTrl9QNKu5PBlSUPuPpjc1i9JAR9fHlk/9Nes+ro4\nXjxHeiDJPH9kszT4IalJ9RVzPHoH3YR6CupnRdLSLuWQueIwM5N0m6SX3P2KlNujXnGYWf/INztG\n9BeWJPP8gKQ/lLRBsrUx9TdabOfurfLMzjwD+vcl/aukpySN3Plad38w75MAKAaZ53gUMqCLeBIA\nvUsyzzvUidV90l1ray4JPeBiSQWIPYtJf0EZd53nyPobI+be8mJAAwHgOs/txIoDaDgyz3FixQHE\ngcxzSzGgM8S+B6O/Zsu6znPo/U0m5t7yYkADDcV1nsEOGmgoMs9xYwcNBIrrPENiQGeKfQ9Gf401\nLvOcJuD+MsXcW14MaKBhyDxjBDtooEHIPLcHO2ggPGSecRgDOkPsezD6a46szHP614TT31TF3Fte\nDGigAcg8Iw07aKAByDy3DztoIABknjERBnSG2Pdg9NcIuTLPaQLprysx95YXAxqoEZlnTIYdNFAT\nMs/txg4aaDYyz5gUAzpD7Hsw+qtHN5nn9MdpZn9FiLm3vBjQQMXIPCMvdtBAxcg8Q2IHDTQOmWdM\nBQM6Q+x7MPqrXNeZ5zQN7K8wMfeWFwMaqAiZZ0wVO2igAmSe8VbsoIHmIPOMKWNAZ4h9D0Z/VdRQ\nTOY5/bHr768sMfeWFwMaKBGZZ/SCHTRQIjLPmAg7aKBGZJ7RKwZ0htj3YPRXqkIzz2liPn8x95YX\nAxooAZlnFIEdNFAwMs/Igx00UA8yzyhE5oA2s3Vm9lMz+1EVBTVN7Hsw+iv6+crLPKc/X7znL+be\n8srzCnq9pMVlFwKEjswzipZrB21mfZI2uvtvpdzGDhoQmWdMDTtooCJknlGG6UU8iJkNSNqVHL4s\nacjdB5Pb+iUp4OPLI+uH/kp4PsmXSZotfesp6cL/kV5XTP3VcTx6B92EegrqZ0XS0i7lwIojg5n1\nj3yzY0R/RTyHFkl6VJ3M80J3bS/z+cY+d7znL+bepHyzkwEN9IDMM7pVyA7azL4u6d8lvd/MdpvZ\nJUUVCESAzDNKkzmg3f1idz/O3d/u7nPdvZRrCjRV7FlM+uvlsavNPKfXEO/5i7m3vEhxAF0g84wq\ncC0OoAtkntErctBACcg8oyoM6Ayx78HoryulX+c5r5jPX8y95cWABqaA6zyjSuyggZzIPKNI7KCB\nYpF5RqUY0Bli34PRX97HqT/znCbm8xdzb3kxoIEMZJ5RF3bQQAYyzygDO2igR2SeUScGdIbY92D0\nl6kxmec0MZ+/mHvLiwENTIDMM+rGDhpIQeYZZWMHDXSPzDNqx4DOEPsejP7SvqaZmec0MZ+/mHvL\niwENjELmGU3CDhoYhcwzqsIOGpgCMs9oGgZ0htj3YPQ3RqMzz2liPn8x95YXAxoQmWc0EztotB6Z\nZ9SBHTSQD5lnNBIDOkPse7C29xdS5jlNzOcv5t7yYkCjtcg8o+nYQaO1yDyjTuyggQmQeUYIGNAZ\nYt+Dtbi/4DLPaWI+fzH3lhcDGq1D5hmhYAeNViHzjKZgBw2MR+YZwWBAZ4h9D9am/kLPPKeJ+fzF\n3FteDGi0AplnhIgdNFqBzDOahh00IDLPCFfmgDazxWb2YzP7LzP7syqKapLY92At6S+KzHOamM9f\nzL3lNemANrNpkv5O0mJJCyRdbGYnVlFYgyysu4CSRd7f752vuDPPMZ+/mHvLJesV9GmSnnX3Xe4+\nLOkbks4rv6xGeVfdBZQs2v46medFFyeHN7lre60FlSPa86e4e8tlesbt75W0e9TxHkkfKq+cyZnp\nNyS9u9pnXTDHTL9b7XNWKer+LpSOeo/IPCNQWQO6t4hH8a6X9Klqn/KDkvTpap+zSrH3t0uKJPM8\ngb66CyhRX90F1G3SmJ2ZnS5plbsvTo6vlXTI3W8adZ+mDXEACEJWzC5rQE+X9Iykj0r6X0mPS7rY\n3XcUWSQAYLxJVxzu/oaZfV7StyVNk7SW4QwA1ej5nYQAgHIU9k5CM7vUzHaY2dNmdlP2V4THzL5o\nZofM7Ji6aymSmf11cu5+aGbfNLN31l1Tr2J+g5WZzTWzh8xsW/L37Qt111QGM5tmZlvNbGPdtRTN\nzN5lZnckf++2J7/vG6eQAW1mH5H0cUmnuPvJkm4u4nGbxMzmSvqYpOfqrqUEmySd5O6/Lek/JV1b\ncz09acEbrIYlXeHuJ0k6XdLnIutvxGWStqt5abIi/K2k+939REmnSEpdHRf1CnqlpNXJm1nk7i8U\n9LhN8jeSrq67iDK4+2Z3H3mH3fckHV9nPQWI+g1W7r7P3YeSP/9Mnb/cx9VbVbHM7HhJSyTdKimq\ni7ElP6Ge4e7rpM7v+tz9lbT7FjWg50s608weM7NBMzu1oMdtBDM7T9Ied3+q7loq8MeS7q+7iB6l\nvcHqvTXVUioz65P0O+r8jzUmt0i6SorurfmSdIKkF8xsvZn9wMy+amZHpt0x640qh5nZZkm/nnLT\n9cnjzHL3083sg5L+RdL7uii8Nhn9XSvpnNF3r6SoAk3S33XuvjG5z/WSXnf3f6q0uOLF+CPxOGZ2\nlKQ7JF2WvJKOgpktlfS8u2+N9IJJ0yV9QNLn3f0JM1sj6RpJN6TdMRd3/9hEt5nZSknfTO73RPKL\ntF9z95emXHpNJurPzE5W5/94PzQzqfPj//fN7DR3D+aylZOdP0kysxXq/Ej50UoKKtdeSXNHHc9V\n51V0NMxshqQ7JW1w97vrrqdgiyR93MyWqPMPLMw0s39090/UXFdR9qjzE/kTyfEd6gzocYpacdwt\n6Q8kyczeL+mIkIbzZNz9aXef7e4nuPsJ6nxzPxDScM5iZovV+XHyPHc/WHc9BXhS0nwz6zOzIyRd\nJOlbNddUGOu8Ulgrabu7r8m6f2jc/Tp3n5v8fVsm6bsRDWe5+z5Ju5NZKUlnS9qWdt/cr6AzrJO0\nzsx+JOl1SdF8M1PE+OPzlyUdIWlz8lPCf7j7n9RbUvda8AarD0v6I0lPmdnW5HPXuvuDNdZUphj/\nzl0q6WvJC4j/lnRJ2p14owoANBT/5BUANBQDGgAaigENAA3FgAaAhmJAA0BDMaABoKEY0ADQUAxo\nAGio/wfTqr8t10n9mwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plot(testValues, ReLu(testValues), linewidth=2)\n", + "grid(1)\n", + "legend(['ReLU'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/python_tutorial_part_6_vector_space.ipynb b/python_tutorial_part_6_vector_space.ipynb index 4d41aaf..18126ee 100644 --- a/python_tutorial_part_6_vector_space.ipynb +++ b/python_tutorial_part_6_vector_space.ipynb @@ -23,7 +23,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -33,8 +33,8 @@ "output_type": "stream", "text": [ "25000\n", - "200\n", - "200\n" + "25000\n", + "25000\n" ] } ], @@ -54,15 +54,15 @@ "test_data = all_data[25000:50000]\n", "print len(train_data)\n", "\n", - "train_data=train_data[:100]+train_data[12500:12600]\n", - "test_data=test_data[:100]+test_data[12500:12600]\n", + "#train_data=train_data[:100]+train_data[12500:12600]\n", + "#test_data=test_data[:100]+test_data[12500:12600]\n", "print len(train_data)\n", "print len(test_data)\n" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": { "collapsed": false }, @@ -71,8 +71,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "7142\n", - "6994\n" + "113562\n", + "113538\n" ] } ], @@ -105,16 +105,7 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -123,9 +114,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "0\n", - "200\n", - "200\n" + "25000\n", + "25000\n", + "25000\n" ] } ], @@ -151,7 +142,7 @@ "#test_vecs= get_sparse_vectors(test_data, word_space)\n", "\n", "#print train_vecs, test_vecs[0]\n", - "print len(train_data[12500:12600])\n", + "print len(train_data)\n", "print len(train_vecs)\n", "print len(test_vecs)" ] @@ -177,8 +168,8 @@ "output_type": "stream", "text": [ "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", - "200\n", - "200\n" + "25000\n", + "25000\n" ] } ], @@ -195,8 +186,8 @@ "from random import shuffle, randint\n", "\n", "\n", - "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", "\n", "\n", "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", @@ -228,7 +219,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "(200, 7142)\n" + "(25000, 113562)\n" ] } ], @@ -249,56 +240,11 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "\n", - "\n", - "Done fitting classifier on training data...\n", - "\n", - "================================================== \n", - "\n", - "Results with 5-fold cross validation:\n", - "\n", - "================================================== \n", - "\n", - "********************\n", - "\t accuracy_score\t0.715\n", - "********************\n", - "precision_score\t0.765432098765\n", - "recall_score\t0.62\n", - "\n", - "classification_report:\n", - "\n", - " precision recall f1-score support\n", - "\n", - " 0.0 0.68 0.81 0.74 100\n", - " 1.0 0.77 0.62 0.69 100\n", - "\n", - "avg / total 0.72 0.71 0.71 200\n", - "\n", - "\n", - "confusion_matrix:\n", - "\n", - "[[81 19]\n", - " [38 62]]\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using gpu device 0: GeForce GT 750M\n" - ] - } - ], + "outputs": [], "source": [ "# Classification with scikit-learn\n", "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", @@ -331,7 +277,7 @@ "from sklearn.linear_model import LogisticRegression\n", "from sklearn import cross_validation\n", "import gensim\n", - "n_jobs = 2\n", + "n_jobs = 4\n", "\n", "#train_vecs=array(train_vecs)\n", "train_vecs=np.array(train_vecs)\n", @@ -355,8 +301,122 @@ "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", - "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", - " \n" + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "X= train_vecs\n", + "y=train_tags\n", + "y=y.astype(int)\n", + "num_examples = len(X) # training set size\n", + "nn_input_dim = len(train_vecs[0]) # input layer dimensionality\n", + "nn_output_dim = 2 # output layer dimensionality\n", + "\n", + "# Gradient descent parameters (I picked these by hand)\n", + "epsilon = 0.01 # learning rate for gradient descent\n", + "reg_lambda = 0.01 # regularization strength \n", + "\n", + "\n", + "def forward(W1, b1, W2, b2, x):\n", + " z1 = x.dot(W1) + b1\n", + " a1 = np.tanh(z1)\n", + " z2 = a1.dot(W2) + b2\n", + " exp_scores = np.exp(z2)\n", + " # softmax\n", + " y_hat = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", + " return y_hat, z1, a1, z2\n", + "\n", + "def predict(model, x):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " y_hat, _, _, _ = forward(W1, b1, W2, b2, x)\n", + " return np.argmax(y_hat, axis=1)\n", + "\n", + "def calculate_loss(model):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " y_hat, _, _, _ = forward(W1, b1, W2, b2, X)\n", + " correct_logprobs = -np.log(y_hat[range(num_examples), y])\n", + " data_loss = np.sum(correct_logprobs)\n", + " return 1./num_examples * data_loss\n", + "\n", + "\n", + "# This function learns parameters for the neural network and returns the model.\n", + "# - nn_hdim: Number of nodes in the hidden layer\n", + "# - num_passes: Number of passes through the training data for gradient descent\n", + "# - print_loss: If True, print the loss every 1000 iterations\n", + "def build_model(nn_hdim, num_passes=2000, print_loss=False):\n", + " \n", + " # Initialize the parameters to random values. We need to learn these.\n", + " np.random.seed(0)\n", + " W1 = np.random.randn(nn_input_dim, nn_hdim) / np.sqrt(nn_input_dim)\n", + " b1 = np.zeros((1, nn_hdim))\n", + " W2 = np.random.randn(nn_hdim, nn_output_dim) / np.sqrt(nn_hdim)\n", + " b2 = np.zeros((1, nn_output_dim))\n", + "\n", + " # This is what we return at the end\n", + " model = {}\n", + " \n", + " # Gradient descent. For each batch...\n", + " for i in range(0, num_passes):\n", + " # feedforward\n", + " y_hat, z1, a1, z2 = forward(W1, b1, W2, b2, X)\n", + " \n", + " # Backpropagation\n", + " delta3 = y_hat\n", + " delta3[range(num_examples), y] -= 1\n", + " #print [range(num_examples), y]\n", + " dW2 = (a1.T).dot(delta3)\n", + " db2 = np.sum(delta3, axis=0, keepdims=True)\n", + " delta2 = delta3.dot(W2.T) * (1 - np.power(a1, 2))\n", + " dW1 = np.dot(X.T, delta2)\n", + " db1 = np.sum(delta2, axis=0)\n", + "\n", + " # Gradient descent parameter update\n", + " W1 += -epsilon * dW1\n", + " b1 += -epsilon * db1\n", + " W2 += -epsilon * dW2\n", + " b2 += -epsilon * db2\n", + " \n", + " # Assign new parameters to the model\n", + " model = { 'W1': W1, 'b1': b1, 'W2': W2, 'b2': b2}\n", + " \n", + " # Optionally print the loss.\n", + " # This is expensive because it uses the whole dataset, so we don't want to do it too often.\n", + " if print_loss and i % 1000 == 0:\n", + " print \"Loss after iteration %i: %f\" %(i, calculate_loss(model))\n", + " #print y_hat[:2]\n", + " \n", + " \n", + " return model\n", + "\n", + "# Build a model with a 3-dimensional hidden layer\n", + "model = build_model(3, print_loss=True)\n", + "\n", + "\n" ] }, { diff --git a/python_tutorial_part_9_neural_net_a.ipynb b/python_tutorial_part_9_neural_net_a.ipynb new file mode 100644 index 0000000..6149e03 --- /dev/null +++ b/python_tutorial_part_9_neural_net_a.ipynb @@ -0,0 +1,677 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# A Vector Space Model, with scikit-learn" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# This is code to build a vector space model, with SVMs on Andrew Mass' \n", + "# distribution of movie review sentiment data." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "25000\n", + "200\n", + "200\n", + "7142\n", + "6994\n", + "0\n", + "200\n", + "200\n", + "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + "200\n", + "200\n", + "(200, 7142)\n", + "\n", + "\n", + "\n", + "Done fitting classifier on training data...\n", + "\n", + "================================================== \n", + "\n", + "Results with 5-fold cross validation:\n", + "\n", + "================================================== \n", + "\n", + "********************\n", + "\t accuracy_score\t0.715\n", + "********************\n", + "precision_score\t0.765432098765\n", + "recall_score\t0.62\n", + "\n", + "classification_report:\n", + "\n", + " precision recall f1-score support\n", + "\n", + " 0.0 0.68 0.81 0.74 100\n", + " 1.0 0.77 0.62 0.69 100\n", + "\n", + "avg / total 0.72 0.71 0.71 200\n", + "\n", + "\n", + "confusion_matrix:\n", + "\n", + "[[81 19]\n", + " [38 62]]\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], + "source": [ + "from collections import namedtuple\n", + "\n", + "all_data = [] \n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " #print my_data[line_no]\n", + " #break\n", + "train_data = all_data[:25000]\n", + "test_data = all_data[25000:50000]\n", + "print len(train_data)\n", + "\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n", + "#--------------------\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "from collections import defaultdict\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]\n", + "#-------------------------\n", + "import numpy as np\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "#test_vecs= get_sparse_vectors(test_data, word_space)\n", + "\n", + "#print train_vecs, test_vecs[0]\n", + "print len(train_data[12500:12600])\n", + "print len(train_vecs)\n", + "print len(test_vecs)\n", + "#-------------------------\n", + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "from random import shuffle, randint\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "\n", + "\n", + "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]\n", + "print len(train_tags)\n", + "print len(test_tags)\n", + "#--------------------\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "print train_vecs.shape\n", + "#--------------------------------\n", + "# Classification with scikit-learn\n", + "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", + "# Let's use sklearn to train an svm classifier:\n", + "#-------------------------------------------------\n", + "\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC, LinearSVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "import gensim\n", + "n_jobs = 2\n", + "\n", + "#train_vecs=array(train_vecs)\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "\n", + "print type(train_tags)\n", + "print type(train_vecs)\n", + "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + "clf.fit(train_vecs, train_tags)\n", + "print \"\\nDone fitting classifier on training data...\\n\"\n", + "\n", + "#------------------------------------------------------------------------------------------\n", + "print \"=\"*50, \"\\n\"\n", + "print \"Results with 5-fold cross validation:\\n\"\n", + "print \"=\"*50, \"\\n\"\n", + "#------------------------------------------------------------------------------------------\n", + "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", + "print \"*\"*20\n", + "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", + "print \"*\"*20\n", + "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", + "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", + "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + "#----------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# A lot of code taken from this tutorial: \n", + "# https://github.com/dennybritz/nn-from-scratch/blob/master/nn-from-scratch.ipynb\n", + "# Package imports\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import sklearn\n", + "import sklearn.datasets\n", + "import sklearn.linear_model\n", + "import matplotlib\n", + "\n", + "# Display plots inline and change default figure size\n", + "%matplotlib inline\n", + "matplotlib.rcParams['figure.figsize'] = (10.0, 8.0)" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 67, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAAHfCAYAAABj+c0fAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xd8XXX5wPHPc865M0mbjnSme0JpaUtZpVAoIEumKFtR\n8YcIOFmKCiooIvyAnyKIiIIosmRpmZVNoVDK6t5N90qbcW/uOOf5/ZHbmiY3bZo0Scfzfr36ejX3\nnPM9z7m5bZ58x/MVVcUYY4wxxuxaTnsHYIwxxhizN7IkyxhjjDGmFViSZYwxxhjTCizJMsYYY4xp\nBZZkGWOMMca0AkuyjDHGGGNaQYuSLBHpIyKvishMEflMRL7dyHn/JyLzReRjERnTknsaY4wxxuwJ\nvBZenwG+p6ofiUghMF1EXlbV2VtOEJGTgcGqOkREDgXuAQ5r4X2NMcYYY3ZrLerJUtXVqvpR7u9V\nwGygV73TTgMezJ3zHlAsIt1bcl9jjDHGmN3dLpuTJSL9gTHAe/UO9QbK6ny9HCjdVfc1xhhjjNkd\ntXS4EIDcUOETwHdyPVoNTqn3dYO9fETE9vcxxhhjzB5DVevnN9tocZIlIiHgSeBhVX06zykrgD51\nvi7NvdbAjoI1uycRuVFVb2zvOEzz2Pdvz2bfvz2Xfe/2bE3pHGrp6kIB/gTMUtU7GzntWeDLufMP\nAzap6pqW3NcYY4wxZnfX0p6sI4ALgU9EZEbutR8BfQFU9Q+qOllEThaRBUA18NUW3tMYY4wxZrfX\noiRLVd+iCb1hqnpFS+5jdnuvtXcApkVea+8ATIu81t4BmGZ7rb0DMK1LVHeP+eYiojYnyxhjjDF7\ngqbkLbatjjHGGGNMK7AkyxhjjDGmFViSZYwxxhjTCizJMsYYY4xpBZZkGWOMMca0AkuyjDHGGGNa\ngSVZxhhjjDGtwJIsY4wxxphWYEmWMcYYY0wrsCTLGGOMMaYVWJJljDHGGNMKLMkyxhhjjGkFlmQZ\nY4wxxrQCS7KMMcYYY1qBJVnGGGOMMa3AkixjjDHGmFZgSZYxxhhjTCuwJMsYY4wxphVYkmWMMcYY\n0wosyTLGGGOMaQWWZBljjDHGtAJLsowxxhhjWoElWcYYY4wxrcCSLGOMMcaYVmBJljHGGGNMK7Ak\nyxhjjDGmFViSZYwxxhjTCizJMsYYY4xpBZZkGWOMMca0AkuyjDHGGGNagSVZxhhjjDGtwJIsY4wx\nxphWYEmWMcYYY0wrsCTLGGOMMaYVWJJljDHGGNMKLMkyxhhjjGkFlmQZY4wxxrQCS7KMMcYYY1qB\nJVnGGGOMMa3AkixjjDHGmFZgSZYxxhhjTCuwJMuYvZTktHccxhizr7Iky5i9jIgcWSCh1wWyAtkC\nCb0uIhPaOy5jjNnXiKq2dwwAiIiqqv3WbUwLiMgZUdy/nc/Q+MF0A2Aaa3iE+Yka/PNV9Zl2DtEY\nY/YKTclbLMkyZi8hIuEwzpqrGVM8SDpuc2yBbuY2ZpSnCbqraqadQjTGmL1GU/IWGy40Zu9xbHfi\nTv0EC2CwdKSEmAtMavuwjDFm32RJljF7j64lxBr9rapb7bEubRiPMcbs0yzJMmbvMXsBm50gzxSA\nQJUFbHaA2W0fljHG7JssyTJm7zE9jb9kCsuD+gdepszPECxS1RntEZgxxuyLbOK7MXsRERkUxnln\nOJ0KjqBngaK8zerquZRXpQnGq+qi9o7RGGP2Bra60Jh9kIgUARcW4H0JoJrsY8DDqlrZvpEZY8ze\nw5IsY4wxxphWYCUcjDHGGGPaiSVZxhhjjDGtoMVJlog8ICJrROTTRo4fLSKbRWRG7s+PW3pPY4wx\nxpjdnbcL2vgz8Fvgoe2c87qqnrYL7mWMMcYYs0docU+Wqr4JlO/gNJvQbowxxph9SlvMyVJgvIh8\nLCKTRWT/NrinMcYYY0y72hXDhTvyIdBHVRMichLwNDA034kicmOdL19T1ddaPzxjjDHGmO0TkaOB\no3fqml1RJ0tE+gPPqerIJpy7GDhIVTfWe93qZBljjDFmj7Bb1MkSke4iIrm/H0JtYrdxB5cZY4wx\nxuzRWjxcKCKPABOBriJSBtwAhABU9Q/A2cBlIpIFEsC5Lb2nMcYYY8zuzrbVMcYYY4zZSbvFcKEx\nxhhjzL7IkixjjDHGmFZgSZYxxhhjTCuwJMsYY4wxphVYkmWMMcYY0wraouK7MaYJRCQEHAuUALNU\ndfoubLsUGAisVtV5u6pdY4wxjbOeLGNyRMRzRK6Ii7fYEycVE2+VJ85PRKSgDe59ehhnTR8KHx1L\nyd0dCL8eE+8zERnUwnZ7xSX0SgR3fh8Kn4njzYhL6GMRGb2rYjfGGJOf1ckyBhARN4r7XC8KJp7N\noHg/ilhNgmdZkpxL+bwa/PGqmmile0+I4r74A0bHB0lHAAJVprA8eJKF69MEQ1S1ohntFkVwZx1P\nnx6n0M+LiIuvAVNZow8ztypNMFZVF+zyBzLGmH2A1ckypunO7Ez0yOsYGx8unYiJxwDpwLcZGRtC\n8VAHubK1bhzHu+k8hmxNsAAcEY6XPs5wOhUIXNScdgUuHkZx57NkoBcRFwBXHCZITzmBvvEo7o93\nzRO0PxEZ6oj8xBPnNhH5Um7o1Rhj2pUlWcYABXhXnkK/Qk+2/SchIpxK/1gE5/LWuK+ISJLskYfQ\nPe/xCfQsiON9qTltx/EuOpre8XzHjqSn66NnNafd3YmIODHx7o/ifjSJ0p+cwYAfDKDD/WGcMhEZ\n0d7xGWP2bTbx3ZhaPbsRy3ugGzGyaOdWvHejY/YtHMyPRHDzH8BF0T3+338I5+puxM69lrGxmNQ+\nzin0L3pHVxc+xJxXRaSvqta0c5jGmH2U9WQZAwQwazEVeXOaxVQQwlnSGvdVVY3hvfUea/Ief5tV\n1dVkH21O22mCF6ezNp3v2AzWE8Kd2px2dxci4jlwzdfYr2BLgrXFeOkh/ekQA77YLsEZYwyWZBkD\nQJLs/z7HkmSFbpuTpNTnCRZWJ8j+prXunSB7/T+Yn1igm7e+FqjykpYFcyivBv7anHYzBL99k1WZ\nmbpxm9dXajWPsSCRJPvzFgXe/np7ONG+UpT34DhKCqO4x7RxTMYYs9UeP1xgzK6gqm9ExL3jx7z3\nvVO0X7Q/Rc4qEkxmaXUVmWdpZqLTxHu/LSLn38aMP3fTmFtCTBaw2ckQLE4TnKGqlc1st0xETv4/\nPnmmrxa6QymOr6A6OYtyNyD4lqq+vqufpY0lMwRuVgPqz6UDqCYb+Oz8qkxjjNlVrISDMXWIyPgY\n7nddnGEBWpYg+zvgRW2Dfyi5FXGTgK7AbFX9cBe1GwFOBwYDq4EnmlMSojWJiFBbiPVwIAk8paoL\nd3RdgYQ+vJChYw6THtu8ntGAa3inejPpY1X1vVYJ2hizT2tK3mJJljGmXYlInyjuK0WEex1Mt3g1\nmcy7rFGBR2rwv6Gq/naunRDBffHr7BcfSwmOCOs1yYPMTS5k86s1+J9viwTZGLPvsSTLGLNbExEn\ngjvvFPr1P4V+bm2HFiQ1y+18lFhO1f+m1P/JDtqYGMP7PeiAGF62kozrIPel8K9V1bwT/40xpqUs\nyTLG7NZE5MQexB+7mUOLtiRYW6zRBD9lWlWGoKQpZRhEpD9QCCxqrer8xhizhVV8N8bs1gSOPIRu\nDRIsgO4SpwPhABjWlLZUdYmqfmYJljFmd2FJljGm3Sgkk2SzeY+pksJ3gVQbh2WMMbuEJVnGmPb0\n9NuszqTzzG2fTTlZgo3A3LYPq/2IiCsip8bFe6RAQk+LyGUijRQDM8bs1mxOljGmXcXEe3IAHU78\nOvvFO0sUVWUum/gdnyaTZC8IVJ9q7xjbioh0iOK+2pnI0GMoLYziMo211XMpT6UJjlbVT9s7RmNM\nLZv4bozZ7YlIKIp7m49+owvRdJKsm8KvSOF/O1B9sr3ja0tx8R4ZQ8mZX2O/iFNnntpUXa0PMmdN\nmqCPquYdXjXGtC1LsowxewwRKQRGUFuM9DNVDdo5pDYlIl1COMtv54hooYQaHP+pvle5nOoLVfXZ\ndgjPGFOPrS40xuwSIhIXkdNF5AIRGdoa91DVKlV9T1U/2dcSrJxh3Yil8iVYAKPoWgCMatuQjDEt\nYXsXGrMbEZHewDkCnRQ+BZ5u74KaIXGuCOHc0pdCv4iwzKXci4v3XhL/bFXd0J6x7WU2bSbtBao4\neUparCeZAja1fVjGmOay4UJjdgMiImGcnwFXH0x3KSEa+YQNlcupSqcJTlDV6e0RlytyUQci917D\nmHgPiQO1+wI+zoL0W6yaW4M/elf0OolIB2AoUAHM3xe3whERieHO+zr7Dx4rJdscK9carmFqWuAv\nDuKnCaYAz6pqpn2iNcbYnCxj9hCOyEVdiN57PePiHSW89fXpupY/MmtzmqC/qrZpL4aISBS37Lsc\n2HuoFG9zTFX5Ie9WriX5BVV9uQX3iEVx7/LRCzsRSVeTCQXoiiT+par6aosfYg8jIhMjuJPPY0j8\nMLrj4fAJG/gTs4IQTnYivb0IrvMuqyvXkNyUwj9KVZe0d9zG7IssyTJmD5DrwVj4LUYOGCGdGxz/\nrX6S+Ij1PwpU72rjuAbE8T77LUfG81Vk/7cu0WdZcnda/Sub2b5Ecf+zH50OvYhhsWKJEKjyMeu5\nj1mJFP6Jqvpmix9kDyMih8bxflODf7gD6uGk9qdT5DIOCLny32m0L+gy/1kWL67BH7ov9vwZ095s\n4rsxe4aCNEGf/emU9+DBdIvH8U5o45jawlFxvHHf4oBYsUQAcEQYIyV8hWHxON4dItJBRC4NiXun\nI3KNiPRq55hbnaq+V62ZowK0QxYd4qPeVxi+TYIFcAJ93EJCPYBj2idSY8yOWJJlTBsRkQ6uONcW\nSGh+TLxVcfGeFZHxQEaBNPmnNiXJEkBVE+/RWUQOFZFhkq/7aecsCdDy+WxucEBVeZNV1RmCZpcT\nCOOcewy9C+onDwDj6EaG4EAPWTmKLrefyYDvjKfHz8I4C8Pi/qC599yTqGoS6NedWKqozhDyFiLC\nWEqiwMFtHpwxpklsdaExbUBEukZw39+fTt0/R59YB8LMZOPnn2bxsWn8q0K4r7/NqkmTKN0mMQpU\nmcLyqiTZv+6g/aIo7r0hnLO6Ek1VkA4F6HIR+YaqvtGcmFVVXZEf3svMe6/VMfHu9Sa+V5BeDExp\nTtsALlIYx8ubCHri4Kp432CEN+a/k8CjZ+hAfsEHPxOR2ao6ubn33oNUVZN1VZV8OfNm0hmamIAb\nY9qeJVnGtIEo7h3j6dHrQhm2tUuiJwUySrvGf8p7tyfJnvUYCw6Pqxc/hO44IlRomn8wP7WR1Fyg\n0YRCRNwo7n9G03XkeQyJFEk4GqjyIeuG3s+s50XkGFWd1py4fdW/hsTp+FOm3dJXC/0OhGUO5R7w\nXk1tCYdmryxM4r8yjbVnHENpYf1ji7QCD4cD6brN610kyrk6uOBh5v2U7bwne5GPkmQr57KpcHi9\n4eRKTTODdQLsM9sOGbOnsYnvxrQyEYl7yIbbOCLaIc+wz4M6J/UWq37loy/F8O4TGFhIKFNOKuIi\nT9TgX6aqjfZWiMgpPYn/4xccWli/vtIbupLHWPBGtWYmtvQZgOOBQuADVW3xps0iEgvjLL2QYV0n\nSM+tgVdphluYHuxHJ+cCGdbguoRm+A5vpbIaRFsaw57AETkjhve3S9g/PoouOCIs1Ur+yKzq9STv\nTal/VXvHaMy+qCl5i/VkGdP6ukZw/XwJFkA/iiLTWDMkodmfASNFZFCCbCdgQboJZRtiuBdMorRB\nggVwGN15iLnjRSSWm+PTLKqaAJ5p7vWNtJkUkaMfZu4rL2tZ4Ri6Fm0ilX6XNYGDlA+luGe+66rJ\n4iCpXRnL7ixQfVpEvvhHZt6h0NdTx0/j1/joTT5tu+LUGLNzLMkypvWtTxG4FZomX6K1jMpUmmD+\nlq9VdeHONO4gBVHcvMdCODigAYSo3RMwL8n9SrYz990VVHWWiPQto+rzy6kao1AJPA4cN4UVd43T\nbgX15yK9xoqsgzzW1rG2J1WdLCLPA73BDwNLVdVv77iMMdtnqwuNaWWqmvCQJ59jSYPq3Os0yTus\nVh99oLntV5N94X3WVuc7NptyPJwV1CYv25BaF8UlNBfwPXGSMfH+KiL9mxtLc6hqVlWfDlRvUNXb\nVHUp8PelVC5/iLnpytyuQin1eUGX8QrLvYDgSxFx7xKR7m0Za3vSWstVdZElWMbsGWxOljFtQERK\nIrjvH0DnbsfnVhd+xgZ9hsXJFP41GQ3ubkHbRWGcpRczvPgw6bH139BmTXEz0xMbqLksUH2o/nVR\n8W7vSPjS8xlaMIJOVJJhCsv9lymrTBMcrKoLmhtTS4hIYQjnetBvgXQAKCTkV5Nxe1PIRQwlisfL\nlKXfZlV5muAgVV3RHrEaY/ZdVvHdmN2IiHR0kMuiuJcEaKEg05Nkf6Wqb7Ww3RhwUBjniR7E46Po\nWriBZOoD1onArWmCG+oPBYrI8Cjuh7cyPlYooW3a+7cuCSaz7KWEZk5qSVzNkdtm590RdB56JgOj\nvaSAqbqah5nL9YyjlxRsc/7juiD7KiueTGr23LaO1Rizb7OJ78bsRlR1M3BL7k+LicjIGN7tLnKM\ngwRAeRlVjy+jqgwoBx5X1ZX5rg3hXDyRXl79BAtgEqXOMyyeJCIdczG3GYFvDKDD4G9xQHTLXKwy\nqphEaYMEC+AYensvseyMfBP7RSQCeKqadyi1WfGJHBTFvdpBDhGoqCZ7H/CX3MIAY4zZhiVZxuyB\nRGR0GOfNMxhQcCQ9JYLLIiq6P8Tcc9aR/EdSszdu73oP6dWNWMMMC4iJR0TdTJZsR8hT7r0VxfAu\nO5l+2+yVuJk0+9WrEbVUK3mKRcyhHBcn4iFLPXFu8tHfAmPjeLc6yFGAxMVbnMT/qao+0pLYXHG+\nFsP97Sn0j46gs7OZNC+y7DeLqbhCRMa39Qbexpjdn018N2YPFMf73TkMKThe+khUPESEQdKR6xhb\n4CDni8iI7V2fwp8xm/K8vS/rNLlli581rRH79gRo565sW/6qBzEWU7H160Vawe18xGi6chdHcg8T\nuYrRJT2J/9JDngzjvHE2gybdzVHefRztXsYBgzsTuT8s7o+bG5eIlLrI3T/h4PjJ0s/pJ0WMki5c\nxej4QZQMjODe2vynNsbsrSzJMmYPIyJdMgQHT6BHg7kAMfGYSK+Qh1xU53xHRPqJyNa6UwE8+DEb\nWFhvNDBQ5R/MrxH4k6q2eS0qB5k1j207hCbQi2msYUWuHuvjLOAcBnO09CYi7tYE81rGFHg4Z1zC\n/vEtxxwRDpAuXM+4OHC9iPRoTlwu8vXx9JAeua2FthARzmRgxEcvFJF9ojiqMabpLMkyZs/TIYKb\nCUn+2lidiHguTlcREU+cyyO4q+J4syI4i+MSmi0iJ6rqxgzBF3/DjMRfdW7qI13Pm7qSG5hWNZvy\nj9IE17XxMwGQIPvrp1iUqMiVbQDoJBHOYhA3MZ2Hda5fRhWH0rByQwUZwjgylpIGxzpJhLGUKPDF\n5sQVwR3en6JIvmOdJUoIR4EuzWnbGLP3sjlZxux5VqbwdY0m6F6vZwXgEzZUpfDfC+Pc3InIt7/B\niIIBFKHAx6wf/idmPykiF2htJfHhb7Hq8vdZe2SAbkqQvR94TlWzbf5UgKq+EBH3rut59zuf076R\nUgrcMqr8lyhL+QR/eZNVXeN4Z3viNPgFMUGWjkTIV/keoIRoFOpN7mqiNP6iFVSngQbVZCs0TQbf\nBTY2p21jzN7LerKM2cOoakrgnoeZl8zW25/5U93AXDYFwBSF713HQQUDpQMigiPCGCnhCkbGI7j3\niIijqmVp9a+r1PQR1Zo5RVWfaq8EC2oLpKYJpgew7F8s0T8yK/Nvln6WIHtyRoPLMwQXJ8jWrM+z\nQ1B34qwhQZU2qPkK1CafwGfNiSuL/ulNVgbleUZQJ7M04+E82dxti0SkpyfOTwsl9GxUvD+IyLjm\ntGOM2f1YnSxj9kAiEo7i/iuOd/ixlBZ0ICwfsT7xCRv8DMGJwKiD6Xb7ZXJAg64uVeVq3qncSGqS\nqn7QDuE3KirubwoJX/YlBhcMo5hNpHiJstR01q1L4R+kqmuj4t01lOJvXMnImCf//T3xQ13HH5np\nH0Q392vst02P1jRdow8we32aoLdqI1nYDoTFvS6G+5MvMCg+gs5sJs3LlKVmsG59qrYg6hqpXRZ5\nehzvh1mC4Q5SkSG430fvzFcOwxE508P522F0l+F0iq4n6b/C8poMweM1+F9rj62OjDFNY8VIjakn\n90NwUhjnC4J4KfznacfhsZbIPcvEKO5XXKQ4SfaNoLZmU7mIXH0cpTefL0Pzlmm4UadtXkbVGcDr\nwH5AETCnreti1SUio+J4797C4Q0KpP5N52XeYtVfazT7dRGJRHEnx/EOOZbSgkJCMp111XMoj5ZS\nmM0SRDwcjqAnUVymscafTXllmuAYVf2ohTF+Lo53vU8wSpDqLMEDWfQuVd0AW5LE0GVn55LEjaR4\ngaU1n7JxZQr/YFXdWKetfmGcWT/koHg/Kdp6j6Rm+RXTq1dQfVWgem9L4jXGtB5LsoypQ0Q6RnGn\nFBEeNpFeBS4i77C6ci3JdSn8o/amrVlE5JiuRJ/9NYcX1t9guUoz/IC3azIEXwl5sV87jlcSCRdm\nqxPrI47jPpzJ1ny7uUNfLREV93efo+83z5SBDWb0l2uKa3mnJot2UNXMlgQzgnuhhxQnyDKU4pOu\nZkxcUT5hAx+wjgw+C6lIl5P6tqr+oTXjF5Excby3buHweP0k8c86O/0ea+5PqX/5ltfC4v76KHp9\n5wIZ2mBC/Rwt5//4ZFlSs/1aM2ZjTPNZxXdj6ojhPXQQJQdczPDIlqGkE+hb9Kwujr3AsskiMnov\nGp55rZLM6pcoG3ACfbcmLYEqDzO3xkHeCXnRvxw17luxXt1GISIkazbx7sd/uWD1+lkDReS4prwX\nIjIWOASoBv6lquXNDdjDGdCTeN4lk50kgqg4oAXAplxsr+X+EJfQgtMZEK/9vgpjKGFMbpXhdF0b\n/gtzvg60apIVwf3m8fSJ5Kuifyr9w1NZfbGIXKlaO5Eugnv4/nTKu2JxGMXU4PcREW9P7GU1xtSy\nie9mnyAivX2Cz53HkEj91Wefp78XwR1EbbKwR5FaE2Li/blQQv8KifNjEemuqprCP/5pFq/8mb5f\n9ZKW8Zwu1mt4J/MJG2b5brj7+DGXxHp3P5AtPV2xaDETD74iFvJihwLjd3DfHjHxPigi9OYR9Pjf\nkXT5fQhnZVjcn0j9rrMmShN8uoiKdL5jq2t3rakBKvMd9wm6dCOWt90SYih0a05MO2N7SWJXiaG1\nKxO3zpHz0XWbyF+KbDNpXCQN+K0SrDGmTViSZfYVo/tRlIpJw85bR4SRdHGAg3a20VySc4wnzq89\ncW4RkSObm2Q0495uFPeJYsIvnEr/L5/P0FMOpfuPQjiLROQkVV2Swv/WcqpCr7E8O5dNMogOogT7\nKf7wPj0aPq7jeAzpf0zMdULn5O4xWES+KiIXiEjX3GtOBPe1SZSOvoMJ8a/L/rHvyYGFt3B4tJjI\ntS7yzeY8T4bgD2+w0l9TbxvAQJXHWVAj8AdVzZt0eDgLF9WpCl/XIioUmN3UOJr7/cvgz1xCZd5J\n9au0GgdJAFsfLkn2Ty9SVlV/hSjAy5RlPZxH96KeVWP2STZcaHYZEXGBkUAI+Kw95vVsR2UlmUZ/\neG4m7QNVO9OgiHSO4r5SQGjIBHoWKMpbrLo8QXa2iHyutfey85Cre1Nw4tWMiYdzhUkPp0dsovbm\nN8x4XEQmhHEevYoxkcHScetly7TS+6UzE8fJX8w07MUcEbdjXLzJUdxjDqCzn8IP5rApFBXvbuCt\nYsKDv8BAt24+0kkiXKojCn7DjBtF5L7GEqLGqOpiT5zv/Iz37zxJ+0X2o5O7iRTPs6x6FdWz0wQ3\nNnZtguytT7DwgRHauaBuIl2paZ5hcSJB9vbt3VtE+kRxf54lOAeIxsVbWIN/k8JDTU100gT3vsqK\nSydp71BX+W+vWqDKEyysAX6/Zagw54UK0lPv4OMJ5+mQWKkUUqlpXmG5P4Xlm9MEzd4GyBize7CJ\n76ZJRKQ7MBBYq6oL6x/3xLnYw7k1jhcL4QTlpByBO9MEN9T7wdIuRMQL46y9hrGdBkqHbY5t1Bp+\nyLs1GYJeOzOnKCruOz0pOKSQkBvF41C6MYou/I156fdZ95+EZk7a5Q+Sk+tNWnMdY7vWXZm2xf06\nKzmNNdOOpNdhF8mwbeb9BKp8132PiROupUvxgAbXTn7jZ5WbyxctG0e3QRczPLqlsnyFprmdj6pX\nk6g+jf7dTpH+eWP7tr5ZXUVmtKouaOazjYni/sBFxilsSpB9Cvi7qpZt5xqJ4v4xinfuKfSL9yIu\ny6gK/s3SmgzBb2s022gFexE5OYzz6CRKY8fTx+1AiNmU83fmV5eT+kuNZq9oauwhcb8TxvnlqfSP\nDqPY2UiK51lavYLq2TX4E1W37aYTkXAI53qBKxUKApQwztNJ/GtUdWlT72uMaXu2utC0mIiUxHAf\nyKLHdyVas4l0GHR+Ev9iVZ0B4Inz9UJC/3cFI+ODcj0mazXBvcxMrCbxUFKzl7XrQ+S4IufHCf3x\nEvaPH0BnBFhIBfcxM7GZ9M1p9X/Z1LZE5NIwzr1H0YsRdKaCNK+xkigulzGCq5lak8IfoaqLWuNZ\nRKTYw1lznxzdoAI5wFRdzd+Zt/Hr7N95dO0o3zZe1DJeKKzmxCN/TCRcuPX1uYun6AczH9kU8f3I\nnRwZr1uHCmCtJvkx73IK/ThdBjZoV1W5kjfTCbLDVXVxnrhLgWNzX05R1eWNPF9BFPeuLHp+EaFs\nNZmwizMjSfZSVf2kkWsEODqGd6UDAwJ0bhL/LlWd2sj5vWO4z4IceDyl7hn1nqdaM1zL1GSC7GGN\n3bORdg+J4V4tyBiBjdVkfw88sr29IEXEAToC1aqad16aMWb30iZJlog8AJxCbQ/HyEbO+T/gJGrn\nI2z94bxsrRJbAAAgAElEQVSzwZq2JSKxCO6nR9Gr7xkMCMXEw9eAqazRh5lbnSYYBywK46z9IQcV\n1+9RSWiG7/N2TZpgSGM/TNuaiJwWw709gFIHUUXL0wQ/8TV4YCfa6BLCWXkNY8KD/jsMR6DKH5hJ\nF6KsoLriUzb8j6o+2krPEXGRyjuZECrIs5ptsi4N/sWSFecypM9R0qvB8U1aw9UyzRfHcfr1OkTi\n0U6sWvUhyeRG0n5NZhid5PsyOu90gqv0bQ3hyM0c1mALm9m6kd/yaUUNfnHdYbZc8dQ/BejZI+ic\nBZjJRs9BnswV3UzXOdeL4k4dRZcDzmFItJNEyGjA26zSfzC/Kk1wiKrOafabl4sngjvveEp7v8xy\n7zbGk+99fFIX+i9R9ru0+t9tyf2MMXufpuQtu2Li+5+BE7cTxMnAYFUdAvwPcM8uuKdpG+cNoKjH\nuQwObZnn4orDBOkpp9A/FsO9ETikmIibb8gqLiHGUBIAp7Vp1Nuhqs8m8Yem8AclyQ6vwe+zMwkW\ngANfOZCu1E2woHYC/dkM4i1WUUUa6kxy3tVUNRXG+fd/WNFgKDatPq9QlqzB//1LlFUFeX6RepWV\nvqfBgv38ovRBZasYNP8zLqjqwl3+YfyIcaH5bPYqGulQ8RCJ4fEgc0jWqS6wTCu5j1nU4D9bfx5T\nFPe+gXQ46w4mRK+UUYVXyqjCO5gQHUiHM6O499W7xee7Eh3+P4yIdpLakc6QOBwtveU0BhTEcJvc\n47gdZ/WioPOxlHphnLwJFkA3Ym4Ip2GWaowxTdDiie+q+qaI9N/OKacBD+bOfU9EinNLzNe09N6m\ndRXgXXwMpQX5FlsdRU/3ORafAfwxhtfonKs4ngfkrQXUXnIJwMrmXh/BG7MfnfIO05VIDE+FMqo8\nYEpz79EUSfyr/s2So1HtMIlSp0BCLNEK/sa8RA3+C8BvNlJz6t18OvZcHRItkRhJzfIqK4IXWFoJ\n0ucihkbqTtIG6EcRB2oXnmABVZplOVXE8TicHtRudZMOTqGfM5tNXMM7DNKOVJFhAzVUk8kAP6nb\nnoj0COGc+y1GRupOSo+Jx7f0gPj3ePtcEblOVVcDxPG+ehx9CvNt9DyRXs4/WXhqbt/FZs/1i+Gd\nPoEeRQXUJlerNUGPPJttz2FTMoXfoOfdGGOaoi1WF/YG6k5YXQ6UApZk7eYEiUbJvwItikeAesCM\nFVRHKjRNB9k27whUmc66DPBW60fbOnIrJrsAlVtWS2YIVq4jmQEadH8kNUsVGRR+Vn+S866mqgtF\nZNwLLPvNsyz5vKioi2zOEtzmo7erqi8ix81i469+zHuXhNUhhR8K4f4ng/4yjvtC/QRriywBc9nE\nWQzkXIZQTg0vU8ZzLNEswcMPMe/cvhSGB9CBrkTZj2LeYFV1Ddk/ZjRYUq+5icMoTsfFa5BsxyXE\nMC1Of8bGicCjAA5SVNDIf00xPBRcav9sN8nKzXMaB3QGZqnqsm3ePsATh6O0F0+wkMt0BG6dOWhL\ntIIPWIuP/ml79zHGmMa0VQmH+r+S5p0IJiI31vnyNVV9rbUCMjtWg//idNaNHEmXaP1jM1hHFO/D\nas2Ux8T76/3MuvBKHRnbshJNa5etZ9L4s1X1/TYPvoVyq75+Gsa5QpBIlsCJizc5if994C+vs+LK\nk7VfqH517ymU4eF8mtTsrW0RZ26l51kiEgaNZaGi7lBdLjH8rohcmyHoDmzOaGaziBSm8b2EZojX\ne4blWsVcNnETh9IxN1zXjRhDtZi7+CSYQ/mXRtA5OJweZAl4nZW8zaogTfBLhV/lC9NHG0xNyGqA\nAEHtfwdbY06SfXUG6w87iG4NMsBZbCSKuzCh2e1u8iwiJ0RwHyjAK+pEJFhOdSQuoTeTZC9Q1XVJ\nsv98mbLT5+qm+DIqSZLl53zAsVpKMWE+YYO+xapkluACVV27vXsZY/YNInI0cPROXbMrVhfmhguf\nyzfxXUTupTZh+kfu6znAxPrDhTbxffcjIj3DOPMuZ2ThSOmy9fW1muBmpicqyXxBVV/Ibdj7iCAn\njqeHF8Zx32VNIkl2YQ3+8aq6rh0fY6eJiBPFfWkQHcafx9BYLymgWjNMYbk/maUVaYIxEdxvdyR8\n6fkMLRhBJyqoPf4KZRVpgoPzlbnY3cQl9Mzn6HPK6TJgm+7KB3UOBXicLYO3OV9VuZ73OIE+TJTe\n2xx7WhdlX6bsvYRmJ9S/j4gcGsJ59zbGUyRhputanmcZS3PF20M4QQ3+Car6Su78bmGchVcyqnCE\ndN7aToWmuZkPEuuouVRVH27suURkfAT3lcs5IDaCzogIKfV5ikWZN1i5pAb/AA+5PIJ7+xkMkOF0\nppwanmIRG6jBR4MU/tNZ9AequmSn31hjzD6hzUo47CDJOhm4QlVPFpHDgDtV9bDmBGvanogcEcL5\nVz+K3OF0KlxFdeJjNrigV2U0uLveuftTOwcvRO2ecm/tiRWrReSkbsQeu4lDC+uXMHhSF/pTWP5I\nDf6XgQtieD9Jkh3iISkP59Ea/Bv2lPpGItIvjDN9Ir07Hkep14EwM9nIX5nLeQzhEOm+zflztZyH\nmcfPOWTrVjxbZDTgu7yZTOIfqKrz6x6LifdIXwrPEUT2pzNvsZJzGcKBdCVDwDus4jEWJlL4J6vq\n67nYjgrh/GsoxTKCzoVrSaansjpQ9M40wY+297kqkNCb5zB4wpH1VlWqKjfxQdViKn8Uwbn1Fxwa\nrTtcqqrczyyms25KSv3jmvu+GmP2DW1VwuERYCLQldp5VjeQm6uyZdd7EfkdtSsQq4GvquqHzQnW\ntA8RiQBnAsOAtcBjqrqhfaNqPXEJPXk2A886RkobHNusKa7inbSPRrf8oJfch7fNA90FRKR3BPfH\nAXqBj8ZjuDKYjk4phXxBBm1z7qu6nKVUcbEMz9vWrfrh5jls+rKqPlv39bC4Vb/ksILnWcqbrOJm\nDqX+XLAZuo4/MmtRDf7gOu9rAXBuGOfALMGaoLYgaYPaW/WeJyyQvIeJzpYq+HW9oSv5O/PmHUWv\n/ufL0AaLFzbUFqZNZAmKVXW7Q5LGmH1bU/KWXbG68LwmnNPkislm95MroviP9o6jrbhIl46NLIjs\nQBgfDVE78ToLW1cr7pFUdQVwGXCZiIiPLh1Flz7PsJhJWsqWEgoABYRYTXVj7bCeGgdYX/9YgHpR\nXCK4jKBTgwQL4EC6EsbpXoM/Evgk12Y1sLOTzh0At8E00FohHEI4HftRlHd1aBeJ4igOUAzsUcPc\nxpjdj20QbUw9Kfx3ZrIxb3XuuWwiirtMtU6BqL2EqmoN/sWPsSAxlGK9mQ+YostZoVV8phv4D8vT\ni6kMlmvDLR5nUU4lmSrg3frHIrjT/sUSprCcbjQskwC1NcaKiWSoXQnYkmeoieHN/oT8Ha3vsrq6\nBv/jMqry9lJt1BqC2oWHm1sShzHGgCVZxjSQIbjnbVb5i7Vim9eTmuXvzKtO4edbQbfHy21LEyi8\nPIuNazeS0sdZwC+ZnrmXmcvms/n7WYJLb+HDxFRdTUYDajTLa7pCf8eniVTtVksNyiokyN74Oiv1\nZPqxoJHcJalZVpOIAnNb+hwJstc/yNzEmjoVNFSV13WlzmVT0kevfp2V2Y1as811qspzLEm7OH+z\nrW2MMbuC7V1oTB4icqaHPDyObs4IOkfXU+NPYXkqQ/BICv8b7TVEmEuERgOdgNmqumoXtRuN4v47\njnfIJEoLfAJ5izXpcsloVjO/AW5V1crcucfH8W5Kkh0HaBTvtSTZ61X1vUbaHteJyFu3cnjkR7zL\nmQzkMOmx9biq8jDzsu+y+vmEZpu1O4CIDAa+HMLpliVY4IArODcMp9jvSsybxcbMZtIbU/hXAmkX\nOTaOd/lZDIztRyfZRJoXWZacRfmqFP7BqrqxOXEYY/YdtkG02euISHfgQKASmKaq/q5uP4p7W5bg\nbBdHAlQcKAvQ/2TQe/Mt2mjmfTo5cHEUb2KAltfgP0RtqZO6+/3FBS6J4X1T0eIsukK9cN+QF43H\nIsX+5qqVEUfclzLZ5MWqWt6SeKLi3T2c4q9ezsjYc04Zr8hK+vcZT0Fhd1au/ZQ16+dU+UHmzC1l\nFnLxOdSOMm73PxER+dxAOjz2YxnXsUyruIOPGEoxYyghjc/rrGQF1WtS+PvvbHIjIvuHcB4H9h9F\nFwbRkWVU+tNZlwnQa320nNohyGQU93shnD4lxLKrSIQVXQpsAIYKUpHC/2OA3qNarwvTGGPysCTL\n7DVEpCiG+0AWPbWUwpoqMk4F6Zo0/rcC1Sd20T26RnA+nkjvkpPoF+ooYdZpkidYWPMpG+bU4B+W\nWwTQ0vscEcJ5fhRd3NF0jVeS0Vcoq06QfacG/1RVTYtIURT3nYF0GHgS/eKVpHnIXcQRB19Or26j\nEBEymSTTZz2aXrT8nfnZbM1o4PAo7jddnN4Z/BlpgrtVdUET4ikI4az9FYfF57OZx2LrOG7iT4lF\nOmw9Z836Obzy7m3Vvp8elpssvzPP2yeCO+9OJkQj4pLQLG+zinlswkOYTXlNBZkzVfWFnWx3qIdM\nj+AWXstYSqXwv/Fqgpv4IFFN9gRgVRhnxpcZVngYPcQRIasBU1gePMWiDWmC/fbm1bLGmNbRVhtE\nG7NTRCQkIj1zS/Sbcr5EcV8aTdfP38GEyE9kXMdfy+FFP2B0SQzvQRH5/K6IK4Rz1cF073KuDAl1\nzG0RVCIxLmVEtDcFQ4AdrqTdzjMUiUhnESkK4Uy+kpFFl8vI+BHSkxOlr9zC4YWD6HBkGOcXAGGc\nn4+iy5AfMDo+QjrzhruesaMuonf3A7fWqAqFYhw66ivhonhJXw+ZUkz4+dMZcN5XGT7xGHp/K4L7\nSUicrzUhvIFFhLKdJcpkbxVjRl24TYIF0L3rcAaWHu46jnfZzj67qpY5yOv/ZFFGVYmLx/HSh8tl\nJPvTWVMEa4GXdrbdKO4v+1JUcAJ9t0mwALpLnDMZGIvhXR/B/eHx9ImNl56yZT9ETxxOkL7OGEoK\nXeR/GruHiHgi0l2kkf2HjDFmOyzJMm1GRKIRcW8N4WyI4i70kI1x8Z4RkQE7uPToAkIHfJ39o/E6\nGwwPlo5cwv7xGO7tkm8X653kIF89nj4Najc4IpxI34ICvG/uqA0RcUTkvAIJTYuJtyYq7ry4eAtc\nZEMIZ5WHLBlGceSAOhX0ofaH/oUMi2ltKYWIwiVnMTAiImzWFPOCDQzofWi++zG0/7FFMSd2xK84\nvOAE6StjpYRzZEj4Bg6OuTi/E2mksNV/VSTIhjLqszxbTq9uDWoKA1DaY2w05EWP3tF7kE+S7IVv\nsnLxTXxQ9YauZKqu5g79uPpvzNuQwj9xZzd7FhEnTXB6Cl9G0iXvOaPoIgF6KHDmBHrmLVdzFD1j\nUdwL8rQfiYh7SwhnQwR3iYtsiov3TxHptzNxGmP2bW21d6HZx4mIG8V9cSjFB5/HkFh3iZOo3arm\nlH+zdIKIjKm3ge9WIZyzJtKrwMmTR42iCwH0BXoCK1sSY4AWdiRv+SRydbOKt3d9bjueRzoROeVM\nBhak8XmE+d3OZQiH0B0P4W4+7bxfI1UKukuciLpOhmCIopFuEmearuFB5gDgOPn/uXpumF5S6Ebq\nFd/sIXGO1dLQFJZ/h9paWHmp6tK4hBZOZ90BIXFJZaob9GQBpNJVNHe+kqquF5GRi6k8Yw0LzheI\nVJN9Fnh4y4T6nVSiqJsgyxMs5GjtzWi6bLPBczVZBBIBWug18vtkCBfNFU/OzTE7K4b7nSjuuMF0\nDJ/HEKenFFBV+1k97XmWHpX7rJblbdAYY+qwnizTVk7pRGTslYyMdZfaWklxCXGqDHCPo0/HKO7P\nG7tQIBzCydtTJYCLBOR+ULZECGfmHPLPH5/FRt8n/+q5Os4qJnLKDRxccBAlvEAZX2M/jpCehMRB\nROhJAeXU5L04rT4p/BCwWpD0DF3P35nHdRxEL7cDK9d+mve6hcvfZpyfP/8bTrHn4Ry0g7hJkv3m\nX5iT6Csdmb94SoPjqgGzF75Ylc5UP7CjtrbIDZFeGhH3HlecnwK9VfWxas2cUaWZk1T1nuYkWCJy\ncAhn3lhKOIfBHEQJz7OUXzODZJ3yZa9QlvXRh1yc16ezNu/k0/dZk84STM79EvDPnsT/fCS9JnQj\nFv0Oo5yeuRHtQglxugxwJ1HaMYJ7487GbIzZN1mSZdpEHO+Sz9Gn0JWGH7njKHUzBOc0NuSXJnhp\nKqvz/jBeRAU+Wgm0uGchQfbmR1lQXVGvRNIqreYFlqVq8P93e9fH8b57Gv0LwuKykmqSZDmQrtuc\ncyjdeYfV2yQDW7zNag3hvKeq6wXuf5z5wefoQx8p5PPZXkz/6C9UJf5bUF1VWbD0DV23Yb4OpWPe\nmNZTg7LtZuz5qOrbaYJJy4NNH346/1/MWvgi2WztHP9EciNvTr+3pjKxdh7wzI7aAhCRY0M4K0fR\n5fazGPjNo+l1fQR3ZkTcm5py/XbajYRwXvgmIzpcLiPlEOnO0dKbH3EQPYjzD+aT1CxP6SLeZ+2m\nDMEdSbK/eJrFyfp1zz7VDbzOynSa4E7gy92IHXcjBxduIsUkSmnks+r5BM2em9dcublhZzoivxCR\nq23Y0pg9gw0Xmjbh7Hirmgi1HVP5ehyeXUNy42RdGj+Jvu6WXGyTpriPWYkswY07O6cnH1V9JiLu\nndcx9ftHa+9QLwq8RWxOvc1qDdArVPXj7V4P/UqpnYCdxKcjYeoPcZZKIWO0hFuZwYU6lIF0oAaf\nt1ilT7KwOk1wJUCa4KebSV9+IF0dgEOkOxtTaZ6Zch29Sw4gGu9C2dpPNFmzabkEmTdeZcUXv8zw\nbcY6sxrwIsuqEmTv3fJabkjsSKAHMB+YsaUEQ67O1UEicvDHc/5514xZj40Nh+LpdCbhieP+LZut\n+V5T9vMTkV5hnGe+x4EFw6TTlpfDp+kAbuKD74rITFV9ZEftNOLMvhSGxkjJNi86Ipytg7iad5jK\najycaWmCs1V1HbBORC68hQ8f6q9F2ofCyAI2p1aTyKQJTlfVpQUSuuosBhWEcqsfGxs2LiZCFo22\n5X6VIjIijPNKD+IFYygpKieVepfVP4+Kd08K/wd78rZOxuztLMkybSKF/9ZnbBg3mq4NfnrNppwY\n7qKEZvMmSqqaEZGj/sWSF/7D8j4HatfQZlLZT9noCtzmU7sReXPkes8OASYBPvCkg3SewvKvuoiT\nIXBd5CkffX6HbcHS5VSVllJID+KsJkGVZiiUbUcyL2AoV/F26jY+SgZoPECdMO5/0gRXqeqnuWeu\niEuoLEF266KAE+nDhKAHH65ZRxlrWMC6tVmC/kCXqaw+xlEpOZl+oU5EWEIlj7IgsZn0W8DzuWed\nFMH9W0fCBT2I6xIq3TT+MhE5U1W3VlpX1feB8SJSkkxt7gwsVz+df9PCPDycyw6jh1snwQKgg4S5\nQIcW3MfMG4BmJVkCI0bSpSjfsQ4SppNGUmtJfiGj2X/XPaaqT4lIt/lsPm0+m3sBi4DJW5LGLEGf\nvrkEuQ+FzKa8QS8kwCw2bvmstlWCFQ/jvPZlhnUZLz23ZOyRs3UQt/Dh/6whsRC4uy1iMcbsPEuy\nTJvIEPz+LVZdcYT2DA+Q/06qTmiGvzOvugb/5u1dr6rLRGREDf4Rr7JiHFAFPK2qDTYkbioR6RTD\nnRzCHXkY3SNZAn2XNb+K4urljHQHSAc2acp5nmVnvsGKCSIyWlXXNtZeguwdz7B49BgtKSiUEGO1\nhMdYwMU6fJserams1gTZjWmCPkAHoCahmWT99tL4D77KiuuGUBzd8lqhhDiKXvxJZ6UC9L5cD946\nERnzNqt/8QYrL/LRSBh3fZbgdh/9X1UNROTAMM5zl3NAfASdERECVV5nxbBHWfC2iAyrXytqSy/Q\nzr6vUdxjDqRLNN+xA+hMEn+oiDjN6X1UKF9HsgZo0L6vAZWkA2BW3mtVk8Cj+Y65OGtWkSgqJsLR\n9OYXfMDh2oN+8t98rvazOn+Hn9Vd7JxBdIzWSbCA2s/BV3V4wW189GMRuWdX9OQaY3Y9K0Zq2oyI\nnBbCeeQgSmQ4nWLrSPqvsiLto39O4V/R1sMecfFeO5Tuh1/IsPCWJCijAfcxk0JCfKVO5YMHdU56\nKqt/n1L/e421l1td+LdiIqeewYCCbsR4gNlkUY6hN1FcprG2aj6batIEE1U1bzJQp70uYZyZJ9K3\ny4n09aLiUaNZXqLMn8zSDWmCA3KJUN1rBAjV33svLt4/T2XA6SdK3wYTjf6gM5MfsPbnWQ1uadIb\ntwMFEpp8DoNPOlJ6NThWoWl+wNtpH4025/stIr1DOAtv5fBIR9l2+HmqruZh5n2S0MyBO9uuI3Ll\nEDrecg1j444IH+o6/sxsxlLCYDqylmTwKitqfPTBFP7lbfVZjUvo719k0HlHS+8Gx1SVy3kjWYM/\naFdtr2SMaTqr+G52OyJS4iBfi+KOzRCsyhA8oKqftEMcI+J40+5kQtyrN8G5WjNcy1R+yWF0yBUl\nXa0JbmTappT6nfK1V6ddBzg7jvf9AO0nsCKJ/58obk8HiSXIvgA8oqpNGn4TkdIY3v0+wcSOhNOb\nSYddnDeTZC9prORFPmFxq37N4QXF0nBe3Ge6gT8w86MqzYxpans7iPnsPhT++QYOLqw/J+1ZXey/\nwLInkpo9t7nth8W9oQPhq7/MsIIRdCaF///snXd4HdXRh9/Z3dtVLMvdltwbbtgGFzrGYFoIoQYC\nhFBD+UIgkBBaIECAQCDUAAmhBggtlBB6sMHghgu49ypbslVsSbfv7nx/6NpRubIlS274vs9z7Ue7\ne86ZXenuzp4z8xu+YoO+wfJIAnecqk7fCZu9fszPCsga8UN6BjsTYiHlvM7yRBJnUxJ9K4n7zI7i\n8lobv1hP/4Ael5wo3RvcG211uZIvEjZup5aWVcqQIUPzaYrfklkuzLBbSc283Lez7VMzNd2pCZJf\n3YJlksOG0Y76DhZASDz00hxWUMmBqbicPHzYuFkNDq5Hyp7XUp8Wo6rrgONFpMMmYl2BIlWn0SXL\nRvuBRtVaBan5r7H9In5gApAPzANm7GAm551NRBc/zfxBZ2tff574iKvDF6zX/7A6nMC9pbn21yah\nzh0isvRJ5t0Rw+kloD6sTxK4N+6sE5QqZXTMMrZc9gTzfmHjdrAw1kawHwBe3FPLcXGcVz6n6JwJ\n2jAzdwYb8WLMSaqTcbAyZNhLyThZGfYZTJFz/Jj3GUi+AA66xRLjZlvdZwFE5Ogg1q8VHWwgpWHs\nx6l5QKarNxiPkT7QHiCGg6eWwsliNuPH2mEdwF1FKhas2c7VVjwY/53OxpOOpaCBMzWF4mgM5810\n7UyRcz0YT3YjS9vjNxezmXhNsPzJqrqiEVuTInLUd5Q9NJvSnwTVsiPYXg/GtATuFcBqETkriHWR\ngeTGcSYla+osNlmGQ1VfBl4WEb+CHdFkQ02MZpJaYn0s9dlbmBgmOesvzB91nvbztxEfriqzKeUF\nFkfiONftaQMzZMjQOJnlwgz7BJYYl2XheehyBgX7p4TXl1PJk8yLVJG8zYBsH+YNp9Ir0IdcKSPG\n+6wOr6N6fgznqFTQ8zZEpIMHY/UDHOLPlroJjxs0zL3M4gEOYWtK/118Ey4mcqWqvrD7zrp1SC1h\nDvdhfnEtw4L9pOb6qSpfU6wvsnhLArd//aB+ETk2iPX2rxkeLEwFgLuqfMo69y2Wb0rg9t7Rsmeq\nPmVXoEJVN4lIlh9zYgcC/Y+lICsLD99RFv+KDU4C9zRV/WiXXIR9GBEJ+DEfcdCf5OFLVpO0FNZF\nsS9X1Yl72r4MGfZXMjFZGfZKRMQC3KYuwYiI34Ox8TYOzu5ar6b0Jo1yC9NiFuL+gbHB3FoOk6vK\nI3wXXUD5H5Pq3l6/X79YD3UgcOmVDA5tVaFfo1U8wne0x28fTTerhKjzCWvjNu6LMZwr9iVNIhE5\nLoh1VxR7JKAWxrcCfToQoDMhYzlbCGOXxnFO2SodUZuQeGacT/+DRkvHBn3/SedUz6f8OlX9a3Ns\n8ov11DDyL7iMQf7a8VrLdAv3MzucxO2qqlt24nS/94hIDtAXqFTVpXvangxNQ0SyDTGvNk3v5apu\nrmGYCxPJyD2q+t6eti1Dy8g4WRn2KkTkrADWHVHs/gY4PqwPo9g3qur8HbQ7oTvZr/5ODm5YUA+4\nV2clc/GYV8iQBgFWa7WaP/BNWUydBqJHImJ4MW5VuL4tPtdBpZKE7aAPGUjQi3Ggja6L4zyd0o7a\nZ7DEuNCP+fhP6BccSQdsXKZQrK+xLJrAvRMoBpYBX6VzHEXEA8QOIM8oJkI2XsbSiSPpgk9Mpmox\n/2DJR9WaPL6pNolI0INReg9jAm2locLDo/pdeDalv1HV/Vr3SUQ6eDF+C/zUQYM+zEUR7LuBN/Yl\nJz8DiEgby/RN69x+UMGgvicFQoG2lJQtZtb818KJZPjxpB37zZ62McPOkwl8z7DX4BPzlrb4fnse\n/YNDySeGbX3BhpPeZsVRInKUqs7cTvOs7O2UJszBY+biS1siqhsh4rj5ImKqqlN7X2om7Q4Rua+E\n6DBqxEi/bYqq+d6MiGRZGI/fyMjg1pk/Dwbj6Cb56g8+xfyrYjiF23tg+zD+FMJjHEZn+pBLKTE+\nYg0zKOF6HY5bI8zf3GDwLgFMJ52DBTCAvNACKpotv/B9QkS6+DBmjqVT22Mp8ObiYxEVw15l6bNV\nJEYBN+xpG/dHRCQfaA+s12YUSTdN7x2FXQ7qfujwy3xbK1X06nYIXdoPCb392a+vFpFXVXX2LjI7\nw15ApnZhhl2OiHRRuOVmDgoeKO0wRAiKh+OlUM6jf1YQa0dLTt8sZYs3UddHAmrS2BdQodLIA389\nETzIZuBQn5iPB8T6u4icK/I/LQNVjanqNFX9ZnsOloiMFJFrROQKESlo4unvCU7pQ65Tf2kVYCj5\nBIn7oaoAACAASURBVLDaAAc31lhExnoxL76DUYyRTrSTAAMkj18wlLb4+ZA1fMmG6jB2czMoy6M4\n3nia3yPARqIJG3e/1nvyYz5wNN3aXSADvJ0lRFAsRkh7buPgkIFxlYgM3tM27k+ISE+vJ/ixaXiK\nAv420w3DU+L1BF4RkbZNaGugevGw/j/a5mBtxe/LZmCv43yW6b9ilxmfYa8g42Rl2B38+GA6kJdG\no2kMHXHRAdsreKuqKw344lWWxt1aky+qylusSLrogimUxKrr+Uc1+5fHTIzKtvjeP5keV5xGr5/1\nIfdJL8YqEenbFONFJD8g1tRsPF8cSZf7RtPxAS/GkoBYT4uI2eSrsPto15FA2qk/EaEdfocaOYa0\n+DGvPoHu/lC9ckAiwg/owWesZRVVm2lEPb0xVLXcgzF5IkUNHOJKTTCZDY6DPt+cPr9PiIjPxj3t\neAobrDBkiYej6erxYFyyJ2zbHxGRrqbpnTG478njzjrhcd+ZEx7JPuO4h/w9u409zTL9U0RSgZyN\n43fV8WeHOqTd2Sanm2kaVu/WtzzD3kRmuTDDLseAdh0IpK0ObYlBjnoTMaL5wOrG+oji/HgqJZ/N\np7zv4dolZCAymQ1Vm4mvi+GM98Fvbmf65adr71ABWayhislsiK6iKjaIth1+ziD/Vp2h8RRkf67r\nQq+x/FMR6bm9AHwRET/mh4fSeeg59N2mDB9Rmz8x5yfrCZcBv23J9dkFLFxERVJV/fXfoJPqsJZq\nL7A4fVOwMHp3JZT2BawLIaqxAcbWz9hsChHsK/7FiulhtbPG0dXMwsM8ynmZJWFFH25MFmI/IcdA\nNEfSF6fuTNDyYBTuZpv2WyzTe2Of7kflDOn3g20vUn5fDqOHXuitrC7pWly64CfA9mbho4ZY4S1V\n63NysxtWPyjbvMp23OTCXWB6hr2IzExWhl2OC/PnU16dbl+VJqgg7qOmYG+jqGpFDGfkJmKnvseq\nR99h5WPFRM6K4QxW1Y1xnOvLiV/+IovLfs83vMYylrIl4KBtzqe/v76Q41F0NfLwtaVGZBOocahE\nZJSIXCIiZ6bkB0b7MAfWdrAAgmJxBYOCLvqL1HGtRsqOoSJyqIhsV2G+ET7bTGLLFIobxFy9xypb\nasREG73eNu7iNVSlXdNbSzU+zE0pkdRmo6pLErjDP2Xty7/m69hlTNS/sWBBKbGL4+rcvDN9NgcR\n6Soihzd1FnM3U6Fgl2gk7c5lbIkncBpkgWbYVci5/Xsc02BGWEQY2Ou4kNcTunR7rWtiHvWJWQte\nj9V/jwtHy1m88pOk7cT36ySP/YHMTFaG3cGbq6h67DstZaj8L8nPVeV1lsdN5K2E6uYddZIK1P4s\n9alP0Id592g65v+IXuSIl3laxussl3QzAyLCcG0X/IA1I4APRKS7H/PfXsyeB5BHOXF3JZWmg/vR\nKDr4DBE2apSNRMjBSwFZtJMA+epPFhMZBny901enrl0neCz/U6bpy/N7s5yq8Eaf1xP4Z9KOXana\nyNO3HqmC0Ce8wOIvZmupbwwdg0lcJrG+ehVVZXGc7Za0ieE8+iFrzjhcuwRz6klivMXyqI37aEvO\nUVVXAhcAF4iIhDW5yzPmRKRLAOt5L8ZhHQjGyon5guJZEsX+6e4uldMYqmr7xHz6dZZdeaUOqSNx\nUaRhvqZYbZonmZFh53HV9fu86Ys8pLbvsAKE4ybv3FA6/9gPJ989YHDfk0OhQD4lZYv0u8VvR111\nb1fVzEzW95yMhEOG3UJNMLXx0QjaWyNpH4hg8xnrqjcSXRHDObw5GTvpMESuHkjew7/iQGPrEtk6\nreYRvuM+xlJ/2Qzgr7ogNoXiG4GnfJhLf0CPzsdTaG59uKVESZOdCIoP01pNFQVkUUIUF+UA8phH\neXgLiaNbQ+JBRI7xWP73jjj46kCX9kMQEWLxSqZ++3xsw6a5M5J27MjmpPCLSJ4BPwvgOQU0GcZ+\nGXi1Kct8PjHv9GNdeyo9g33IlVJivM+qcBHhuTGco1U11qKT3Y2ISLYPc8GxdOt0Ej0sn5g46vI1\nxfoPllQncEeo6h5T86+NiAT9mJ93InjA8RRm5eJlARX2J6xNJHAud1Rf2tM27i/4vKHJo4acf2iv\ngkMb7Ju14LXkohWfPJO0YzsMXE8l2Zzn9YSuAm2rqnOSdvR+Vf1qV9idYfeR0cnKsFchIu1M5BI/\n5gSFcAT7OeCd1pBMCIi1ZDQd++bho4AshpKPgXAr0zmbPgyRunHeVZrgBr6OJXD7Akf1IfeJm2Rk\ndv1+p2sJz7KQE+jOCXTHIwauKrPYxLMsxEBw0M9jOCc3daapMbze0HeHHHjRkO5dRtXZ7roO//r0\n+upwtOwkVf2iJWM0BxE5JoB1AzBIoDRSU6bohVT5mX0GQ+T/hpB/7y9lWINA5X/pCucT1r4UVfvC\nPWBaWkTEC5wRwrocaGujU+M4D6nqglbq30PNMnkhNXGQH6lqi8sSfd8QkQl+X85bJx5xezAr+L8Z\n+LLNK/lo8h8ithMfrqpL9qCJGfYwGScrw15LqtBzEIjV169qbj9ejLuAm0bRgTb4WEgFVST5P4ZS\nToy/sYDz6c9w2mGKwUqt5G8sCJcRezKuzvUh8bx+Jr3POFK6Nuj/U13Lt5TxKzmwwb4PdDXrqCaJ\nG5tP+b8iap/bgvPoYJreNeec+JTPMBomLH635F2du+TdJ2w7fvXOjrEvknI48oHNOxNoD5AlnhkX\nc8BBB0oDPVpKNcrNTKtKqJNW6Pb7hogc48V4rRNBqzvZnpVU2RuJxBO4p+9OB35fwTQ91wlyd89u\nY2mT3dW/sXxppKjkWxw3ea6qvrOn7cuwZ8mIkWbY6xARy0R+5cP8lY2bB7gBsV6P4dy4M8HUAhe3\nwXfNTYykdvzQV7qBh5jD3YzhEDrxPIv4G65tqZFw0GoH9y4bfQxAUbex9MI5lHIM3dLuO4RO3MQ0\n/shY/3V8dZqIdFTVkuaeQwqvIaabzsEC8Fh+ETF2lDL+vUFEsnyY93gwLjIRsXGNgFhvx3CuU9X1\nzezO5yf9dfVhouh+cR8UkYFejHevYWhwYC2Zp3lalv0Yc/8jIgfuLcumewuOk3xQRP65fO3k803D\nU2A78YXAP1S1Yk/blmHfYL+4uWTYO0jJIbzZjazx59A32FNy2KIJPmbN2Z+xboKIDG+Oo5Xq7/aL\nGBiqH9x+qHRmpm7iM9YynY3EcGIOOiCB6wJFtWUbojivf8H6E47SLtn1Y7eqSeJpJAnXg4GDS0g8\nFGpWbDmVw4EPm3FJarNe1a0qrVgRaJfXq8HOVeumVtl27NOd7HufQkR8fswvh5I/8Ax6+9pJgEpN\n8BFrTv+MdUeKyLD6xay3RwL3o5ls6jeAvAYyIrMpxYM5rXXPYO/Ej3njcRR4B9bT0Rws+YzXAu9n\nrL0e+PmesW7vRVWLgHv3tB0Z9k0yEg4ZdifjQnjG3cCBwZ5SszqTK17OlD7WOLq18WPe1cz+8h20\nfV9y0+48kHa8z2rCJBMuep6qrlbVtWl0sd4pJlL8OsuTyVq7lusWSogkp1KSNgZpJpsYQI3CQhhb\ngJ2OyVJV13XtOyfPeioci1fV3s7ilZ+5FZVrqoE3d7b/fYxzuhLqexmDfO0kAEBO6u9kDJ3aejB+\n3ZzOkriPfsn65Hwtr7N9g4Z5jWWRKPbtrWb53s1xB9Mx7Yv1KDp4DIwTd7dBGTJ838nMZGXYbQQw\nLz2OgpAnjUj6cRRYn7D2xyLys2Zk0CVcVGwUDw2XxSPYuOiGBO6pqjq9sU5UNSkih02i6PWJFI3q\npTmJCuKUEXMSuL/5ho0PDNf23pHSflubdVrNW6zg5wxitVZRTswFpjTR7rS46jweiZZ3f/OTa6/u\n3vlgAv423rXFs8KRaHm57SSOVdV4c/tMxb4d5cM810RCEezPgFdaGqS/KwlhXT6BwpCRJiN0PN28\n0yj5KXB9U/tT1TUictIjfPdOoWaZ/WgTKiIcXUCFoehVqjpJRPoBvYANwHffx0LMAq7TSLlJZ+dq\nUWbIkGEHZJysDDskVTrm+ADWTw3IjmJ/6sKzqvWmBnaAidGhLf60QYK5eHFRD+ABmpS9pqqVIfHM\nnk7JqEPpXGefq8pEiqrjuBduz8Gq1ddG4EgR6buAiiHAZuALVbVFZPZfmf/+2xpo04823lKirKCS\nc+mHIDzInIiDXtfSLMnUg/0GEfnzinVfnQ5kAzOBj7enSt8YIhLwY/4nhOego+kaCmLJN2z8wTK2\n/DFVlHtvFbbMzSW96nkbfDi4zRZ/VdUvRKTTcip/uJzKPkDCi/RT5IGAWH8LYEkXgpGNRCWJu15E\nzlXVb1p6InsTDvrW1xRfXkh2A4HNr9iQsHGbW4syQ4YMOyCTXZhhu4hIyI/5aR6+wcfQLSuIxWxK\nI3MotZO4E1R1alP78op57xF0+eVPpF+D2JhluoUHmbM+qnbDFL/t23eoD/PjyzggOIya4tOVmuAV\nlsa+pfTbGM4hO+OgpBlnhIU8pDAa8GZhJV2wk7ibEzjXO6qvtHSM1iYg1pMDybvgSgYHaiveT9Vi\nfY5FmxK43VpDPqO18Yv1zAQKLjhVejV4CZyuJbzA4m/Cmmy0wPWOEJH+Xoyph9Ap9C1lnmPpxngK\nsFLyHNMp4TkWbdXPWtqys9mhLWZLsmubOVZ3D8bcn9I/awydxBDBVeUrNug/WFKZwB2Uij/KkCFD\nE8hIOGRoMQGx/jqYtuf9nMF1FKi/1VKeYN7mJG6XpqbWi0h3L8bCmxgZKJT/SVLF1eE+ZkXWUH2T\no+7DzbVRRMYFMJ82MTpl47FLiflM5I0YzhWqmracT6pdW6A9sF5Vqxo7zhA5zYf54sn08B1IOzNM\nko9Yk5hLWTiJHgR0DmD9GhguUBHB/gvw/M5KDrQGIpLtwSi5l7GBdIW5f68zqlZRdZGqvrEHzGuU\nlHDjr4NYt2fjMQrIYhzdGCB5VGqCO5gRriB+nqq+vbNjhMQz5VR6jkrgGkWEuVQOaHDMv3SF8zFr\nX4yp/bMWnE5aRKSNF+M2hUuSuNk+zE027p8d9IFdrUEmIgf6MV/zYnbpQtAuImwlcdfGcM5U1Xm7\ncuwMGb5vZJysDC1CRLI8GBsbe1Dfp7OqF7P5KlV9oal9GiJnWBgvHEonYyBtfaVE3U9YG43hvB/D\nOXdn3+pTsUcDgFxg8fZSrEWkRwDrSRv3qCBWIoztsZB/xXCurr8EmnJWim9iZLC71NUq/UBXO++w\ncrUHo9MP6RkYQJ6UE+dDVkdWUbU8NYvWqJO3KxGR4e3xT7xPDkmr//SuruRdVt7jqN60u21rjNTy\n5sSuZA06ie6hdvhZymb+zWraE3DWUJVw0Qfj6tzSgjEK/ZiLHubwwP3M5of0ZFC9bDuAjRrhNqZv\njquzM7Ujtzd+rg/zmxG0LziFHr4OBFhNFa+zPLqSymkxnGOBAuBMgRytWS5+rzXFQlPflQNT46ze\nW8oKZciwr5HRycrQUnpk40nmiS+Qbudg2mYtZ8uBQJOdLFf1DRGZ9hXFP5/BxoMctCSG81dgckuC\njVNtd1gHTEQ6ezFmHE9h3ni6mQGxfJWa4G1WnD6FkoNSWkG1g8LP6E8bt76DBTCEtua7rOp1B6No\nK34AupHFEG0bfJL5/b6l9PfAdTt7Ti2kMoxtuaqkCyCvIJ5wa+LO9hos5Dd9aTPkGoYGttrcjSxG\naHtuZpqmnPCdnsFK0SUPX8IjRkBVtyPPYQKNiGu1AAu5bghtu13CQN9WuZAe5HCdDgvcwYyDNhB5\n10SOHkMnIw+fdzabqoqJhEXkmNZSfE99V2anPhkyZNiFZCQc9iFEJEtEClJLKruDzWFsr91ISFMZ\nsYSDlja3U1Vdm1Dn5mpNToiqfYGqflnfwRKREUGx3vaKWeUVszIg1j9FZNBOnsc2vBi/OYzOOT+Q\nHmZAat4xcsTL+fT39iKnK3B+vSaFPclJWwj2a0o4ii7bHKxatnMavXwKl6aSBnY7qrpcYfUsNjXY\nV61JplLiAntVoLOJXH06vQL1ncJc8XEcBeLDPLkVhlldRswXV4f+5DEzzfUB+IaNaiKTWmG8OlgY\nl51ED399PTZTDE6ke1YIa8IDHOq/UAZ4fyg9uV1GZZ9P/45ejIkikvZlJ0OGDHsvGSdrH0BECoNi\nvW0hpUGsRR6Mcr9Yj4tIs7OsmoOqrjORBVMobrCvShN8TbGr8HJrjysiJ/gwvzyVXqfcy9isexiT\nfRLdz/BiTBeRI1rUN3LeOLo1SF0TEY6lIBjCurTerjUrqEy75LeRKD1JX42lowRR1AekddB2B1Hs\ny59hYeQr3UBSXVSV5bqFe5kVBp5S1VV7yrb6iIgRx80vaORyFZJtejAGtnQcVd1gYnz5H1bbR9OV\nqRTzbb33hJVayb9YEY3i3NnS8epj4+bm40+7rx0BcvEaWVI3+e9Q6Sw9yfEDZ7W2PRn2DqSGzMrS\n95DML3UvR0S6eDFmHktB3nEUmEHxUKpR3mD5Rd9RNkZExu7KYNkI9mX/YMnEmDqhw+gsfkwWUcEL\nLA4Dj7f2g1pEPF6Mf1zLsGA/abNt+0n0MLpqVvAp5r8iIgWq6qZiSw4DBgKlwAc7CjZ3UX8WDTLY\nAUhtr/+Uf30Jmx9dpZV0J5u1VJPApQshqknoWqoZRccG63GlGgUkCXVjslI30jOCWFcKtHfQOTGc\nP+0KuQBV/VJExr/C0gefZ9FIE8MBNtu4dzvo4609XktQVdcn5ub1RNp0peG7w3rCro3bKiVfotg/\n+5g104sI551Ej8BzLKKN+uhBNkWEo6upcpK4FzRF+qO5eDGXLWXzkOG0b7BvCRUU0nBZGmAUHbLX\nUDUeeL61bcqw5xCRAsvy32WIebarjtfrCayz7fg9ij7VGlnRGfY8GSdrL8eHeevhdM49VXptW3Zq\nJwEu00H+u/im3yqqzmAXzCZtRVVnisgh77Dyvn+y7FgBPBjr4jh36K654R/XiaBZ28HayjDyycaT\nHccZKyJlfsz3Q3g69KeNUULEWUO1GCKXuqqvNta5B2P2XMoOq6+rBfAtpbZN3SK5qlotIuf9gVmv\nhrB8XgxCeFhPGEXja6lmvBb4c2uV9VFV3mVVwoDnagfyp8rFfNSewMgTKMxqR4AlbO77H1af4hHz\npqQ6zc6s3BGqOgUYKyJtbBw/sHFvvXkrPPUOK665QgfXWU6r1iQfsiYWw3miVcZRLRKRQXPYdPFi\nKi50UX8R1UtXUzUTmEtNoPkuyQyNYN/3OsufGqB5oUCtiYtyjfEf1nANQxtrpw57Jokiw65BRApN\n0zurf49jcgf2nmAFfLlsLFtcMH3uS/dXRTaOEZELv4+iuPsbmezCvRyfmJvvYFRuR2lYG3ialvAS\niydVa/Ko3WGLiHgALxDZVV9+EblkNB3/fLkMSrsU+qDOqZxH+dVejIfOoV/bI+gsWx/Ia7SKPzI7\nEsE+WVU/b6T/47Lx/Os2Dg7m14qlWq1V3MusSBxnpKouqtfmxCDWm1cxxD+ANogIWzTBcyyMLaKi\nyI/V6XR6hwbQhgrifMia6AIqiuI4o2pnOVpi3NqfNr+9lmF1dKtKNcptTI/GcA5qreDmfRERyfZj\nft2X3N4n0j3QjgBL2cybrAhXk/xrTO1r97SNLUVExIf5lB/z3OMpDHQkaKyk0v6UtUkXyn5K/25j\npFOdNra63MiU6nLiJ6rql3vI9AytjNcT+OeAXsedPnzgGXXiNpN2jH99en04Fq8ctytmUzO0Hpns\nwu8BDhrIaUT9uma7pC/ctwtICVe2inhlKiB8AjWp5JXAm6q6AVi2gi2uqlI/ONhVZTVVlsDgQbQN\nHCld6hxQKNmcq32DL7P0TmqWEdOdw8ceMW65mal/GK0d6UrIv4wtkW8pExv3vDQOlgQwH7mUA/wD\n5X/Z/Lni5Sod4r+erzttIXHTqyw93UEHG8iWOM7TLvqEqlbW7stCfnEmfeo4WFAzM3mMdvN8wtqr\ngKuadyW/P6hqlYiMmU/55cuovNJF25rIogj2fcC7e9q+1kBVVUQuj+M8/w4rr7Qwuidx58VxHgPa\nPM/ijzxqBoenhHUrNM5LLI5GsGcAk/e0/RlaBxHxGmKeOrDXhAaJMR7Lz8BexwXmLnnvUiDjZO3j\nZJysvRwf5sIFlA8bSYcG++ZSlrRxvtoDZjUJEfEIXObHujaJ29mDURzDfljhcx/mf/Lx5Q6lXbCC\neGIWm+73iXk/cHslyc3TKMkaQ6c6TtQkilwbd1UQz+ixdGo4tQccRAeeYeFYETEaWxZLqvuQiLz2\nFRvOtzAKE7gLgZca0dYqBLoMIb/BDksMjtDOgQ9Y0zWsySN3dC2A/MJGArt7kWN5MQdvr4/9AVUN\nAw+mPt9LUrPAX6U+dRCRk//OgicNpFtIPckK4l4TeSGG88vM0tH3ipCIgd+XPgYvFGxnGIbVMKYh\nwz5Hxsnay4lg3/Uqy57rq21CObXiftZoFZ+zLhmn+QrpuwMRsfyYH3QlNPY0egW7kcU6wr3eYvk9\na6n2/Jg+1pHSdasTFdiicf7ArOtKia6K45zyHIsmLtQK3xg6+l3gKzZEZ7EpksA9LYT8xSX98ya1\nXVOfRkmVD7m3Cafi82A46bSmAAJYhoGkTxeri+3BCG8iltWBhpn4xURcG3d1E/rJ8D1GVT8XkQFA\nnzB2NrA0sZ1qBBn2WbaISLiicq03L6egwc7iTQtjthOfsQfsytDKZCQc9nJU9Y0qEg/fyJToq7o0\nOVGLeFrnR+9mZjRRkwG1S2urtYCfdCQ45jeMCA6UtmSLl4GSx+F0DvYkx1PLwQJqtJAuYkDIh3kH\n8G0Cd8AUSu5/gnnznmTedzPYeFcCd4CqLg5jv/oF68PpBp1KifqxPm/Ft/4VMRx7XSMxx1MpqUri\npo3/qk1q4f7v77EyXn9fRG0+Yk00hvNkK9ibYR9Ha1iqqrO2V+4pQ3pExCMiXVpT4kZEDBE53jCs\n+wwxfi8iI1rSn6q6rrp/njH3pajj1hXzL9u8ipXrpqjr2n9tkdEZ9goyge/7CCLSz4NxkYVREMf5\nzkWfVdWNe9quxgiJZ9bFDBw+XOqmqv9dF9KLHI6ShnWgVZUrmBRP4BaoanqVSGqKVvswlpxEjw7H\nU2hZYqCqLKSCR5kbieOMU9VpqWMFCALRnc2q84r5m04Eb/s1w4OhlIaRqvIxa523WVkUx+ndlLIn\nIpLnw/xmGPldT6C7Lz9VNuY1loW3kHgpVWtx7/hCZsiwjyEiAcv03q2qlxqGZTpu0jQNz4dJO3pN\nS6RmRKSLZfknBv1tOvXqdki27STcZasnxRzX/jJpR08FuoKcYxhmW9e1ZwFvqGqsCf16PFbgPZ83\n+9AD+hyfFfTnsWHT/MSyNV/YjpM8X9V9a2dtzrB7yNQuzLDbEZEg0N6HMe02Du7Yud7L5Au6mI4E\nmCCFDdra6nIlk5I22l5Vt+xgnIIA5huCDOlBdrKUmGwhEYnj/FRVPxIRvwfjFoGrHDRLIGliPB/H\nuUVVy5p5ToYP8xFFLz6YDuTg9c5kU3gLiY1xnPHNuYGLSBsL41cWcomNm+vFXBbBvhd4JeNgZciw\nc4iI5bH8kzq2Gzji4MHn+rNDHUkkwyxc/rEzb9n7WxwncaCqrt2JfsVj+b87oPcJA4b2P9Xamozj\nujYTpz8aLS5duAK0d++Cw4xgoK23qOTbqrItq2zHSRzXFO07ETGAYz1W4FLDMNvbdnyK4yb/oqrN\nCh0QkX7UxI+uVdXFzT3PDDtHxsnKsNsQkbZ+zIcc9EwvhhvHCfYmV37OIHJrVQGap2W8yjJ+z6gG\nNfWmaDH/YMmMsCZHNWPcAfxPjPSrlEipx485qS9tDjyL3oGukkWpRnmf1YmplKyP44xoJMh9R2MV\nAqcBIWqyfj7bWzWnMmTYnxCR09tkd3vu5KPvyjLqZe/OnP+qvXjlZy8k7djFO9HvYUF/3oenH/fn\nUP1s51i8kjc/vo4fHH03OVkdt21fs2EmX878yxbHSXTf0ctiSxGRfh4r8ArIwJysTomqcIlH1V2a\ntKPn7s9yMLuLjIRDht2CiGT5MKeNpmPhqfT05oqPak3yPqv4A7O4VQ9ia6mQXuSwhbj7VxY4P9F+\nnizxoKrMpYwXWByN4/yqkTFyDOQKP+alLpotyOwo9r2qOhFYVO/wszoSHFq70HA7CfBTBnjj6nT+\nhk3XAbc29zxVdQ3w5+a221tILZ2OBg4BosC7qQSA5vQRBE4G2gHzgAZ1JzNk2N14PcFLDuhzQgMH\nC2BAr2OthSs+/jHQbCcLGFPY+SBPfQcLwO/LoW1uIZFoWR0nq7DzSDq3H2QVFc85H3hsJ8ZsEiLS\nyTS9Uw4ceEab/j2ONgzDCriuw9LVk4Z+M//lr0VkULrvt4h0oEY+J48a8d2vVbVBrGiG1iHjZGVo\nMQIX9yO3ywX09269GWWJh7PpyxZN8HcWcJL2oIgw77EqbOO+/S2lxiw2ndZBA7EqElYCtzSOc2k6\nsUURaevDnH4AeV0mUBjIxctCKo57ixWHe8W8NaFOnXT/ENYVJ1AYSpcReDyFvjmUXspOOFl7K7XK\nCw2nRnPsnfozdSLSyWMFPrAsX9/CziM98UTYXls860GP5f+b7cSvacqMnCXGhR6Mx3qR47YnYC2k\nwgmT3CgiJ+6JJYpUiaLxQGdgORmHb79FxMgL+NLXEQ34cnFdxy+paYdmdh2JJaqcxnYm7Rim6Wuw\nvXvng0Iby5aMZxc6Wabh+UWvboeEBvY6dptnaRgm/XuOk81V6wLL1nxxHbDtpVVEDMvyP2iI9fOA\nP9eKJapMy/Rh2zHHsnxPOE7ihoyz1fpknKwMLSaIdcmxFATTve0dSwEPMDu2hG/XAisj2H8GPkyJ\nMrYrIjwA2ALMa+wG6MO8fwwdC2o7cR0JyhDND97C1LtF5B1VXV6rSbu2jRThbYsfG00vTrMPohEp\nNgAAIABJREFUIiI9/ZgfBLG6DiHfqiBuL6TiL14xf5dQ54+pY8Rj+T/r33N8v+EDT7dqwkDwxhNh\nPvn63p9tqd5QAty1g3GOD2E9fiMjgl2lRutLVfmC9aFXWDpZRHrXF19tgu0DgYOBKuDjlEZWU9tO\n8GK83IGA1ZmQuZJKrSJZLiI/VNU5zbFjV5FyficEsa4S6OKg38ZwHlbVb/e0bd83bCfx1fqNc4d3\n7TisgXLzhk3z8Vj+ZYlkZGcc8HfWFs/6UzxRjc9bV+NuU/kyknaU/LyeDRrFE2FVdXbpUqFhes7p\nU3hkQw8P6FN4hHfF2q/OppaTZZm+3wf9eZfGE9W+QX1Ook/3I7BML1Xhjea0756/alP50kEicmwm\nBKJ1yUg4ZGgxCtmNqdJn48GFaFiT/cKanKCqH2x1plS1VFUnq+rcxhwsEfE5uOeeQk9vfScuX/wc\nThfDg3FJ7e0OOmsxFWnfPhdTgRdjyc6c555CRPJF5DARGSq1LoKI+HyYX/6Qnn3u55Csn8oA/y9l\nWNYfGOPPxvM7Q+S81KHjfN7swuEDz9jqYAHg84Y4fOSVIeAGEUl7s95KEOvu8+m/zcFKjc+R0lUG\nkhcAzm/G+bQPivVlEGvmCNo/3pfc5zwYGz1i/F8T2x/ow3zrlwxr+3sZnXOFDA7dx9isn9K/wIsx\nUaReXZoWIDUcGxLPByHxLA+J5wsROUMkzbpU3XamH/PN9vhfP4PeJ/+cwSOOp/CCAObXXjH3+fJA\nexuOk3hsyeqJdtnmlXW2xxPVTJ/7UjhpR7f7EtEYqlokyNMff3VPeHPluq3b2LBpAf+d9qDbqd0g\n6i9Ruq7DopWfhJN27MWdPJ2mGmdZZvpi96bpAXTbJIqIhFTda7NDHYKD+57MgF7jscyae3Z2qAPj\nRl9r+LzZY4Bxu9Tm/ZDMTFaGFqPo1PmUFxSS3eDBM58KNZFZLei+jYWhJsL7uooiwgSwGEsnepND\nAVleD0bf2g1iOH/6gDU/HKUdg+3lf8KfYU3yGsvDEex7WmDPbkNEQgHMpzwYZ3QgEKsiaSVwykXk\nMlX9EDi9G6GcCVJYpzRHvvi5WA8IPsp3fxaRNwU5qme3QxoE7gLkZnfG782WcLRsIJB2BkhELIHh\nI2ifbjdj6RRaxpZTgcebcE6GH3PiEXTpezq9PVbqAVWiEe5j9r2mSIWj+tL2+ghg3nYKPf0DapU5\nEhHG0EkWaoVvCiVXArftyJYm2Co+zIeCWJecQo9gIVmyhupeH7FmxGYS54vIaVqrAHidtvDzTgQn\n/JYRQY/U/HoG0dY8RDsFf8f0u0Tkv5kZrdZDVVeKGOd++OVdLxd2OVg6tRsQqKwusZes+jyp6vwV\n2O7f1Pawnfi1ldXFxe9/cftvfJ6Q4bhJw3Xt8qQde2TV+qm3d2o/MNSz62gMwyIcLWf6dy9G44mq\n2cB/W+8MG6KqH69eP+OCvNzCBs/xNeu/cYDPam0aGQq2TxZvWsDhI69o0JdhWAzodWzg20X/+hnw\n6a6zev+jxU6WiBxPTTCwCfxNVe+rt/8o4B1gRWrTm6q6U28V+zqpWYihQBtg4d6sc9Ucojj3/5tV\nPxiu7YOdahWyLtMYb7E8GsG+uwXdVyRwjJuZygjaM5i2bCbB31hAb3IJYCYSOHXigVR1pkeMX9/G\n9PuP1q5WL3I8G4g4n7A2nsR9Bni9BfbsFkRE/JgfDyF/xHn092WJx6eqzKc89ATz3hKRkwJYJ4yl\nU9qlzwG0QSHfMDxrXTf5iuPEXWq+o3VQVRzXFrZfk1IBdVBJd8NwasT1d6gTlmJCG3yFZ9GnTjBx\nRwlyuR4QfJjv7hGRf2wvdsZGx4+iY9qZpLF08s9k0zU+Ma9RVEGmpH7n7+1EvMm4AOYlt3Nw6GuK\n+QvzCZPEQUMBrJNiODcCaf+2/ZjXn0WfbQ7WVtpJgAla6P2QNb8EftZMezJsB1X3HRHpsapo6kVF\nJXMOdJzkesdNPtPSLLvU8tk9IvKniJPoS813ZWkq5OHLqXOee3HK7Gd6WJbXse2EGob5vO3Er9vV\ny262E7t/wfIPz+7cfpDVsd2Abds3lS9l3tJ/x20nXruihavqiIjgsRpWnADw+3JExGizK23eH2mR\nkyU1RX4foyb4tAiYISLvqurCeodOUtVTWjLWvo6IHOPHfMaL2S4Xr11MxBcU64MozkWqunlP29cS\nVHWmJcbVv2P6E6O1Iz3J8a+jOv4VG9RFb1HVHSqib4c8E8O8hmH0qVUL+xjtxkPMYQWVho3+rX6j\npLqPi8jH/2XdlZMxB9u4q2M4f1HVmS2wJS1SU5cwC6hsbGZjJzg6G8/Qyxjk3xrALyIMJp8LdUDg\nBRY/6KJzbNLfx10UxODgwT9pO2PeS+cuXzM5PnzgmUGz3vLCxvIl2E68Cqj/nd2Gqjoh8Xw5jZIj\nj6BLg/2TWF8dxn61KSflxfjB4XTOSjer1o82COQDBcCa7fWj2ymrlIMnZwyd+dQsxvSGjndEj4/G\ntkQs03uX4ybvbWrwcxDrulPoGXyd5WwkwhUMpqfkEFGbSRSZ77Dy9yLyptYrKg4QxSnYTJwZupF+\ntCG3Vkms3uRaJjK0KTZkaB4pEeP7dnjgzvWdAOZv/VlECizL/6zXCnTu2nGYRuNbdMPGeYaiJcAO\nxUhbgSWu6/z70yn3n902t7u2bdNDyjavpGLLGttxk+eo6txax84IR8sl6G/DxvIldMzv36CzdRtm\nx5J2dNJusHu/oqUzWaOAZVvFGEXkVeCHNLxh79f6VyIy1of57uUMCg4lHxEhojavsfSk6WycJCIj\nm6IYvjdjq/usiHw4heKfzWTjgATuCqdGlb5F9fgs5NKD6WD3kdw6f6s+MTlb+/JHZsVs0ouBak3J\noV0W/yIiXfyYfzSRMwxEFI37xVxlIPkg8STuyzbuIzuasRSRfoZhXWganq62E5+j6r7gxTj7SLqm\nzZAcQXv+xsIDbNx7J7L+jPFakFX/uDmU0ja7K/17jpPla7/0bK5ct3jSjEcHHDL8koA/lYVVtnkl\nk6Y/GnGcxPU7euuOYP/2FZZ+lq/+wAHkISIk1eU9VtmrqSoDXmviZXO1ZmYs7T1B6/yXHgv5YArF\nZ5xEjwazWVMpph1BvghUc/Tom8jLrRG93VK1PjhxxiM3hyNl2cBNTbS1n4UhC6ngLkbjS81KBcXi\nBLqDYrzP6geBE7c2kJqanX/2YJhTKcFAeI5FjNWO/Ji+WGKwiSgurG+iDRn2QkTEskzfl0P6ntxt\ncN+Tza0hepFoBR9OvuuGcLRsPfD0rrTBMn2/zw51OPmoUb+kbPNKCUdLyc/twaqiqclNFcvPB97d\neqyqxi3Ld2fSjt35zdyXfcce+hu8nv+tOqzfOI+1xbNtVfeZXWnz/kiLxEhF5Axggqpemvr5PGC0\nqv5frWOOBN4C1lEz23V9uunb77MYaUg8k39M30MPk7pF1VWV25heVUT4PFV9t5Hm+zVZ4vn3ufQ7\naWwjscyX6edNUohvbUSkkxdjzji65U+g0FpJJX9nIePpxgjaE8dhEutj0ympTuCOVtUVafoQy/Te\ngxi/6Nv9KCs71MFTUroosq54NpbrTjuT3kePk24NxlZVruSLWBynjx/z/VF0HPhj+nj9qcW8JbqZ\nx8xFjBl1FV06DGHukveYs+itRyzTm+W69rm52V3jSTsi0dgW23Hta13XfqGJ53y8D/PZbDyhfPzu\naqq8AjOjOGerapOcBhGZ0JHA639gTHb92ayFWs6jzF0Tw+mxvdkmERnsxZh2JYODQ1IvLW5NpiPv\nspK4CMcfczfZoQ512kVjm3nrk1/FHDfZpSlitCHxfNaLnHF9yOUUaZhBFlWbX/Bl0kHztmZGBsR6\ntCuhi37B0GB2avaqWpM8zXzaE+Bs+nAL06pLiZ2lqh805Zpl2PsQkdPa5nZ/7uSj7mywXL+pfCmf\nfH1/se3Euu6qJUMRyTINz8YfHnNfICvYrs4+247z2odXx2wnfoCqrqzVRgzD81tB7jBNy+pTeCSh\nYD5FJd/ZJaULo46bPFlVv9gV9n5f2R1ipE3x0GYBBaoaEZETgLeBfukOFJHba/04MSU0uU8jIkED\nGT2aDun2MU67Zr/B8p9Q660jw/9w0E2biaed+QhrcusfYHQ3m4UX49bD6Nz2LOljxdXh7yzkGobS\nu9aSZm9y/V005H2PVS8Ch6bp5my/L/fqE4/43bbZpQE9xwdLK5bzwZd3HTJNS8Lj6NagyO1yKhGo\nADbEcI6aQcnrUyke39NsTyUJwoZy0LCL6dJhCACxeKWNuuWJZOQaEbmhfMuq4dRcs+nNmUFV1Q9F\npGscZ0wpsXxq4gqXNeOyAXyyhcSqV1ja/0zt4/WkZgDWa5inWRBJ4Ny4o+U8VZ0nIif+hfmv5ODN\n6qxBXUWVT8BzCj2M/+Y4DRwsgIC/DR3zByTXb5p7AvDyjgyNYD+yisqjDqZD2vivgFhYajgOThYQ\nFpF8D8Yl/8dQf3at5cEs8fBzHcQNfM1SNkerSX4CfLSj8ZuKiIwKYN2ZwDkKwIs5KYp9q6bqd2Zo\nfSzTd1yvboekjYdsl9cHwzBycOjGDpa9W8DY3OwuiaxguwYBVpblo6DzCHfluikTgG1F51Pfqz+I\nyMOOmzhv4YqPjzUNT8J2Yl8A/9BMMfIdkooxP6o5bVrqZBVREz+xlQJqZqy2UfsXp6ofiMgTItJW\nVcvrd6aqt7fQnr0RSwCzEbUMLyYGkl7UKQMxnL9/wrozj9FuIW+9IOL/ss71Yvw7onZid9ulcN5x\nFHgAZrKRXuTUcbC2cgxdjXdZOVxEutdfOvV4greOGnJeyF9PRLFdXm/6Fh7JqtUT+VyL3KPoYmyd\n9anUBM+wIJzAvSP1lryZmtpnizw9D+4/rNNw2uX13pZWnkxGWbp6UlJR2+fNmuH1hHJddabYduyB\nnVmiTo35dXPb1W4vIkd/RfFrk9kwtr+2sStJ6DrChove4Ki+0sR+JolIt01ED91EtDOQ1w7//dl4\nsz3buat5PAED2K5cRS3ei+HYCyj3Hp4mFm2dVqNoAigVETGQh3uS7c+RhnImQfHQW3N1HuX/BC5p\nrRkOETnBh/nG6fQKjKKjCDCdjeNfZ9mhInK2qv67NcbJUBdFbcdNNrLsrbiuY9D0ZBBEZLDH8t/q\nuu4JoBiG9VnSjv5eVWc32mS7UTiy7Z8G1tXMuj6V+mRoBqmJn4lbfxaR3+2oTUudrG+AviLSg5oY\ng7OBc2ofICIdgY2pTIxR1CxRNnCwvsdUeTFWL6C892DyG+ycRkl1GPv9XW2EiHT1YPzCg/EjQJO4\nbyVxH23qMk8Lxg0ApqpW72QXk2PYnzzAnON+ov2C3SWbsCb5L0Xu+6yuTODe0Jr2NhUHN5SXelaX\nEaOArLTHecSknfoT6wgXAnWcrGQyOqBzarapPgWdR/hWFk1Z9Jq9rM0nrG17oLYLVhBPzGKTIfCI\ni9aJ97Cd2GWLVnzyQZvsrsH8Nj1BDMq3rGHyrKfDQLJj/oDfHtD7+JDfl8uGTfN6zVv2/hmGGNcp\nuoGam/FkbWbh7J0lNc4xItL3W8pGUiNG+l9VbdaMZMpR+RJqpCGqSN5SSSK7ePMakskoHs//XvIT\nyQhLVv6XjetnB4NYt/jEHJbAfbj2ckoaenow7PlUeBdrBf1rSUYk1eEfLMFBZ6mq4xfz7gDW6b6G\nCZzbMJFqaupd7nRyhIgcHsS6PYFzCOB4MTy/ZKi3tm1H01W6aij4IHOeF5GO+3q8596I4yTeWrJq\n4k8H9T25QSmfopLvEDHWABua0peIHGma3v8M7vsDf6+CQwxBWFU07ZQ5i946LiUTkm7Wc+rmqiJv\nJFpOMNC2vm2sLZ4lwCc7eXoZWpEWF4hOLQFulXB4RlXvEZHLAVT1KRG5CriCGq8+AlynqlPT9PO9\njckyRM7Jw/e3mxgZbFtr0mqybtCXWFyewO2xIyckJf/gBRJNzY6q1XaEF+PzQ+nsG0snn6J8TXF8\nCsWxBO4RqvrdTp3Y//ofFcS6M4YzDhQf1hdR7NeCWBfEcEYLqBdzeRT7ZlV9cyf6tyzkBgP5lUK2\ni4oH4/0YzmvUOAgrganNvS4tISieRZczqP9Qyecr3cA3bOQaGdbguKS6XMOX0RjOAVsTRLZimp7I\nj8Y/EAjVu0kCLF8zmRnz/vFxIhk+ATgWGEGNM/Kmqqa9eYvIoV5P8GHHSQ4xTY/tuHYcZGlh55HD\nDhtxua92DNTmqiLen3gbha6/yoOhK6nymsizMZxf7KsP5a1/522NrKxgx/7GIQddgWFYRKLlfPLF\nnfSKmxytnfFhMotNic8pSiZwT1HVtHpGIjIoD9+UixmY/STzGUY+B9CWzcT5gvX4MFlH9YMu3GVh\nrL+Vg/z3MYt7GbutVudWImpzLZPjSdxeO/tiY4qc68f665n0Do6gPfMp5wPWcIekr6d+q06rKiJ8\nZu2HdEqotSdQvAMHM8N2EBHxWP4pBZ1GDBs19AL/1iDyTeVL+Wzqg5FEMtykWUQRMSzTt+6Ig6/u\n3K1j3ftHcekiPpv6QLnjJNI6yh4rcF9uduerjxnzq+DW2fCkHWfyzCejxaULPk4kI6e2yslmaJSm\n+C0tdrJai++zkwXgFfO3wG1DyXfa4ffNpTxaRqwyjnPc9nRcRCTkwbgVuMLGzbIwwsDTyZrloh2u\noacEFVf/jAEFo6RjnX1f6wZ9iSUrYzhjTeTSANaxikbC2M8D/0qlLO+o/xN8mG+cRZ/AaDqIIEyn\nhH+yjMPozJn0xsRgLmU8z6JINfbNSXV2qshySmU7Fxjuw3wpF29WF0Kspkqi2BtjOD9qqcPYDFsu\n6ETwiVsYGTIxuJ6vuI4D6Sl1l/4+1rXuO6ycHtHk2Pp9eDyBZwf0HH/eiAPOqjOjrOry70m3VVds\nWXORqjZb00tE2gMBoMw0PBtPHX9/MJ0jN23mXxlUtJ6T6U6lJniMuZF1VL8UVfvy5o65tyAiXSzk\nl4bpvVIMK9iz2yFasmGOHBoLyWnSq86xi7SCh/i2MonbUVUbpNyLiM+DsekORmWHsJjMBtZSTQCL\n0XTkRRZVFRE5DwgMJO/pG2R4ziu6lHVUcwWDtzlaEU3yGPPiq6h8Nar2hTt5XlkWRsltHBTsllLd\nn6LFfEspP5fBads8pnOrZ7HpSlV9UUQ6BzCfc9Aj8vHHK4h7BVkYxb6wXqp/hiYiItkeK/Cs69on\n5+UWxmLxKiMW3xK3ncSVqm6TvrcicnhWsP37Pxr/QINEEID3Pr+5sqJy7dlaIz5cv+3/s3ee0XWU\n1xp+9pTTJFmWbLnIstx7wd0GbMCmmN4JBJJACoQQEkIvITfk3iQkhBAIoQQIJSGBQMAEiCGAsTG4\ngMEF997kIsnq0qkzs++Pc2zUjLslm/OspeXlmTPz7Tl1z/ft/b6mZQUeUM/9fsd2/eKGabO9dKkt\nYr6ZcCLf2teZ4TT7TjrJamWkfvwuJOl+voikX5ub2hcEAkDlzhkZEQkEMOcMJLf/hfQM5EsG2zXM\nFNbFPqdsTQx3rO7B701EJrQnMPW3HNtEm0hVuYXZkRriOpqOMoK8YBSHaRTVbie8KYr7aCrWdSST\nrkijc5s+jOKfcEy7+urbAGu0ikdYzO84jp2q3qUa4ad8HHXwOut+aoOJyAAfxrwfMDhjpxyGqjKH\n7fpXVlbH8QbsbqbnYJJKXh+xkCtPoas/TMKcxXZOp5CRqe7CmWyNzWZ7Xaq7sEmBuIgUmqZv4dC+\n52X363GK4bOD1IZ38NnSF6JbSxYvSTjR41T1y0RC9xRjgW2HVn79zMdDze1ftWE65pL3+K6XFMyv\n0wQ3MSuaSM6sFu/vuK0FERkGXOrDuOUhJlh+abqUd69+VrOaqu/rbmrB/GL+XwGZN93CsFCgnhTr\nu7rZe5V1RTHcnsDlI2j/yPUyNMtVj3+yhtlspz85CLCYMgRmxPAm782Ny26u5RsDyHn0Vhm+q9h6\ng1bzGEu4l2NpLOHhqXIzs+qqiE8Clvsxl55MQeez6WYFxMJRj4/Ypi+wujaBN6y57tc0e0dqdnAo\nUAt8vC/LwSLytS4dhj558rG3NOtuPfPTR+o2bPn4R6r6zJecoz1JrUoLmKmqh6rYPk0jDkd3YZp9\nQJNCeQ2KDUVkaBDrfhOZKIhnITtMMX7loY8B3+5GVt8fMjiwM0HqJCGu1UH+P7Cox1LKrya5VPtl\n9OxOFs3dJYkIXTUzkE+GfE1679p+nHbKfJrlA1dS+eAYOhprqKpbT/XjInKRqtZf5z+pHQG7cYIF\n0FuyydMgyyhnKMkW4zwJMlBznM8pOx94ds/PGIhIG+DrFsYAB29zAPPY0ykMHCPt6z+G4+gsq7Uq\nMJvt1wM/3ZtzHwipRPg6EXnqbTZdbSEFUdxtU9nYaSobxwjEHfQfCbwHVXX7bs6xSURGL1n95p8W\nrZwyyTYDCceNiYjxrOPGbjuQBCtFhevGzWi8hoCvaSNUTXURPbwvvgIyxKa/tk0spvwU4O8HOPY+\nISJ9Apg/d9HzPdT2Y84L49yjqvtt8aGqC0UkM4/gD/xiNu1KAPrQNmM1VT2b2wcQx7tnK3WFNzHr\nkuO0k5WJbX9KaU0Z0fIY7iRNymjPXkqFHVcXn5hcTl/O0u6soII4Lp+zIxpHv7W/CVaKjvlkNCjY\n70YWmdjMYAuTaCj18QFbvBhuETBP4Pp+tM29WHrterEtMTiJLlKm0eB7FP0U+O4BxNaqEZEJthX6\nmafOOEFioC86bvy3qlq056P3TOrz3exnfC9YW1a10VT1aGyHqarsqFjnAWv3MP4OYK/EgNMcftJJ\nVgsiIsN9GDMvpGfG8XQSPyZrqc5/jhW/20F0sIlMOJNuocYJkohwhnYLraP6OvacZG3ZQl2znUyq\nynbCcgaFbNIaplHERmoIYDKUdnxGqXUGhWSInbUyubTymogcU29WJq8Dwd1m8XkEqWnk1tKegA9o\nunbVDCJylo3x0kBytC9tM7ZQF/2UksA4Ojb7+GPp5J9HyUUchiRrJ6o6n2TN4f4evxY4Q0SyY15t\nW6C4uaWrvUFEevBF7dYMVa3z2aHXl66eesHIQZc2+KyHo5Ws2/QR39RjGvQgGYhwmMWDRWSYD2Pm\nZApDE+hsBrBYSOnxL7Lm36bIXR7sNPSerfuuh7atnKjPUW/XjGp9iqiN8CUFyqlZiStF5Nfvs+VC\ngQyFOcDbO2csVHVtSKz3n2PFpO/ogIApBtniY6Tm8QRLoybydgKd5BNzpINXqvCP1Ou+L6xeRWUM\nvnBiFxGu0UHcxwKWajnj6YwgzGJbeDFlkTjeeaqqmWJ/6yS6NJECATiBfOtdNl/MUZpkGYb1HZ8d\nenjEwEuDBR2HSdwJZ63eMP37qzbOuEJExmpSsLglme+6sc2rN37Qr2/3iQ0+d+u3zNForKaCVHNH\nmiOTdJLVgoSwHv0avTNPkC/aw3uTzZ06InQLs69StCaX5tUd2hHAQ/fGZ2p6OdHoEi1rM1gadjcu\noowoLjuI8jJrOZUCJtKFahJMpwgTYTO19CeHfpLDyVpgT6PoJhH5MXAaMHQd1T5PtdnlinVUMYku\nu7apKksojwN7rJsSkd4+jJduZXionjRCYNmXNKZKg3+OLFLJw34JqopIbhDzBT/mCb3JjlcTp5gw\nlhg3uOhPVq5/b0IsXps7oOepvkCgLdtKlrB46Uuc4XWhQz2vyYg6LKfCAg7EBmmfCWI9cxm9MydI\n/q7Xbpx2Yi1VoQ/Z/mBXQtVRPHMHEcsv5mNxvJv3VgIhmQDZyz9i2/CT6NLgvbFFa1lGhQH8ay/O\nsxLYrbF4BPeyBex4/UZmjRmrHW2AuWxPJPAWK0wcQM4pg8nNLCeWmMW2OwNi/jmGd9M+NGtMLSYc\nW6g7sobVm8XtKCGO1Y7Ou2zetIrKkmQszssePKNfCK4Ggrvpegxg4qE2gIi0BSakdn24v0v6rQUR\naWca9iNnnvDzQJvMpBB0iBxGD/mGnRFqn71oxZRngPEtGWOq6/7CeUv+PqukbFWwZ9fjA4YYrCua\nHV2/ZW7MdePnHs6GnjQHn3SS1UKISEcfxvBjaapkHhKbCZpvf8jW2tVU5nUho0nisIpKDGSPxqep\n5YxL/8Ti/5yp3fzj6GgqMJdi9y02uj1ow4us9t3JSPLli5vdIZrLM6xgJlvpT3I5cAR59vsUnePD\nuDiPYKAnbawF7LDfZTOTKWww7gdswY9JT74oNZjJNq0iXkYz7vQi4gcuCmKdA+qaSM5JdLEba08N\nph0fU8w5NFXgnsP2WALv1T09J0cTImIEMN8fR6cBl9Lb5xMzALBJa/g9Cx+pJfFNx40NXV80++YN\nWz7+tqobMg2rroNjtJ1Evn9nShpRh8dYEjGRV1V1y2GMv1cQq99xdGrwHn+FtWyilglSwBwpbpOb\nmU+eeuyo2/oTS82xInL83v74RHCufJHVs0o1EppAvhXAZAE79BXWRjy8a1W1+kCvI9WEMlFERkyj\n6FSSQs2zbIypP2Bwm3qJkX2e9rDv5bOrtxNewV5qFamqIyJnP86Sd8dpJ3s0HQIJPD5gS3gZ5WET\nYxlIgYeu85IC0LsSpATeO/Mp7duPnCYCXgvZgY35cVCsP9gY13YjKw7oRmr8AbEei+HeeiCSEy3M\nZQWdhnk7E6z69Os+yVi4/JWRItLlcL7fm0NVl4tIvw1bPr62qHjhJYA4bnyK5yUe3V2pQZojh3SS\n1XJkB7HithjNCiO2w28l8Ja8xrpOw7V9KFu+eFi1xnmVtXVhnL0yQlXVGSIy+m023fUWm84AEHgz\njvfUWqreP45ODRIsSC5FXKg9uZuPiamLX0xKiKCQfy2DjZ0/GmdpmN8wn+VawQnkI8AstkeWUOZr\nRyD6AVszbDWYS3HtaiojcbzTG89CiEg3P8asAjLbHEfnLA9lBlu8JZQZNdqN+urZk+lKug0JAAAg\nAElEQVTKb1lAD23Dzpk5VeVjinV2Msl6eG+ek6OIU7Lx9fwGfX31l5ULJYurdWDoMZb8LoLb23Hj\ndwB3QNLQugzziZv46LKBmusIsJRyy0SmRHAP97JRxxz8cVOMXaJWtZpgBlsZZXRmRRsfZ4/57S4t\noJq6YqbN+f2xdZHye3dez57QpEL8Me+z5a732XKRh/osjNkRnP9V1VkH82JSy8fzAUyRG4fSzqw/\n8wRJBfhvab+Mh/j8bhF5Ym+TRVX9WET6zWb7Dz+j5GwgHserDWCNPYtuZ3Yly9hC7TH/YePkKO5f\nReSHqqpxvD9+wNbvD9M834B69ZPbtI6XWBN2cCu6knXNjxgayBZfAKBKY/yRxd/fRp0J3HCQnp7D\niojRJadN12abPkzTRzCQHaupK+lEUlS7RUnV6/5f6i/NUUS6u7CFEJGgjVH6K8ZmtJcmzgj8ThfU\nLqfiBh9GgYlxx6kU2F3Jsoqodd9lc8zFeyiq7t4a3e6WoFjzL6HX8IlSQExdPqaY5VRgAMfQnhdZ\nzV2MpL0E+anOTQwk17hC+jZYe4iow5Ms09VUlgMrIzj/8uBvwIkhrMsFfHU4b5CsRWmgByYiEsBc\ncg7d+50h3XadV1V5gdWUEeVHMrRBzHN0G8+xUnMJ1HQhw9hADbUkymO45+vuFZKPSvxiPnYuPb5/\npnRr8tnxVPlhyuOwubt1Eckn2ZUkJEUyD0oh8L4gSQ/I9X9gfCCY6t6bpyXMZCtrjTrOm/wgfl/D\nG4Da8A7+Pe32mOsl2jd+P7UmMsV+43L6nt2c76aq8n1mJBy0feOZtJRUyck+jPMAM443FZjaeEZJ\nRE7Pwf+vXzAmo74uV1gd7uGTuh1Ev6mqU1KPnWhjvNaDLOlLTmYRteGllBsu3h0mxm/v57gGVkCQ\nvJm7ldnRBF6BHiah2oOJiHw3v8OQB0859tYmSsHxRISX374+5nqJrqkEJ02afSbdXdiKUdVIQKyn\nnmfVNdfrkGD9otzPdQdrqHKBF2PqhkXktf+y+YcW0tdB18RwHz1YyUQC740i6oYUa9i6n4V0JZOR\n5OHgMY0iorjE8Hhd1ztVxI2xdGxS3BEUi8u1j9zNx2ZM3foefa+k/r6McQHMbpMpbHBeEeEi7cWt\nzGaHRqifiG6iNgE8vZ3wy9sJdyEpMTHrq1i7ICCpYvXm9iHJZatmPZ00KYq5V+bQhwpV3R4Se/ob\nbDjla/S2ATyUOhJ0aT+oSYIFkBlqT9s2BW5Z5foTgKmHO+a9xUWra2neeiWGi5fcHqu/XUTaBjDf\nz8LX5wQ6Z5gYMottV+wgul1ETqi/fBTCuvV8emQ0Fj4NicWF2jPjeVbdCkwBUNXpItJxFVUXrqKq\nN8luuJeA0waQHc9KzWDVp4346K3Z8eVUTAL2Wa+tFfDS9h0rHiotX0Nebu8GOxavej1hGNY0x423\n2gRLRE732aG7PM8ZKmLWuF7iL57nPFi/Vk5EjL2tT9yL8QySN11DgDKSsj37VSea5gvSSVYLEsO9\nYxWVQ+9i7uiTtSAjE1sWsCO8mDIngXeGqoYBUgKbh0Qg0kWf/oitt62gwjqTbkyULwrVT9B8XmA1\nv+JTT+AjQbq6eL2aO4+DIsj+fNiPGUSu0bhwHsAvJl01k9VU0Z4gMXWZwRZvOltq43i/UtXN+zHe\nUUUM7z+z2Hb5ZO3aRMxwVbIsp4xGfqKtjQjOVdPZ8vEmrcmbSEGGohRRSw/Tbvbx8UQYx40DjBCR\nGTs/J62NKO5fp1F07iTtkmk26mz8iG3qx3w/rIkGSVYQ8+9j6Djwm/Tz7/xMTNauWVNYF5xG0RvA\n6J2PVbR/T5pVpqAX2bhon/rbUl2rDYyxRcQ0MHZ7J24mE/jdewW1YlS1RkQufXf2b17q3/M0X9dO\nI6y4E2bl+mnh7aXLyhw31mo7Ki3L/z9Bf/btIwZdFurUfgCRaGX2srVv3V60feFVIjLRNOxrSTqr\nZNtWoNRT94+e5/xOVWN7PHkziMggy/S/HQrmZufnDfbX1BXHt+9Y/qhp2re6buKRg3x5XynSy4Ut\nTMouZ1IA81smkh3BmeHBs4ezs8cU45Ec/Nfdx7FN9LRi6vITPozF8PpZyLfH0vH278rAJne9r+pa\n512Knouq8719GVtEvj6AnD/XF1msz506x9lB1MvGF60l4TMx5kZwrmkFrdetAhExA5jLT6Ggx3n0\nsHb+mJdohN8yP1xF7Aeu6l9FxAQmW2bgIhGxE07kTeC1A9RuOmiISAZwRQbWVUAggZeF6et9yRl/\nwjSTy1ie57Jg+b9YtWE6WRl5qHo11bXbTUTudd34r1rbTGaqKWHGAHJGXUHfYK4EcNRjDtv5O6vq\n4njHq+qieo/v5sdc8SDjA42FUz1VfsJH4VoS43fOYmeIveB7DBzWuOYLYKmW8xhLVtZpov8eYsz3\nYaytv1y7k3pWQN2P5AJsEelrmf6bDMOaBEQSTuQZVe/pg9HwcCgQkf6WFZh/wcn3BYOBhg3kHy96\nLrF286xwQadjAkP7ne/PzuxMedUm5i97KbKjYs1nCSc6aV/19UQk2zR968YNvSqnZ9fjZedvQE1d\nMW99+H/haKz666r6+sG7wqOH9HLhEUDqh2Fa6q9F8NAVg8iNi0iT7qPkbFJWdA1Vgxz00XmU/Ki7\ntvGdRL5hioGnyjxKeIfN0Tjeb/Zj+P+socoq0XADOQFIqsZXEKty0X7lxDoDparuEa9Evj+IyGk+\nO/SLRCIyGjFcy/RNTTiRn6WKuk+YRtHrM9g6aJDmahUxby3VlqI/SyVYOZYVmJERyOnRp/vELENM\n1m7+6Pyqmq2/E5EJe6sQLSIjgDtMwzfOMMy4qvuW48Yf3A/NpyZo0rngidQfIuL3wfqP5j/RefyI\n72OaNp8u+QcVNUWcd/JvCCV/fLJq6kqYNvf3d9RFygzgfw80joOJqnoiMnkZFfffydxvZ6jlhHFs\nC2NxHO8H9ROsFCN60ibuT3WI1scQYbDmMpfiUcACgDDOw2+y4Y9DtV1G/ZlgT5U32RAO4+yxCURV\ntwbF+tdjLLnoOh0c3KlqH012m4Yt5F/xIzjBAlDVVcC1LR3H3mKavqv7dZ9kNU6wAIb0O89es+nD\n7OOGX4OVmult17Y7Jx97S3DqB/cML6/acCnw/L6NKN/Kzxvs71U4vkGykJXRkXFDrwrNWvjUL4F0\nkrWfpGey0iAi3xhE7qM3y7Ams0mqym3MqSkjOllV54hI3yDmywbSu4DMxHbCZgy3OIp7map+uj/j\n22L8KAP7N99lQGgguSjKAnbwLCvCEZwrPdU96hgdzZiGdbVtBx8cPeQbocJOI3HcOGs3f+gtXPFq\nxHXjp2jKcD1lIzOSpBjpWylZAXx2aGqPgnEnjx16VYMOxMWr3nCXrH5jRcKJDvmyWSARybSt4OvA\nSQWdhomqsqX4c9pkdPAqarZEPS9xvjZ0AjgopMZ9Eziuc94gtpYsti+e/CA+u3EhfCn/nnZH2PUS\nHXQPNlMthYiEgG4kbbN2Z/B9SgEZr/yvjG3WYuV+XVCzjIpdFkAiYgcw3+9Bm5EX0jPYlUy2UMcU\n1kXWULUkinvC3gjbiog/iPm0i144hHYuoIspswzk1Sjud/Z3CSrN/uH3ZU4ZPfjy83sVTmh2/4tT\nr+X8k+9jpyn0TjZuncechU/PjcVrm/ik7mG8d8YOvfLUHgXjmuzzPJfn3/iOBxpsLbPerYn0TFaa\nveX1lVQ+0dxs0goqqCVRB3wMu+4KjxGRQSup7Emy/XnBgSzVJNR72BDZ/hhL7nXQroqKD3NZGOc2\nVX3nAK7riEdEsg3DfuiMCT8L7tT7sSw/g3qfaYSCuRlzFz7znIj01yQLgYWNji+0TN/EkYMub5Bg\nAQzuc5a5Yt273RNOdCwwV0QCwBlAB2AFSR80ta3gv7p0PObE44dfLWbq7jmeiPDBvIeNws6jQpu3\nffqqiHQ82LVRqc7Bk0Sk/6Ztn97dtdOIi312RhPJk8xQHm3bFCTKKtdPAJoY6bYGUs/N8j08bGYJ\nEd2kNRRKw/udHRphNVUW8J9650yIyCkrqbzh9yy8IY7b0YdZksD7k4v+YW8SrNR5YsAVItL1M0pP\nTW1+N13z2DK4bnx5WeWGM3sVTmiyslAbLgUEn91UmSIz1B6Sn919Je56za8wep6DgCoclOL6ryLN\ndh2l+WqRrE3QW3/N/PBC3YGnSkI9Zus2HmZxJIb73cYdLKq6VFXfUNX5B6MWxlN9OYLbJ4GX76Ad\n6jQx7KueYKW4sHPeQLc5QcXu+WMwDKsLMPBLjh+a27ZHzLaayrGJGHTpONQAhosYF5uGr7h9Tq9n\ne3Y9/oHMUPs3LTOwXkTOAk48fvjVhlmvEN1nBxk/4vtsLVlEu7Y9FLjoQC90d6jqCuAD0/TtVhTT\nNGw4wm8aVTXuojf8noXhJVqGqqKqrNJKfsP8MPDzxnVEqhpz1bsvok4XV9WKqJPvqPdrbWTmvpfj\nb1bVp1N/6QSrhXC9xBOrN33g1dQ1rIxQVeYve0k75PbBMJq+1UvKVqmqt0c3jcbEE3X/WLXh/drm\nvsbXFc3GsoIfqKqzr+dNk+SI/lJKc/BIqPeIiGx9kmW/iuP2UZAA5idR3NtV9bB4Z6WStSNKjydV\nJdoZsIHNB6uduh4d2mR2blawVsQgI5jrxOI1eV9yfHU0WrXb6ey6SLkD5NtW4P7Tjr8j1K5tUklf\nVVm7+aOMOQuffqmg03Axm+n0Cwayyc3uTjCQnUU5zXadHkRmbCleJK4b31UIv5NorIayyvV+kp6C\nRzSOes8ZIrWPsuR3QGcD8Ty0MoF3t6PeMy0dX5pDj6puME37pjc/+Pn9Q/ueG+jUfqARiVWxbM3U\n8I6KdWV+X1ZuPBHOqD+bFY5U8Pmqf0cSTuT+/RjylcrqLfd8uvQf3Yf1v9i2LT/JkoBFfLrkH2HH\njd158K7uq0c6yUqzi5Rw4ZRUp5dXp4l9vhv+KiEiZwUxH/Cg0EQ8F62xxPiFiz5+EDvdlm/fsSxK\nMolrQMKJUV27zQ+saXrYLmaHo+VOc1pBteFSSspWWrYVPHHU4Mt3JVipa6N34QRZse5dy/MSu03S\nPM+htq40ypeYLB8MVHW1zw69P2vBkycfP/yawM6kL+FEmfnpn8KGYT7vuPEjKkHfHZ7qKyLyKtCF\npHzCHpP31Gf2TKAtsBio82PeYCGjFIrDOI8BbzQWNE3TOnHdxGMi8unnK/992+JVr48CqYwn6h4H\n/kpcfv/atNu+Naj3mcHszHyjrHK9s3zdf+Ou5/xS98PBQFVjInL8mo0z/7Z6w/QT27YpiNZFyi3H\niZY7buzbqvrJIbjErwzpwvc0afYDQ+SiINbfvsfA4FDaIcA6qnmSZXWVxB6Iqfs/B2McEbFM07d1\nwsgf5BV2Htlg32dL/5lYtWH6jHii7rQvP4dxic8OPXvc8O+FCjoNRxCKdyzno/lPhGPxmp976t57\n2RmPWbbd1Hlg9caZzFv8V71k8sPSeH9NXQn/+eDnJJxYRNXpol8YEh8SUoXwLwEnde08As9ztWj7\nfEPEeCXhRL+9r63rRwumGN8zkYd60MbNxW8up8KM4frG09kdTp5VTpSpbKytIDY3invmV/V5amlE\nJBsYASSATw6kkFxEjrOt4A8Nw+zhes4yx4k+3Ey36v6ctyvQn+SKwgHV2n4V2Ju8JZ1kpUmzj4iI\n6cfcdhPH5PWRhm3WVRrjNuZEE3iFepDsOkRktGn63uvWebSve5cxAceNs3LD+3VlFetKHTc2TlX3\nKGshImfaVuh3npfoLWJ4Ikax48bu9jz3ecOwYl87/U++5tTVN239lFkLnizJyS5sc+LoHwWCqY6m\nmrpSpn/8B2rDpQnXc77vec5hW8oSkf7AJMAF/quqGw7X2K0NETk3C/uFOxgR6pzyH/VUeZfNTGcL\n/8sYfGLiqMfvWRheQ9UvHfXubeGwv1KIiG1ZgQfUc7/XJqtzzHXjUhcpV1XvLtdNPNrS8aXZf9JJ\nVpoDQkQKSXabmcB0Vd1Td9RXAhEZ157AO/fJcc0KqP5JP6+bz44bVfXJgzhmB0Os79t24ExVYvFE\n7d+AF/a1o09E8ki+nsU771J9dsbUYf0vOL1P94liGnYDQdppcx+o21K88C7bCvTzPPe7bdt0Fc9z\n7OrabSJirE0tJxyWmr3mEJEJPjv0fwknOh5Qywy8n3DCd6vqvJaK6XCSIfaS7zJg0HBpWpb3gC5k\nLB05XpJNExu0mt+yoDSqzv50oKXZT3x26O+52d3OnzDqulBK342K6s1Mm/v7cDRWfYvrJh5r4RDT\n7CfpJCvNfiEidhDzLy56yTDaexaGLGAHoDMjuBfrYTLlTdWZXAR0BTaQ9NJqcQsVETm1B1kv/0xG\nN+tp8ryujL/PljtU9Q+HO7Z9JSnx4H/c85wzECEjkEu/nqfQt/skVqx71/185ZRSx433U9VqEckB\nxgEOSa/IFn0tRIyLbSvw3KjBl4e6549B8VhXNEfnL/1nxHFjF3xZd6qIDAxg3qlwOkl/x9djuL9R\n1S+rb2tViEimiVT8mZOs5mypPtStLKeCa2QQkGxm+B7TVcF/tC8ZikgH4ASSr+0MPUwG16lGmGFA\nR2A1gGUGllxy+sOBxh2+FVWbmPrh/1a6brzD0f56HK2kdbLS7BcBzIcKybroxwzdZbXhqMfTLD9p\nETv+RfKH6ZAiIufbGM/3IVu70ya0jqrwWqofF5HLVPU/ez7DIeXzIur8YU0QamTOq6osZEcc2C9h\n1sOJiBSapm9+/56ntR3QazIBXxY7KtYxb/HfWLRiiquqCx03fvFO2YBUzdVbLRw2kBTQNA37L6ce\nd3uofU7PXdv79zhF2mR0Cs345KG/ikh+cwXjIjLJh/HG6RT6x9LR9FDmsP3Kd9h8qYicoqofH9aL\n2X88JWmobTT1oMZttL2MKCYScTh62/FFxLatwKOmYX+jQ7u+cVWP0vI1PtsOPuk40RsPZeG/iBxr\nW4HnLdPfITOU51bWbPG5XqK4W/5oaU5CJSe7kFAgx6ipKx7FUdAZm6Z50klWmgaISI6N8e0fMLiB\nl5klBt/WAf6f8OEJItJPVVcewhiGBTD/fhvDQ91ll6px5lqt4n4WviQio1V12aEaf0+oanFIrDee\nZ9U539OBgfqzCG+xyQ3jFAEftVR8e4ttBX7bv8epbYcPvGSXUV5ebi9OG38XU967JR6JVt7Yiuud\nTm/bpqvUT7B20jlvEMFAdqimrmQ8MLP+PhGxfBj//BFDQ4Mkd9f2C+llFWpW5tMs/6eI9DgSCn5V\nNZwh9sLPKB01lo6N9/ExxUzkC8P3/7AxbiDPqXqt/tr2F9sK/Dk3u/ulJ435ccDvywwARGPVvP/x\nH75bWbNFgRsOxbgiMtA0fe8eN/x7GYWdRyFi4LoJpn/yYKFp+narR5nSd2veCT3NUUFajDRNY8Z2\nJTPWpqmNIbYYDCNPgYmHMoAA5p3n0D1QL8ECoJdkM5muvgDmbYdy/L0hgvudhexYcDtzat/UDfpf\n3cQ9+knNG2zYEsWd3Np/pEXEct3Ehf17TTYb77NMHwN7nh6wrcA+mX0fZvLaZHZs9vtLRMjK6KhA\nc/php+QR9NVPsHYykjxCWO2AsQc31ENHGOf2v7IyvEardm1LqMfLrKWCGIPIZYvW8RddFptLcXEc\n76B0vbZGRCTf89zLJ479Scjvy9y1PeBvw8SxN4Y8z71GpJkX/iBgW8F7hvY9L9gtfwySMmk3TZth\n/S8yNm/7FK8ZBY66SDk1dcU2KS/KNEcn6ZmsNI1xHbzdrjEncJVkTc6h5KTh5DX7AzqCPOtdik4+\nxOPvEVWtFZHjo7gnvsGGiwR8cbx3gNePkPqKAGAE/c3a5JERaiciZsdmd7YOViQVrrVBoT4k/dZ2\nVKyzSFoDNaaggMwmiSUkk7N8zfDKiXUF5h78kA8+qvq+IfKN+1nwZK4G7Bz8up5qH7AZMG/go24W\nUqPwlwTebw5XbVILMalT3qCEzw41WZsL+tuQl9MzXly28kRgysEe2PUSZ/UqnNDkO6t9Tk+CgbZ8\n8vlfdczQb4mRSsASiQgzP30kLGL+WTVec7DjSdN6SCdZLYiI+EjeNdvAZ6r1bkdbjlnbCJslGqGD\nNNRFCqvD55SZHOK6HIFEjOZLJ+J4CLQKo9LUbNWM1N+RRp1hWBU7KtblNbfktq10acxxY625rmxW\nLFFXsmbTBxl9up3UIMtatvZtV9VboapLmzlu/QaqvWaTM1U2U2sC6w9h3AcdT3WKiLyxnfD47YRz\ngCWqurql42oBTFSbmvql0Hq/d5KcbppEski9Cnj1gBJQVdNsxuoGYEifc5i98C/hjVvnSbf80abj\nRL1N2z4TEXnJcaMtPiuf5tCSXi5sIXxiXu/DKO1M6M1CMl+1MbYFxfqjiLTo+ryqhgV+/QcW1pXU\nax6r0hgPsihsIP9Q1S2HMgYHfWEmW5tNpD5gSyyO+/yhHP+rgKqq5zn3z1v8fNh1Gz7VZZUbWLd5\ntud5zuMtFN4eUVV1nOjZnyx+vnzGJw+HN237jI1b5zFt7gN1n698rTThRHbnpTi9knhtslu2IXPY\nrjHcrcBnhzT4g4CITM4Qe3pQrJIMsVcZ8GNgnqpOOVoSLBHpKCLjRaTfXh5iFJctN+LNGFVEYzWU\nVawLAjNFpI9l+tdmZXR8tX/P037dtfPIP5iGXWSZvv22jzEt/6xN25q/JykqWRR1vcTvY/GaCas2\nvH/nuqLZtzpurH88Efl22hPw6Cct4dAC2GL8sA2++27kmFAXSdYOVGqMP7M0vJGaKRF1vtGS8YmI\n+DDuUPhpR0KuhbCFOttEnozi3nyovxhEpLMPY+nF9Mo+iS6GJQYJ9ZjGZvc11lfE8QapasmhjOGr\ngIhYthV42bZDpw7seXooFMyVbaVLY+s2z/JcL3GFqnfQl1UONiKSI2Jc5bNDF6uqF0/UvQD8TVV3\nuwQjImN9GO+dSL5vDB19CsxmW2w226NxvAmquvjwXcGumAyStY79gGLgP6oabe6xfjH/N4h104X0\nzOhHW8qJMZWNkTVUrY/iHtvYRHofYsgCjgMEmL2/5zlQRKSdbQWf8TzntKyMjtFwtMJW9TYknMiX\nWrwExXqmi9H2qkRuZ44b8yN8KYeCWLyWj+Y+SHHVetfxEj1M0zdv1KCv5/XtPsnYOZtZFynn7Q//\nL1wXKf++qrfPN3EiMt62gv+dPP6nodzswl3bN26dx0ef/bnG9eJ9VXX7Pj8ZaVo1aZ2sVoiI+HwY\nJXczKrtAMhvsi6rDjcyKxnAHqmqLL1mISJDkl65J0gai8hCP5wMSqqoi0jeI9TdFh+QRjJcS8Qmy\nIILzTVVddyjj+CqR0vU50baCVxuG2cFxYp+4XuJxVd3c0rEdSkSkmw/jBgvjPEATeC8n8B5W1a2H\nMQYbGAr0tUz/fcFA2+wO7fpZ1bXbnPLKDbie811V7+VGxwwNYs39NeOC2fWaU1SVJ1gWW0DpozF1\nb9rHOAw/xi89+EkXMuICFFHnM5HHori3HU6/QxEJWGZgUe9uE7oP63+xz2cH8dRj45aPmb3wL3Wu\nGz92d0lwUKxnz6PHlVvNGPO0hPx2A1A8tpWt5Fg68ZG3KZZAb+3Yrv+vJ4+/K7Px8dtKlzHjkwc3\nJpzofnWXihiXmYb1VPucXl52Vr6vpGxlvDZcGnbc+Nmq2pqX3tPsJ2mdrNbJ6Bz80jjBAgiIxUjN\nYzbbzwYePvyhNURVI8C0QzmGiBgGcp0P4w6BzgbiBMX6J/DTsCbGikiPzdR2BTa1YjmBI5YjvK5s\nv1HVjcBNqb/DioiIhfFjH8Y9mdhWtaEZ4465SnoUHLerTqyscgPvzLr3WRHZpqq75ED8mNedSoGd\n3aj7V0Q4X3v451N6tYjcvC9Jgh/z3g4Ef3gDQ4O5EggCVGiMP7H42m3U2SSXIg8XX8vJLsgfPfgb\nvp3PhSEGPQqOJRytDH6+8rVfAuc1d2AU9425FF/4P+6orPMpZGVp0kpzAKPZTA1zKSrC8o3vUXBs\n0y9foFP7AXie2xnIJendt0+oei+KyBvFZSvOLS5b0YGkGOl/D2eSmqb1ka7JOvzYNuZuvwB9mAZf\nEd0UEZEA5l8LyPjNjRzT5SkmGvdzvG8SBZf7MBaISKGqrlfVmekEK82eEJEOInKOiEwWkd0WQLc0\nNsbtOfh/dTej2p4kBZk9O4+Snl2Pb1CI365td0YNvjzkMwNTReQ7qVlebIzeBWQ2e3PcUUK4aBBo\n6vS9G0SkrYf++EaOyciVwK7tOeLnJwwNueg1KSumw4LPzvhmv+6nZDZuSgDoXTjBcNzYmdLcziSv\nbydc+m/WO22wGSedGCedCJPgLywPR3HvAI25bvPNv566qHpC0sB5v1DVOlV9QVUfUtWp6QQrTTrJ\nOvzMLybsq9BYkx2eKp9S4nCAswoikiEiF4nI7bYYz2SK/apPzHtFpGkb2RfHGCIyOSTWPzPF944l\nxs9EZK9a+EXEEpGTReRiERmwD6GO82OefycjM/pIW0SEbPFxsfQyT6OwbQDzvn04V5qvKCIS8Nmh\n50zD3tght+/fcrO7/9M07BLL8t/6JT/ILYKIZAI/u4VhGQWSyRKzloKuxzb72G75o3G9RFYv2vwx\ngPmhiAQTeKs2UdNsTeQ2rcNEwkDTyu/dM7EHbeJtpakieZb46EfbOHDKPpzvgBARv9WMOjqAZQVQ\nVQOakbcHVDURw53wLpsX3sis8GO6pPZe/az6HuaFa0nc4qn+K+FEX1q14f3aZowA2Lj1E0zLv6il\natHSHJ2klwsPM6paHRDrz4+x5Job9ZjQTlV1Vz3+zuq4g7dQVefv7/ltMa6zMX7XFp9RSyJwPJ3p\nRhYbqEl8yNYbbDHvTKj7UP1jRMQfwJyahW/MqRRktsHH55SNn0fJ7SJyvqq+t+NZP5IAACAASURB\nVLvxDJELfRhPtidg5RJgLdV2SKxFEdyL9lTf4sf87il0DfqlqWzRqRSYb7HxAhGxjxDdqX0mNTvR\nC4gB61u7gGlrxbaCL3Zo1/e08SOuDfh9GQGA6tpi3ptz3z2RaKUDtCYPyYldyXRy8POpllBGlHyv\n+T4Sz3MxMbiTkRkP8/mQpVTcFcN9dBpFV03UAiunXmKkqkxhXRR4fB/fR6bVfM4CgIUhJGsyDwuJ\nROTtTVvnjSrsPLLJbFzR9vnYVvDzeKKuaYaUIvWdM1pEjplHyXCgmuSSXV3qIW/VRcvXzFn49IBR\ngy/3++wQqsq20qXMXfRsxHGitx6aK0vzVSVd+N4CiIgVwHzKQy8dQZ4GMI1PKXVdvAUR3HNSHnH7\njCHytTb4nrmYXqHXWMedjKT+EsAOjfAL5oXrcE5T1Vk7t/vFvK8vba//MUODlnwxublSK3iARXUJ\nvILGRe8i0ga4x4/xo3F0ss6mO+0kgKMeb7LB+S+bt8Rw+++uQwogU+ypV9D3jHHSqdn91+j0hIPm\ntRL9sIOGiBg2xl0CtwSxzASeqei2CO4NqvpmS8d3JCEiA2wrOP9rpz8cMM2GdUqV1Vv4z8yfV6UM\neFuFtpqIXNKH7L/UkMhqg00mPnZ06MJJxzYtDVu25i1YMZPrvH4UaS2/5NOKOF47H8Zdfsy7zqdn\nsB9tpZwoU9kU3kD1miju8boPBu4i0tHG2PAHjg809uGMqsNP+CgWx+tzuBohRCTPNHxrJoz6QZvC\nziN3ba+pK+WtD38RjsaqL1PVNw5wjOxU9+KZbdsURCPRSjPuRKocJ3q1qrYKb840RwbpwvdWSkoC\n4SoRuWcuxWcCPmAWkAmMF5EFqlq0L+cUEQli3vs9Bobep4iz6N4gwQJoL0HO0x7BKay/AzgndZzP\nxrjuCvo2SLAA+kkOQ7SdLKD0SuChemNNtjFeGUBOcAA5RikR7uETTtdCzpLunE9Pa5lWtFtD1cXA\nbtuhY7ifrKBi4jg6BRrv26Q1GIgJ2tTf5wgngPloB4LfuJpBGV0kA0+VJZT3fIKl/zRELvdU/93S\nMR5BnN69y1hpnGABtG3ThVAgR2rqikfSegx4526iNvNcunOGdCOiDj8rm8/SlW/Qv88ZmIaFqrKl\neCHLVrzGbe5gECiQTBLqZQO+mLq/EpHZ/2Lt7aBDBKkI4zwKPJNqVtlrUj6cLz3G0kuu1yG7ZpVj\n6vIEyyIm8ubh7DRV1VIROeXDzx57q21WF1/nvEEZ1bXbo0XFiwzQOw80wUqNUQVcKCKdyirXDyQp\nRjo/1dU8yDL9NxqGdSxQFU/UPQG8oNpMfUeaNHtBOslqQVLF3I9aYlxpIu+0J2hkYusGqv0hsf4b\nwf3WPtQH5LlolwHk8Bwr+Bq9m33QAHJlCutG1Ns02MbI6LibOuHB5IaWUz5q5/9Tre+v3MLwjN6S\nvetxZ2t3fst8OmkGIyWPCXTO3ErdJXxJkuWgT8yh+M6JWkA3ydq1faf3Wj4ZuoW6O4Cb9/I5aPWI\nSA8/5pW3MSIQSi0VGyIMpR0/1MGhh1n8iIi8nl463GtMw7B2eydpGKZyGJe79oIeGVju6RRaAEGx\nuMMdwpOrZ/DamrfIaJNPIlyBlYhxvdufwtTnoljDWBjhBF4cQFWnA9MPRkAR3GvWUeW7gQ/PH6bt\nPUFYyA7DgLcjuFcejDH2BVWdJyKdyyrXn1tWuX4AUAr862BbAqV0q3ZpVxmG+XXL9D81sNfpvs4d\nBlvRWDXL1rw1tKKm6CciMn5fZgjTpNlJOslqYQyRizOxH72JYaGdiUZEHf7BqtM/o/RdERm3lz+4\nux4TwqKKOB1pmjhVEUOQXYmbhXwzgUdUHQLS9O2wg4jnoMU7/29jXD+BfKt+ggXQVvxcrL14h02M\nJA8DQfbQWKGqW/1iLruPBcOO0070py0VxJjBVjoT4mv0Nn/Fp9/mKEqygAvG0EFCzTzX/cnBj5kd\nxR0CfH74QzsimbZhy9xfjB58hc8wGuZSNXWl1NaVWsB+1zgeAkYeQ3tX5Is3QHsJcqc3hG1ax4Pl\nnzOJzpzJ4F3dhqrKv1kfE3jyUCTfqVmar4tI908oOY1kYfl7qrr2YI+1DzElgFcO13gi0sk07L+c\nccLPgzltCnZtL+w8MuOjzx7vv3n7gvuA6w5XPIcSETGB00WMCcmZT31FVZe0dFxHK+nuwhZERMSP\nef81DArVn8kJisW3GeDPwB4InLCXp9thYmxcTBlj6cg0imj8fayqvM2mSATnzzu3+TAv6EU2M2la\nox5WhxlslQTeszu3+TFPPIZ2zbb/DKEd60nmb7PZXluHs0fFcBuDb9GXIBaz2c5Gavg6fbiOwXQg\niINm7ekcRxihLHzNSnSICEEsF5rJjtM0i6ou8Dz3s7mLnom59QrIo/EaPpj3x7CI8aBqPX+olqem\nilizle6dJQNF4++yOfIJJVRojDVaxZ9YHFnAjg1xvF8cysBUdYOqPqGqf27JBKslMMT8TveCcVI/\nwQIQMRg+8Gt+Ve9KkWZaMI8wRKSbZfpXZ2d1eWFov/NvH9hr8t0+O+Njnx16uaUt3Y5W0jNZLUuh\nIB0GktNkhyHCCdo54002ng98sKcTpeoJbv0Ly1+4niGh2WzneVZxtnYnR/yUa5QprIutpmqTwlO7\njkOtUyngOVaSUI8T6UIGFquo5EXW4OJF6t/lKFpbuxsZmVoS+DD5r25y11JVA7y4p7gd9LNthAdf\nKD2bvBdXayV+jBZXvj/IfPIZJXUXac+sxuoCFRqjjKgPWNYyoX05ItLDNH3XW4ZvgqIV8UTdU8CU\nQ22ztCcSTuScjVvnvbJx67zjCjoNU8eJeVtLFltimE85bux/DlccqRmCq4NYNydwCyyMsgTeIy76\nUL1E79+LKX+4UmM0lk1YpZXUkKiM4V77N1be7uD1N5HKGN7jHvrIl1kFpTkwTNM/JC+nd5PaUIDM\nUHsM08b1Eu2BvfZtFZFcQ8xrLStwBeBzvcR/XTf+h5ZKYEXEsEz/+8f0v6BwUO8zd077WsMGXGxN\nm/v7M8sq1t0H3NgSsR3NpJOslsU06i3zNdmZbJ/e69dIVV83RX7wAAsfbk9QllAW+JCttqmG5+LF\nTYy/xZI2GbtqCxTe20jNFXcywnqd9dzCLDwgjwAdCHrbqXu1/hh1OE+/S9GYsdoxo3GSMJ0tgDr/\nZv2qON7ZezODEMN96B02X36sdrLq14VF1OFF1tRFcO/d2+s/QnivknjJO2wOTaZw1/pWQj2eZXnE\nRJ6Nt0KdHhE5zzR9/+jbbaKV32GILxqrZtnat4+rDZfcLCKTWnK2KFXIfIqI9F9fNOcEIA68pW68\neA+HHjRExAhgTulE6OSL6BXqThbbCXd5nfU/W03VJamanrCqlvrFvO9e5t98tQ7M6EUbFPicMp5i\nWSSOe52qTgGmiEgvoADYmE6wDi2el9hYXbstQTNC0NF4Da4bN4EmtmKpLutLRYyeqt5m4EVVLReR\n7qbp+7hrpxFZfbqdFLRMHxu3fdpt5fr3rhKRc1I1dQdMavZpEtAOWKKqX1ZmcFookJM3sNcZDdbV\nLdPH8cOvDr027fZrROTuenIXaQ4CaQmHFiTpGWZuu4VhHXo1qnFSVe7m45pthC/f17b+lP7SRCCL\n5KzIDqCyuTZ2ERnox5h3I8NCfaUtrnq4KBup4QEWhWO4Y1R1ab3H+wOY80aQ1/cSevmzxU9MXWaw\nRV9lXTSB93Vgnwq3LTGusTAePIl8qzdt7RLC3jtsjsRwX47ifudoKwIXke5+zBntCeSOo2NWBMed\nybaYgzc9mtQXa1WdTMm2envD5PE/DbXP+ULP1lOPmfMejm4tWfLnhBP9SQuG2OKIyEWdCT17D2My\n7XpduqrKH1gUWUbFL1z1fpt6rBjI1T6MewRp6+EZBsb6CM7NqjpVRPoFsZ4HHZRHMF6S9O1cFMG5\nQtO+nYcEEeljWYFFF5z8u2Aw0PC7eP6yl52V6999M+FErwbKNaVkKiLnmob9Qqe8gZqX0zujvGpT\neEvxQvHU+4Fl+q4e3OfscUP6ntMgodlWuoz35/6+yvUSHQ/0cy5iXGia9lNtMjqaGaH2Ulq+2vQ8\nd0XCiVygqpuaucZ7hvQ993+GD7i42d/ZKe/dWl1TV3yypn0W95q9yVvSSVYLY4lxbS7++29nxC5b\nC0+V11jnvEvRupRZ9CG1ZkhJMrzUjSzpTlZgAzXRjdSQwLu0Od0YEWkTwPyji34tA8upw/FZGHMi\nONeq6sr9jKGPD+M6H+YwB68oivs4MPtoS7B2kip8PstCTvQg4qGvqupnLR1XcxiGdWv3/DG/mDDq\nB00EImvqSnn9/TtqXS/Rvv6Phoj0T7XCTwCq44m6J4G/f5lu2pFMhtjTr6DvScc2o/m2Wit5kEWb\nw+oU1t8uIgbJmaqEqm5Lbcv3YSy5hN7ZJ5JvWGKQUI9pFHmvsa48jjdQVUv3N87U+87d18+ViHQA\nLgDakGwkeP9o+2xalv8Xfjvj5pGDv56RnzeYSLSK5eveSawrmq2emxDDtDxBqlzPuVfVfdcy/Z+c\ndvydDW48qmq28p+Z90QA49LTH/GbZtMyp6kzf1Gzo2Ltd1X15SY79xIRmWhbwTdPOfa2UF5uLyAp\nXrt0zVR38arXix031qfx7LKI3N63+6T/G3fMVU2CUlVefvtHddF49dj6N9Vpvpy0TtYRgIv+uYp4\n+zuZ+9N+2jaRjc/6nDJ18JbHcM8+1AkWgKr+V0Q6rKHq3DVUdQM2kZyNavYHMSUrcZWIXF9JvAAo\ni6u731/8qXOu5itUD5CqY/p36q9VY5m+4R3b92/WDy8rIw/TtA3XS+QBRQAixsWW6X9uQK/Jvi4d\nhlrReA3L1r49uLxq440icvzRJi6bokuH3VgGdiSEg+Y23p6aEWkw42Bj3DyezhknS8Gu6TBbDE6n\n0NiitZkfU3w98PN9DU5ELgti3QP0NcANifVmBPcuVV2+h+PENH33mIZ9W0GnYW4okOPbvH1hLBqr\nLhGR046mAnnHif1cROZ/vOjZux03PsgQM67qBYf2O9/u3/M0sUwfZZXrOsxZ+PSvaupKrh3Y+wxf\n/QQLIDsrn14F4wMl5atoLsECyMvtE9hRsXa3Fmd7g88O/XrMkG/uSrAADMNkSN9zzG2lS9ps37H8\nMuDpRodNWVc0+56Rgy6zbath+dm20qW4XryCVloPeiST7i5sYVRVY+r+MoGXv4TyH85i+001JE4I\nqzMqpeNyuOKIpe6s/kpyeXFI6k77y46pVdUV+3pnLSIhEbk6U+yPMsX+1BbzlyKSfwDhpwFEZLTP\nzphqmnbYNO2IbQdX+azQUr8vc2bKZHivjYPr43pOUU1dcbPdDvFEGMeNWyQFHRGR9qZh/fX0CT8L\nDR9wsdWhXV8KO49k8vF3ZXTrPLq3ZQUeOIBLbLUorNhATbMzOxuoxsZosnzTHBZy2QnkNyvAewL5\nAR/mFfsam1/MX7Qj8JfvM7DfU0yUh5hgnU33c30Yn4jIMV92rIjxvZC/7c0XnPr7wImjf5Qxesg3\n7AtO+V3m8IEXd7dM/4dHQ8ddfVT137F43WjXTYTEMFeMGfot35C+54ht+RER2uf0YvL4n4ZEzL5d\nOw1vdpKiMH+U1EXKxPOavz+uqNoUZR8K6BsjIlY8ERnTLX9Ms/t7F56Q6bMzLmnm2lYBL703+75w\ndW3xzm1sLVnMzE//FEk4seuPttnJ1kB6JquVoEkrnb+11PgikhXEfMrGODefjFgNcTOMUyMi16rq\n6wdxnA5+zLm9aNNhIl0y/Jh8RumgOWy/QURO13p2P2n2HhE5y7ICLw0fcFGwW5exgirri2b3+XzV\n6/TrfjIl5atGlFVuuCulu7ZjX87teYmnV22Yfv2gPmfZAV9DRY0V6951TcP+r+smapJxGFcWdh5F\nbnaDlTFEhOEDL/Fv2DL36yLyo1Ymq3DARHAeeIP1J4/RDqEs+SJHiqvLv1hbF8b53d6cR8H27UY7\n1Y+JJt0h9hoRKfRh3HY3owLZqbgysDmDbkZIrYyXWfsEMHY3xxqWFbjnuBFXZ4QCbetvZ0DP04wN\nRXMzSyvWXAT8Y19iOhIQkQ6m6RvWs+C4Jvt8dgi/L0PiiaS4vuPEiCfC+H2ZmKaN35cJwNrNH9Kn\n20kNji2r3EBp+WoD2KO8zZeFtxe7m32M40S/U169afMb0+/6ccDfBseJmZ66JQkneoOqd9C+59N8\nQTrJSoOISADz/9k77+iqyuzvf/dpt6WREJJAElJJQg29iiBdsWN3xjLq/OxlxrGXGR1HZ/S1zjg6\nMyo6drGDICAgSG8BQkkjCQmppN922n7/yA2G5AZIIBCc+1kri5VTnrOfw809++xn7+9eloneQ6/B\nAKudZCszYy/qgl7Djg99TaKXnoxr2SDNPxt9Yy+nlMOx9MGIsI7gSLyGnd8QUUxPS/zu6RCRLIrK\ne9PG/c4eFZF2ePvAlDkIC4nD+qy3cOG0vzm2Zn8Yn1e8+i0AF3RmfGbeK0vWfyxa9cf/GzPkGkdM\nnyHweOuxt2Cpvnf/sgbDUO9uOVYSlcGR4al+I2Z2axgkyWoaqhYF4BclzcHMKy0kvvwoNtw9lxNs\nCQimcrjwLQqdjdAWAZh/POMQsGIbqubNQf92UeStqNJNdNysvYPxrh6HKCGU2vtmExBDHyFvKBH1\nY2Z/kZUoMML7hA/wO3Zi7ITgusbS2fgFOlkAQmTJqomi7DdSFxYSi+zchXrBgTVScdlmiKIFpqkj\nKW4iDENDXPQIbNuzAA1NFUiJPwuiqKC4bDNn7f3cbZj69cdbwUfNJdzTADoLYBXNaRw7LYpjS1HZ\nptH+nMC84h+bVM35mb/xfOknjxLR0073oTQAbgC5gQhW9xFwsgIAwLRgKAN/g4FWwSfLQETIQC/c\nyBn2t7D3/wEYcqIXIaIYBcKUC5DQLllhCEUgjoOkAjRcCOCTE73W/xjTQxxRYmsHq4W+fQZDkmyo\nqsnBsPRL5JzCFTOIqA8zV3bmArrhvb/JVbl19ZY3HtV0d5ogiB6BxA8NQ/0jt+ptZxhacX3TQRV+\nIi6q5oKmuyUANV2YY5chonQ0J21b0dwjdFlLhdjJxMvGw0S06Evsv1cA0hkocUF/BcCi432IuWH8\n5RsUzk3nXvZECjm8PY/rsQQHVC+M44qItSCC+kTC5jf6JZOAIJZVL4xw+F++0pgNgdkEUfvomm54\nmdn8RRYyACjRNDcanZUIdvRpt9PrbXTWNhywpyWeg4unPw+rJQRNrmps3/MZDpRvw9wpT2FY+kXY\nnb8Ey9Y9D93wwjSNek33zGTmTcdjABHFSJJ1uVUJjkuMHe/Qda+Rf2DNw4psX6bp7sc37nh3QWhQ\njD0iLBFAc7XvnvzFRnVtfhOAD482ti/fNqvztyVAZwk4WQGgQLhsCvo6BGofYc5Eb+gwBxBR5AlW\nNfUDcF0QZEPoINqdgV6OAjT4f20OcDSiQoJiOsyfs1t7oaR8O6J7ZyDI3ttb31SWBKASOFwFeIco\nykNN0yjRdPfrANa0dQp8v3+AY0QtTDbezi9e/YchqefDbjtSZHdP/hJDFOTvDUM7JYnvRCTJku1d\nWbJdlBw3UZJlu1RYusHp9tSX+7S9TnrjY2ZeA2DNCZy/nYiueRZb/5vCodwfwbYCNLj2o4E0mJcf\nK1G9LTp4x27UNp2HhKC2++pZRQNUBUBRB7ZUWxTHvgPlW4a0zf8x2URO4Qqnbng/7ow9ZwrM7JEl\n65sbdsy/derYe22i8POjsqR8Gw7VF4mJ/cZpIwddddiBDbL3xsQRv8WydX9FefVupPafgjFDrsWY\nIdcit2gltmR/tK4TDhbJknVJetLM1Mz0SyWfJqE0YuBl0oqNL8+oqsktUzX3jYvX/PnN0KC+CLJH\nUmXNPsEwtALd8F4Q0LrqOQScrAAQQBYFol/PRwBBBBlaJ3NBWiCiIBuk+QqEc5MRqjuhOX6PtbiA\nEzCd4o449iCcbvge/gE6RW5VbR6Y+XC/uxZMNlHfWIqa+iJkZlwKl6dOge8ei6J8qyRaX0hPmi5F\n9kqRG52VnJ236ALN8HxGRDd0ZQmBmQslyfLUwh+feHjkoCsdffsMgdfbiL37l6l5xT/WG4Z6x8mZ\n8rGRRMszvULjL5w+7vc2SWpe9clMvzRoV+63iTtzvvmBiNK6EtEioigAVxIQycBeAAuY2X2y7Gbm\nL4koag9qL9mD2jgAhWhW1u/KNT7JQ/3Lu/gQBlPE4Y0mMz5GrlcEfXw08VtVc923dtt/vlZkhy26\n90AQEbyqE5t2vufxqg3ZAH7ogk1nBLrhfbiyJnfIl8vuH5+WON1hVYLpQPlW58HKXYZAop6RPKtd\nxSgRIS1xOvYWLD2cj2WYOrJzFzWpmuv1Tlx+gizZE1s5WAAAsVk41Pb50t9dB/ADhqFG1tQXzqip\nL2wRI+1JfToDIOBkBQDggbF4HcqvmIbYdm0l8tEABpwAyroytg3i10MQPv56pFtbGlBXsAsvYwcU\nFjHZV1RYwS7sRI0IoMvaMT0BnwLzRXZI1xBgdUFfxMD8bpYtWKuqTmde8Y9Bqf3PPmJHbuEKWC2h\nYNPA9j0LGMA+Zi4goqGyZHth7pSnbK2WQyg1YYpj0Y9/nFffWLoSwDu+OQ0BMBpAE4DFfAxFel33\nPkNEWRt3vPuYbqhDBRLdDH7PMNTnWvSguhsisouCfPukEb+1tzhYvu0YnDpXzCteFd3o9EwH8H1n\nxlVIvE+G8OeRiORo2G27Udu4Hw1/96l4/3iy7PdFIk64EIaZXUQ091XsXDSK+0gjEGl1QsNylDRV\nwZ3rgXHnMc5fRkRXrNj48j8VyR5itQQb9Y0HLYIgfaXpnpt/ybk8zOwlolm67jl7x76vfiUIYpiq\nuVYBPF8U5BKbJdTveTZLGLxqE5gZNfWF2LzrQ5fbW7cOwMJjXZOIYgSSbpYl25VWS7C9urYAvXsl\nHfHyZLOGISQo2lvbcCCTmVcdz7gBTh8BMdIAIKLeCoTKS5FM0xF7+A+6jr34G7ahGu4qDRzVBQHD\nUSGQV76AiQ6xjRpEETfiFezAnzAGWajGR8hzuaHfp7P5RgfD9XiIKNwKcXUU7PFT0S9IgYBNqHRm\no0ZVYU7ho7e8ONFrPyFLtifjY0Yhod9YAIz9JetRfmgPZk54AJt2vo+DVdkeZmMCM2+TZdvbg1LO\n/dWwtIvaJduUVmThx82v79N012RZsn1FJAztGzmY3d56s6o2XyLgcd1Qn++uuZwMiGhUkL3P8ktm\nPB/ib/+2PZ/yzpxv/sTMT3ZizPNDoHz0GEbZI+jn95FsrsGr2NGkwhxwqpzIzkJEfSTQLRZI0xnc\n6IL+DpqTqP1KcxBRGIAUNHeKyPPJuQyHr4tEZ3P6fmlYFMfaMUN/Pd5f4nnW3i95V963pmFoJInK\nIdPU/5/Jxgsd3esWiIQLREH6MDF2vBDde6C1yV2NvKJViO49EOMzb0BrRZ3Pl97X2OSqns7MG0/+\n7AIcLwEx0gDHy9wEhLhWo8zxIw5iEIejASqycAgzEYflOGDXoA8B0Fkn4dwJiLG2dbAAoD8Fg5lx\nF1brNkgbXdCf7EoFIxFZBOAOBeLdGswoGUKlCvMVE/zKqa5StEF6exyiUq7FAKXFUR2HaMd6Lre/\ng71LiCi2G8Vll0iicn9IULQjO28hAEK/qGEYPeQaSJIVlTW5BrNxMTNvAwCBxBFREWl+tQL6RKRB\n0z1JkmRdOSBhasrwjHmy4MtJaXRWYclPf/6jIEiHTFN/u5vmcjLw6IZX8LeECgCq6tIBdCpp2w7p\nyWsx4AgHCwAGUTjGcbS0FuW3AXjsBGzuNnxO0dO+nw4hoiArxNdkCFeEw+JthKbYSS4EcPMvUV6l\nJfIsS7ZLASJNd32J5uXfdi3IWqNqrj9vyf7o4+jeA4+Qt6hrLMXu/EVkGCokybZb190PMvPxRLBi\nRVH5cNbEh49QkM9Imoll6/6GffuXIz1pBgCgqiYfHm+jF0CP7BAR4EgCTlYAAIiMR5ByFVKxF7Uo\nQhOiYceVSEUIKcjiar0Jjb27MC4J8POE8yGCGhmY7GRte1eMJiLFCnFZAoJHXIJkeywcKIEz9gsU\nPLkfDRf6kpuP+mV5siCiaAXCzHlIVto+1MdRNC3kIkcpnHMAdKoPZSfYoBveEkmypM6c+NARXm3W\n3i90AJuZeXGrzRVOl3+5LKerGoIgSHZrWOKIgVfIrecT7IjE5JG32Zetf/5pIprfHVV6J4lsw/DW\nV1TvCYqOHHjEDk33oqDkJwOd1CpyQ88chgi/+0Yg0roVVTPRQ52s44GIRCvE5cPQe+hVSLWGkGI1\nmbEZlRlvYc/3RDTleBO3zwSIKFoSLWtCg/v2SYmfHExEyCtePaeuoeRZX2eCko7OZeaFkmh57stl\n9z+UmjBFCgvuJ1ceykVx2WaMGnwtkuMmiiXl2wavy3rrE0GQ7jjWC4koyLcmx00S2yrIy5IVIwZe\njnXb/oO0xOmorMnByo2vuAxTu+dUdAMJcOIEFN8DAMCePaj1EBEyKByzKR5TqB9CSIGXDRyE0wIg\ntwvjLl2LcrfpZ5WxlJvQCM0AcCJ9sn7dF47hv8dwewqFwkoSUigUv0OmvR8cmQCuO4GxO0taNOxe\nG/l/bxmMCBuAwd11cWZmTffM3bb7k6qla//qLDiwFvnFa7B49dNN2XmLDmq6e17r41XN+fqu3IVN\npqm3Gys7fzFCHNGUEj/Z6i8KFBmeCoGEMADx1Ix/9czTSPP98N61ctOrrvKq3WhZ6W50VmHZ2r+C\nGbIsWZcKwrEV1ImoFxGNEkC6C/6fa27oYJzxFV2zwmAZeDMGWkN8uloCEcZQFK5Cqt0OqVPyET0d\nWbJ9lp40s/+5k58MTkuchgEJ5+DcyU8ED049r68s2Y7Z7ko3vE/phnfoquHwaQAAIABJREFUvv3L\nX9y86wNVEERceM6zSO0/GYIgIr7vKMya+LBdIOFVIrIfbSxJsoyLiRzkV5OrT/gANLoq8cniO5uW\nr3uhTNWcvzFN4/2uzjvAqSXgZAUAgOwquJVNbdIsmBlfYb8mgtZ0sdx9nQf67v8iR9VbBTxq2Yu/\nY5eTgaePladwNOyQ7jofCe2kJwQinI8Ehx3SXV0duwvU1MErdZS2VgW3F92sD8XMebqhppRV7bp3\n4873lmza9f6iypqc23XDm+5HbPJrl6dm/dK1f3VX1xaAmdHorML6rHdQXZOHPhFpMM2Og1TMLEii\n5VUiwQtAU2R7LhH9ivx5ZR1ARPFE9IAoyi8Q0d2CID9jUYIWy7LtHSKa1HYsIhKI6CqLErRRke1l\nFiVoPRFd3lH7J2bzc1Vz/eqHjS+Vfrr4Tu/nS3+HhaseQ1TvdFwx5x/i1LH3xlktIW+Kouw3+ZuI\nQmwkvS9DONgHtmUyBHGVHzkpZsZylDQ50aOXT4+JDeI156BfkD8pl3GIhgfGxK62ZuppEFE6EQ0f\nln6x1PYjO3jABZIgSOlElHmscZg5zzT1JXZruGd85o3tZEvCQmIRHpZgAJh9tHFM06hyeWr9fnl4\n1EYApHvVhvG64Yk1TeOjY9kVoOdwwsuFRDQbwEsARAD/Zubn/BzzCoA5AFwArm/JCwlw+iGiWAXC\nxjHoI72PHOzgaoxAJLwwsAKlRjGaSrwwru3K2MzMRDRrA8oXbED5uMEcYbqgmzmokwl4UYN5Qn3s\nTHCfozXlNcGRJzJ+J9mlwazYjuqk4TjystXsxk4cEgEs6G4jmLkJwL98P0c7ziCicytrct5Ztvav\nV2m6m2TZjuT4SZh11iOoqSvC5uwPMWTAXLT1Ycqr94DBloEp585JT5wmKkoQyqp2pWzc8e7rbk/9\nUAD3H+3aRESiqDwticp9ibETKcje21JauQO19cXISJ4FUZDNPQVL5um6ZyERXe2zVZAl6wKHLWL6\nsPRLgsKCY1HbcCA6a++Ct1ye2nlEdKW/pUtm83MiWmiSfmj00GstcVHD0VJtGN07A7MmPmL/ZsXD\nzxLR275712KjZIW4agQiM65AiiWYFGspN+FZbEUIK5iEGEgkwMkaFqBALUFTKc7wylgBFGTr4JGg\nQIAAsAlY0KwSfqYzLDI81Witf9WCQAKie2eYRQc3ZgI4nlSGXg67/2VkAAiy9RarkNtO7qE1mu7+\n9+78xecN6D81qG1j6b0F3xuSKH+savqu47AlQA/jhJws3zLBawCmo1kxeBMRfd1aMI+IzgWQwsyp\nRDQWwOsAxp3IdQO0h4j6SqDbLBDPB6C7ob9vAm8xc93RzrNAfGoq+oVdTiliI6tYgzKswkEIIJTD\npXlh3MvMFV21y9eT8RwiytiEyvFo/oJe7Nt+Qgig3P1oiIqBo92+AjRAAOWd6DWOByKyArC7Yfzm\nDWQvvJJT7eMRDRkCduAQ3sVeF4MfZ+ZDp8Ke44WZNSL6Pjpy0AWTR90aRCQeThKPiRwIRbJhfdb8\n5uR5sXn5qLahBKu3/MOMjx5JmekXH14m7NdnKOac9YTj82W/u52I3mDmo9x7utZmCb17zuQnrDZL\nc/HfkAHno6QiC2u3/QsXnPMXIT1xumPJT8/MrakvuhPNL3FXOey9Z5w3+UmH6LMlNDgGsdGZjm9X\nPnau1lQ2Dx13CpjqsEegomwHivevhCO4L1ISpyEspB9CgqIQEZaoV9bkzAbQuhXJ+b1hS7kRGZaW\nyE4/CsL9PBxvIBufIM8MYaWxFl6LBPrWA+O3PhXtMxYX9O83o3LaRMS0+4PaizpIEA7qME6JkOwp\noM7l9h85AgCXu8YEcNTvzlbsqqrJlw1TR1unjdlEefVu4NhFQ8tVtenHZev+NmX0kGvt4aHx8KiN\n2Fuw1Nydv7jeMNRHjtOWAD2ME5JwIKLxAJ5g5tm+3x8EAGZ+ttUx/wSwgpk/9v2+F8DZbR/cAQmH\nrkNEYxQIy8YjWhmDPhYNjNU46NqJQ40qzHHMXNjBeSSCXH/FBGsvap8OsIJL8BkKvnCxdkl3z6Er\nENEFEbB+8EeMdtjp57c/F+t4Ehud1fBcw8zHzK04geunyJLted1QzxVIYCJqMkzjIysjwwNjMgCy\nQtrnhv4YM3d7FKsrEFG6LNm3Xj7nNVvbB4TLXYuvfnjAZGYtMjzV61UbUd94UGBm65Xnvi611p9q\nYdPO/2r79i9/zjD1DhPAFdmef/boO5L69mnfqWnN1jfQKyQOg1LORVVNLpau/Wu5pntiLErQ1gnD\nbxoeHzOy3Tn7S9djQ9b89V61abyf+ZECYakN8rRZFIcotiGfmrCKyjF40DykJc3Aqk2vNRUd3Hgn\nM7/Tcp6D5C8uQ/JFZ1O/dtdzs4478KPBwHgABT3Nee4qRBSiQCi6ERlhYyjq8PYGVvEMtjir4b7T\nYD6jl0RbICJFFJXK2ZMeDY0ISzhiX11DKRauerzJMLU+xysAq8j2NelJM8YOz5h3xB9Rdt53xo59\nX+zVdM+QY0ngEJEsCNIDAon3MptBzCZEUfla0933d/QdHuD0ciokHPoBaJ2rU4L2Hd39HRMLoMvR\nkQA/Q0SiAuGbWzAoeAT9vEw1FBH2RVxkWYjCj9H+/6QFwQBbQzsQcw+DBSKoK1WFp4pvmqDNfwQb\nrpvLCfZYOKgUTv4WhS43jHcBdFtXeSJKFkVl0+DU80LSEqcJiuxATX1R+MYd711f21C8lHXDCkB0\nsdajm10z815Ftm/cvOuD8WOG/OpwZSSzie17F3gBWq4bnjvKqnaNQLMY6QGrErJBkizt2rQAQJC9\njywIcpS/fUCz5AZBSIiJHOR3f2xUJgpLNwAAevdKgWFofYjIKknWuF4hcX7P6RUSB2azfweXvDYc\n1gmPYRRskAAChiMSU8xo/Gn3AkT0SkJZVbaANuXwBDg6WjqzQPQdgpJfioMFAMzcQERT38Ke7xdz\nsXUYegdXw+3dhEoG8IrpE6ftLnxyCrEAmvgEWngdD8ysCoJ429K1z/1rQuZv7LHRwwEilFZkYe22\nf7tMNu/ujMK+prsv25O/5Keyqt2Rqf3PDiISkF+8uulQ3f5G3fCefzwag7781KeJ6BkAIQBchuY6\nJdXRAbqPE3WyjjcM1tbT83seET3Z6teVzLyyCzb9rzErEjZbawerhZmIExeiaAgRpTJzu+pAZjZs\nJBXnoi4+Db3anb8btV4vjA3dY/aJ48v5usML49PPUXCPACSbQL4b+sto/vx0m9KuLNmeH5RybsiQ\nARccTlgKD+2PGRP+YP9i2f3TNd0z4WQqgHcnmu6+pODAmuUl5dtTUuLPcjCbyCte7dR0925Nd1/t\nU6vfDwBEZNN0t+B0H4LD1j4Ppaw626kbnqPlseggmJruERS5fcGVqjkPL016vA0AkQ6GKpBYUttw\noLe/Zr11DaUgEvwWZtghPXg5Umxtqz57kw0zzRis3vqWyWzuZOadrfe7oS/ZgqqJYxDVzshdqIEV\nMqkCChXFsU7TXLcx8+6jzPmMgZt7J/YrROOFRWgcys1LZp8cTc7gRCEiSRTkR0VRuUcSFUnXvZJF\ncexQNdcd3Sm2aZrGB0RUu3b7v5/TDTUdAERBydN014PM3KkXNGYuI6KM6tq8SxuaDl4GkKhqzs8B\nfNwZZ803VmeWKgOcQohoCoApnTnnRJ2sUgCtXy/j0BypOtoxsfDf8R2dUV8OcJjUVITJ/nZIJKAf\nO9Q81KegAwkGL4y/vI/c5x/iEY7WD6JibsRqHDQ1mH/vHrNPDj5HaqXv55RARAqReF5a4vR2VW2i\nqCAjaZY9K+fLGwGcEU4WM9cQ0QhN95y9I+frOc131fwWwE9tHVVmdsuSdf6mnR/cMHn07VahVVJ8\nRfVelFXuAoAOy8uZ2VBk+/d5RT/OGZgym47cZyK36EcMHXABAGBPwRJDFOVPNE03iejFrL1f/KNf\nn6GO1onBhqEia9/nTlVzvujvel4YyQPgv/3JAA7F4qadugbtorb7TODtLFQ/vo2rMLzVC0wde/G+\nWIBRmTdRXPQIJbdo5eStez5dR0SjmTmno3mfSfgiKp/hyBy1bkOWrB+EhcSdNz7zRntYcD8Ypo7C\nknWj1++Yv8KndddtL3rM/B2A74goFAAZhtZl58YnfnzMJurHAxHFiYJ8uyRaZjHYrWrOdwD8l5ld\nJzp2gK7jC/ysbPmdiJ441jkn6mRtBpBKRAkADgK4AsBVbY75GsAdAD4ionFobtMQWCo8eZQfhFMD\n0K7voMmMKrhFAOUdnczAm9Vwj3sQ6y6bznHWSFiFvajzrEM56zB/FcgF8IuViGBR2ifcA4DNFkYC\niaeysvGE6Yyzqhve35dV7Rzx7YpHB2UkzwqyKEEoKd/u2V+6zjBM7SI+Rp9GTXc/sG3vZ5Nl2RaU\nFDcRoiDB7anDluyPIIkKInolYdueT409Bd/XtUr4fb/JVXnZoh//OHVY+sWOXiGxqK0/gO17P3c6\n3TXfo4OkdwlCQzU8kfFo/x5yCB4AvJaZ2/19+BzP6f9E9uJ4DpKHICK4HG5sF2oxMOU8JMY2p39l\nJM8i3fAG7cpd+AyAeW3H+aVBREkAzgcgo/klYtOJRIyJaKRFCTpvxvg/HO4xKQoSkuPPAgP2Tbve\nfwUdpzucNI71mW2LT/dqHoBENAcNPu3sGEcZ+2xRVBamxJ0lxcWMtOi6B3v3LxtaXZv/ABGN6+6l\n1AAnlxPuXUhEc/CzhMN/mPkvRPRbAGDmN3zHvIZmnRAngBvYT6fwQOJ71yAimwyh8mGMDOpPwUfs\n28yVeAt7CjwwUo4j6XK0BeLNEqivF8ZmHfwmMx/sVuPPUIiIJNFSMXPiQ5FtFZoBYM3WN92Fpeuf\nMQztqC1MTje+B8X1iuy4BeBQZt6g6e6/MfMx23UQkQRgriLbbyQSwjTds8I09TeO9zNDRCNk2f4G\nm8ZgixKkur0NdoEE3TA0kYhYFOXPNd3zADMXtzpHBHC1ItvvM9nsJ5BY7ItgfehPvgEAFBKfGYnI\n+26hI4UedTbxR2xqKoXzBmbuMGJDRBYAFwskPhEbPSJ9zJBr22khedUmfLL4Dp3ZtHRkx5kOEclW\niG8zcOko9CErRGETKlUVxm4PjDldzU0TRfn5QSnn3jM8Y147QVvD1PHRwt96DVOL60mOBRHNEQX5\nk8jwVESGpwTV1hc7D1btEpjNm0zTOKEoFhFZRUEuP2fcfaGt8xaZGVuyP9Ryi1YtUTXX+Sc8iQAn\nhePxWwINon8BCETzLBDnz0OybRT6kA4TP6HcWIhCjwpzOjOvP902/tIQBen34aH9/zhz4kP21lV2\nlTW5WPrTs07D1JJ7csSWiMIkybo2sldy/4zk2XabJRRlVdnmzpyvPYah3m2Y+r9PkR2xAHoBKERz\nYn0QADczt5ei79r4vSwQt4xGn35zkaBEwop8NOA/2K3WSlBBVM3Mm3zOZYctY6yW4OXjhl1/Tv++\nY9rtY2a89/X1JsA2PkVtnE41NpJejkfwTfdgqN3qSyswmfERctWfULbdxXqXok2yZPv38IHzfpOR\nNNPv/o+/u83pVZsG95SIOhGlSaKydcaEB+yR4amHt9c2lGDx6qdcmu6ediLft0R0TZ+ItH/OnvRI\nu8ISVXPj0yV3egxD7c//4w26ewqnorowQA/AZP6MiEoXoOCJj5B3FgGGCPpWhflUa82yACcPk40X\n65sOjv5i2e/PS0+aabPbwoWyyp3uooObTMPU5vVkBwsAJNHyt/4xo5MmDL/J0lJRGBGWIMTHjLR/\ns+LRV4locXcmO7fgu0br6zSe5PFriWjURlQ8uR4VN+ow7bJoMcPDEmhSynlBNmtYUFlVdvzO3G/O\nE0X5d4ah/dPfOJrmWVVSnjWhf98x7Zbly6t3Q5asReoZWglGRJEAhqFZLHpjWweXiEJkCDf/FoNs\n1lZ5mwIRruRUZSMqB/ly0jb5jj9blmx3E4mpgLlf1VyvAFjuL5quG56fDpRtvSIjaWY7p6KuoRS6\noeroIIf3dCCJlt9lJM9SWjtYANArJBaZGZfatu9Z8Aial1M7BRGNkCTrvYpsn6WqzqD9pevRP2Y0\nBOHnAJ8i2xBki/DWN5UlAjgjnCxf9HmeIjvuBBDDzLs13fX/mHnF6bbtVBFoq/MLgZnXuVibrbHh\nUNkIcbN+dcDB6j6Y2dB0z5Vub/30nTlfv7Vp53+/Lizd8JRhakl8ZCPmHgcRWUw2rsnMuPSwg9VC\nSFA0kuImkEDiTZ0Yb5pFcfwoirJLkix1smT9ly9Ps0fAzDVeNu7SYQaLovLP2JiR+syJD8ux0ZmI\nCEvA4NTzhLln/9FOJLxIRH6lIEzW3ygs3WCUVx/5J+VVm7Bhx3ynpnueOiWTOYkQkV2R7f8VBflA\nRFjiZ8GOPosl0VIhCGLbDg+ZUbCp/rT0BCKMQqQM4GwiIlmyvmSzhC4cnjHvoqlj7x48POOyuXZr\n+JdS82fC3xv/x1W1eVrRwSODiLruxbqst1xgfplPoPXWyUYQxGlx0SP8BifiokcQszmx9TYiGiBL\n1letluCNFsWxiIgupDa9PkVRvk2WbKuHpM69aurYeyMHD5iLPflL8MOGF2EYP0/dNHW4PHUKzhwH\nS5Yl63dhwbH/GTvsuonTx9+fNHzgvPOslpBvZcn6zOm271QRiGQFCNBFfG/m630/ZxIRoiDDYfPf\n6aN3ryRL4cGN6cczkChI/2dRgl4YOegqe2xUJjTdZcspXHn93v1LLyOi8T3M0ZfAfN3wjHntnMtg\nRxRS4icLuUUrbwbwaNsTmbmCiC5Yvu75r2IiB1FMn8GOJle1lle0UmfmfwM/C5meCTQ7RLZvYiIH\nTxifeYPFogRZAKC6tgDL17/wBpHgZTZb2gSpKswOl0S8MEwAKoDZiuy46fypf3a0FIVERaRRUuwE\nx5c/PHA1mFOtlmAyDG23bnhfY+ZdzOwioulrtr6xbN/+ZXJczMggj7feyClc4TVM/VvD1E6K80rN\nPRcvBZCEnxPVG7owkkfT/Qv7a7oHIDrsFQmCeI0kWv6VljhdiokcKDvdNcjOW3SW21O7iYhmM7Pa\nvPxofX7ulKdtwY6fa2US+o7Fio0vITvvOwxNa662zS9ewwDtYeb9nbf7dEC3hoXETZw18SG74BM6\n7t0riRL6jrF/9cNDdxPRt8y89jQb2e0EnKwAAf73qDVMjdzeBrS0tTliZ0OJahhqwbEGIaLeoiC/\neO7kJ6zBjmb9UaslGCMHXSE5bOEh2/YseAvNqug9hV5EghRk96+v2zssUdlfsm5gRycz8w9E1K+k\nYvs15Yf2jjRNvcI09XeZeV+3Wdx9jBZFZezkUbdahVZK/717JWHyqNvtKze+9AIRfeZ7kdhSB69Z\nzI2Ib1Nc42Edm1EFAN8osv3fw9IvdrSuumVmbN/7OQiCbUjahZPDgvqiuq5g/N6Cpb+WROUx3VBf\nYOatRBRbXr3niuq6/ZPYNGoNU/sczbIzJ1xIQEQzRUH+rHd4Cvr0SgmqaSh2lVftflUQxJtN0+hQ\nbsQfmu5+L6fwh8djIge2a5qaW7RCZTY/9F0zXhSVf507+QlbWEjs4WOS4yYGLV//wrjKQzkPA3hS\nEi23pyVOk1o7WAAgCCKGZ8zDD+tfRHL8ROQWrTKy8xa5DEO9oUs34TQgS9b7Rgy83C606SRhtYRg\nSOpca9a+L+8B8It3sgLLhQEC9ECIyEJEV4mi8pJAwhNElHayxmZmtyhIn+/K+abdMozLXYO8olWm\naer/OY6hroyLHsEtDlZrUvtPIWYj05fY3lPQTTYUt8e/FFJtwwHDMLSjOpfM3MDMr2ua+ybD0B45\nQx0sEAkXpcZPtrV9AALNjbOJxAgAyUCzbpYBfuAl7HAV888pc7XsxUvIclGzWOl+Zk6L7JV8xFiF\nBzegrHo3LjjnGQxJnYu4mBEYnjFPuuCcZ2yiqPzJ188WzOxi5rd13fOiIMhDBRJXSqKlWBItlaIo\nP9B2ie3450kpoqh8MX3CH4JnTXwoePjAy2jauN85zj37jzZJtLzpaw133DCbb5RWZNVn7f1C1/Xm\nZg6GoWFPwfdmbtEqp2GozwOAKMi/TYk7S2jtYAGAIEgYNfhqGxHdRUSCKMhDInsl+9U5DA/tD7en\nDp8vvd+5J3/xB4ahjmDmY/VA7DHohrdveKj/RgzhYYkCkXBc0fIznUAkK0CAHgYRDRNFZVl4aH9L\nXPSIYLenTs8r/vEBWbZ9oOueW06GTICme+7LLVo52aM2RmYkzbTYrGEoq8rG1t0fu5jNp5g5/5h2\ngvqGhfRr90YPAKIow2YN8zY6K6LQXqD4dDHPbu2l78r9Vho95Mi0I5enDjmFK0XDVP/VejsROQDY\nARz6JckzEEgRRcXvSzYRQRQkE/hZXExn802JBDyDLc+GsUWUIXAl3IoAetML4/7m84SKBmdFv9aO\nxb79y5GZdjHaqvs7bBEYMuB86459X/0OwOW+6w4WRWVdZvrFjpT+Z5MsWeVDdYXWDVnvPF7fdHAo\ngGuOe35EMkC3ypLtadPU7eu2v4WU+MnISJoBUVTQKyQWwzPm2bbt+exRAOcd77i+QorR2XmL52fn\nLZzgsPX2Oj01CkHIMgz1+pZiEVFURvSJSGufxIbmNlAmm0EAgkzWixqc5Sb8BDwanVUgQXCZph6s\nalrPkAHoBKKoHKpvPBjtT+amoekgA1x0Gsw65QQiWQEC9CCIyC6Kyg8Tht8UMeesx4IHp56H0UOu\nkS6d+ZItxBF1pSBIvzsZ12HmCt3wDis6uPG5pWufK/lq+QN1m3e9v8rjbbhIN9Rnjz0CwOCcikM5\nTf72qZoLLneNBUCxv/2dhYjsRHQ1ET3o+9evc3c0FNk+JS1xunSwcifWbHkD1bUFcLprkF+8GotX\nPw1mUwdQ77veUDvJy0RQrQKh2AKxXCLh90T0i/jONNlYVlDyU5M/CZ/a+mJoulsDkNd6u87mmyrM\nqEq4Z5XCeYEGM8rD+r0t1Yiq5nxlZ85XTtM0Dp9T11CCPhED/NoQFZEmEAmZLb/Lkv3lEQMvd2Qk\nzyJZai7ijAhLwIyJD9kFQb6IiNp3BwdARELrSBcRSbJkXRQZnvKXqWPvDb5s9quYOPxmVB7ah2Xr\nnodhNBeBxsWMIGZjor8xO7jOAFmyfQ5QgW64pxKJ5fVNB1/RdU+GqjnHt45qmqZe0uSq9OuUuz11\nAMME4NJ0zz935y92q1p7IfedOV+rAonvnIjY6+nENPR/7Nj3pbut+Zruxc6cb1yq5nr1NJl2SvlF\nfGEE6NlQM5c4SN5gIbHWTnKeSHSfTwwzwJFc2Sc8VUnsN+6IRGNFtmHC8JscAoldXjppCzPXGIb2\nhKq54jTd08urOqcw89JODPFp5aF9qDh05IoZM2PHvi81QZC+PxkikkQ0VxTkiqiI9H8OTJ7zVJ/w\ntH+JglxHJHxMRBM7qFprh2kaDaap8+yzHkdIUBR+2voGFq16EoWlGzBq8FVgmCYANxFlKhB+ugRJ\n57yGyfI/aYr1DxgeGYugJ60Q3zrR+ZxOfH+LEwGMcrlr3Vl7P9dbB+g83gb8uOV1JzP/2V9VHzNr\nzLyemX9k5rZyGx80NJVv+n7ts66KQ/ugaW7Isg1Nrmq/tjS5DgFoTugiohDDVCelxJ/d7v9SlixI\nS5xmEUXlujZzGa7I9sUEUgHSFNmxg4guAHBFkL3P+FkTH7JH906HItsRGZ6CqWPvgSgqyClaCQDQ\nNA8A4bhkN4joHEm0ZjPMi+3WMFmR7RTdOyMhyB55nyRa7mh7vG5439yTv8Sjau3bFmbnfacLovQJ\nM+vMvF7XvR8sXPWE90DZVqiaEzX1xVi9+XVvYen6g7rhffx47OuJmKy/UHFo777l6/7mrji0D25P\nHQ6UbcWiVU84fX0dO/Ndc8YSECMN0K0QEVkgvhYM+bp5SHakIBRVcGMhitx5qM/1wJjAzM7TbWdP\nQZHt744YePmv0hKn+d3/4cLfujXdPeBUaFgdD0Q0QxSVLwckTJXio0cqqu7GvoKlzsqa3Erd8I47\nUdFEIhoiiZb1MyY8aI8M/znfp6omF8vWvwCRJJdhqlma7pl9rGoxIjrLbu313cUzXnCIbXKRcotW\nYUv2hz95VeckB8mrL0XyxKnU74jvIw/ruB9rXU7o43tibgw197u7UxSVuQA0TXe/z2z+i5lrffsj\nrRCX2CCljkWUrQGqsUmslSTJyrHRI3Sv2qSXVmwXiYTXdMP7h65EUIhIIRJul0TLPYahRhOJnn5R\nQ+1nj75Tau0Lm6aBRT8+6aypL7qFmT8gor6yZMu76rw3/EYocwpXYEv2Rx+qmutq33UmiaKyZOTA\nK2zJcZNIFBWUlG/D+h3zXYahVU0Y/pv+/fuObjdOWdVubMn+CHOn/Ambdn2g5RaueFPTPe2cpDZz\nipJEy4GMpFnywJQ5sCgOON01yNr7OWrqi1HfWOYxTO/gtkvssmz7t8MWceXowVc7onpnwOWuxe78\n77T84tU1uuEdwcwHiShcFC1rbJbgREGQrS53DQRBYt1QvaapzWDmNZ39P+hJEJGNSLhNEi13mGxE\niIJUoGquvwH44EyN0LUmoPge4LRDRJNCoCx+BmMddvo5v5OZ8Xfs9OxAzQsaG+1K5v9XkSTLa0NS\nz791aNqF7aLMhqHho0X/pxqm1rerbUy6AyJKFEXlLlGQZwBwqZrzLQDvnQznWZFt/x2cOvfKIQMu\naBe927HvKzS5D4HZ8BYf3Lz0WO1GfDpOSyLDB0waN+x6W5C9N0xTR2HpRqzLestlGOoUAPkShLLX\ncJai+AkYLuB8YwkOvKhxcx5ST4GIxoui8n1K/GQloe8YxTB15BatdJdWZDXqhncsgCIbxI2T0XfY\nZUiRBZ/D42Udz2GbWoImrw4OAsB2SKtd0B8+GeX1RBQiidYt8TFSeLdFAAAgAElEQVQj44amXWQJ\ndvRBbUMxtmR/7K6uzdui6Z5zmFkjIkkUlerzJj8Z2jZZHABWbHjJeaB820PM5qs++YmcCcNvSmnr\nSDU6q/Dtykd51qRHKDw0vt04TvchfPfjnzB4wPm8JfujBsNQh7Zu3eQPSVReT4qb+H/jM288Yjsz\nY9m6v4HZNCoO7fuzaepHNAtujrDS9bJkfVjTPUmiILmIhHd1w/sU+/plKrL966S4ibPGDPmV0toJ\n3V+yntdt/3eFbqixzGwgQI/kePyWwHJhgG7FBvH2OYi3tXawgObk2guRZBWB/ztNpvVIDEP9YF/h\ncrdhtu8qU1i6AaIob29xsIgoUxTlTYpsNxTZzopsryaiO0+1zcy8X9e993rVpsFetWkMM//zJEYn\np8XFjPS7PBoXMxKVh/ZhzJBfW5jN6R0JibaykzXdc37loX1vfLX8AednS+5u/GjRbZ6NO9/dZhjq\nNJ9iebAFgu7PwQKAMFhECeRfA6Kb8S31pRHRCF9Cfst2SRSVr88edUfQ2KG/VqJ6p6Nvn8E4e/Qd\ntqFpF0XIku0jAKNliBmtHSwAsJCEuzFUkSAEv4iJ9DrOFq5AytlWiMuIaO6J2szMDbrhGV1Utukf\n36x4uOm9r6/jxaufrq6o3vsXTfdMb1mS9C2dvbxhx3xXS85UCwcrd+Fg5U4T4Hd9mwYJghQTH9M+\nRSvYEQmL7DAP1fkvEq2uLYCme3nb7s82G4Y6/lgOFgAQCVdkJM3ys52QlnAOPN4GURSkaD9zZ2bz\nbVVzpTKbom6owZruub3FwSKiaJONGSMyLjvCwQKAxNhxFGTv4wAw51j2BejZBJysAN2KCCGpLxx+\nP2d9YYcXZvjx5tT8j7BO1z2rV6z/f+5GZ3M6k2ka2F+yDht2zHepmuseACCiyZJo2Rwe2n/UhOE3\nCTMnPoTM9EsjLErQK6Iof3JaZ3CcENFAWba9J8u2Slm2VciS9U0iSm5zlNZSKt8Ww/BCECTIkgWR\n4akqgPbrQ21gZq+me+41TC3S5akdqxueVK/qHNGq31yZCtMo68BH3IFqpwfGus7M82RARLNsEPMc\nkLZGwrpChlBpJellIlIAzAkJirbERme2Oy8jeZYIYCiAC0ch8ggHq4VQsiAOQSiDCxYScRb1xT0Y\nZrNAePtk5P8xc52ue+8zTC0EgEXTPZGGqT3FzEf8x5qm9tSh+qJlny/9vXNnztdmXtGPWLnxFdeK\nDS82GqY2l5nrfYdG2K299I5qEMJ7JWD73s9Vr3pkTYame7Bt96ceTXfdrWrOMccrlGuyYbH60ZMD\nmjWfvJrT1A3v1uMZqw3poUExXln2X8PRt88QG4DBXRg3QA8iIOEQoFvRYe4rRuPIIYho92VdjCZY\nIFZ6WO8Za9Y9AGZmIrqwsib32a9/ePAWixJkqppbJkHI0Q3vbcy8johIFJQvoiLSxanj7oXge9hE\nhCWgf78x+Gr5g5cR0Vhm3nC65kFEvQWSbpUky1UAJMPUFhqG+hJzc9k2EZ0jiso3g1LOtST0HSsy\nmygo+emGPQXfX0VE57T0wTMM7dM9BYvvGDfsRqWl4qyFvOLVaIlmqJoTaO69d1wwsxtAu4csM6sW\nEv/+X+TcdQ8Ps8utHuQ7+BD2os4A8EFn78eJQEQzrBA/vwWD7EMQAYEINezB29hzcz4aEj0wVvTp\nlar4O1cUJIQF91OravNC3dA7XHbywIDU6p17AIUhnK1KGVyTAZyUPnO+HJwOW+Qws05EF+m6Z/zO\nnG+uFwQpXNPdPzGb77TklfnIaWgqt7Yk1relsanCparOH7764aHpQwacbw8P7U/1jaW8M+cbl1dz\nLgDwWmfsFkVlZ1nVrrGJse0ltQ5W7oSqOrv6mTjk8tRKzCb8OYyNzgovgB6TFhCgawQiWQG6FQ+M\nVxej2NvARy4BmMxYgHy3DvOV02Raj4WZVU333Nc62qKqzmHM/JPvkGFEQnhmxqWHHawW7NYwpDdr\nAT136i1vxicAuSeh39iHp4y5K2PauPtSB/SfepsoKruIaJJveeuTqWPusQ9Lu0gMDY5BWEg/jBh4\nuTRx+M1BkmT9hIjCZdk2H4TbSyt2Kp8uvhOrt7wOt6cOhqljT/4SlJRvQ1rCNNQ3lqGuoUTASXIG\nVJiP70fDygewzrmIi3g1H8SrvMP1d+xs0GDOYWa/shXdhQ3SKzdhoH0Y9UZLJCqcrLgLw2wyhGkA\nLHWNJX6r5JhNNDorJQBfb0EVubj9MnQRN6IJGhJxpJp7b9ia/zmFNC+x8VpN99ziVZvmmabxYmsH\nyxf1HsBs5q3c9KreVvqgsHQjGp0VLsPU5nm89edt37PgyxUbXsreuvuTRU73oUt13XN9ZxOuNc31\n9OZdH7pcbURsaxsOYHf+Yhim+ls/lZbHwy7D0MoPlG9rt6PJVY2DlTtFAAu6MG6AHkQg8T1At2Mh\n8U9WSPddiER7CkKpubqw0FkG13YPjGltlw0CHB0img3gu19dMB8tK6267sWuvIXILVoFt6cOoiib\nYH7OMLU/d5QfRUSjZcn2MIMnEshtmNp7pqm/fKKyC4ps3zIs/eLMgcmzj/AASyuysHLTq4cMQ70u\nLCT2gwumPtNuDYaZ8dmSu5s03V2bGDs+KjP9EsVmDYNHbcSunG+QW7QKRCLCQ+Mxduh1ULUmrNr0\nmsujNj5gGFqnIhRHw/cwn2iBeJ0E6uWC/iMD81stWZ0SiCjOCjHnNUy2+lvqW8D55ncoeokE+ZbZ\nZz0WFBGWcMT+gpK12JA1P0fT3elWiG/2hePq32KQPdInM7afG/BP7MJcJOAs6nv4PJ1N3Is1Lif0\nMcyc3a2TPE6IaIAkWhdbLUGRMZGDpAZnpVJdmy8k9B2L2OhhKCzd4CqpyNIMQz0HQBCAeAAHAKw+\nUSFZSbI8SqBHUuInS6HB/aTKmn1m8cHNpmnqd5hsvtGFufQCMBdApihIt40afI0lKW4SiaKM0vLt\nWJ/1jsurNT1qGNqLJ2J3gO4lUF0YoMdARFPtkO4HMAhAlQv6a2gu4z0unZoAP0NESaKo5M89+08I\nDe4Lw1CxdO1fYbOGYmjaxQgL7of6xlJs27vAU1aVvU/XPeN9y2OHEQTxWkm0vDEs7SJrbHSmoGou\n7Cv8wVtYuqHeMNTRx5MQ3IFt6Yps33L57Nfa9SwDgG9WPNpY21A8Pyl2wo2TRv6fX520L5b9wRNs\nj6Rp43/frpHzmi1v4ED5NiiyHbrhhWGolYapP2Ca+jtdsbenQ0QDQqBseYkmBfnbv5iL8SUK/q6C\nV8iS9d3hGfNs8TGjyDA15B9Yo2fnLfIYhjqVmTcTkWiB8JQJ3BUOi6HBJCc0+3BECrfQoCNu9EIu\nNBahaLuL9VGnZqZHh4iCRFEpGDXo6ogBCVOFls9FQ1MZFq/+s6Eb3mzD1N5jNrdLouVtqyUkNDw0\nHjX1xfB4Gxp0wzuvVd5dV21IEgTpBlGQ++uGN5vZfLuzEiVERKKoPALmR6J7Z+hWS6hYWpFFhqlp\nmu62A0SyZNuj6a7HmPmLE7E3QPdzPH5LICcrwCmBmVfgJC3n/K/DzAWSaCnJ2vdl7Fkjb0Ve8WpI\nkgWTR91+OLcjLCQWU0bfZV269rnU8uo9NxHRR4Ig3S6JylXMbCESY+ec9ZjYuly+d69kS5A9MmJ3\n3nf/BjCzi+Ylhgb30/w5WAAQGZ6i1DYUi9V1+/3mBzGb8KqNypih1wptHSwASE+ageq6AkwZcxc2\n7njPXV69+xlmfqeLtp4J7PdA10vZiX4/FxQeZiMqGlWYK5l5ARGVbNvz2eNbsj+aDCKDSPjSMNQ/\nM3MuAPikAB4moqcr4B4CQAXg2oqq1S9xln0Coh0mGGtQ5sxHfb0X5iWndqpH5ZqoiDR7WuI5R0RH\nQ4JiMC7zBvGnrf8SmL0LRFHZMXHELUHxMaNARGBmHCjfGrx6y+tLiSizrZZVZ2DmAgCPdeVcX6eA\nXkTilTZL6IOzJj1iddjCAQAmm9ix9wtpd/6S/brhGaJqTk9XbQzQ8wjkZAUIcAZimOqMkvJt+ooN\nLyKncAUykme1S54lIgxOnWuXZds9oqjsSew3/oGpY+8dkJowtX9cdKboT49oYPJs0TT1yUTUp4um\nHWxsqpA6Wp2pbShWAax0uWtcB8raF2TlH1jDAJtWpeNqLl33ICy4HxJjx9lk2d6pBr9nGsysMfDC\nf7Db5W6TT7WCS8wyOBsBfOU7doOquc7TDTVY171hmua+vsXBajOmC81FAkMBDFRhDtmBQ394D/t+\neB85y3aj9h4vzAFdjWZ2B4rsuCAxdkJ7LxNAbFQmdMOTIYrKH9ISpin9+44+vIxORIiPGYn0xBkW\nSbT8/pQajRZ5DflhSbRUCYJcKgrSa2eNvNXR4mABgEAChqVfIjnsEVEAZpxqGwN0L4FIVoAAZyDM\nvJeI4koqsl6XJetFQTb/+clB9kgwm/EjMi4TMpJnCQBQUr4d4aEJfo+XJSusllDF5al5nIjuYfaT\nKX10dhimVrq/ZP2ApLgJR+yoqslHTV0RAHytG97iH7f8Y+mAhKlKYr/xCrOJ/ANrvPkH1niIhLVl\nVbtmRYQltHsJLKvKRrgv78jlrjVMU/ffs+UXhAbzmXK4En6Hn64ez9FiCBRlMyobq+Gp98I8h/20\nv+kIIoqWJdtXiuwYHBM50HR56syaukKJ2fx9k6n5bzPQM9BNP9pxANCi1SmQODcxdrzfKsuk2PFy\nTuEPFwK4tdssbINP/PaTsJC4WWOH/touS3YsXvM0evdK9ncskuMmBWft+2I2gG9OlY0Bup+AkxUg\nwBmKT9TwYoviWFZZkzstNLhvu2Mqa3JAEIS0xGmHHZYge29UHNrrd0xNc0PVnBQRlnhDfWNpPBFd\n6K8ay5cYTm0Tin0SFFety3prVX1jqTU5fpIkCDKKD24yt+/93GuY2rXM7AGwnoiG5BSuuDe/ePUF\nAAzD0D4zTO0VANE7c745Oy56hL31nJpc1dix70tMGH4zdN2LffuXeQ1DfadLN+8MwnePbyKi51ag\n9FICghjYAGBRZ9TAiUiSRMvq9KQZCcPSLpYEoVlVpb6xDEt+euZ5IqGa2fy0m6ZxQqia86PcwhXn\npMRPDmq7jFxYugGyZF3HzEcVoz0NTJQk68yZEx6wi6KCJlc1jpZ/b5o6oxMOc4Azg8ByYYAAZziq\n5vrr9j2fOd3eI1v3ebyN2LbnU4/NEuJunSOV0G8cDlbuRG19+9Wg7PzvEBM5CLMmPmSXZfs5ACa3\n3k9EmYrsWEQkaAB0i+LYTkRHtLNh5q2GoWbuKfj+nW9XPl7z1Q8P1mft+/Ir3fBOYuavWx1XqOve\nu72qM9GrOlN0Q32QmQ8y81bdUG/7duVjnjVb3/Tu278c67PewbcrH8PA5DlQZDu+X/usSze1r5l5\n80m5iT0En5r7vUR0e1sFe2bOZeZnTeZHmfmbLrRbOT/YERWVmX7pYQcLAEKDYzBpxC12WbI+24OF\ngRfUNZaWb9n9sab7FOGZGWVV2diw8z2XqrkeNNn4en/JOr+FNAUl6zRm86tTabAkWW/MSJplF8Xm\n4JrDFgGrJQRlVe2LNZlN5BatdBqm9uWptDFA9xOoLgwQ4BeAJFmeEgX5voHJsy29QuPFuoYDRnbe\nd17D0D6UJOWqy2a/Zm+tqVVYuhEbd7yL9KQZiIsZCVVzIrdoJapqcjFr4sOw28KRnfcd79j3xX9V\nzf1rACCiiaKofD8i43JbcvwkkkQFB8q3tSjRP2gY2qsnc05EFEMk3CCKylg2zUGmqceLomwAcJps\nPG+a+t9+KX3diCjcBvFbCcKwkYgUvTDNragiAj7ywLi5C8u27ZBl29vD0y+9PiO5fYsYZhMfLLzF\naxhqf2auONFrdQdE1EeWbB8ymxPCwxK8LneN4PE2OHXDeyMzf+froblj0ohbgv4/e+cdHVd1dfF9\nXp2iLkuyJKtYtmVb7r3ijgFTQ4cQWiD00DsEQhICfCEhIYTQEgi9mWJscO+927JlW73L6nXaK+f7\nQyMjSyNbkmWw4f3W0lro3ffuPXcYzWzfe+4+ibHjvk98L92OtTv+3WgYvhNKfO8qqhK0YPywa85L\nSZhy5Fp+yVZsTX8fM8ffgxa7DU1zY0v6e96Ckq27Nd0zsas+XhY/HtbpQguLnwm67n2SiL5Iz1p4\npyhIAwxTz9J1zyvMvE2RHZNzCtcN6p847ciHQXL8ePi0JmxNfx85RRsgiSqS4sZh3NBfQlWa3QIc\ntnAiEqOAlvwS+9uTR/3akRQ3/si4SXHjEBGa5Ph6xaMvENF7bZy5j8Jfb28aABHARj5OkWtmLgXw\nbOvndcNrB1B9or5Hpxp2iAsnoffoq5GqtPhhuVnH37D78kI01gC474QHYZiM435/n7Jf8H67hNlE\nlFxedXAIgGoAm1veC8ycS0Sz1u94Y/529ePQ8JBE1NQ3WzgYhu/SH1JgAYCuezaUlO+dnZIw5Ygt\nfVLcOBiGD8s3/QWK7GSbGlJfVZurCiQu1nTPtZbA+ulhiSwLi58IzLwDwI1tr2u6+6rNe/63pq6h\nxN4v4QxZFGXkl2w1dx/4wkNEvimjfhMWFdE+GbekfK9H170b/L+mNRflbW+bFOyMRlz0MKOwbMel\nAN5o2+73BnpCFORHwkL66KIgc1VtrqrI9vf8BXM75ZXmN1XtqcLTpwxENDYY8vDWAgsA7CThVh7i\neBSbbiWip7rpKn4E3fB8mZW/+rLBKWcFt90VLCnfC4HEAgPothEtEUlorrUnAEg/WR54zJwHIK+D\ntq1ElNjoqpja6KpIAlAAYN2PIcpNNt7ML9n6+IDkmYiJHHjkemLsWGTkLHVV1+a+W99Y+g2AnTpz\n8Q8dn8UPgyWyLCx+4jDzbiIacTB3+YMH81ZcCmaJSFhhmL4NgiA/tXnPOzhr6uOQJfXIMxXVmcgr\n3myarLeIpl7HKsobHNTbBiAyUJsoKo867ZEPz5n0gCPIEQUA8PgasHbbv66urMlxAri6J+d7GjJ9\nHKKlQI7uEWRDNNt9xWgaDWD1CY6zqNFdVbx934f9Rg2+TBZFGQBQU1eAdTted2m6++HurqSIonyr\nKCjPioLkJEGUTNMwBRI/Z5jX/dAVHfxzWOv/+dFg5goiunjZhhfmJ8aNFeKihtldnmrzQM5St2Z4\nFzH4jtbb3USUgOYi5y4Aq/wHRCxOc6ycLAuLnyFEwoWK7Phg2tg7HblF63G46iD6J06H0x6B4vK9\nRmHpNo9hapcx87fN91OsKCo5l5/1si1QUV6/k/uvmPmro8chhyjI5RfMetYZ7Iw56hlN9+DT7+7y\n6IZ3sH914mcJEf12Cno//2tKswVqf5Q31h+G+xxm3hCovYtjRcmS/TMA46IjU3W3p5brGkrJZONu\n09T/250+RVG+R5bsf2Y2bSkJUxAfMxw+XxMyspegtqG4Qjc8A45XjoiI+qHZAJcALA3k73W6QkRR\nAok3SpJtkmka5brh+Q+atznZ3x4sS/b3mI25UREDfF5fE+oaigUGP2IY2is/cvgWx8Aqq2NhYdEO\nv39P3vRxv02Mix4KZkZlTQ5yitbD62tAYdkuzTC8c5h5TevnFNmxsF/iGXPGDf2l0nq7Kb9kC9bt\neL3cMHzxbRO0iWhWeEjCF+fP/FNAd9G121515RZvvJ+Z/31SJnsK4D+xFwnAG2jLj4j6qRDT/4Yp\nNhsdvblQyI34E7bV+mBGd8UPqxMxpQIYCaABwIrurjYRkV0gqUKSVOfMCfcctS3GzFi/8w0UlG77\nRNPcV3TwvK1ZYPC5CbGjTQJRQdl2AiMdQBYR6Zru/hLA1z05/1MF/9/i2oTYMWMnDL9ebVlNrmso\nwdINz7vc3vrbTVN/50cO06IDrMR3CwuLQAwQBCkyNmoIgGYjxKiIfmjJy9qa/j5lZC+ZCeAokaXp\n7uuyCtZsqKrJiU3tOztIEhXkF29xFR7eqRmG75wOTsCJRGKH/5ITBJHQnAh/yuMXSwkA7AByj5dz\nREQkgG62QXzKBEcaYMFJ8jYX9PuZeWPLfcycbSdp/t+x56JbeYgjlJq/aIu4EX/HbpcJfqynBQYz\nHwJwqAe6mm5TQ4Re4X2PElhA8/tqzJArkV+8+RdEFMLM9W0fliX7W9GRqfOmj7vLJvmtDgzDhzXb\n/jXW420Y27fPRGQVrLmwvrHsMBFNPVVPPp4AEyXJNnLyqJvV1qd/Q4PjMH3cXY6lG154joje/akd\n9Pg5YYksC4ufHzZJVMyOLJEU2SkRUbutK2auJKJhFTVZl9Y1ll5LINWnuxYxm28yc3UHY22uayhW\nXO4aOOzhRzUYpo6C0u0AsOIE53PSIaK5smR/GUCCJCqGT3ebkqi+aJi+P3b0BahA+HM41DtvwGDn\nAIRCh4lNODzpA2QuI6JzWq8UemBcn4+Gvz6Ejb+OZYfPB5Oq4WET/JjG5qs/2ES7jo2IxF7h/QM2\n2tUQqEqw4fJUJwJIb91GRHGiIF9yxpjb1BaBBQCiqGDqmFvx+ZJ70af3KAzsOyd4Z8antoO5yz8H\nMLUrwfmF8WAANgAZ3KZQ+o8NkXBu/4QzjrJXaaFXeD9IkhKkG56BADJ++OgsegJLZFlY/Pw45PE2\nUn3jYYQExbRrzCva1MBsBkwa9m8rve//OS7MXC9L9n+v3vryLbMn3e9Q5Obyc4bhw/qdb3oArGHm\nU/oLhIjOliX751NG/8bRp/coCCSgtqEY63e88VBdY0kyApzoJKIEBcLdj2GMLZiaBYQMEWcgDnaW\nHP/FgdcBDGq5379SdRcRPVGAxjFoLt685WSd0OtBtrg8tUJjU3nARt3wwas1Cgh8anFKdORAryI7\n1LYNsmRDbNQQlFcdQkrCZIwYdLF8MHfFaCIaxMyByxW0gUi4SJZsL4uCEi5JquH21IqyZHtZN7xP\n9oTvWA8hdnSYhIhAzX7hVhrNaYzl+G5hcZrSbNZJlxHRRUQUuKJyAJjZA8JL63b826Vp7tbXkZ65\n0GhyV1UA+K6n4tQNz4O1DUXvfbr4bs/KLX9vXLPtX65PvrvTU3x49wpNd196In0T0fmq4twuCKIu\nirJLlu3v+ZOoTxgiOkOWHN/Kkm3hGWNvdyTGjkHLikNYcDzmTn7YScBVRDQgwOOXj0cMtQis1oxG\nFAhICPQcM9cx8wpmXncaCCwwc4koSItzijbA5W5vkZaZtxICSazIjvWiKD/TpvC4YZod74Kapg4S\nml9vUZAQHZmqoTmP7LgQCb9QZMcHM8bf0+eys192XnzmiyEXzHrOGR6a+FtJsnUrwf9kwGwuzi5c\n1xQoN7qqNg+a7vYAOPjDR2bRU1giy8LiNIOIbIrseF8UlZzYqCFvRkekviMKcpks2f7Y2bIohuF7\nuq6h+JNPF9/t2bT7v77t6R/xp9/d5UnPXKAJgqRLou3fRJTWE/Eys+HT3LcYhi+lsHT7b/OKN92j\n6Z7hPs11LjM3drdfSVQetKuhH00aeePoq+a9Jl4850X7kH7nXCmKyg4iGnIiMYuCdIsiO79L63fW\nWbJkF+Kjh7W7R5btSOkzRQRwSds2AkIjoKoAUMc+1LAXLV+kAhGCIGsAQk8kxlMF3fBeweDshWue\nQn7JFui6F03uauzc/yl2H/wCk0f9Wp018f5+fftMflAUlX1E1LK3uKKyJkdxeWrb9en21uNw1QG0\n5A0CgNtTAwDHPKUItCSTqy/PGHeXPTYqDS1/EsHOKMyZ9KCDQJf4E/+P1YdIFEAh9zyrvVpT9tb0\n93ytC2A3uauxZtsrTSYbv/+pVDX4uWJtF1pYnGbIkv2jmMiBc6eOuc2myHYbALjc1Vi68f/uaXRV\neAH84Xh9+D+4byCiPx3KW3mlJKp3hAbHhw3pf47NroamllSkp2RkL/6lIIg3m6bxQU/E7Xdw75FV\nBCKKFQX5mXnTn7Y57c32XJKkYsSgX4iqGhy8M+OzNwBM7mbfvUVBfunc6U/bfJobhWXb0dGWjk0N\nkQkU1PY6A7s24rB7L1fZD8MNEQQHZMzjRAxHJKrhVQEcZVNARMkAkgEUMXNWd2L/MfCbxPYnovs2\n7HzrAV339iZBoLjo4Th3+jMIdjYvXkVHDLCFh/RRdh/44n0AEwDUMfPnC1c9eXl0ZKocEzkYKQmT\noekerNn2CgYkzYRNCQYAVNcVoK6xFOhc/l6aKKqhMb0Gt2uQJRtS+kyWDuYt/wWA59u2E9FYWXI8\nR6AZDJAiOzM03fUEM5+UmoLMzEQ0O7tg3Rc5hevHxkUPM72+RrO86pAM0POmqVsWDqc5lsiysDiN\nIKJUWbLPnT7uTrvYKlnYYY/ArAn3Or9e8ehDRPSXzib4MnOWLNli4mNGhE8be7vaIiZieg2SkuMn\nSotWP/UmEa30C6TOxhhKJNwoS/ZrACiG4fvGMLVXmLmoi9M9FldFRaQKm3a/jerafMiyHcnxEzAo\n5UwMSJxOO/Z9PIqI4rkbTtpEwq+S4ycg2BkDzb8q0+iqRJCjV7t7c4s3NTC4joiGodnlvNn7CJCa\noNmuRH+MQC8QgEzU4X84gKUo1CXQh5rfO4qI+sqS431Jso0Mccb4GpoqFEV2ZGi6+5pTPV+tNcz8\nVwB/JaIreoX1e2PWhHuC294zsO8cYfeBL4YR0UBJsv1TluyTB/WdI9nUEBSWbucd+z8m09SRFD8e\nowZfApNNFB/ejQ0733CZpn5XJ60m7LKkdnywQ3FKaD4hehRENF0S1UWj0y6zpyRMJVGQUHR415BN\nu99+XxTlRw1D+0dXX5PO4C8vNY2IhuQVb56IZjPSb5m5/RKfxWmHJbIsLE4vzkqMHUutBVYLwc5o\nBDtjzNqGogkAVnWmMyJSREG+YcyQK9S2qzXhIX2Q3Gci5RZuuBHAnzrZX6IkqpvjooeGDEia4RAF\nGXklWwZkF667k4jmtrYuOBEEQbqwvrFEGTn4UkwccQM83ufPsPcAACAASURBVDoczF2Ob9c8g7On\nPg6bGuxrdHmjAHRZZImCnBIemmgDAFlSMSBpBjbveQczxt2FltddN3xYsfFFbnJVBoUGxz3u9TX9\nzjC81UR0K4AlKsR/3oMR1J++3xFMRRju45F4FJtEDeaT/terlygqm4elnh8xOGWuKIqy3TR1ZOav\nHrVt34cbiWgYMxf2xGv2A5IcFd6vvWMtmnOrgp3Rem1DyZrIsL7RcyY+gBbn+QFJ06mobBdWb33Z\nVVi6s/jDkluSGIAoyNma7nqYmRd0cvwMl7tGCiSMmRl5xZuaABxl7OqvzfnfKaN/40iMHXPkemLs\nGISHJDq+XvHI834rhQ5rc54ozLwPwL6T1b/Fj4OVk2VhcXpxzLQrImJ07TRSL0GQqKXcTVuiI1Jt\nkqQO72xnsmT/YGjq+VEzxt/tiI8Zgd5RaZg44np1+tg7gkRRWUBEchdiCwgRpQgkTpo37Sn0TzwD\nTnsEIsP6YvKom5AYOwZb0z+E21OnoIP6dsdDN3yHKmtyjqwEjhx8CWTJji+WP4Qd+z/B3kPfYP6S\n+wxJtuPiuX+jC2c9F3rZWf8ImjbuzkS/m/odoVDU1gKrhQiyYRDCmgA8pirOLxXZsTE0KC4sNXmG\n2CI2BEHCwL6zKTVppkMUlfu7M4eehIgiRVF+WlGcuYrsKFNkx3wiGneMRwqravMCrqSapo6Gpopg\ngcToSSNuOCKwWujTeyRiIgeZuuF5wTC1ONPUYn1aU1oXBBaYuYlIeHX9jtdduu5tfR37shYabk9d\nGYBlbR4bLghyNAAcyFmKwrKdaMmRCnZGITZ6qAHg4s7GYGHRgiWyLCxOL5bml2xjw2x/Ar3RVYn6\nxjIZwJYu9FdrmJro8QWuPVzXUKIZhpbfmY6IaLJhauMJJJZXZx5J9AaAPr1HISSotwLgnC7EFhBB\nkG7snziNHfaIdm1D+s/z51CJ33R/u4XfLSzdjtr65t1NUZAwbeztmDn+HpRVZpi7D8wvEgRRmzHu\nLrKrzYc6iQjx0cMxedRNDlmy3xsMOaABaxNryBe9zvDQxF+PGXLlhROGX98/2Bktf7X8UVTX5cMw\nfMgp2oDdB76ALNtlAl3VvTn0DETURxKV9OS48Q/PmXh/8rxpT8WMGHjRhbJkXyUI4jUdPPZlZU02\nVdZkt2vIzF8FVQmCqjgREtQ74MPxvUcESaJtIjNXHcN/7ZjohveR6rr8BZ8uvtu9Ze+7+q4D8/HV\n8ocb9hz8Ok83vLMDeJvNNgyfY3/2d6hrKMG+zIWYv/R+lFbsBwAEO6IVAO3fcBYWx8HaLrSwOE0g\noggA9czmqnXbX505ZdRvbJK/DIfbW49VW/7eRCS85E9E7hTM7FJkx4L9Wd/9YnTaZUc5r7u99TiU\nv1I3TO2t48QlSKLtZVFUbk7sPUb2eOuxfsfrsKkhmDHuLthtYQCAmIiBak1dQWDXyi4gikpKWEif\ngCe/bGoIBJJY011Pd7d/Zq4UBPGmb9c+88bQAeer8TEjRK+vEQdylrhr64vKiWhZavKsGwWhvVF9\nQu/RMNmIK4DHdLMOe5syOf8TchDTZyxNHHGj2rIi2bfPROQVb8byTS/CNA1EhiUjMqwv6hvLYLIZ\nLQjSjaap/6e78zkRZMn+xuCUuVEjB19yZLKhwXFCXPRwx8LVv3udiL715xQdgZldRPTLJeuf+3BY\n6gW2pLixgm74kJm/2peVv0aZMf63WLPtXzAMHwJte7vcNbrJelW7hi7g98G6kogGH8hZ+guA7ACv\nQ3NdxKMEFhENlETlDzMn3EOxUd8fqC2t2Ie1217BWVOfQElFugfWVp5FN7BEloXFKQ4RTVBkx0uC\nII0WBdlgNt1FZbuzPv72jv5x0UN1w9D4cNUBmUh8XTe8v+tq/5ruvudAzuJpPq0pfHDKXNluC0Vp\nxT5sS/+widn8OzMf06dHFJUnQ4J6Xzd3ysNyi9no2KFXYfeBL7Bi80uYN+0pEBFqGoq8AE64LIqh\n+w5U1eZ6gZntTCxd7mqYpuYB0H4ZpQuYpvEBEe1Pz/zmoX1ZC6cA1KTp7jeZzTclUX1RlmwBt2QF\nQYQoSDoZxtoPkTnjeh6kCn4xVc8+7KZKXDzkSbTd8k2On4C9h75GUtw4DB940ZHrdQ2l+HbtMy8T\nUQ4zrzqROXUVIuolCvLMIf3ntVOTYSHx6NN7FOeXbLkawMtt25n5ayKanJ654LG9mQtmEkgzTf0z\nw9TujIseJvQKT0F24XqkJs886jlNcyMzf5XPNPV3e2IO/oMDxzw8IInqw2n9z1FaCywAiI0agtS+\ns7Fl77toclU2AljcEzFZ/LywRJaFxSkMEU2SRGXZ2KG/dPSNnwhRlFFVm2ffuOstW31j2deFZTsW\nAtAALGH2detf/8xcTETDswvXPZJTuO46k027JCoZPs31R2aef5z4VFGQ75829jZni8Bqvi5gxKCL\nUVC6HWWV+6EqwaiszhIAnPBReJP1t3IK1z+a1u8chAbHtp4Hdh743EskvM/MnhMdh5l3Abi67XUi\nWpFbvOmKwf3Oand6rrImB6ZpNOkwrtyG8mWHUDtwGsc5ZQi0EkUuh7OXTZEdAdM04qKHo20GR2hw\nLMYMudKxLf3Dp9DJwww9SKxNDfHKsr2dmAWAyLBkR2Hp9jlE9O8Oaivu0XTPvwDsQnNZm8WSaDu/\nrHJ/39Fpl2PZxv8Ds4l+CVMhSSqqanOxcdd/NJONT5k5PUB/ncKftHiRIjseMUx9oEBinWH4XjPZ\neDlwgW7h7KS48QG/C5PixiEjezHrhvdsy6/KojtYIsvC4hRGkR2vjh9+rSOlz/eWT5FhyThrymOO\nz5bccz6ApzpbZuRY+Avv3uv/6QqDbWooQoJi2zUQERJix2B/9ncorzrkMkz9113ZyjxGrMWCIN2+\naM3T/xw64Dw1LnqY6PbWISP7O1dFdXaBbngfONExjsPntfWFLx7MXeZMTZ4ttKxKeXwNWL/zjSaT\njT8xcy0RjffAPWsBci8lkOKBsVf11v+B2XQG8t1qcle1K7IMNG9Bbt7zzoSTPKdAlHq89apPc0OR\n2x8WrKrNhU0NmevVmsqI6JLWK21EFCdLtqUCSSkkiDZmA6Zp/I7B3rXbX/NeOOtZ9czJD2HXgfnY\nvu8jCIIE09RhmvqzJhvPnEjQkqi+qCrBvxk95HJnTEQqmtxVoemZC58ordh/LRFNYL91RisMM0CO\nI4CW5PdiZt5zIjFZ/HyxRJaFxSkKEcVLojowOa7996ss2zEgabqckbPkagBd3iLsQTTD1IiZ222B\nAYCmu1Favi/LZP06Zt4Q4PluYZr6f4lod3rmggf3ZS0cD1CDT2t6DcA7zOzqqXECwcw+Ipq2fd/H\nyzJylkYkxo5xuj21vrySLSDQa6ap/8N/n4nmU2zLgOYVFtPUbyso3T4gKe7ow3lN7ioUH96N8cN+\n1W48TfeASOi4/sxJgpkrFdmxfF/WwrmjBl961HfF4cqDKCnfi4vm/MVWU1dgW7nlpW+IaCQzZxGR\nIEm2FdERqQPqmw4Lk0fdhOiIVP9zGeqa7a+any2+25eaPItiew2RDUNvOlyZAZP1C5j5hIqFE9FY\nVQm65bwZzzhUpdkj1mGPwIzxd9vX7XgtuaB021MA7mv9jMnGZ9mFa2+PDEtulyCWXbDOZ5r6JycS\nk8XPG0tkWVicuoTIskMTBNEWqNFuC5MEEsN+6KDakKEb3oby6kNBbVdhDENDTuF6l8n6xcy890QH\nIqIwAPEAypm5gpl3APhRTt/5xURKfWPp3PTMb8YCaADw+bEMV/3u3jeu3/HaYo+33p6SMIVEUUHJ\n4d3YuPttn8MeAZsa3O6LPit/lU4QPjuZ8+kITXf/JiN78dbGporwgX1n2xqayrE3cwGaXFWQJRsW\nrv4d0vqdjUF9z1QO5i57AMCtAGbblOC4ipps4dzpTyPY+X0R8t5RaZg7+RFh4erf6Qdylv5dFOVe\nuuHdAeD9QFt5XUWSbLen9TtHbRFYLRARRgz8hVpQsvVmIrq/xTQWAAzD99fM/NU3RIQkySmJU0kg\nASabyC5Yy5kFq92Gqf3tRGIiIgnAXDS/d3MBrLS2Hn8+WCLLwuLUJc/naxQamioQ7GzvY1VYur3B\nMLXNP0JcR2BmUxDE+1dvefnNWRPvdfQKb67N7PbUYcPON9zM5vITFVhE1EuW7K+Kgny+qgT7vL5G\nRVGc6zXN9RtmPqEE9xPBv1L1HbpQTJuZ1xHRtJ0Znz63Ze+7M8FMkmQ7pOnuP2ma69kd+z+JHZZ6\ngSRLNhimjuyCtbw/Z7HLMHydMoPtafz5ekPzS7beUVC6/WkiQZg48nokx42HIEioqs3D5j3vINgR\nLROJfnsOOiMyNDlIM7xHCawWwkLiERocz9V1ees03bOoJ+MVSUoJDY5rf+wTQEhQDAxTdwBQABwx\n0GLmQiI6Y2v6+x9v3/9xYmhwnF7XUCKbpp5vGL7LT6RSARHNEUXl4xBnjBwWkiBW1eYaLnd1ExH9\ngpk3dbdfi9MHS2RZWJyiMLNblmyvbdr939tmTbzPLgrf/7nmFW9BVV2+D8CPssLRGtM0PhQESVqy\n/rm/2dQQRZbsZl1jqSoK0nua7r7rRPomIqckqptSEqYkjBj0C8WmBKua7sXB3KUzdh/8cgsRDW8p\nnUNE4aIgP0gk3GSaeogoqdma5noOwHutVy5OIBY7gAsBJAHIB/BVZ8sXtYaZtwM407/CIfo0l9ff\n/7KDucvfzshZMj3IEeV1uasVgPYbhu8GZs450fi7i9+r6g+K7Pjd5FE3Ca23OiPDkjFn0oP4YtkD\nAEz/fjH7fLrbtKshAcUO0Gy1AaBdzccTxTC1jOravCmJsWPafbfV1BdBFOU6w/D52rb5/yGQRkTD\ny6sOJgIoONE8LCIaKonKVzMn3OdoObnIzCgs2xG8dvurS4loKDN3yoPO4vTFElkWFqcwuuF9tLIm\ne/D8pfdNG5g8y64qwUJ+ydbGiposzTB8czpZy+2kY5r6u0T0YaOrYgya68LtNgxfT5QguS4qYkDs\n+GG/UlpyvmRJxdAB5wkuT21wZv6qhwH8logiJFHdnhA7JnZo/3mq0xGJ8qpDadv2ffSqy11zBhHd\nciJCi4jmiYL8UWRYX/QKT7FX1uS4q2pzXyeiK5m5W6sxfi8nvdXvZQDOJqLedQ3FfQGUMXNud2Pu\nCkTUH0AMgGx/HG3bU2TJjoRWJWdaUGQHkuLGIzNvVYsQ/LKiJvsxWVLtpqlDEI7+mjEMH8qrDokA\ntvf0PHTD+0pGzpLrUpNnSq3NaplN7Nz/iZvZfOVY7wO/sOqRJHdZsj82dMC5amtrCCJCYuwYDEia\nrmTmr74bbfLDLH56WCLLwuIUxp9kfa6muyfuzfzmGoHEEE13rwLw4clO8O4qftHQo9uXiuy8cXDK\nmY5ASfWpyTPlrPzVVwP4rSQqv0+Onxg3edSvj+Q09ek9CtGRg5xfLn/oat3wvI029eo6S/OKhPrp\nmZMfckRFDGi5HFxenYllG1741H9irduWA23xi5x2QudkQESjZMn+tiI7+jts4VqDq0JVZOdKTXdd\nz8zlrW4NVmSnRyAh4OqTwxYOEPb6409XZMd3AF24M+NzYXTa5UcORTAztqZ/oBPRupOx1cvM6ZKo\n/H7Byid+N3zghfboyIHU5K5CeuY3TXUNxemmqT/b02N2HIt5VlLchICreX3jJyk5hesvgCWyfvJY\nIsvC4hTH/y/vjf6fHwz/dtZQACKA9J5YNSOiBACxaN6O6YyQcCpK4F0lVQ4Cs2kjIhIE+YZhqRco\nzCZKK/aj0VUOuxqGuJjhGNLvbPueQ1/fhm6KLEmyPTyk/zy1lcACAERHDEBa/3m2/VmLngVwQXf6\n/jEhooGiqKwZN+yXzpQ+U0gQRGiaG7sPfTXnYM6yLUQ0E0Ce//2X5fHWiU3uKjjtke36Kijd3sRs\nrmr5XdPdV5ps/Otg7rIbC8t2UP/EM2Cyiaz8NR6vr/6gpnuuOFnz0g3f80S0bteB+Q8S0XCAqnxa\n07/QnFzfbqvwJGK2r97TTPN1Ctxo8ZPCql1oYWHRDlGQbpZEtcxhj1gT5IhaIQpKhSSpjx2zOvUx\nIKIBiuzYIInqoZCg3ktEUclVZMcSIoo/1nOmqa8pKtsV0MSouHwPBFHeDkA2Td3u0xrx5fKHsX3/\nR6iszcX+7G8xf8m98OluQRDEhO7EDQAEmp0UOzbgikRy3DiBSDhPluwriehs/09707AehIj6S5L6\nN5savEqRHR8Q0Yzu/H+RJdvTwwacb++fOI1aSgRV1uaisjpLBjhJEtVDsmQrEATxBn/R5bc27fqv\nu23dzOyCdVzfWNIA4OuWa8zs03XvTbrhDa9vLH1g94EvPtl78Kt/NrrKz9d0z+ju1iTsLMy83qe5\nLvL6mlK8vsZxzPzfkyGwqJlZgiD+kUj4PRGNbdX6VU7R+oDv3ezCdV7D8H3a0/FYnHpQD+SD9ghE\nxMzcrQ9wCwuLnkMSlbtUNfi5mePvdkSG9QXQXN5l9daXmxpdFa9quufBrvRHRPGiqOwdNfjS0IHJ\nswRRVKDpHuzLWqTvz/q2Qje8aR0Vc26uK6fumDvlUUev8JQj1xtdFVi4+mmX19dwITMvk0S1SpLU\niAnDr0NS3Lgj21Pl1ZlYselF6IbvE8PQurV6osiO/DMnP5zYevwWKmtysGrLP6AbXiiy07CpIY01\n9QWqQOIiTXffwMz13RmzIwRBul4UpH+lJs+UoiMHyo2uCt6f9a3Lp7sX6Lrnly11+ajZ7XQmgL4A\nigAs82/nHkEUZdfFc160O+zhAICisl3YsOstjB/2KyTGjgaRiPKqg1i/840mt6fuecP0vSBLtq8l\nyTZlYPIsuyI7hbySLY1Vtbluw/DNYOb9/u3HB4mECQBq/d5l73bngMDxIKIYQZBuk0Xb+QzWfZrr\nfYDf7unX/BjjR0uSbblNCUpO6TMlyDA1M7tgrccw9Y2a7r4AQJwoKjunjv5NUGJs83uS2URWwVre\nsvfdOsPwpTFz6Q8Rq8XJoTO6xRJZFhYWRyAimyjI5efOeCY4LPjoRSa3tx7zl9zrMUwtkZkrOtun\nJKkvDUicftv44b9q5wG1euvL7oLS7U+bpvHCMWK6QBTkD+Oih3FUxABnbUORJ794Cxj8oGFo/wQA\nUZRXpfSZMn3yqF+3e35f1rfYc/CL5T7NPaezMR8Vv6g83y9x6t0TR9zQrrzM8k0vorImB7Mm3IOW\n7USf5saWve96C0u37dZ0z8SeONkIHBGcO8+d/oy9dTkhXffiu3V/bKqpK3jIZPNfRDRBhfhFGJSg\nZIQIxWg0yuH2+WBe0drsUxAk7xXn/EtRZDuYTXy5/GFMHHE9YqOGHDWuy12DL5Y94DFMLRFAJYBJ\noiBfJQhikKZ7VgL4hJk9oiD/WhTlfwwdcJ4aGzVE9HjrsC/r26aqurw8XfdM7knxQ0RjRFFZkRw/\nQekbP9FmmDoy81e5yir21+mGdyIzF/TUWB2hyI7NA5JnjhqTdoXcIupNU8eaba96SivSv/BprquJ\naLwk2T5WZEev0KA4s6a+QDQMX6Gmey5lZqvg9GlOZ3SLtV1oYWHRmqkhQb25rcACALsagtioITqA\nc7vSIZFw1YDkme0EFgCkJs+yy5L9umM9z8xfG6YWV1i248HdB+b/Padw/ROGqfVtEVgAIIlqVErC\n5IDP942fANM0ul2WxjC1l7IL1zdlZC8xW8qvGKaOjOwlqKjOxJRRN6N1vpYi2zFl1E2qqgSloXk1\nqUeQRPWuQSlnSq0FFgBIkoqxQ692SpLtISJKUiAsuxlpsc9iYvAtNMT5DE0IuQvDeykQFhDRkaNu\nsqRuLyrbAQCoqs2DQAJ69zq6SDIAOOzhiI8ZYQD4BTezQTd8d/k09w3M/D9m9hBRHyLhn+dOf9ox\nLPV8sVd4Cvr0HoW5Ux51JvYe3V8S1ed76nUgIlES1YVTR98SMnnkTTZZsqGsYj9sSoijd1RajCTa\nTvo2HBGNFgRp6Oi0y+XWO7WCIGHSyBttpqlfTEQxzLxF1z0pLnf1nNKK9Bs93vozNN2TZgmsnw+W\nyLKwsGiNTZba16lrQZGdApqL/XYeZkWWAj/SPBZ32B8RCX57gWgAr+mG7x5mfjFA0rxhmoFNtE02\nAep+kjEzlxqGb9KuA58f+PjbO7F43bOYv+Re5BZvAkCIjxkeIG4BA5JmOkVRubi747ZFFOWxMZED\n5UBt0ZEDoenuRBl0zzTEKaMp6qgyR0MoAvOQpNogPtpyzae5nt6a/r6rvvEwNN0DmxoasDQSADjs\nEQqAkI5iEwTpppSEKdS2hiURYeTgS1Vm81oiCii0u8Fcp6OXI6H3KKzd/i+s3f4aVCUYvcJTIJAo\nMBvjiajHXvcOmJTQexQJAWpQqooTEaFJHgCjgeaDK8y8mZk/Z+adPbWyaXF6YIksCwuL1mytqs1V\nfFr7Os6mqaPo8C4AWN+VDomEDUVlOwN+sRSW7dBN0whYr04QpGslyVakKkG7VCVopyTaSkRBuinQ\nvbru+Si7cG3AvJ+cwvWGQMKCrsTcFmY+pOnuEaap1SbHT8RZU5/A9HF3oNkgNrAwEUWZCD0mLMBs\nljW5qwK2NbkqIQpykwzxggmICTjmBMSIDJz9fX+8RNPc93298lHPvsyFrqraPPi09q4gzIyisl1e\nADs7ik0SlSG9wlPabacCQJCjFwRBJADtjyV2j9SYyFR5z6Gv4dNcuHDWsxg+8AKkJs/EjPG/xbSx\nd0AU5P8RkbOHxguE2+tr7LA0jk9zEYAez0OzOP2wRJaFhcURmPmwIEhfrN/55lGnyJhNbE3/wMds\n7uhqmRxNd/9xZ8Zn7tr64qOuV1RnIyN7sU83vC+2fUYU5TvsasircyY+EHv52a84Lz/7FeeZkx+O\nsdvCXpJE5YG295tsvF5Qut11IGeZafqPzTMzCkq3Y2/mAo+me/4EAESUKknqX1UleJEs2f5JRMM6\nOw9m1hn8WHrmgibAhMMWDklSUVGTFfD+nML1Dbrh7XTJnePh01z/Ts9c1GgY7WtF78tapBEJ7wAw\nO1omMcEAcFSzYeqvmaYeV1Kx916A923Z867W1nbgYO5y0+OtrwDQYfFmw9ByahuKAxaxdnvrYZg6\nAegJc1oAOFxbX6wdyluBcUOvgSgerSkTYkcjKiKVcJy6lkSUQkQjiMjRjRi+KSnfK7m97dPMqusK\n0OiuZHTTMsTip0W3E9+JKALAx2guMZEH4PJAJ4SIKA9APQADgMbM4zvoz0p8t7A4BSAihyzZ5xMJ\nZ/RLmKoIgijkFG1w67onQ9M9Z3Xn+L0giNcQia/HRw8zw0MTnRXVWY3lVQfJMLUrmHlh2/FFQS4/\nb8YfnW3zjxqaKvD1ikfchqn1BuBmZq3Vc6myZP9MEMR+EaFJel1jqeDzuWp0w3MVM68XRfkegcRn\nU5NnSZFhyXJtQ7F+IGepZrLxoq57n2wTgwrgMkV2XkUg1as1LfCfXKuTROVeAM+EBseZuu6TTdbt\nZ099HC0O48wm0jMXGnsPfV2kG97+bU/1dRciEmTJ9lVYSMKscUOvdkSGpcDlqUZ65kItu2BtpW54\nR8oQ7puC3vdcS4ParSrN52x9KYre8bAecDWQiEIkybbcroYOSk2eFSSJCnKLNjRU1+U36oZvGjMH\nVpPNzw6QRHXPRXP+z+awHV2zfOve9/WsgtUf+zT3NSf8IjSP5RBIqrCpIY5Lz3op4D0ZOUuwK+Oz\nt3yau91ciWimLNlfJaIEWbLrHm+9RCS8oRveh7pi8yBLtuedjl53TB97hzMspA+YGRXVmVi99WWX\n19d4t2Hqb57ANC1OA07q6UIiegFAJTO/QEQPAwhn5kcC3JcLYMzxPpgtkWVhcWpBRCPQnOQuAlgO\nYOMJlqYJA3AFgHgAuWg+ldZuX5KIzosMS3n/3OlPt8sBqm0oxrIN/6e7vbUCM5Ms2bI13f00gA9a\nYvOvTvVDs2v6ZmZmIpqkyM5l5834gyPI0etIf25vPb5Z9WST21NzeUt5HCKKlkR1Q3hoQkxq0qwg\nQZSQV7TJVVKR7jEM33RmTvfXMZwOwCYK8hQAd8ZGDdXttlC5+PBuXdPd+ZruOZuZC7v7enXwGkpE\n4j2iIN+vG94YQRA9Aonv6Yb3SWY+TERxCoT9v8LAkEnoTQIRmBnbUYE3sb/RB3PUccSSAOBMSbRd\nRiTYNd31LYBPO2NEK0nqE4pkf3R02hWO2KghcHvrkJH9nbegdHulbnjHMPPhHnwdbhIF+Y0r5/0b\notg+TW3H/k+M/dnf/c0wtAfbPDddEtVvp46+xZ4QOxpEAhpdldi06z/uipqsFZruOb+z73EiIlGQ\nHwDRY6rsFE02BF33NRim9pBp6u/20FQtTmFOtsg6AGC6/w+7N4BVzDwowH25AMYyc+Bkgi4Ea2Hx\nc4SIQgRBuk8QpFtMUw+VRCXb11z4+IMTET2nKkR0eXz08DdmT3rgKJFVXVeAZRueR1r/c5CaPBOy\nZEdJRTo2736nye2t+7uuex/vqE9FdswfMejiC9P6ndUuRSK7YB22pr+3zutrOsN/79IByTOntz6a\n77+PN+95p1g3vEncZk+NiCLR7PoehOaafCckSDsDEckA9LbjENEwG8TPVYhxCQgyStAkuKBXeWBc\nwcw9WvYoQEznKLLjccPUhwkkNhqm9l/T1P92vM//4/TZm0i4QRLVwYbhyzPZ+C8z5yqyI33s0KuH\nDEiaftT9mu7B50vudfm0psnMvLt1myI7d08aecPw5PijD5sahobPl97X5PHWzWLmLV2MTwYwCM27\nNQfavjcsfrqcbJFVw8zh/v8mANUtv7e5LwdAHZrfgK8x8xvdDdbC4ucGEYVJom1LfMywhKEDzrM5\nHb1QUZWJ7fs/anJ5aj/Sdc/NPzWhRUQpkmjbd/k5q3B8XQAAIABJREFU/7RJrfJtlqx/DsnxE5Ca\nfLQrgttbjy+W3ufRDd8gZs4P1KeqOPPmTnk0KSI0qV1bo6sSX694tEbTPRFElCiJ6sHLz/6nTZKO\n3nFjZny1/OGG+qayS5l5yXHmECWJ6pPM5nWGqQfJkq1A1z3PM/j1H+JL2P+ZPAbNZqSF8K/onexx\nexpBEK8RSHwjKX4CR4X3s9c2FPuyCtaYYH7aMLVloqisHjf0amdKwlRIooLqunxs2vVfV21DyRea\nfvT2JBH1FkUl76p5r6ktDvet2XVgvrkvc+HfdcNn1RO06BSd0S3HrF1IREsB9A7QdNS/GP3L8R39\nAU9h5lIiigKwlIgOMPPaDsZ7utWvq5h51bHis7D4qSMK8pMJsaMTp46+RW1ZVUmIHY2YXoOdXy1/\n6Epd97wNYN2PGmQPw8w5iuxYs3Xv+9MnjrhOJRLg8tSiui4fsyfe3+5+uxqCvn2mCJn5q64G8OfA\nvVKNy10TUGS5PTUgEloymAeHhfTxSpLazlaCiBAbNUSpbyobAqBDkeUXWDv79pkUNaT/PMXp6IXy\nqkPJ29I/+EtDU/k0IvrlyRY8/v63+X9OS4houCTZXpt3xlO2sJAjvm3K0AHnYeHqp540vHW7DMM3\nbcf+T17asve9CaIgGczsNtn4i2nqgcxt7ZKo6IIgBjwFqcpOgUgIPmkTsjjtIaIZAGZ05Zljiixm\nPvMYgx0mot7MXOav1VUe6D72lw1g5goi+gLAeAABRRYzP93ZwC0sfibcNGLghUcEVguKbMeQ/vPs\nuw9+eTt+YiILADTdfUVe8cZlJeV7BvZPnOZ0++ohiSoFyr8BgGBntCIIUnRH/fm0plf3ZS36a3zM\nCGfr15KZsS9rkccwfK/7L1W63NUiMwf0jGpwlWsAjrn1JYnqMykJU6Injrj+SLCxUWk4+4wnnV8u\nf/AC3fBMA7D6WH1YALJkuz+t/zy1lcACADjtERiTdoVza/p7j3t9TdMATCOiUNPUHQDKmbkja4VC\nw/D5auoLneEh7UtZ5pVsadAN76qenofFTwf/ws+qlt+J6KnjPXMiFg5fA2hxar4OwJdtbyAiBxEF\n+//bCWAugC4d/7aw+LlCRIJhaiHBzkCLyUBIUKwgCGL7pZnO9S1RR86TpwDMXKvpnnFN7qrz9mYu\n+GtW3up0n9YEtydgiUOUlO/1mqa25xhd/q+qLi9rzbZXPPWNzfnXja5KbNz1H19JeXqxyUaLe/wO\nn+6uKS5v31V9YxnKKg+ICPBZ1wIRkcnGtUMHnNdODcqSiiH95jlkyX7LMeK08EMkTo6PHhawMHds\n9FAYpj6y5XdmrmPm0mMILDCzbjK/sH7HG00+7WgLq6yCNVxdl+8G8FlPxW9hARxnJes4PAfgEyL6\nNfwWDgBARHEA3mDmc9G81Tjf/1kuAXj/eLkMFhYWzTCzKUu28uq6vOiWQs2tqazN0Q1D73R5jmZR\nRTfIku1xAH2JRE2R7fM13fM4M+f0ZOw9gX/LazWA1ZKk1iT0Ho3t+z/GlFE3g1o5bZeU70VFdaaE\nZkuZjvryENHUosO7fl9UtvM3DFYB6AKJ7+iG93H219Xzpz5cv2brPxeMHXqVvW+fKSQKIgrLdmLT\n7rddYPN+PnYNPsk0dbvTHth3M8gZRYIgtq9ZZBGIek8AHyoA8HjrQSS0d049DqapvVDfVNb/syV3\n/7JfwlTRrobKBaXbGuoaS5sMwzerM6coLSy6glUg2sLiFEYUpEejIwc+MWfSg47WybpN7mp8veJR\nt6a7J3TWHFSWbP9w2MNvHD/sWmdsVBo8vkYczF1m7Mta1GgYvnHMnHnSJnKCiILkuWjOX9T1O/4N\nw9TRP3EaVCUIxYd3o6B0O9g0sn26u39n+iIiEc2nABs7WvkgonGK7PizprlnMCDIkn2Pprseb+vp\nFQhZspfNnfJwTK/wfu3atu37SDuUu/xVTffc3ZlYf84Q0a29e6X95czJDzvbLrpu2PmmN6dow9sC\nSTJgXszMoiBIazXd/UxnTlAS0QAAlxGJIczGVgBft/Zcs7DoDCf1dGFPY4ksC4v2EJEiS7bFQY6o\ncUMHnOcMcvTC4apDZnrmNx7D8P1ON3zt3NI76CdNlhzbLj7zRbuqHF1tJD1zobk3c8Fin69p3kmZ\nRA+gKM4tE4dfPy4pbhyKDu9CXvFmGIYPvcL7weWu1rIL172h6Z47enpcvyATuvIFLAjSg1Hh/Z4+\nc/LDjtY5ZPWNZfhm1ZNu3fCOZOZDPR3rTw0ickiibXtS/Li+owZdqjrs4fD6GrEva5GekbOkjpnV\nIf3PsfVPPEMSBQUFpdt4Z8Znbk33XMdsWtt+FicdS2RZWPwEICIJwCWK7LwdQDSzuVvT3X/tip+P\nJCovDEo5894xQ65slyKgaW58/N0dPtPUo46zFdad2AnABAAxAA4xc0Y3+znfbgv78Nxpv3c67N87\nxVTV5mLxumdduuEddaoIFyKSZMn2lU0NnTak/7ygIEcvlFUeaHaXN7XftnYCJ6JRBLoIRCqzuQrA\nEstn6XuaLUzUv5psXimJiqkbXkkU5EXM5tAJI67r3y9h6lHfGVW1efhu7R+aDFOLZuYubydaWHQF\nS2RZWFgAABTZ8e7otCuuGdh3VsD2jxbd5vJpTYN60qGciKZLku09VXaGBTujjeq6ApnZPKDp7kuZ\nOber/Umi8hiInuyXMJVCg+LU8uqDrqKyXWSY+lXM5lc9FXdP4HdOP1uRHbcRCTGGoW3XDe8/WkQm\nEamyZP9cEMSZ/ROnqZJkE3OLNjS43LVluuGZyczFxxniROPrK4rK3aIgnwXAq+nu/zGbbzFz3ckc\nt7v4Hfaj0XyyM0lVgjdfdvbLToHan91avP7PDYcrM25l5g9+6Dgtfl6csE+WhYXFTwPd8O4oq9x/\n8cC+s9oVw21oOgzD8JkAerLsyQhRVBZNG3ObIz5mJIgIpqkjI2fJiN0H5m8iotSufqHrhu9ZIvog\nM3/1taIoJ+i6dx/A/+Nu1FI82fhXoxb5f9ohS/aXoyNTZ80Yf7ddFJo/hoenXhi859BX9v1Zi5YQ\n0dCT5aVFRLNEUfl6YPIsJTF2rKwZHhzKXfmH0or0+4lo/MkWeN2Bmd0A8oHmw1VBjig9kMACgPCQ\nPurhyozYgI0WFj8wJ2LhYGFhcZrAbL5TVLYT5dVH57abpoGtez9wE9Eb3IXiuMdDluy/HznoEluf\n3qOO+E0JgoQh/eeJsVFDgwC6oTv9MnOeaerPaJr7ZmbzpVNRYB0PIgozTf1Xk0fdfERg+a9jeOqF\nkiw7EgFMPUlj20RB/mL2hPucY4deLUdHpiI+ejhmTrjbkdb/nGhZsv/vBPoW/DlsJ5uc+sZSxTAC\np8lVVGd6AWT/AHFYWBwXS2RZWPwMYOZqw9QuWbr+edfGXf/xFpRux6G8lViw8vHGw1UZ23XD91hP\njmeY2tyUPpMDfr70T5ruUGXnVT053ukCEQ0WReUdWbYrG3a8juzC9TBMvXU7kmLH2QBM7kRfkUQ0\nhYiGdsHz7JKIsGS5d1Rau4ah/c+VmM3JRNSns/PxxzFGkZ2LCeQDSFMV504iOq8rfXQFZs4GsDsj\nZ0m7k6HF5XtQ21CiATjuKVALix8Ca7vQwuJnAjN/R0Sp2YXrbiso3TaV2az1aa63ACw6loljNwcT\nAtWHAwChebHjh1jxOKUQBPFKSVTfGpwyV42LHia4vfU4mLccmXkrMXvSA5Cl5ko+Xl+DBqDDpG0i\nCpIl+2uiIF8S7Iz2eLwNkmFqFUR0MzMvO8ZzI4mE16IjBtgDtUuSiiBntLe2vjAZQFFn5kRE0yRR\n+XZU2qX2fn2mkCjKKCzbOXLz7nc+FgXpAcPUX+1MP11F091X7Tn4xeaq2tzgAUkz7KIoI79kiy8z\nf7XPMHwXWnYMFqcKlsiysPgZ4c+3eeJkjyOK6tr8kq2zU5NntlthyS3a4NZ0zymVqB4I/6nONAAM\nYP+JCFEi6i0K8n/OmfY7e+uSLklxY7Fux+vYfWA+xg69Gl5fE/JLtxE6cJUnIkGWbMv6xIwcMW74\nNSqB1KyCtSg+vMdZWZP1LRHdB+CfbfO5iChEFJWV/RKmOhuaAlZAg2nqaHJVKQBKOzknkiXbO1NG\n3+JIjB3Tak7jEB6S6Ph65WMvEtEHJyOZnpnziGhQQem2m0sr9l0DQDEM30LD1P7RUZFwC4sfA2u7\n0MLiJwg1c6YiOz61qcFrJEn9KxG1d8c8SWi666nt+z5yV9Z8byTPzMgt2sj5pdt8Juv//qFi6SpE\nRKIo3yOKSrnDFrHObgvbIIlqmShIv+lunwKJNybHT6S2NfOIBIwafAmyC9ehpq4ASzc810QQ/nOM\nU55n2tTQIVPG3GKrayjBV8sfRnVdPvolTMHwgRdJDnvES7Jk+46IlKPGAV0bGzVEHp12Bcoq96O2\nvv1CVVbBWgb4gH87rjOMkES1V0Lv0e0aQoJiEBuVZgC4uJN9dRlmrjFN4wWvr3G419c4SDd891sC\ny+JUw1rJsrD4ieH3afpcUYJmDel3jtNpj6SyqowJh/JW3ioI4q9N0/jwZMfAzBuIhF8tXvent8ND\nkzg0KFYqrz5kuD21tYbhO5+ZK052DN1FFJWnHLbwB2aMu8sZHpoIAKiqzQ1aueXvLwuCNJfZ6PKX\nuSipw6MjB9gCtQU5omCaJhau+X0DEb1gGL5nO+pHkmxXDUye7TR0L1ZvfRlTx9yKuOhhR9oH9ztL\nWLHpxTPKqzOfAfBIy3VZdsxNih3nVGQ7xg+7Fks3vIDhAy9CQuxo6LoXmfkrcSB3WaNh+K7twrR6\nOewRRkfpYCHOGLUYiASatzgBulGRHTcDHMzMGzXd/X/MvCPQs0TkADAYgBtAxsk6aWlhcbKxRJaF\nxU8MgcR7w4L7zJk75RGHKDYvaCTEjlb6J07HojVPv0VEG5k572THwWzOJ6KFlTVZ8yprsmIAHAKw\nqjtmm0SUJonqrYIg9TMMLcMwfa+djDJARBQmCNLDc6c8anPaI45cjwzri7mTH1G+WfXkJcziuYrs\n+EzT3Td2NvfHNLSC+sYyDUC7wtEeXwMM0+czTT3heFtrRIJNklTKKdqImMiBRwksABAFCROGX2tf\nsPKJ24nody0nRpnNJk1vLoqckjAZQY5e2J/9LXYd+ByCIMHrazRNUxvHzAc7Mx8/B+oaSlRN90KW\n1HaNJRX7PAD2EVGEJKqboyNT49L6ne2wqSEoKd+bsPfQggsEQbrdNPV3vp8fyZKo/lkU5NvstnBd\n1z2iYfqqiYQ7mc2vuxCbhcUpgbVdaGHxE4KISBCkB8YNu+aIwGohPKQPBiROF0RRuf2HioeZvcz8\nBTP/m5lXdEdgSZL6e1mybxuUMve28cN/NW9g39l3SaK6WxSVk1H/7+zoiFSttcBqISSoNyLDUzBl\n9G9sEaFJF8uS7e+d7dQwtbcO5a3Q3QEKHmdkfWeIgrygM7lLmuZakle8ubGmvgAxkYMD3hMSFAtR\nVAQAR7yiNN393oHcZY0tL390ZCpmjL8bV5zzCkYMvBCiIK/posACMxcRCWt2H/hca7vQlFu0kZtc\nlY0AlsiS7W99+0xOnD3xAUdc9DBEhCZh6IDzhHnTnnIIJLxKRL1bnpMl23uRYX1vu3D2846Lz/xL\nyGVnv+ycMf7uBEW2f0QkXNCV+CwsTgUskWVh8dNCNQytV6/wlICNMb0GqZKojAnYeApCROcosuP+\ni2a/YB+ddpmU0mcyxg69Srlg1p/tkqg8S0STenhIm6o4O/xcVCQHiIBp4+50mKZxAxGFd3Rva5j5\noMnm3xatfqopv2QLfJoLdQ2l2Lz7Hd/+nMVVmu6+p5PxfVRRk+VuclfD7akJeIOue6EbXhlAa0X3\nnctTk7Fu+2ueFqFnsonc4k3Ymv6BS9NdD3Zy/KPQdPc1h/JWFXy37g+N2YXrkV+yFau3vuzasOut\nOt3wngNANUz98pGDfqG03VYMDY5DcvxEIhJuAAAiGgII58/+//buPLyq8s4D+Pd3zrlrQiAkQMK+\n7woCIsUNUSy4b+3UaX202+horbW2VWv7VGdqbWvHunaemW4ztm5d1GJxQ5SissgmCgQCYQuEhCV7\nbu69Z/nNH7kwAW4ghJzcJHw/z5PHnHPee84v3oebb97znvedcU80O5qP1D4U9pmAC6Z9IxKwwk+e\nxFQVRJ0CbxcSdS9JEbFjjVWh5mv8HdIQO6ie57bq6bHOIBiIfn/KuM9nRcI9j9ifHc3HmWOuCa/b\n9PJ30b6Dq5eX7VtvuK6N5os7A4DtxFFxcBOmn/klREI5yM0ZlDhQvW06gLdac2LHSTwgIquXffy7\nHzhOcrxhGDEFnnXd5E9Vtbw151DVmIhcWL5/w+L9lVv6Thx1BQKBI2dk2Fr6vppmcLnr2lXNXueK\nyOzdFWuf2bl31eezIr0TiWRdUFV3uG7y66q6KjVY/hrLDM1W9eKuZ/8JwLLjjYdS1f0iMnF/5dYb\naur2flEg4aQTe03V+72qVonIEMsMuZFwr7Svz88dHt61d+Xk1Oa1IwefH7DS3Hos7DMBhmHmAxgN\n4KR63IgyiT1ZRN2IqnqGGXhhY8kbztHHHCeBjSVvxGyn8b8yUVtbeJ47oV/+2LTHCvLHGYC0a6+c\nqm4CsHzl+ueSze9seurho0/+gP59z0BWJA8A4Hq2ADjm//MJzv9yItkwxfXssO0kejtO4lutDVjN\nzlHkenZ/z3MWvvnBw/b+yhKoKmy7EUUlb+nqDS822HbszjSvq0/ajTd7nl1Y11BxSdKOnZm0Y+NV\n9UMRGWGZoe15vYb9ZvLY626dMOryOyPhXm8HrPDC1LqBx6snrqp/TCTr58WTdRd5nvuYqh4KeAcd\nN2HFk3VpX1tZsxOu61wfDGRtBOSMYCCa9g9/EYFlhl0Ax62FqLNhTxZRN+M48QeKd7w7T1Xzxo+c\nZ0XDudhXWYxV65+P2U7jAgAfZLrG1hIxahvjVbmHbh81F4tXQUTS3zM7BbbTeP323cve2l2+dsLw\nQedGVRU7yz5CTnYhLpx2BwCgtr4ctfXlBoAP2/v6rZHqmZpbXbfnzoVLf3a/5zm5qp5hBcLVphHY\nJ5BbReQJVS1O89pqAKsObYuIYZmhRVPGf75g7PA5h/7wNs4cfVXW4pVPzSw/UPQkgK+3sc76YCD6\n2oYtC66ZOuELR/y+qY8dwM6ylbj64p+Z1bWl4z5c++vhxTsWx88cc0346LuCdQ0ViCdqDACb2lIH\nUaZIZ3kytjWrWRNR64hIf8sMPeR57pc8dUIBK1zmuMmfq3pPt2XweaaYhvXAwIKzfnDh2Xce8YtX\nVfH20p/GKg4UfVtV271nLjX2Z6Yh5lPhUK+JMybfEhjYbxIAoLZ+LxYtf6wh1lj1b66X/J2I8dWA\nFblA1a2ynfj/AFjUkVMOiEjIssLvZkfyJk0YeVlWJJyL8gMb7U3bFjquZ3/N89znT/D6eTnZhS9d\nPfunPY4ON43xary88J6469kFbZ1UVEQKLTO0anD/aXljh80JhUM52Lt/Az7Z/CrGj5iHcSMuBQAc\nqNqGtz78iXfmmGt04sjLzUO12E4c7yx7NFZVs+sJ24m36/JPRKeiNbmFIYuoG0uFBVNVT+q2Vmch\nIjmWGVo7dMCMgWeOuTqYHc1HXUMF1hb9JbG7Yl2x48TPUdVGH69vWVb4F+q5/9Kzx4Ck5zmoa6gw\nIPLvrptcaRqB1wYXTjMGFEyKxhO1WlTydkPSrl9mO/Er2nPB7eOxzODD/fLH3T37nLsjzZcyqq7d\njQVLHmx03eSY40xuChH54cRRVzw4Zfzn0w4f+duie2tq6vfOVdXlba1RRPIMw/qWaVi3A9K7X/5Y\njBv+WRQetYbi3xbdV18X21+bHc3rMbT/OT0SyXp72+6ljkL/6jjxW9p9+SeiU9Ca3MLbhUTdWKpH\npUsGLABQ1VoROXvHnuWPbNv94U2AmALYEPmN6yZ/4GfASl3fEZF/MwzLq63f+xVP3YhlBouTduyg\nIdZrs2fck90sKMiYYZdkL1z6s/MOVJW8Ew71CKlqImk3PAvgeVVtcT3CthIR0zSDd0ybcGPk6LUi\ne+UMxIhB5xlbdy25FcdfSqm+MV6TBHDMZKmqioTdYAGoP5U6VfUggB+KSOnwgef+8rypt0bTtYuE\ne7k19WV31NaXxz8pnv8ZQBsBvHKy00sQdRYMWUTUqalqJYBbReQOANkA6jqqR0NE8iwztHpQ4dSC\niSMvC0Ujeag4uGniqvUv/KchhhzdE1NVsxM1dWWRof3POX/owBlw3QQ2b393yoGqkgdEZIaqVrRz\niTlQjfTKGZD2YN/eo0M79qyYdIJzvLyjbMVPpk28EaFg9hEHyvZ/CtdNVgLY0D7lYm3Z/vXqqQdD\njuw4c5wEDlSVhAB8nJos9812uiZRxvDpQiLqElTVUdXqjrxlZJmhHw8bOLPw/Km3hXJ7DkYomIXB\nhVNxxayHgrYbDzRfm9HzHCz+6CnMnPxVnDf1VgzsNwlD+k/HpefelzVm2CUDAlbkDz6U2KDqaWM8\n/XCpuoYK1/OcYxcqbEZVdwrkv9/64CcNB6t3AGh6mnLX3tVYsvKZRtuJ/2s7jjFb5brJkvVb/n5E\n76qqYvXGl5Iixvt+r0YgImdbVujpUDDrBRHjmyKSfn4JonbAMVlERGmIiGEYVv01F/88ku7pxk+K\n5yPWWIkZk24BAOwqW4WNJW9i7vnH3pmznQT+/OYdccdNjlTVPe1ZZzAQeW700NmfP/rpvaTdgFcW\nfjeWsOvPBxAKWJG7DDFHe+put53Gp9G0xJGmflYxDOvbhpj3m2Yg7Lq2KWJus53Yt1R1YXvWKyKD\nLDP0fk52Ye+Rg8/v4amLLTsW1zXEK3c6TnxW6tZiuxMRM2CFnzONwJVjhl0SioR7mXv2fdJQVvGJ\n53r2Faq6xI/rUvfFMVlERC1IzdY+DUASwHJVTRzVJKzqBdMFLADomV2IA5Ulh7dr6veiT++RadsG\nrBBysgsTlTU7RwJo15BlO/Hvbt7+zsWOm+w9bvhnA9FILsoPFGHV+ucaXM9+1jJDnzPN4Dcnjro8\n3LvnEKO6ds/k9Vv/frljxytDgex9njrLADzhuvZ/iMgTjpsYBCChqmXtWechqloqIqMqa3Zcs7ao\n4kqFOo4T/yuAN/3spTQM67s9ewy48tKZ90UPTXg6euhFWWX71uO9Fb/8u4gMaTa/F1G7YMgiotOK\niAQDVvhJ0wjc3CtnYMJxE1LfcEBMM/gD100+2axpo2FYtVW1pbm5OYOOOc/+yi3ewert7r7KLYE+\nuSMRsMLYX7k17TU99dDQeDAAYH97/zyqWiYik0t2vf+DrTuX3OSpk2WZ4a22E/sJgLJIqOerV170\ncDQcygEAFPaZICMGnx96Y8lDhUP6Ty/01Bm/advCmw3D/LKqvgRge3vXmKZmG8CfU1++ExHTMkPf\nmTHplujRM8r37zsRA/pNMkvL19wM4PGOqIdOHwxZRHRaCViRZ/N6Dbvq/Gm3hyOhnDAAVNfuwaLl\nv3jEMoOu4yafAZqezDTN4NOrN7z4ndnnfPuIp/fqGvaheMd7CcdNPvrO0ke/7qmT73muGIZl1Nbv\nNXKyC4+45s49K+B5bqmqbvTjZ0rNGv+N1NdhwUDWm5PGXns4YP3//ggmjb0OxTsW4dJz7w8MGzAj\n8PqSh34vIv842Rnou4g8AFm9ew5Je3BAv0nR8gNFM8GQRe2MA9+J6LQhIqMUevXsc+6ORJoFj145\nAzB7xrejEPmxiBxetNDz7If3V25ZveAfP6rfvnsZ9h0sxieb/+b+ffEPY56696h6P3Lc+ADPc/IA\nzYJ6t73x/r/HtpUuhe0kEE/UYv2WBd7Sj3/bYDuNN3f8z4tx+bkj044Z6dN7BGrqm/JUbs/BGDpg\nhhhifrVDC+w4Mc9zTNuJpz3YmKjxPM+p7OCa6DTAniwiOp1cPrT/dKRbhDg3ZxCi4VyjrqFiGoBl\nAKCqCRGZXVW76/oVnzz7rwDyPXXXOE78MVVdm2qjAA4tzvdrEdnx0afPPvTh2v+eLhDHNIOvuW7y\nQVVtr2kQToLsq2uoGNy75+BjjtQ17EPzoNk3b3S4tHz1mR1ZXUdpWt4na/HWnf+4ZNyIzx4ROh03\nic3bFjY6buJ/M1UfdV8MWUR0OgmYZrDFHnzTCCiO+lxMjR96MfV1Qqmn8Vp8Ik9ELAAXAOgNYIOq\nFrXmvG2RtBue+rT4tV8NKpiS1fx2p6qHDVtfx/BB5x7eV1u/13Zdu8WZ4bs624ndvaboz8sByRo5\n5EIJWCFU1ZZixbr/jdluYiGANs9oT9QSTuFARKcNEZkaDvZYcsPcp6JHT4bZ0HgQr77zvZjr2f1U\n9ZRmOD/O9a+yzNDvopHegexoPg5UlQRU9RPbaby+vad2SF0vELDCC3v3HHr2lPGfi+b2HIKauj34\ntHg+GhO1mDPzXlhmEPFEHV555zuNttM41c/Ql2kiMjEYiD7uOMkLTNNyPPUSUH3c9eyHu+rSU5Q5\nXLuQiOgowUB06bCBn5k6/Yybgod6d5J2DO8sezRWXbv7GduJf8+P64rIBQEr8sbsGd+O9ssbA6Bp\nAtNPi19zNmx9vcxxE2P9WCZIRIIi5l2WGbjLcZMFphGQXjkDdcakL5s9on2wd/8GrFz/fEMiWfu0\n7STua+/rd0Yi0gNNqwfsZ7iitmLIIiI6iojkBqzIfBFjyuDCaYbjxr3S8rWGiPFHx4nf5tdcTaFg\n1odnT/zSzBGDzzvm2JsfPFy/7+Dmb6iq7+OCRKTQMoPfV+BL6rlRywptStqxH6tqh0ynQNRdMGQR\nEbVARCahaWxUEsACVT3u8jOneK2AQOL/fMWvDdMMHnO8ZNcHWLn+udcTyfrL/aqBiNoXZ3wnImqB\nqq4DsK6DLicKQCT9mPvUfjPtQSLqsjhPFhFEkGglAAAN0klEQVSRz1Q1GQxE1u7auzrt8ZLSD+qT\ndsNfO7gsIvIZQxYRUQdI2rHvL1/3P7Gqml2H96l6KCp529tfWdwA4PnMVUdEfuCYLCKiDmIY5k2G\nmL/K7TnE6xHtY5YfLILjxEttJ36Fqpac+AxE1Flw4DsRUTsTEQFgtvXRfxGJAJiL1GSkAFZoZ/kg\nJqJWY8giImonIpJvmsEHVfUWz7Ojlhna73nOY566j6VmhSei0whDFhFROxCRPMsMrRk2cEbBxFFX\nBbOj+ThYvR2rNrwQq6rZ+YHtxC/za34tIuqcWpNbOPCdiOgETCN4/5D+0/t9ZvJXgz2y+kBEkJ87\nHHNm3huNhHNnAuD8VkR0DIYsIqITEMFXJoy6LHT0ftOwMHHk5dnBQPT2TNTVHYlIH9OwfhQKZm8M\nBbOLLSv0SxEZkum6iNqCk5ESEZ2A6zk9siP5aY9lRfMhYvTt4JK6JREZbZrBpUP6n501cvCFYcMw\nsWPPitu37Fz8NRG5VFWXZbpGopPBkEVEdAKWGdqxr3LLyP59Jx5zrOLgJsf1nLUZKKvbCViRP00e\nd0PuuOFzDt9l6dt7VLB/nwnBf6x65m8i0p8LOlNXwtuFREQnYDuNP121/vkG24kfsb+uoQJFJW/a\njhN/PEOlQUQmishcERmbqRrag4hMEDFGjRk6+5jfSwMLzkKPaN8wgEszUBpRm7Eni4joxH5f33jg\nwlcXfe+6CSMui/bI6iv7KrfYm7cvcjx17lHVTzu6IBE5M2BFXggFs4fmZBfaNXVlgWAgWmw7jV9Q\n1c0dXU87GNYrZ6BtGOmXcMzvPcKqrts9vINrIjolDFlERCegqp6I3Ow48d+s2/zKbYZhDnRde53j\nJp5R1U0dXY+IDDbN4PtnT/xij+GDzxNDDHiei+Id7565euNLS0VkvKpWdHRdp6istr7cUvXSLqRd\nXVNqA9jT8WURtR1vFxIRtYI2WZK0Y/8cT9RdYDvxOzMRsADANIPfGzN0dmTkkAvESAUSwzAxdvgc\nY9iAz0QNw7ozE3WdorWumyjfsWfFMQf2VW5BVW2pAni948siajuGLCKiLsYQ4/oRgy8IpDs2csgF\nYcsMfqGjazpVqqq2E//C0o9/W7+26C9Obf1e1Mf2Y8PW1713lv485nr2F1U1kek6iU4GbxcSEXUx\nqmqZRvqPb9MIAF30s11VV4nIWUUlb91XVPLWtQAMEWOR4yYeVlU+wUldDnuyiIi6GBHjnZ1lK9Mu\n47Oz7CPb89w3Orqm9qKqW20n/jXbiefZTjw3acduYMCiroohi4ioi7Gdxkc+LZ6f2Fe55Yj9e/dv\nQNG2t5OOm/iPDJVGRM1wgWgioi5IRC4zjcALuT0HS++eQ4IHq7clq2vLXNdLXqeq72W6PqLurjW5\nhSGLiKiLEpEQgCsBDASwA8ACVbUzWhTRaYIhi4iI6BSJSJYh5l2mGbjd9Zw80wzusu3YowB+p6pe\npuujzGDIIiIiAE0TmAIYAaBcVYsyXU9XISJZlhVeWpA3dtQZo6+O5GQX4GD1NqzZ+KeG2oaKtx0n\nfgOD1umpNbmlzQPfReRzIrJBRFwRmXKcdnNFZJOIbBGRe9t6PSIiOnkiMiAYiC62zNDm3j2HvBIK\nZK8KBqIbjve5Tf/PEPOufnljRl90zt2RPr1HIBTMQv++Z2De+T/MioR6zgFweaZrpM6rzT1ZqcVI\nPQD/BeAeVV2Tpo0JYDOAS9C0HMJKADem+yuKPVlERO1LRHIsM1Q0fuS8vhNHXWFZZhCeeti+exmW\nr/t9vesmp6pqcabr7MyCgcieOTPv7Z+fO+KYY1t3LsGqDc8vTCQbjli4WkTOBvDZ1OZbqrqyA0ql\nDtaa3NLmCesOLSchctzzTwewVVV3pNq+COBqAOyqJiLynXy5IH9cr8ljrzv8WW+IgRGDzkV9w77I\nhpI3fgjgpgwW2Om5ntO7R1ZB2mM52QUAZMChbRHpEbAiC8KhnCnDB54bBoBtuz+8LxiIrrGdxitU\ntbZjqqbOwu95sgYAKG22vTu1j4iIfBYMRL84euhF0XTHRgw+31TPvbqja+pqTCNYerB6e9pjB6q3\ne6re4U6DgBV5flDBlLNvuPSJrGkTbzSnTbzRvOGzT2YNKpw6PWBFXuiwoqnTOG5PlogsBJAuwn9f\nVV9rxflP6l6kiDzYbHOxqi4+mdcTEdERQqYZSnvAMkNQaJdcfqcj2U7sF2s2/umxfnmjs0wzeHh/\nY6IWnxbPj9tO4y8BQESGWWbokhmTbgkbhnm4nSEGZky6JbSrbOVsERmmqukTG3V6IjILwKyTec1x\n/4Gp6pxTqAdoGoc1qNn2IDT1ZrV0vQdP8XpERJTiusk3dpZ9NKawz/hjktauvavUNILLMlFXF/Pb\nuobyefPfe+CSM0ZflZ2TXYADVdt0ffH8mOsmHlPVD1PtZvTLH2tbVih89AksM4iCPuPt3eVrZwJg\nyOqiUh0/iw9ti8iPTvSa9vorpqWBWasAjBKRoQDKAPwTgBvb6ZpERHQcrmc/XVL6wR2DCs4KDeg3\n6fD+6trdWLPxpUbbiT2UwfK6BFV1ReR624lfuWr9c98AZICqV2Q7jc0DFgDEbbuxxbs3STumAOL+\nV0ydyak8XXgtgCcB5AOoAbBWVeeJSH8Av1bVy1Pt5gF4HIAJ4Leq+kgL5+PThURE7UxEzjWN4Pxe\nOQMC/fLGRKprd8crDm42PHVv8zz3D5mur7sQkRzTCJRfffHPItnR/COO1ccO4G+L7m10PbuAg9+7\nD05GSkREEJEggCsAjARQAeBlVa3LbFXdj2WFHoiGc++fNf2bWbk5TSNlqmp3Y/FHT8Ri8apHHCfx\n4wyXSO2IIYuIiKiDiIgYRuCbIvKjcLCHBQjiyVpX1XvI85wntLP8wqV2wZBFRETUwUQkAOCM1Oan\nXLS7e2LIIiIiIvKBr2sXEhEREVHLGLKIiIiIfMCQRUREROQDhiwiIiIiHzBkEREREfmAIYuIiIjI\nBwxZRERERD5gyCIiIiLyAUMWERERkQ8YsoiIiIh8wJBFRERE5AOGLCIiIiIfMGQRERER+YAhi4iI\niMgHDFlEREREPmDIIiIiIvIBQxYRERGRDxiyiIiIiHzAkEVERETkA4YsIiIiIh8wZBERERH5gCGL\niIiIyAcMWUREREQ+YMgiIiIi8gFDFhEREZEPGLKIiIiIfMCQRUREROQDhiwiIiIiHzBkEREREfmA\nIYuIiIjIBwxZRERERD5gyCIiIiLyAUMWERERkQ8YsoiIiIh8wJBFRERE5AOGLCIiIiIfMGQRERER\n+YAhi4iIiMgHDFlEREREPmDIIiIiIvIBQxYRERGRDxiyiIiIiHzAkEVERETkA4YsIiIiIh8wZBER\nERH5gCGLiIiIyAcMWUREREQ+YMgiIiIi8gFDFhEREZEPGLKIiIiIfMCQRUREROQDhiwiIiIiH7Q5\nZInI50Rkg4i4IjLlOO12iMgnIrJWRD5q6/Wo8xKRWZmugdqO71/Xxvev6+J71/2dSk/WpwCuBbDk\nBO0UwCxVPUtVp5/C9ajzmpXpAuiUzMp0AXRKZmW6AGqzWZkugPxltfWFqroJAESkNc1b1YiIiIio\nu+iIMVkK4B0RWSUiX++A6xERERFlnKhqywdFFgIoSHPo+6r6WqrNewDuUdU1LZyjUFX3ikgfAAsB\n3Kmq76dp13IhRERERJ2Mqh73Tt1xbxeq6px2KGBv6r/7ReQVANMBHBOyTlQoERERUVfSXrcL0wYk\nEYmKSI/U91kALkXTgHkiIiKibu1UpnC4VkRKAcwAsEBE3kjt7y8iC1LNCgC8LyIfA1gB4O+q+vap\nFk1ERETU2R13TBYRERERtU2nmfFdRB4VkSIRWSciL4tIz0zXRK3X2slpqfMQkbkisklEtojIvZmu\nh1pPRH4nIhUiwuEXXZCIDBKR91KfmetF5JuZrolaR0TCIrJCRD5OvXcPHq99pwlZAN4GMEFVJwEo\nBnB/huuhk9PayWmpExARE8DTAOYCGA/gRhEZl9mq6CT8Hk3vHXVNNoC7VXUCmobc3MF/f12DqsYB\nXKSqkwFMBjBXRM5pqX2nCVmqulBVvdTmCgADM1kPnRxV3aSqxZmug1ptOoCtqrpDVW0ALwK4OsM1\nUSulpsGpynQd1DaqWq6qH6e+rwdQBKB/Zqui1lLVWOrbIIAAAK+ltp0mZB3lKwBez3QRRN3YAACl\nzbZ3p/YRUQcSkaEAzkJT5wJ1ASJipB7oqwDwtqqubKltm5fVaYtWTm76AICkqj7fkbXRibXm/aMu\ng0+8EGWYiGQD+AuAu1I9WtQFpO66TU6NHX9FRCao6oZ0bTs0ZJ1oclMRuQXAZQAu7pCC6KS0x+S0\n1GnsATCo2fYgNPVmEVEHEJEAgL8C+KOqvprpeujkqWpNatWbuQDShqxOc7tQROYC+C6Aq1MDy6jr\n4uz9nd8qAKNEZKiIBAH8E4D5Ga6J6LQgIgLgtwA2qurjma6HWk9E8kWkV+r7CIA5aBpTl1anCVkA\nngKQDWChiKwVkV9luiBqvZYmp6XOSVUdAN8A8BaAjQBeUtUWPyiocxGRFwAsBTBaREpF5MuZrolO\nyrkAvgTgotTvu7Wpjgbq/AoBvCsi6wB8hKYxWS2OIedkpEREREQ+6Ew9WURERETdBkMWERERkQ8Y\nsoiIiIh8wJBFRERE5AOGLCIiIiIfMGQRERER+YAhi4iIiMgH/wfFhtau+2o+NAAAAABJRU5ErkJg\ngg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Generate a dataset and plot it\n", + "np.random.seed(0)\n", + "X, y = sklearn.datasets.make_moons(400, noise=0.25)\n", + "plt.scatter(X[:,0], X[:, 1], s=60, c=y, cmap=plt.cm.Spectral)" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 102, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAAHfCAYAAABj+c0fAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XecleWZ//HPdeoUqkAEpaiggiJYEFExFiyoiUaNmmyM\ndU3bZDdr1jWbTTHZJJtfiomJ2SRqookxtmjsvaAoFlRARRSlqfQmZcqp1++P+wxTOGcozsMM8H2/\nXrxg5jznPPc5M8z5zn1fz3WbuyMiIiIiHSvW2QMQERER2REpZImIiIhEQCFLREREJAIKWSIiIiIR\nUMgSERERiYBCloiIiEgEPnLIMrM/mdlSM3u9xed2MbPHzGy2mT1qZr0+6nlEREREticdMZN1AzCx\nzee+CTzm7vsAT5Q+FhEREdlpWEc0IzWzPYD73P2A0sdvAUe7+1Iz6w9McvfhH/lEIiIiItuJqGqy\ndnX3paV/LwV2jeg8IiIiIl1SIuoTuLub2UbTZeU+JyIiItJVubttyfFRhaylZtbf3ZeY2QBgWbmD\ntnSwsvMysyvd/crOHod0ffpekS2h7xfZXFszORTVcuG9wAWlf18A3B3ReURERES6pI5o4XALMAXY\n18zeN7OLgJ8AJ5jZbOC40sciIiIiO42PvFzo7p+tcNPxH/WxRVqY1NkDkO3GpM4egGxXJnX2AGTH\n1SEtHLbqxGaumiwRERHZHmxNbtG2OiIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIiIhIB\nhSwRERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQUskREREQioJAlIiIi\nEgGFLBEREZEIKGSJiIiIREAhS0RERCQCClkiIiIiEVDIEhEREYmAQpaIiIhIBBSyRERERCKgkCUi\nIiISAYUsERERkQgoZImIiIhEQCFLREREJAIKWSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQ\nJSIiIhIBhSwRERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQUskREREQi\noJAlIiIiEgGFLBEREZEIKGSJiIiIREAhS0RERCQCClkiIiIiEVDIEhEREYmAQpaIiIhIBBSyRERE\nRCKgkCUiIiISAYUsERERkQgoZImIiIhEQCFLREREJAIKWSIiIiIRUMgSERERiYBCloiIiEgEFLJE\nREREIqCQJSIiIhIBhSwRERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQU\nskREREQioJAlIiIiEgGFLBEREZEIKGSJiIiIREAhS0RERCQCClkiIiIiEUhE+eBmNh9YCxSAnLuP\njfJ8IiIiIl1FpCELcOAYd18V8XlEREREupRtsVxo2+AcIiIiIl1K1CHLgcfN7GUzuzTic4mIiIh0\nGVEvFx7p7ovNrB/wmJm95e6Tm240sytbHDvJ3SdFPB4RERGRTTKzY4BjPtJjuHuHDGaTJzL7HrDe\n3X9R+tjdXUuJIrJTMrODIP0tyJ8KxRSk5kHmZ8AN7p7r7PGJSGtbk1siC1lmVgPE3X2dmdUCjwLf\nd/dHS7crZInITsks9mlI/hmOroJRMagC3gOeqoPlr0DmRHfPdPY4RaRZVwtZewL/KH2YAG529/9t\ncbtClojsdMxsACTmwD9XQ/82txaAvzXAgqvcc9/ujPGJSHldKmRt8sQKWSKyEzJLXAmjroDTq8of\nsRy4dg3kPubu2W04NBFpx9bkFnV8FxHZplITYUSFgAXQD6iKAcO21YhEJBoKWSIi25aH7jbtHmJs\n+iAR6eIUskREtqnsAzCzofLtS4FMDnh3W41IRKKhkCUisk0VroU3HT4oc1seeKge/Gq1cRDZ/qnw\nXURkGzOzT0DyNjg8BQcmQguHBcCkOvjwOch8QiFLpGvR1YUiItsJMxsBqf8APzM0I02+A40/A251\n90Jnj09EWlPIEhEREYmAWjiIiIiIdBEKWSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIi\nIhIBhSwR6XRmFjOzeGePQ0SkIylkiUinMbOTzaqfA3JAzqxqtpldYmb62SQi2z01IxWRTmGW/DZU\n/RecUAP7AXFgDvB4HXz4OGTOUudzEekq1PFdRLYLZjYWqp+Cr9RA9za35oA/1cHib7j7HzpjfCIi\nbanju4hsJ9KXw5FVGwcsgCRwQi2krzAz/SImItsthSwR6QxHwj7t/PzZE8gNAmq21YBERDqaQpaI\ndIYCFNu5uQi40f5BIiJdmkKWiHQCfxjezFe+fTaQnuXuDdtsSCIiHUwhS0Q6QfYqeCEHy8vc1gA8\nWgeN/7OtRyUi0pF0daGIdAqz+Och8Xs4IgUjE5AA3nF4uh6y10H2Mu+sH1AiIm2ohYOIbFfM7ABI\nfQM4BTwO8anQ+FN3f7KzxyYi0pJCloiIiEgE1CdLREREpItQyBIRERGJgEKWiIiISAQUskREREQi\noJAlIiIiEgGFLBEREZEIJDp7ACIi25KZ7QPsA6wHprh7tpOHJCI7KIUskR2cmSWBT0H8MPAcFB8B\nnt7Zuqmb2ShI/xHS+0P/LNQbrMYs+WPI/3Rnez1EJHpqRiqyAzOzoyB5N/RLwojukHeYXgcNiyB7\nkrvP7+wxbgtmNhKSU2BiNxhtzb9fLgPuqIM1f3TP/FtnjlFEujZ1fBeRDcxsf0i+COfWwrAWtzjw\nfAGeWg65fd19bWeNcWuZWS1wKtAHeB94xN1zlY+vfgaOGw9jy/zMqQeuboDMge4+O6Ihi8h2Th3f\nRaSF9JXw8erWAQvAgCPisFd3sAu3/bi2nplZ0uy/E7BsCFx/IPyiP9ychKUxs89VuM8gKBwKB1X4\n4VgDHJKA5JcjHLqI7IRUkyWyAzKzBMROh0Pa+UVqbC0s+BLw6202sI8oCT/oAf9+HtT0bv509ULg\nr3BtzKxYdL+lzd32hF0ykKyq/Mi7J2Ha/lGMWUR2XprJEtkx1YJ5mKUpJwcsBhhqVvOCWfr/zGx4\n26PMLG5mp5lVP2pW86ZZ9RNmdlapmH6bMrN+DpdfCLW929y2O/A5qEnAb8ws3ubmLNTVhGXSStY5\nFFd26IBFZKenmSyRHdN6IA+rU9A2kqwE/gr0Ak5JQfVhsOBgmHqhWepqyH3L3d3MukPqceixHxzZ\nLZQ/rR4BU8bC6rlmdoy7r96Gz+mzw6HYvcKNA4FekFoOE4BHAcysByRvgGICFgB7lLmnAy/VQeaG\nSEYtIjstzWSJ7IDcvQCxG+H5NsXgOULAOhK4ABgF7A0cn4R/rYZuX4PYxeHY9E0wfBR8pRscBAwG\nRgNf6gajhkP6jm31fABiMHhXqG7vmF3Dz7SBzZ9J/QRG7AkTDe4BPmxzDwceycH6ecDjHTxkEdnJ\nKWSJ7LByP4Rpa+C5AuRLn3uTMIM1pszxtcCnaiHxfTMbDMWT4NSqjX9MGDAxBRxhZvtGN34ws6Fm\ndoSZ7VGEJasg097xK6FA6MuAmdVA8QI4Lh3C5GHA7wlh6xVgMnA18OpqyE4Ajqwyu7vKbGG12fyU\n2VVmtmeUz09EdmxaLhT5iMwsBcTcvbGzx9KSuy82s7HwzC3wzCjYMw9LauDYtjVLLQwGkj0hexHs\nU4R0heMSwMgYvHIq8HZHj93MJkDVryA9FHpkYU0abPZrZOxEyk9nLQOWh38+VvrUUKjNh1AJMA4Y\nCUwHPgBSwKHA5HlJ+FYKLj0KavYCywGvw7+8Al80s3Pc/YGOfo4isuNTyBLZSmZ2BlR9B2x0+Lhq\nAWR+AvwxLNd1PnefB4wLRe1vjYGqb0NVO7NPBqQLUFcLVe2EMYB0Amjnir2tY2anQ/pvcFoN7AvE\nq8NM3MxRBe7P/5Fc4yVQ1TJorQZuDg2v/tvdm2a7cpCPhyXBpu4N3YDxLe75JkCvbnDppVDb8jKB\n3SE1ElJ/htvMbIS7v9/Rz1VEdmwKWSJbwSz1Y+jxr3BSLQwnvInP2xMevwpWnmpmZxIqxfciFKHP\n7MxtW9z9LeAts9QYmL8X7Fvh6sA6YG0aeBre/TJ4ujmgtPVOPWFaqMOEWcHEn+G8GhjU4pYEoR6s\nJrGC27M/J9e4H/guUL0Y6uaGNc3v5dyvaXGn2ZCrg4W1rcq0WpleF6dxl5PbBKwmA8NZE9PhK8B/\ndciTFJGdhmqyRLZQaauaf4Mv1sL+QJzwX2kocEkt7HICxKdBYgH0fRi6PQ+p983iF3bmuIPcb+GV\nAqyrcPuUPMTvAx6EhpWlmZ4y3gE+bAAe6eABnga7xloHrJaGAT0KBTj/dfjm0/CD2fD1PPTPuf+8\n5ZHuXoT8j+G++vKlXLOBuQWgZ9t2rS2NClN2Z27d0xGRnZlmskS2WPpyOLo6FIq3lQBOqIZ7D4Av\nA+uqQv3Pqm4w/f/MkkPcc9+PcnRmNgA4AMgCL7p7Q9Nt7v6OWeqncP1/wOk1sCdhpqqOELBeWgm5\nr5daOJwDdz8Ba2tDt/QqQliZ4fBYA+TOjmBZdDjsWe6FbXp2wJAkrOzv7q2aqJqZAXF3zzd/tvhr\nWD0arjkHxtfAEINGYFoDzMxD/jNxuKu93zYTrf4SEdl8+sEhssV8HAxrZ/+qPYG1wG2EUuy9gCKQ\nr4bYd83sHuA1YAIkz4JYLWRfAr/J3dv2GNhsZrYbpK+DxHGwawZyBqtiZqnflXpf5QHcs98zi82B\n2/4HYn0hnYf1KYjfA7nL3H1ROM5fMrPDYdJP4PHjoSoHmSQknobcN9192taOtR11UJejcsU9UJcn\nLME2Pe9Rafh2DE4vQjJttiIPvy7C1e6+zswugexf4MnLwEYBDZD7GxT+AKxwWL8QqnavcLZ3oFCE\n5zrwOYrITkIbRItsIbOqxXBJf/hYmVszwIuE9gDjCf2omn6XyRJaMU1fA74cuveHg2shbTCnHt41\nKFzqXrx5y8dku0JyOozrC0cmmuvRVwH31sOiJyB7esu6sNLMzzBCW/j57r6mncfvCfQDVkbZgNTM\n9oLkTLi8Klz911YD8ItGyA9x92VmNjEJdx4NVQdBrIbQx/4ZaJgL72dh3KbGGzf71hD49uehuu2M\n1jrgt1DfCEe6e4fWn4nI9mVrcotCluwUzGwkpP4FEiOhuBoabwDua720VPG+NcDJhEL29yF5Dhz+\neTiuzdV364A/E65mGwKcVubRHPgLkC7CubHWReVLgRsaoPGT7v7Elj2/1O/gwEvg1DIF7Tngt+vh\nw7Pc/dEtedzOYFZ1Nww7Cc6sCvVuTfLArQ3w/m3ujReZWY8ELDofage3eQwH7ofsG3BXo/tn2z+f\npdLwxAA4eALUDCQ023oLeATqG+DHOfcfdeiTFJHtjkKWSBthtib1W4hdCGOTsHsirDRNXQerlkH2\n403LY+Xvm7wC/NswoAi947CkAKsdij3gi7Sezfozoc/UK4Ru6v0qjGoO8ATwhTK3vQ488LJ7w6Fb\n8BxTkFgNX61p7gnV1ssOjz/i3nDy5j5uZzGzWkg/AOlDYFwt9DZYUYQXGyD7FGQ/7e4ZM/vK3vDT\nz5UvjqMO+CU05mGgu7e7L6GZpWLwrwm4rAB9HWIpmNYIP3D3+yJ5oiKyXdma3KKaLNnBJS6HXufD\nRdWtW1ge3B2eroYpT5rZeRA7E+LdIfcacKu7r4PkldD9G/C52jCJ1eQD4GaMPwCH4xxA6NS0DPgn\n4Gmgbztj6keo2SpnBHDvAWa2q7sv3cwn2S/M+FQKWAC7GfhhZjbU3eds5uN2CnevM7NjITMenv4i\nxAZCYR5kfwdMbVryrILj9qsQsCDc0A8yi0Pvhyc3cc4s8HMz+wXQHcg3uNd33LMSkZ2RQpbssMws\nCcn/grNqN+4RbsDYBLw4DGqehYOTUBODuXUw/1dmdhkkroAL09CjzX0HAv9Emr8wnCm8y0tkKZBl\nNJAk1EOtoXLoWUMogyonAaSzkOtJWD/cHOsgnwzLghXaX1FHNfQqwBtVZvdl4PMtmnZ2OaUgNbn0\np5JCcROPU2z112aft1ICFhHZIuqTJdsFM0uZ2blm1f8wq3nYLHFlqVVB2+NiZnayWdXfoWoqJGtC\nhU5bReAWYGQc/iMNx8fgCOC8Wri0BhK/hn3YOGA1GUQjPXkTYyR5DqNAbEP90AGEJcNKXiLspVdO\nHdCQJNRvY2a7mdk4M9u3VKi+EXdfC8kX4I2KZ0zyIseQsf+AqsHwiTRscXF9V9MI98+o3PCLNcDK\nUD3/8rYbVWtmNtos9j2z+E/N7Dwz6/AO+SLSdSlkSZdnZvtAcj7sfh2c8Ck49SQYfQUk5prFv9Li\nuL6QmgF97oLjz4JPjYYxqZAn7qf1hMYcQiH1KWz832BXYO8UDGinjQDA7mQ5hBcZyBQM421CoDsc\neBWY2eZ4J1x5+DaVQ9ZLBUjcAwwxq54EyTmhoWn1q5B+xyz2qfL3a/w2PFwPS8rc9jJJ5jOakDjO\nCdN6p0S9ufM2cMciyM0qc0OB0E01Bje6+/oyh0TKzHYxq5oMNVPg8O/AMZfDkN9BYpmZVfgaisiO\nRoXv0qWFIujkXJjYDw5p8/2yCri+HurPBR6A1Cw4cN9wIWDLQzOEoLUXcEzpc3cRuopXqi9/FlgB\ntPd+eG1pDPuVPn6DcO6DgEXAHYQ8sw/hbf81IOeQew8G9IMzaqB36b45YGoRnloDuX+CxB1wQqkJ\naIoQEOcA/6iHxq+7F64r81qdC/EbYG+DfasgS4pXSLOaC8i1qhJ7GPIvwf8W3L/bzhPs8sxsTBKe\nGA3pgyFdS3jln4G6lTA9C8dv6427zSwOqWnhe/GkVOsrJD8AbqqHzKnuPmlbjktEPhpdXSg7HDO7\nFIZeBZ/vVv6IWcA9r0HjVdDtRriM8hO0qwmh6DJC3dJfCQGr0mTOGuAa4BuU3wN5OXA98O8tbl9C\nuMLwIOAwQv30DMLS4RKgRwbWzYDcBEh+F/wr0Dcf7r8oDbGXofEiSP8dThoFB5f5/7EC+F0WCruV\nu2LOzHqD/bKWxD/thSf3I88+tH6bB3gBeBKuz7hfWuEF2CJmFgNOr4LLC7CfQdbhrhxc5e6zO+Ic\n7Zx7twR8NQ7nF6FbHOY2wi+A2909F+W5K4znk/Cxm+HL3cvv+7jlV5CKSOfT1YWyA6q+GMZUCFhQ\nmiXaF5I/gEOovALeG+gJ/A6YUDpuCZVDVjVAAW7Owj+1uTLxQ+BvpcdpGcD6E9o63EpYFnSa32QT\nHu5XfBU4BHJXAFfCkvGlB3nD3eea2f4Q2xtGV/iP3BcYlorx9vyE2b/l3f/U8lZ3X21mf0iQO+tM\nSFb6abAEGnPwboWbt4iZxVNwWzeYeAzU7kFp8x24+EX4vJmd4+4PdMS5yim14PhW6U8XUPVFGFch\nYEHpCtKRZra7uy/cliMTkW1LIUu2ubCcQn/CGtgSb386tXvlK/EgzNEkc5Ab2P5xEK726ws8BDSs\ngQ9q4PBk+c7i0x1iz8LSt+Gq82FEEvrEQ5uGt4EDKb/U2As4G/gDYdYsTenN1uDddIzbvlRD7vws\nLMvCZwmFYbnSH4DhsFt+47mnlvZiGHO6LSH/m6RZn5z7z9oc8EIDrJ4D3cptfLweeAPMQ1fUjywG\nX+8LJ18ENS2vbTweksMheSPcXmodUa5gbAdk/UOgryQB1Gbhwz6AQpbIDkyF7xI5M+tjZqea2elm\n8R9Bcimk34HUXEi9Zxb7Smm5qen4YWap35ulVkN2BNxNWHIr15x9LZBJQLEA77czigLh/WwUcCkQ\nq42RjcW4idZX7BcJS3yP1UHmX90zX4TcMHjjfXiGUMeVAo6m8kzFLoQtdFJtjhlGkQn0J1lzKuyR\ngOf7wD2D4JYkvFNl9jhQA/XtruEbdfSmyMUhVf4gFPw389D06Qt3QP0cWl9buRK4EeoMfunui9s7\nz+Yws1gC/vOUNgGryUBgJMTiYYpvJ1GcC8vb+RpmgPVpSleQisiOSzNZskml1gEnAv8Kqd2B9yD7\nV+DuUhPHSvfrAenfQ+IM+FgGVnWHXWNwErAb4e1/wUB46Kew8iSz1DLgXEh0h24OYy2EokWE/Xlf\nJ0z+zCPUYmWAdUWwh6E4Ed5Nhpqlco1AXyPMMjV1aD8w0Z1XGc4iXuHXxOgPVOMspEi+WCB3lbu/\nBuDuC82qDXYn1Gp1JxS8d6/wzFcT8k+532EOYh6PcXbpEdZBt9MIkex5OGYyjMqzPBlu3aXM/YvA\nq4ygSC9gOPjM0F7+Fy2PcveHzezs2+C6Wug+AOxDKC6DmMGPcvD/Kgx+Sw2JQ7dKmysDjISqt8Me\nQ9/voHN2cZlrYMopMKa2/I/YaQ6JZ9xzy7f50ERkm1LIknaZ2emQuAWoDvvxjQCKo2HaqbBitZld\nAPEjILEXFBZC/s/u/rqZVUPqWRixDxydhj9XhfBzPmEprEiYmRoCHFMLd50G4wowOh5KlN6zcIXf\ne8CngaGEvlbXEHpXHUgIMgtisHwi2LswaD/4Sww+WTo+Rogv0whd2M9v8cyGsYa3yVHP5eSZywfk\nCCNsgNjt8Dkzu7J5KdNWwughYUxG6HU1pMKr9hKhyXg5aWIkaaDAGOA3wKlsmBuLN0LvF/CZzh0H\nwAWx1jVfToyHSZJhJrAHMAiqZ4cvykbc/UEzG5SF41bDp9NwQAJWZ8KL0puQ5D6qmIEbzdc/Li89\nn6YuY6WouTPNmk+GxqfhlmPhzOrmpvRFwhWoT9RB7rJOHJ+IbCMKWVKWmR0E8R9C8pTw/vg5wr58\nTcbG4PU+cO/9cEAWuqVgYREWfN0s+QEwCXYbCqenQw1UHhhHaOA9hzA7BaGgPAOcCwxrUYi0H7A3\n4SrAx4DjCIXjh9B6qW5/4ONVcP0wmJeBA6vgUQuVR70Js0qDCJM9u7YYfxboz2vM5Thap5RS/BtC\nmKoqrSU2/B6m/xIuqYXnCX+eAcbTnB+cEOhmAF+q8MrWUyRHDc2LiVmay+oPh8QLFPaNs6xo/CpW\nYCzObsBaUrxAL9ZxNjn+WHpF1kOhEFYBKzkgCbf0h/SB0D0BvA3HzoYfxszOL7r/vZ37bo73cpB7\nmfCV7UZ4teuBxwmvaxwyufBF3Cm4u5vZGfDBNfDLz8PgHFQbLDDILYLcZ929cudYEdlhqIWDtFLa\nnPcuiI+HQ2pgPjAcOLLCPW4nXKWXJ8weFQiF4RBq2wEW0LzdS4wQkg4iRItlhLCyFLiQ5t/6XyMs\nEa4jRJECIZZ8mY23yIFQj3VTBpgNsaHg1XCihaBWrmv7zcB+JHmbk3iLMYSwM5MwE/M8FB3GufvU\n5tcl+RYcPQCOjIfs9XfC38NLjzmLUL9eDXyVcpM3xtMMZzLnkmcdYSbrClqXuf8QcmMhPgpiU4mz\nggQ1FDmY3Ib5uVsI8fIhqG+AI919+kbnMuubhHdOh54j2xSQLQZugIYsTHD358u8QJstZnZjCi44\nl9CJrEkjoZpuLhSzMNTd53+U82yPzGwX4ATCN8VM4OVNXOghIl2UWjjIZild3TeBMFuzEnjYN2yG\nm74dhn4czqoK78s/Jmx6XM5awnLekcBYmqNCjjBxMa3076b5jfmEmqqWM2IfIywHPgI8DJwFPEV4\nPzoZ2JMQK1YDk4AbgYvYuHfVQKAqDXU1KbLzc9hQZ0q6uVFoS68RQt3ZFFhCA/AoMJVQtdUd6A+x\n5fBcyuzlHEx097VmdgQ88yhMHQQH1sJw4sygwFTC4t2ZpXH8FbiH0E2+qWl8EXiNJJOZUCrgf5mw\nAU/LgJUDHGLVEOsPfJICIWC2lgZeglwRXigXsAiv2qXDId02YAEMAE6AqifgSkKR3FZLw6hTaB2w\nIHyFPg38CnLZcMr5H+U82yN3XwXc1tnjEJHOoZC1kzCzgcAZwOFx+ERvYADEPoTCYoinzH6Ug3ug\n6lg4oyq8secJfz9FeJ8eQugr1RQLnifMpxze5mxJQkBaRghhtYS3YKd1wGrp48CvCEXtrxCW21q2\nx+pN6L5+L6G+qm0usNLxdUP7AUtwBvIhC/hNaXyDgQbCUt5CwvJnihjv8SzhgvtRhLL2BYSy/EGQ\nfA87PIYvMbMj3X2ame1nZK/vxzMX7IHFZ2Gs43Ra12B9FngA+CUwGCNNgnl0J8M55OlTGsXLwCVt\nnsWb4ZnMfRcGHNX6BdigqfYpC6/l2mlJn4KLDik/7Qfh+dpDcJyZpbd2s2gz2yMNw/evcHsCGAfJ\nyeEL+pFmzEREtjeRhSwzm0h414wD17t7R13NJJsQlrb4OOENdjakr4Dkp2F4DHqkjAWsYglFoDcx\njiPLy/Dfq+AiqKoOF541NdLsVnqYNOE98hHgM4SlwOmEdghlR0GoV7qTMNM1l7B0V0k1IdpMIfSf\nKpcvDDgKuI5QkdSyaUAGWIExgu68wWogRqH0+YXhZSBJWNo7nTDPspA8yzmPMA/VZCrwIAmM3Sky\nFKirhhkvm1U9BZyZhtrD8PhgnGlUE+ajWkoTZrXWYtxDFW/gOHsQ5vbeIdQsXUjr6weXEvbby8NX\nF8LfPyDMi7X1GpCH+Vn3MeVeySYO3WrbuT0NxKBYbC6M2xoDekA23k6Y6wOxWOWrBEREdliRhKzS\nctQ1wPGEd7ipZnavu5fby1U6iJklIPljSPwLfKy0XcvCWtjd4JwNV6qFxarFrOJmVrEf8/kAZ3Et\n2N6hfukEwjLbh4QNWF4lRIIjCFdH3QxcTHhfLtdmoElfwkxYX8JK0cbLXq0VCAXr49s5ZhdCPFjX\n5txTAcPZld7M5BM4vwcOpcB05pPjbMJsmhEC5BzgDsaTbxWw3gEeIY1zEb6hpgzgxBjcdxTMeiRP\n5uX1UFwLsRi9qHzhXA+cUQzifU4gyxzCLNQngZuAO2DdGOieCuetfwesABe7+6Nmdt5f4NZToXp/\nwn/URuBV8KdgfXszWE0M3l4YNkksq9Q/oJHWjcK21NK1kCpQuX3qqhDk2mtiJiKyQ4pqJmss8G5T\noauZ3UqYPlDIioiZJSH+NFSNDdu39SLMzywlLF+1bRU5gLB6+AAFcoTAMrJ029OlP+cQlv16E2qM\nLi4d8x5DR7T+AAAgAElEQVRhVitGeH8uV1gOoY4qSViE24uwQDa2wrFrCAX0u9L+pEqxdHvTW3qG\nELBeAJwEC+mN041QdDYTOItGHuI2Gkhh9MZZTYosBbJMaPPoD5MizxnQKmBROt9pKZg/Mk/mpqnQ\nsDfUOutovX1Oa8ZqulOgH9AUdhyIQXYlXPoUTIxBdQaed/izu38I4O73mtnEh+Cn98OB1ZCth1Qc\nnsjBf2zOLyyNcNVkGDMSupX7jz45vHi/d/fiph6rEnefW202exaMHlnm9gLwAjRkQgt8EZGdSlQh\na3da/+b6AWHHXCkj7FfHSELR0FPuvm4L798XUlNhlz3CbFMvNlwjRxWte363tBeh1HoMYXWxKSgc\nQ5i9+gvwBcLy3XOEENQfOBj4EyFkvUSYsCznBULh+suEoPcYoaFo2+W1AqHovRehtcMrhC5L5cwl\nBK3rCZVUKwhhciyhnuvdDVFxBKEy6jxgX7IsIst61tONEA1fonU0WgmsIdbOuWPAuFp4anyOzPSX\nYEw12XSWuYQrK9sqkGAqB7eZwXsvPNIy4PaMe8WiaHd/BhhnZv1zIQUvyYVC6s31wHp49mb4+Ceh\npmnerx54GrKzYEkefrIFj1dWI/zbvfBgN6jZo8XnM8DdIWC9SPgGEhHZqUQVsjbrEmUzu7LFh5Pc\nfVIko+miQrhK3wTV+8LAfFi5WZw0S/8Bspe7e7l9ZMpI3wsHDoaJNMeGwYSGnXcS+lSdXm4EhPfu\nAbSOG0boR7WQEJCOIQSZDwghq2nm6gvADaXP7d/iMYqE2qp5hGXGuwltGs4lbKw8j9DCoZbQTGAK\nIQxeSghmzwJv0dwaocl6Qhg7kTBHlQO+Qvg2vpY4OY6iuGEHw7bPqGVXcid04mw5B1UPxKkl327f\nzJ4GsQEZOHUWPOBkD4nx96oiF9HcTR4gR5w7GUi21XnzwCPgOfjp5l7KX9rzb4v3/XP3opmd9gH8\nz//Bv/SCYgJ8BaTjcF8OvuLuq7f0ccuc52kzO/tmuKkPJPeE6jrIzQq1WA9k4QK1LRCR7Y2ZHUN4\nA9xqUYWshYRr9psMIrxDt+LuV0Z0/i7PzPaB5BQ4sTscaM3LX2uBu74Ai4aY2Vkt35ys1KSjxceH\nJ+AXUH14CB5tl6zihAqgqwmLZ22LyYs0bxNTzhhCL6hjCLNNTeFjCaFnVQ/CVXo3EdorjCgdN4sQ\noIYTQtN5hLA3o/S5lYROT3mgD2H2bb/SfacRZoXuLP19cOlc8wmBbwwhAI4GfkeYj5pBjAaOp8C4\nFvn+bUJ8LKeKMG04nzDXRulVKLCuNK5K/zWWFaAwx93XmNlRwKFGww/h2uNgUAH2SMVYC7yO4exH\njmzp0eYCTwIr4H2H/6twgg7l7jngm2b2/RXhhUsAr+fc22tgujXnedDMdl0Cpy4JX+Q64F53f68j\nzyMisq2UJn4mNX1sZt/b0seIKmS9DOxtZnsQ1n/OJawXyQbpq+CobnBIm2TUA/hcDVx9ImQPN7OX\ngG9A6gvAHmbxPCSeKG1APDFP1kLgqFR2XE2IEXPYeKuXdwlF5G3rj5r0JESRbOn+JxDmfp4t3X4X\n4UrDLwC/JSyEDSb0utqdEMb+SghyRxNC2luEGahs6bimXuvLCO0ZwlWCoR7sdsLsXqE0xs/T3LU9\nARxCjEn0IrthPqtJI6HxRNOjtdzNcAVhIXQI8A9CF7D+hMXKXYGFvEYId21lgRczkL0OQmdvQso7\nMVzROe9smH9+d3z8eZBcS4iYD5VetX7ASsgW4GR339RVAB3K3Rto/sJFdY48oXjvnijPIyKyvYgk\nZLl73sy+SrjePw78UVcWbthouRfQGxLHw5gK61JJ4IgaeOp2sN3DjFOBEGAOTsGUk8PszgTC/Eil\ngNUkTlgMa2kBYbZoApWKtkNdVw9Cq86BhPmfBwiTFAVCfl5EaL1wJiEkLSaErRzh6sRehOL7h0of\nQ1+K9MB4jzsp0psiudJjxgjLkqeXju1DWG6spC/9MZYTSt8PKX32beAJQiQaTKge6xv+5FZA4yKo\n2Qdi54DNILQ37Vc6toos8GDpuY5o8dqsA+7AyFYZTK4yeyQD33X3aQDuXgfcaGZ/bYQH/gFHHge1\nny2N43XwJ0Nivdzd32znSYmIyA5C2+psA6VwdTGk/xsKu4f9dGuT0N4esXMItUznEmaFCoRZoAcJ\n8y6fI4SnmYSIcWGFxykSmmLWE/pU1RCCUT0hViQo3w3AgVsJM0wNhCC1iNCM9OTSY44pHXdi6T5N\ns1GL2HANHf2BEcRZjfEaB1LkFArECLHqjtIze58EYbJzBqGn1a6EmbDLqfy7wIuM4HEayGWXQSID\nMS+N9IDS6OKE2axfQCYbBj3D4KYrIFFVGvENpWOThHm9LPA8KRpJU2SgQz3GB3YAcHrpWszp4E9A\nQw7OcPdHW44qtNLg4jT8Zwb2Miim4KkM/NDdn67wZEREpAvTtjpdUAhYqVuh5+kwMR2W7tYR6ona\n6y60ljB71NSOMkGoW3qUsGzXdL99CbNE82ndUrPJa4TYMJEQIz4k1EQtJ8wwrSEsC46nuc1DljAX\nNI9Q1XRs6bZBhLqupiW8pqsBm/QjzPwcCTzPSeR5nEUMZyl9KHAQTu+Wr02Lex9BgancS45/JoS2\nhYQFvzcJvdjbKpLieQ4lx1OQaQS+CqnebDwv93b43Bx3/y8z6xkDb9qU5zlCRdhRbe4zniwfkOUW\nZrEHcBrNG/nEgXFgA6DmJvi7me1aWo5rHhzcnAldU83BG1X4LSKy01HIipCZ1SbggTw1R8M/07yP\nXU/ClWhvEa7Ka8sJbQyOaPP5dYT375bl3AlCv6vbCV3QRxGWEhsIpXHPEArPmxpuFwj9q6oIdVKZ\n0jE/IwS2AmEWrWkz5paxqMkMQpuD5aXnQmlcjwPLSbOIz1BgT8L2NjnyfJzW32w5wixWVemeh+Ek\nWcezXA2MpsBgQqi7v3SOIa3uHecePkYd/YDFIQF+70/wnbOhZjAhaOUJDSMehLpc2PAQYJ1BwwpI\n9iHMA/57mWfY1Ovewc5g4y5jlEY0EJgfCsj+bGbD0/DdWCg2i8UgY/DHHPwvW3F1oIiIbN8UsrZA\naRnoEMKa22x3X9jOsckUPBknNSbPcTQHrCZHE8qu+9C68LwprOQJoafVo5Zub9v8ciihfPsZwkxX\nihBj+hLmXRYQlgdXE/pf9STkgnTpPElC/VHTdXZHEa7+m8fGIesDQqPSswj73o4tjXcatWSZQK5V\nGf4nCJVfvyoduQuh8PzV0iMXCF24rgfGUeQ8irzOqyzgdVZTpEj+Hrjpk3H6xAoMI856jFnsiXMW\nOe6FhjjcmnP/Sczsg5vh/6WhR3corISUwcwcfNndX4bQ1iBpdu2z8LVTIV0kXAdZzmJCzCsXsJoM\nh+6L4EgzeycJjxwB1YdAvBuwChLPw5emw2fNbGxTc14REdk5qCarHWZ2AKT+HeLjwHtCrhfUFqC2\nCCvSEHsWMl9y9zll7vuZAXDdCuLdcvw75ffie51QSL4boV6qgdDCIAv8Cxt3UnfCVXyfoPzSIIQL\nuxKEgvbrCcuDSXpTZB1Onp6EgNeTMLnyHCGAjSUs81WXzvMGcF/puINLj/kuoah9AgleYDCrWAn0\nx3kf51zKb1C3mFBcPpKwbc1AwuLkboTZrMGEqrOXCB1sjTDPNxc+yLgPMrP94zBlF6geAsn9Sq/U\nZFi/Ct7NhieaKz2ZWYQmXL2ABe7+btvxmFmfJEw/FHadAckLaN3hqslbhC6aF1R4pQGeBZ8Evzc4\n+2zoW66N6bNQmAxTG93b7qTdLjPra3BeAoYXYEURbnH3mVvyGCIi0jFUk9WBzFLfCxsrj0vC0MSG\na8R4BziJEBFeOg6enmpmh7j7vJb3r4Kvj4du925YuCrnAELkuIYwnzKbMEP0BKFeqm3IMkLj/IcI\nW9y0nR1bSIgGXyLEAwP+kxS/41OsZhowm5XU8zRhtqsnIbD1KZ3z6tI4MoQ5ppGE+qtJQAyjO3G6\nE+NhxlPkKIr8jbDguYQw93UmMIxQ8u6EubC7gVMIc2VvEqrDmp7ZAYRwdRitN0N+BgrzwpQZ7j7T\nzPZYCZesgy+8Dj0MFuahJgb7joSfxsKrZw6vZ+DT7v5KhRcdd19pZmNegevycMqzED+zzHGDCF3C\n1lM+IjvwKqzPw/IBkK7UJ34cxCfDaDMb7u5vVRpXS0mzyxPwg+FQ3B1q1kH+VbisymxSBs4uXc0o\nIiJdmEJWGWZ2FvT4T7i0unWjzmGEeqXbCZ3Gx8eg0AOmXE2ojd7AYciuhIW8WbyOb1Ra3eQdwrJg\nFXA4odapgbAFze6woX95k10Jy37/R5iR2osQAGcQ6rhGEuaHcoS+UsuJs543CQHnDIr8g9Vk+SQh\n4jR1kfgkMe6hN29zKgUWAjOYyWpgAhn6A3U0kCYsKiZLo3yfsAS4B2FG6s7SiPuURmmEULUfoWfU\nYFpHxyo2jqDrgSmQycBVG17P0Jn858DPzaxnEl47HHY7GhJN38QF4Dk4eHLYkHyku6+o8KLj7kuB\n08xsxCx45jHo9XFINMXWOuAhaIzB+geg9tNQ3fYShalQXA8rDapHVO7oSiK8ZoW3wsroJkNWwuzS\nWrjyYqjq1eLTx0HiLjj2XXjWzH5DyLBPf5S9B0VEJDoKWWVVfR9OqSn/vjmUEIpeJez3NzYOk08w\ns74t39QNPlwL/ceT5x0mk2MErVtiQoghTxH6Qr1I8zYyowjNBa4FxtG8x+B0YDpDyLM/WSbzEPVA\nASPEnv6EWajxhOXH9cBt5MiTJcxb9QIuIs/t3M9qHifOXsTI4MxhKHAmBdKlM84hw+rS/fZqM3In\nzH3tQ1hYvJBwbeEqQorYmzAztRshRL0ITGbjRhNzaH6VC4S5vIegrgC/cPdXy3wBiMEXh0K/CW2+\nf+PAxyGxEnq9Af8GfKfc/Vs9D/dZZnbAy3DjS3D07pApgC0OmzHfnoXL5sA9f4DRR0G3/oTrPqdC\n/RxYn4MTDC4utLdLdHhuXnqK7TKzeBL+9zNQ06vNbQngLKj6BRw4CK5ZDfn1UGdmF7v7Q5t6bBER\n2bZUk9WGmfWBxCL4VoqKe9jNI0SMfy59/Os1sOr4puJqgJjZ1/eFH30GaqYBD5CkyKEUN+zx9ybG\nSyTJUyTeolaq5ebJCwg9sBYDcYzVXECOPQhB5k7CDFJfYCFJ3iOOcTBODxK8j/M2B1NgAk6MMC30\nVTZcNcf/g2wSbDwkh9N8nSCExg+PEDpiPUCYHxtDmIVaQghM6wn93q8Drijdz2neDbiWMKvVdFnd\nhbSufVoP/AZyBVhvUF2EeBJmNcL33f2uCi8+VWYLPgeDB1e4fQnwJ1iRce9X6THKMbOBpadZAKZ4\naeuZ0gUPp1fBZQ57GHyYgWsdbixtr3N0T7j/69Ct3Dd0I/BzyORhL3dfVOaQlmM4qg/c/7WN14o3\neLI0wOMJ34m3QkM29Ot6ZEuer4iIbD7VZHWMNMQLlQMWhHqmpkkJBxoShP4KGzjcMAe++SJUjYXY\nIHI8z4vM4RVyQAN5n0jB3gNWUcRYySJewluFrCE0l5K/SzW3s4RQqLSCEHzeJixiziCXS5JjDM8n\nM8ToS4HRNF859xJhqa6ptqgRyEGxAHc9A6cXoXoQxEqNH+rnQDEG7/+9dInjdFg6PVT8JxOwJAtv\nxeGwX0OvGMRyhLk0I8y9HUqoEMuyYfHTU6WZniKhT/39UF+EX+bhu4RVx1zefc2mvkB56Nd2TrCl\nvuG8u2zqcdpy9w8ov8dmnpBp76xw12caYNGLMHRcm8ZnDjwCmTg8mNtEwCrp02MTG6z3IPQXM8IM\n46eh+k74nZkN1UbMIiJdh0LWxpZBMQPLqstfcwZh/qBpD725QHEZYaVrg9IMxxFPwGMvQL+DoFsf\nCraMwrrFUEjB4p4w4kzCzM+LOGEO5kVCGXhLa4hzNx8jy3LCtX4jwkB5E3gKnm2EH6Tga0vxCWdS\nqGkKU0VCL6hJwPktHnEK5BPwUAbOy8GRk+DrsTCNVt8INzr82d0/LHWrJ1vhzdvMUil4ciYceWCL\nz8cJoQ7gTmiogxnXwKgqKObCzYtL29LcXDqsYv1UW3H48EOoblut1iRcT8nazX28j8rd3cwmPgHP\nL4DuY6GmJ+Hr8yzULYM52eY+XZsyfxkkilSO+UtpPeu4N5CCfpkwCzd1q5+IiIh0KC0XlmGW/BEM\nuwzOrdq4zKYe+D1hu5tuwPX1sO4Sd7+1/GOZAcfF4RMxSOXCJr13xeAb+8O3zwo9EygSmiPcQZIc\nfclyKFBFnDkYr3EMeca3meBYCNwE8xrc9yqdK5WCXxfhgkGQq4b0e5BKEzbOGUQo6J4C+ZdgVQ4O\nbq/X1+a/XnZcFdz3z1DTdoZpFnAnrM2HCbf1hFKtDPD+1s66JMyuHAlXnNHchL2VByE7Da7Jun9j\nax5/a5lZL4OL0vDlIvSOwcLGsJXPre6e2czHsDS89SnYZ0SZ2+uA3xDaxLYMWjfB2jlwUXvLrCIi\nsvW2JrcoZJVhZrWQeh6G7g0TqsICVNMi10OEj2sbQ+US33HP/XwrztEvAfPOh9qWtUV5wuzUgyQL\nGWLFEeSSJ1KkbRE0wKOQmwrXZt2/2uaxexP2pqkGBqbgohj0rwqF0qk43JOByzZVH7Ql4mYXx+C3\no4C9oSoHzID170EuBye2rFf7qMysXxJmHg99xkKs6ZvICXsKPggf5uCAjgiQncHMjk7Bg2dCzb40\nx/wVhJYSexO6oLX0G1i7Ek7T3ohdj5mNIJQ3pgmXAT/i7pu8CEJEuhaFrA4UglbiO8CXIRmHfBxi\nqyE/H5KrIPcKFK79KG/kZnZSEu46DJKjIVlFKHWfBHVr4cks3NMXrr4Uatt2xFoGXBc2KB7t7u9s\n4jxGuCyyFpjvm1H3tJXPZ/c4fCkVWsZnG0Kvi1ui6OlkZsNS8EAV7HYAVBvYG1BfD8uz8Al3f7Oj\nz7ktmdmxafhjCvoNAFsLtesI7WIPp/X86kLgRliRg/568+46zGyXdKjjO2wkxNKQmA31a6A+F3q5\nPdvZYxSRzaeQFQEzSxIaVmWBxR1dWGxmw5LwDYOzHNJxeKsxbCR4F6FY/PpqOPcYqB3Khv34Cs9B\nJgeXFt3/1pHj2Z6UwuORwDGE3DGZ0Ddqhyj+Lj2/scCeCfjaEDjoXKhOtThmFXAD1NfBVwvuN3TO\nSKWt0rZar46GfU6CVMvi19nAHSFoHeHuMzprjCKyZRSydkClN9pTquA/C3AgkDd4IAs/d/fXOnt8\nsm2YWToNNzl8chTEe0LyA6ibAzGHb+bdf93ZY5RmZnbOAPjjFyq09XgB/Cl4uNH9lG0+OBHZKgpZ\nIjs4M9sLOCcOfQphM8lbo1r+la1XYzbpZDh6VIXbG4GfQbYA/dx9m10JKyJbT32yRHZw7j4X+Eln\nj0Pa57Bbe43aqoA05OrDDlQKWSI7qPY6boqIyNZ5v73Gbw1AJvTv3ez+cCKy/VHIEhHpYI1wzRRY\nX2nn7lehmICH3H1dhUNEZAegkCUi0vHu+xDeuhcy2RafdMKG6pOgPgPf6pyhici2osJ3EZEImFn3\nNNxchBP2hWIaYu9CvgFWZOHsjmzQKyLR09WFIiJdjJkNASYSOr5PA57dUXq5iexMFLJEREREIrA1\nuUU1WSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIiIhIBhSwRERGRCChkiYiIiERAIUtE\nREQkAgpZIiIiIhFQyBIRERGJgEKWyE7OzHrHza6oMpuXMltTbTY7ZvY1M+vW2WMTEdmeae9CkZ2Y\nme2RhBf2gR6HQXUvYAUwBerfg8VZONzdl3f2OEVEOps2iBaRzWZmloKZx8A+R0C85W0OPAbZV+C5\nRvfjOmeEIiJdhzaIFpEtcXgaBo9rE7AADDgWUg6Hm9nQThibiMh2TyFLZOd17AFQVemHQBLYGwrA\nsdtwTCIiOwyFLJGdVywWJq0qiofb9XNCRGQr6IenyM7rhZlQX6kqswDMDiHr+W04JhGRHYZClsjO\n64k6WP16qHPfyItQcJjl7q9v64GJiOwIdHWhyE7MzEYlYPJYqD4Ukr2AlcDzkH0N1uTgMHef19nj\nFBHpbGrhICJbzMyGJOGbRfh8AWoTsBa4Lg8/c/elnT0+EZGuQCFLRD4SM4u7e6GzxyEi0tWoT5aI\nfCQKWCIiHUchS0RERCQCClkiIiIiEVDIEhEREYmAQpaIiIhIBBSyRERERCKgkCUiIiISAYUsERER\nkQgoZImIiIhEQCFLREREJAIKWSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIiIhIBhSwR\nERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQiCVlmdqWZfWBm00p/JkZx\nHhEREZGuKhHR4zpwlbtfFdHji4iIiHRpUS4XWoSPLSIiItKlRRmyvmZmM8zsj2bWK8LziIiIiHQ5\n5u5bd0ezx4D+ZW76b+AFYHnp4/8BBrj7JW3u7+6u2S4RERHp8rYmt2x1TZa7n7A5x5nZ9cB9FW67\nssWHk9x90taO5/+3d38xlt51Hcc/X7cuthUUQ9L/hmpKYrmwJbGQUGGioaleUHqBRRJFMaZJxQtj\njLYktldGYvRGgyERkQutqUZIm1rt1rAJGBAJTW0oTdtIdYulcgGJFHC37deLOZDpOrvb+fPtOWf2\n9Uomc57zzDzP9+Lk5J3nmfMbAID9UlUbSTb2dIzdXsk67UGrLurupxePfyPJT3T3u0/6GVeyAIC1\n8LJeyTqDD1TVVdn8lOGXktw8dB4AgJU0ciXrJZ3YlSwAYE3splus+A4AMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADNh1ZFXVO6vqC1X1fFW94aR9t1bV41X1aFVdt/cxAQDWyzl7+N2Hk9yY5ENbn6yq\nK5PclOTKJJckeaCqXtfdL+zhXAAAa2XXV7K6+9HufmybXTckubO7T3T3k0meSHLNbs8DALCOJv4m\n6+IkT23ZfiqbV7QAAM4ap71dWFVHkly4za7buvueHZynT3H8O7ZsHu3uozs4JgDAiKraSLKxl2Oc\nNrK6+227OOaXk1y2ZfvSxXPbHf+OXRwfAGDU4sLP0e9sV9XtOz3Gft0urC2P707yrqo6XFWXJ7ki\nyWf36TwAAGthL0s43FhVx5K8Kcm9VXVfknT3I0nuSvJIkvuS3NLd294uBAA4qGpZ/VNV3d115p8E\nAFiu3XSLFd8BAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAG7DqyquqdVfWFqnq+qt6w5fnXVtW3qurBxdcH\n92dUAID1cc4efvfhJDcm+dA2+57o7qv3cGwAgLW268jq7keTpKr2bxoAgANi6m+yLl/cKjxaVdcO\nnQMAYGWd9kpWVR1JcuE2u27r7ntO8Wv/leSy7v7a4m+1Pl5Vr+/u/9nm+Hds2Tza3Udf2tgAAHOq\naiPJxp6O0d17HeITSX6zuz+/k/1V1d3tXiMAsPJ20y37dbvwuyetqtdU1aHF4x9JckWSf9+n8wAA\nrIW9LOFwY1UdS/KmJPdW1X2LXW9N8lBVPZjkb5Lc3N1f3/uoAADrY8+3C3d9YrcLAYA1sczbhQDA\nCquqc6vql8+r+sy5VY+dW3VvVV1XVVpgiCtZAHDAVdUPH04+dXHy6jcm3//KJF9J+p+TZ59NPnU8\nuaG7jy97zlW2m24RWQBwgFVVHU4efUvyo9cmh7buey7Jnck3jyUf+d/u9y1pxLXgdiEAcLKfOj+5\n+M0nBVayuVjmDcl5LyTvrapXLWG2A01kAcAB9r3JO65Ozj/VJZhXJbkwOZHkLS/jWGcFkQUAB1gl\n571iy3qW23nFi76xX0QWABxgx5PPPJY8e6r9J5IcSw4neejlm+rsILIA4GC78z+y+Y+Ft/OvyQvf\nk3y+u594OYc6G4gsADjAuvsbzyW/8BfJtx7M5pWrJPlGkgeS5z+RfP3byS8ub8KDyxIOAHAWqKpr\nvy/5vRPJGw8nJ44nhw4lf3s8eX93/+ey51t11skCAE6rql6dzQ8VfrW7v7nsedaFyAIAGGAxUgCA\nFSGyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaILACAASILAGCAyAIA\nGOkZzZ4AAARrSURBVCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaI\nLACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaI\nLACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaI\nLACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABuw6sqrqD6rqi1X1UFX9XVX9wJZ9\nt1bV41X1aFVdtz+jcjarqo1lz8B68FphJ7xemLSXK1n3J3l9d/94kseS3JokVXVlkpuSXJnk+iQf\nrCpXzNirjWUPwNrYWPYArJWNZQ/AwbXr+OnuI939wmLzX5Jcunh8Q5I7u/tEdz+Z5Ikk1+xpSgCA\nNbNfV5jem+TvF48vTvLUln1PJblkn84DALAWzjndzqo6kuTCbXbd1t33LH7m/UmOd/dfneZQfYrj\nb/s8bKeqbl/2DKwHrxV2wuuFKaeNrO5+2+n2V9UvJfnZJD+95ekvJ7lsy/ali+dOPna95CkBANbM\nXj5deH2S30pyQ3d/e8uuu5O8q6oOV9XlSa5I8tm9jQkAsF5OeyXrDP44yeEkR6oqST7d3bd09yNV\ndVeSR5I8l+SW7nZbEAA4q5T+AQDYf0tdv6qq7qiqp6rqwcXX9cuch9VTVdcvFrV9vKp+e9nzsNqq\n6smq+rfF+4k/U+BFqurPq+qZqnp4y3M/VFVHquqxqrq/qn5wmTOyGk7xWtlxsyx7kdBO8kfdffXi\n6x+WPA8rpKoOJfmTbC5qe2WSn6+qH1vuVKy4TrKxeD+xPh8n+0g230+2+p0kR7r7dUn+abEN271W\ndtwsy46sJPEpQ07lmiRPdPeT3X0iyV9nc7FbOB3vKWyruz+Z5GsnPf32JB9dPP5okne8rEOxkk7x\nWkl2+P6yCpH164v/f/hhl2k5ySVJjm3ZtrAtZ9JJHqiqz1XVry57GNbCBd39zOLxM0kuWOYwrLwd\nNct4ZC3udT+8zdfbk/xpksuTXJXk6SR/OD0Pa8WnMtipN3f31Ul+JsmvVdVPLnsg1sfik/DedziV\nHTfLXpZweEnOtKDpd1TVnyW5Z3gc1svJC9telhf/yyZ4ke5+evH9q1X1sWzecv7kcqdixT1TVRd2\n91eq6qIk/73sgVhN3f3d18ZLbZZlf7rwoi2bNyZ5+FQ/y1npc0muqKrXVtXhJDdlc7Fb+H+q6ryq\neuXi8flJrov3FM7s7iTvWTx+T5KPL3EWVthummX8StYZfKCqrsrm5dkvJbl5yfOwQrr7uap6X5J/\nTHIoyYe7+4tLHovVdUGSjy0WRz4nyV929/3LHYlVUlV3JnlrktdU1bEkv5vk95PcVVW/kuTJJD+3\nvAlZFdu8Vm5PsrHTZrEYKQDAgFX4dCEAwIEjsgAABogsAIABIgsAYIDIAgAYILIAAAaILACAAf8H\nYwJ7l+X9lxkAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Take a look here: http://stackoverflow.com/questions/28160335/plot-a-document-tfidf-2d-graph\n", + "from sklearn.decomposition import PCA\n", + "import matplotlib.pyplot as plt\n", + " \n", + "pca = PCA(n_components=2).fit(train_vecs)\n", + "data2D = pca.transform(train_vecs)\n", + "plt.scatter(data2D[:,0], data2D[:,1], s=80, c=train_tags)" + ] + }, + { + "cell_type": "code", + "execution_count": 119, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "LogisticRegressionCV(Cs=10, class_weight=None, cv=None, dual=False,\n", + " fit_intercept=True, intercept_scaling=1.0, max_iter=100,\n", + " multi_class='ovr', n_jobs=1, penalty='l2', refit=True,\n", + " scoring=None, solver='lbfgs', tol=0.0001, verbose=0)" + ] + }, + "execution_count": 119, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Fit a logistic rgeression classifier\n", + "clf = sklearn.linear_model.LogisticRegressionCV()\n", + "clf.fit(train_vecs, train_tags)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loss after iteration 0: 0.333610\n", + "Loss after iteration 1000: 0.000334\n" + ] + } + ], + "source": [ + "X= train_vecs\n", + "y=train_tags\n", + "y=y.astype(int)\n", + "num_examples = len(X) # training set size\n", + "nn_input_dim = len(train_vecs[0]) # input layer dimensionality\n", + "nn_output_dim = 2 # output layer dimensionality\n", + "\n", + "# Gradient descent parameters (I picked these by hand)\n", + "epsilon = 0.01 # learning rate for gradient descent\n", + "reg_lambda = 0.01 # regularization strength \n", + "\n", + "\n", + "def forward(W1, b1, W2, b2, x):\n", + " z1 = x.dot(W1) + b1\n", + " a1 = np.tanh(z1)\n", + " z2 = a1.dot(W2) + b2\n", + " exp_scores = np.exp(z2)\n", + " # softmax\n", + " y_hat = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", + " return y_hat, z1, a1, z2\n", + "\n", + "def predict(model, x):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " y_hat, _, _, _ = forward(W1, b1, W2, b2, x)\n", + " return np.argmax(y_hat, axis=1)\n", + "\n", + "def calculate_loss(model):\n", + " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", + " y_hat, _, _, _ = forward(W1, b1, W2, b2, X)\n", + " correct_logprobs = -np.log(y_hat[range(num_examples), y])\n", + " data_loss = np.sum(correct_logprobs)\n", + " return 1./num_examples * data_loss\n", + "\n", + "\n", + "# This function learns parameters for the neural network and returns the model.\n", + "# - nn_hdim: Number of nodes in the hidden layer\n", + "# - num_passes: Number of passes through the training data for gradient descent\n", + "# - print_loss: If True, print the loss every 1000 iterations\n", + "def build_model(nn_hdim, num_passes=2000, print_loss=False):\n", + " \n", + " # Initialize the parameters to random values. We need to learn these.\n", + " np.random.seed(0)\n", + " W1 = np.random.randn(nn_input_dim, nn_hdim) / np.sqrt(nn_input_dim)\n", + " b1 = np.zeros((1, nn_hdim))\n", + " W2 = np.random.randn(nn_hdim, nn_output_dim) / np.sqrt(nn_hdim)\n", + " b2 = np.zeros((1, nn_output_dim))\n", + "\n", + " # This is what we return at the end\n", + " model = {}\n", + " \n", + " # Gradient descent. For each batch...\n", + " for i in range(0, num_passes):\n", + " # feedforward\n", + " y_hat, z1, a1, z2 = forward(W1, b1, W2, b2, X)\n", + " \n", + " # Backpropagation\n", + " delta3 = y_hat\n", + " delta3[range(num_examples), y] -= 1\n", + " #print [range(num_examples), y]\n", + " dW2 = (a1.T).dot(delta3)\n", + " db2 = np.sum(delta3, axis=0, keepdims=True)\n", + " delta2 = delta3.dot(W2.T) * (1 - np.power(a1, 2))\n", + " dW1 = np.dot(X.T, delta2)\n", + " db1 = np.sum(delta2, axis=0)\n", + "\n", + " # Gradient descent parameter update\n", + " W1 += -epsilon * dW1\n", + " b1 += -epsilon * db1\n", + " W2 += -epsilon * dW2\n", + " b2 += -epsilon * db2\n", + " \n", + " # Assign new parameters to the model\n", + " model = { 'W1': W1, 'b1': b1, 'W2': W2, 'b2': b2}\n", + " \n", + " # Optionally print the loss.\n", + " # This is expensive because it uses the whole dataset, so we don't want to do it too often.\n", + " if print_loss and i % 1000 == 0:\n", + " print \"Loss after iteration %i: %f\" %(i, calculate_loss(model))\n", + " #print y_hat[:2]\n", + " \n", + " \n", + " return model\n", + "\n", + "# Build a model with a 3-dimensional hidden layer\n", + "model = build_model(3, print_loss=True)\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/theano_tutorial.ipynb b/theano_tutorial.ipynb new file mode 100644 index 0000000..3cb4eeb --- /dev/null +++ b/theano_tutorial.ipynb @@ -0,0 +1,2160 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#Theano Tutorial" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# The code is from Theano Tutorial: http://deeplearning.net/software/theano/tutorial/\n", + "# See this tutorial too" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from theano import *\n", + "import theano.tensor as T\n", + "from theano import function" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "5.0\n", + "28.4\n" + ] + } + ], + "source": [ + "# A function to add to scalars\n", + "x = T.dscalar('x')\n", + "y = T.dscalar('y')\n", + "z = x + y\n", + "f = function([x, y], z)\n", + "print f(2, 3)\n", + "print f(16.3, 12.1)\n", + "# T.dscalar is the type we assign to “0-dimensional arrays (scalar) of doubles (d)”" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# x and y are instances of TensorVariable. \n", + "print type(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, scalar)\n" + ] + } + ], + "source": [ + "# x and y are are assigned the theano Type dscalar in their type field:\n", + "print x.type" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, scalar)\n" + ] + } + ], + "source": [ + "print z.type" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Adding two matrices:\n", + "x = T.dmatrix('x')\n", + "y = T.dmatrix('y')\n", + "z = x + y\n", + "f = function([x, y], z)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# Again x and y are instances of TensorVariable, but \n", + "# dmatrix is the Type for matrices of doubles. \n", + "print type(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, matrix)\n" + ] + } + ], + "source": [ + "print x.type" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, matrix)\n" + ] + } + ], + "source": [ + "print z.type" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 11. 22.]\n", + " [ 33. 44.]]\n" + ] + } + ], + "source": [ + "# Then we can use our new function on 2D arrays:. \n", + "print f([[1, 2], [3, 4]], [[10, 20], [30, 40]])\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 11., 22.],\n", + " [ 33., 44.]])" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# The variable is a NumPy array. We can also use NumPy arrays directly as inputs:\n", + "import numpy\n", + "f(numpy.array([[1, 2], [3, 4]]), numpy.array([[10, 20], [30, 40]]))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Plural Constructors\n", + "from theano.tensor import *\n", + "x, y, z = dmatrices(3) # creates three matrix Variables with no names\n", + "x, y, z = dmatrices('x', 'y', 'z') # creates three matrix Variables named 'x', 'y' and 'z'" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorType(float64, matrix)\n" + ] + } + ], + "source": [ + "print x.type" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Random numbers, etc.: http://deeplearning.net/software/theano/tutorial/examples.html" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "gpu\n", + "float32\n" + ] + } + ], + "source": [ + "print(theano.config.device)\n", + "print(theano.config.floatX)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Logistic Regression Example" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial model:\n", + "Final model:\n", + "target values for D:\n", + "[0 1 1 1 0 0 1 1 1 0 1 1 0 1 1 1 0 0 1 1 1 0 0 1 0 1 1 1 0 0 1 0 1 1 0 0 1\n", + " 1 0 1 1 1 1 0 1 0 0 0 0 0 1 1 1 0 1 1 1 1 0 0 0 1 1 1 0 0 0 0 1 0 0 1 0 0\n", + " 0 0 1 0 1 1 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 1 1 0 1 0 1 0 0 1 0 0 0 0 0\n", + " 1 1 1 1 1 0 0 0 1 1 0 1 0 1 0 1 0 0 1 0 1 1 0 0 0 1 0 0 0 0 0 0 1 0 1 1 0\n", + " 0 0 0 1 0 1 0 1 1 1 0 1 1 1 1 1 1 1 1 1 0 1 0 1 0 0 1 1 1 1 0 0 0 1 0 0 0\n", + " 1 1 1 1 1 1 0 1 0 1 1 1 0 1 1 0 1 1 1 0 0 0 1 0 0 1 0 0 1 1 0 1 1 0 0 1 0\n", + " 1 1 1 1 1 0 0 0 1 1 0 0 0 1 0 1 0 0 1 1 0 1 0 1 1 1 0 1 1 0 0 1 1 0 1 0 1\n", + " 0 0 1 1 0 0 0 1 1 1 1 1 0 0 1 1 0 1 0 0 0 0 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0\n", + " 0 1 0 1 0 1 0 0 1 0 0 0 1 0 1 0 1 0 0 0 1 0 0 0 1 0 1 1 1 0 0 1 0 0 0 1 1\n", + " 1 0 1 1 0 0 0 0 0 1 1 1 1 0 1 0 0 1 0 1 1 0 0 1 0 1 1 0 1 1 1 1 0 0 1 1 1\n", + " 1 1 1 1 1 1 0 0 0 0 1 1 1 0 1 1 0 0 1 1 1 0 0 1 1 1 0 0 1 0]\n", + "prediction on D:\n", + "[1 1 0 1 1 1 1 1 0 1 0 1 0 1 1 0 0 0 1 1 0 1 0 1 0 1 1 1 0 1 0 0 0 1 1 0 1\n", + " 1 1 0 0 0 1 0 1 0 0 1 1 1 1 1 0 0 0 1 1 0 0 0 0 0 1 1 0 0 1 0 1 1 0 0 0 1\n", + " 1 1 1 0 1 0 1 0 0 0 1 1 0 1 0 0 1 1 0 0 1 0 0 0 0 0 1 0 1 1 1 0 1 0 1 1 0\n", + " 1 0 1 1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 1 0 0 0 0 1 1 0 0 0 1 0 0 0 1\n", + " 0 1 0 0 0 0 1 1 1 0 0 1 0 0 1 0 0 1 0 1 1 1 0 0 1 1 0 0 0 0 1 0 0 1 1 0 1\n", + " 0 1 1 1 1 1 1 0 1 1 0 1 1 1 1 0 1 1 1 1 1 0 1 0 1 0 0 0 1 1 0 0 0 0 1 0 0\n", + " 1 1 1 1 1 0 1 0 1 0 1 1 0 0 1 0 1 0 0 1 0 0 0 0 1 1 0 0 0 0 0 1 1 0 1 0 1\n", + " 0 0 1 1 1 0 1 0 1 1 1 0 1 0 1 0 1 1 1 0 1 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0\n", + " 0 1 0 0 0 1 0 1 0 1 1 0 1 1 0 1 1 0 0 0 1 0 0 1 0 1 0 1 0 0 1 0 1 0 0 1 1\n", + " 0 1 0 1 0 0 0 0 0 0 0 1 1 1 1 0 1 0 1 1 1 1 0 0 0 1 1 0 1 1 0 1 0 0 1 0 0\n", + " 0 0 0 1 1 1 0 1 1 0 1 1 0 1 1 1 0 1 0 0 0 1 1 1 0 1 1 1 1 0]\n" + ] + } + ], + "source": [ + "# Logistic Regression: http://deeplearning.net/software/theano/tutorial/examples.html\n", + "import numpy\n", + "import theano\n", + "import theano.tensor as T\n", + "rng = numpy.random\n", + "\n", + "N = 400 # training sample size\n", + "feats = 784 # number of input variables\n", + "\n", + "# generate a dataset: D = (input_values, target_class)\n", + "D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", + "training_steps = 100\n", + "#np.asarray(your_data, dtype=theano.config.floatX)\n", + "\n", + "# Declare Theano symbolic variables\n", + "x = T.matrix(\"x\")\n", + "y = T.vector(\"y\")\n", + "\n", + "# initialize the weight vector w randomly\n", + "#\n", + "# this and the following bias variable b\n", + "# are shared so they keep their values\n", + "# between training iterations (updates)\n", + "w = theano.shared(rng.randn(feats), name=\"w\")\n", + "\n", + "# initialize the bias term\n", + "b = theano.shared(0., name=\"b\")\n", + "#print b.eval()\n", + "print(\"Initial model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "\n", + "# Construct Theano expression graph\n", + "p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1\n", + "prediction = p_1 > 0.5 # The prediction thresholded\n", + "xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function\n", + "cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize\n", + "gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost\n", + " # w.r.t weight vector w and\n", + " # bias term b\n", + " # (we shall return to this in a\n", + " # following section of this tutorial)\n", + "\n", + "# Compile\n", + "train = theano.function(\n", + " inputs=[x,y],\n", + " outputs=[prediction, xent],\n", + " updates=((w, w - 0.1 * gw), (b, b - 0.1 * gb)),\n", + " allow_input_downcast=True) # added downcasting...\n", + "predict = theano.function(inputs=[x], outputs=prediction)\n", + "\n", + "# Train\n", + "for i in range(training_steps):\n", + " pred, err = train(D[0], D[1])\n", + "\n", + "print(\"Final model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "print(\"target values for D:\")\n", + "print(D[1])\n", + "print(\"prediction on D:\")\n", + "print(predict(D[0]))\n", + "#----------------------------------------------------------" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "25000\n", + "200\n", + "200\n", + "7142\n", + "6994\n", + "0\n", + "200\n", + "200\n", + "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + "200\n", + "200\n", + "(200, 7142)\n", + "\n", + "\n", + "\n", + "Done fitting classifier on training data...\n", + "\n", + "================================================== \n", + "\n", + "Results with 5-fold cross validation:\n", + "\n", + "================================================== \n", + "\n", + "********************\n", + "\t accuracy_score\t0.715\n", + "********************\n", + "precision_score\t0.765432098765\n", + "recall_score\t0.62\n", + "\n", + "classification_report:\n", + "\n", + " precision recall f1-score support\n", + "\n", + " 0.0 0.68 0.81 0.74 100\n", + " 1.0 0.77 0.62 0.69 100\n", + "\n", + "avg / total 0.72 0.71 0.71 200\n", + "\n", + "\n", + "confusion_matrix:\n", + "\n", + "[[81 19]\n", + " [38 62]]\n" + ] + } + ], + "source": [ + "# Get text data\n", + "#----------------\n", + "from collections import namedtuple\n", + "\n", + "all_data = [] \n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " #print my_data[line_no]\n", + " #break\n", + "train_data = all_data[:25000]\n", + "test_data = all_data[25000:50000]\n", + "print len(train_data)\n", + "\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n", + "#--------------------\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "from collections import defaultdict\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]\n", + "#-------------------------\n", + "import numpy as np\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "#test_vecs= get_sparse_vectors(test_data, word_space)\n", + "\n", + "#print train_vecs, test_vecs[0]\n", + "print len(train_data[12500:12600])\n", + "print len(train_vecs)\n", + "print len(test_vecs)\n", + "#-------------------------\n", + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "from random import shuffle, randint\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "\n", + "\n", + "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]\n", + "print len(train_tags)\n", + "print len(test_tags)\n", + "#--------------------\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "print train_vecs.shape\n", + "#--------------------------------\n", + "# Classification with scikit-learn\n", + "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", + "# Let's use sklearn to train an svm classifier:\n", + "#-------------------------------------------------\n", + "\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC, LinearSVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "import gensim\n", + "n_jobs = 2\n", + "\n", + "#train_vecs=array(train_vecs)\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "\n", + "print type(train_tags)\n", + "print type(train_vecs)\n", + "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + "clf.fit(train_vecs, train_tags)\n", + "print \"\\nDone fitting classifier on training data...\\n\"\n", + "\n", + "#------------------------------------------------------------------------------------------\n", + "print \"=\"*50, \"\\n\"\n", + "print \"Results with 5-fold cross validation:\\n\"\n", + "print \"=\"*50, \"\\n\"\n", + "#------------------------------------------------------------------------------------------\n", + "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", + "print \"*\"*20\n", + "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", + "print \"*\"*20\n", + "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", + "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", + "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + "#----------------------" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(200, 7142)\n", + "(200,)\n" + ] + } + ], + "source": [ + "# This creates an artficial dataset (code from the Theano tutorial):\n", + "D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", + "#print D\n", + "print D[0].shape\n", + "print D[1].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(array([[ 0., 0., 0., ..., 1., 1., 1.],\n", + " [ 0., 0., 0., ..., 1., 1., 1.],\n", + " [ 0., 0., 0., ..., 1., 1., 1.],\n", + " ..., \n", + " [ 0., 0., 0., ..., 1., 0., 1.],\n", + " [ 0., 0., 0., ..., 1., 1., 1.],\n", + " [ 0., 0., 0., ..., 1., 1., 1.]], dtype=float32), array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))\n", + "(200, 7142)\n", + "(200,)\n" + ] + } + ], + "source": [ + "#But let's use our data to construct D:\n", + "# Let's ensure our x is float32, for use with Theano:\n", + "x= train_vecs\n", + "x=x.astype(theano.config.floatX)\n", + "y=train_tags\n", + "y=y.astype(int)\n", + "# Now create the dataset, and check dimensions, etc.\n", + "D=(x,y)\n", + "print D\n", + "print D[0].shape\n", + "print D[1].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial model:\n", + "Final model:\n", + "target values for D:\n", + "[1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", + " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", + " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", + "prediction on D:\n", + "[1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", + " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", + " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", + " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n" + ] + } + ], + "source": [ + "# Use with logistic regression:\n", + "# Logistic Regression: http://deeplearning.net/software/theano/tutorial/examples.html\n", + "import numpy\n", + "import theano\n", + "import theano.tensor as T\n", + "rng = numpy.random\n", + "#theano.config.optimizer='fast_compile'\n", + "\n", + "#N = 400 # training sample size\n", + "#feats = 784# \n", + "feats = 7142 # number of input variables\n", + "\n", + "# generate a dataset: D = (input_values, target_class)\n", + "#D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", + "training_steps = 1000\n", + "#np.asarray(your_data, dtype=theano.config.floatX)\n", + "\n", + "# Declare Theano symbolic variables\n", + "x = T.matrix(\"x\")\n", + "y = T.vector(\"y\")\n", + "\n", + "# initialize the weight vector w randomly\n", + "#\n", + "# this and the following bias variable b\n", + "# are shared so they keep their values\n", + "# between training iterations (updates)\n", + "w = theano.shared(rng.randn(feats), name=\"w\")\n", + "\n", + "# initialize the bias term\n", + "b = theano.shared(0., name=\"b\")\n", + "#print b.eval()\n", + "print(\"Initial model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "\n", + "# Construct Theano expression graph\n", + "p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1\n", + "prediction = p_1 > 0.5 # The prediction thresholded\n", + "xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function\n", + "cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize\n", + "gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost\n", + " # w.r.t weight vector w and\n", + " # bias term b\n", + " # (we shall return to this in a\n", + " # following section of this tutorial)\n", + "\n", + "# Compile\n", + "train = theano.function(\n", + " inputs=[x,y],\n", + " outputs=[prediction, xent],\n", + " updates=((w, w - 0.1 * gw), (b, b - 0.1 * gb)),\n", + " allow_input_downcast=True) # added downcasting...\n", + "predict = theano.function(inputs=[x], outputs=prediction)\n", + "\n", + "# Train\n", + "for i in range(training_steps):\n", + " pred, err = train(D[0], D[1])\n", + "\n", + "print(\"Final model:\")\n", + "#print(w.get_value())\n", + "#print(b.get_value())\n", + "print(\"target values for D:\")\n", + "print(D[1])\n", + "print(\"prediction on D:\")\n", + "print(predict(D[0]))\n", + "#----------------------------------------------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Now try the code with different values of \"training_steps\" and see what you get.\n", + "# For example, you can try:\n", + "# training_steps= 100, training_steps=500, training_steps=10000" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# We need to create some functions to load the IBDB data:\n", + "from collections import namedtuple\n", + "from collections import defaultdict\n", + "\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + "def get_data():\n", + " '''\n", + " \n", + " '''\n", + " all_data = [] \n", + " DataDoc= namedtuple('DataDoc', 'tag words')\n", + " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " all_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + " all_tags+=all_tags\n", + " return all_data, all_tags\n", + " #--------------------------------------------------\n", + " \n", + "all_data, all_tags= get_data()\n", + "\n", + "#train_data=train_data[:100]+train_data[12500:12600]\n", + "#test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n", + "\n", + "\n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]+ [ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "\n", + "\n", + "test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "\n", + "\n", + "\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "from collections import defaultdict\n", + "\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]\n", + "\n", + "\n", + "\n", + "\n", + "all_data = [] \n", + "DataDoc= namedtuple('DataDoc', 'tag words')\n", + "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " #print my_data[line_no]\n", + " #break\n", + "train_data = all_data[:25000]\n", + "test_data = all_data[25000:50000]\n", + "print len(train_data)\n", + "\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", + "print len(train_data)\n", + "print len(test_data)\n", + "#--------------------\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "word_space=get_space(train_data)\n", + "print len(word_space)\n", + "print word_space[\"love\"]\n", + "#-------------------------\n", + "import numpy as np\n", + "\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "\n", + "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "#test_vecs= get_sparse_vectors(test_data, word_space)\n", + "\n", + "#print train_vecs, test_vecs[0]\n", + "print len(train_data[12500:12600])\n", + "print len(train_vecs)\n", + "print len(test_vecs)\n", + "#-------------------------\n", + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown and we don't use that part.\n", + "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", + "# later, for now we will hard-code the labels.\n", + "\n", + "from random import shuffle, randint\n", + "\n", + "\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "\n", + "\n", + "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", + "# tags= [train_data[i].tag for i in range(len(train_data))]\n", + "print train_tags[-1], train_vecs[-1][:10]\n", + "print len(train_tags)\n", + "print len(test_tags)\n", + "#--------------------\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "print train_vecs.shape\n", + "#--------------------------------\n", + "# Classification with scikit-learn\n", + "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", + "# Let's use sklearn to train an svm classifier:\n", + "#-------------------------------------------------\n", + "\n", + "import argparse\n", + "import codecs\n", + "import time\n", + "import sys\n", + "import os, re, glob\n", + "import nltk\n", + "from collections import defaultdict\n", + "from random import shuffle, randint\n", + "import numpy as np\n", + "from numpy import array, arange, zeros, hstack, argsort\n", + "import unicodedata\n", + "from scipy.sparse import csr_matrix\n", + "from sklearn.svm import SVC, LinearSVC\n", + "from sklearn import preprocessing\n", + "from sklearn.cross_validation import StratifiedKFold\n", + "from sklearn.grid_search import GridSearchCV\n", + "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.decomposition import TruncatedSVD\n", + "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", + "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.linear_model import LogisticRegression\n", + "from sklearn import cross_validation\n", + "import gensim\n", + "n_jobs = 2\n", + "\n", + "#train_vecs=array(train_vecs)\n", + "train_vecs=np.array(train_vecs)\n", + "train_tags=np.array(train_tags)\n", + "\n", + "print type(train_tags)\n", + "print type(train_vecs)\n", + "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", + "clf.fit(train_vecs, train_tags)\n", + "print \"\\nDone fitting classifier on training data...\\n\"\n", + "\n", + "#------------------------------------------------------------------------------------------\n", + "print \"=\"*50, \"\\n\"\n", + "print \"Results with 5-fold cross validation:\\n\"\n", + "print \"=\"*50, \"\\n\"\n", + "#------------------------------------------------------------------------------------------\n", + "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", + "print \"*\"*20\n", + "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", + "print \"*\"*20\n", + "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", + "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", + "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + "#----------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "space_len: 975\n", + "train_vecs.shape: 25000, 975\n", + "dev_vecs.shape: 5000, 975\n", + "test_vecs.shape: 20000, 975\n", + "Subtensor{int64}.0\n", + "... building the model\n", + "... training the model\n", + "epoch 1, minibatch 41/41, validation error 52.083333 %\n", + " epoch 1, minibatch 41/41, test error of best model 50.505051 %\n", + "epoch 2, minibatch 41/41, validation error 52.083333 %\n", + "epoch 3, minibatch 41/41, validation error 52.083333 %\n", + "epoch 4, minibatch 41/41, validation error 52.083333 %\n", + "epoch 5, minibatch 41/41, validation error 52.083333 %\n", + "epoch 6, minibatch 41/41, validation error 52.062500 %\n", + " epoch 6, minibatch 41/41, test error of best model 50.484848 %\n", + "epoch 7, minibatch 41/41, validation error 51.895833 %\n", + " epoch 7, minibatch 41/41, test error of best model 50.328283 %\n", + "epoch 8, minibatch 41/41, validation error 51.687500 %\n", + " epoch 8, minibatch 41/41, test error of best model 50.111111 %\n", + "epoch 9, minibatch 41/41, validation error 51.354167 %\n", + " epoch 9, minibatch 41/41, test error of best model 49.944444 %\n", + "epoch 10, minibatch 41/41, validation error 51.020833 %\n", + " epoch 10, minibatch 41/41, test error of best model 49.641414 %\n", + "epoch 11, minibatch 41/41, validation error 50.500000 %\n", + " epoch 11, minibatch 41/41, test error of best model 49.363636 %\n", + "epoch 12, minibatch 41/41, validation error 50.125000 %\n", + " epoch 12, minibatch 41/41, test error of best model 49.000000 %\n", + "epoch 13, minibatch 41/41, validation error 49.687500 %\n", + " epoch 13, minibatch 41/41, test error of best model 48.661616 %\n", + "epoch 14, minibatch 41/41, validation error 49.125000 %\n", + " epoch 14, minibatch 41/41, test error of best model 48.257576 %\n", + "epoch 15, minibatch 41/41, validation error 48.645833 %\n", + " epoch 15, minibatch 41/41, test error of best model 47.853535 %\n", + "epoch 16, minibatch 41/41, validation error 48.291667 %\n", + " epoch 16, minibatch 41/41, test error of best model 47.585859 %\n", + "epoch 17, minibatch 41/41, validation error 47.979167 %\n", + " epoch 17, minibatch 41/41, test error of best model 47.217172 %\n", + "epoch 18, minibatch 41/41, validation error 47.812500 %\n", + " epoch 18, minibatch 41/41, test error of best model 47.000000 %\n", + "epoch 19, minibatch 41/41, validation error 47.604167 %\n", + " epoch 19, minibatch 41/41, test error of best model 46.737374 %\n", + "epoch 20, minibatch 41/41, validation error 47.395833 %\n", + " epoch 20, minibatch 41/41, test error of best model 46.520202 %\n", + "epoch 21, minibatch 41/41, validation error 47.041667 %\n", + " epoch 21, minibatch 41/41, test error of best model 46.292929 %\n", + "epoch 22, minibatch 41/41, validation error 46.770833 %\n", + " epoch 22, minibatch 41/41, test error of best model 46.030303 %\n", + "epoch 23, minibatch 41/41, validation error 46.645833 %\n", + " epoch 23, minibatch 41/41, test error of best model 45.813131 %\n", + "epoch 24, minibatch 41/41, validation error 46.125000 %\n", + " epoch 24, minibatch 41/41, test error of best model 45.570707 %\n", + "epoch 25, minibatch 41/41, validation error 45.895833 %\n", + " epoch 25, minibatch 41/41, test error of best model 45.348485 %\n", + "epoch 26, minibatch 41/41, validation error 45.812500 %\n", + " epoch 26, minibatch 41/41, test error of best model 45.121212 %\n", + "epoch 27, minibatch 41/41, validation error 45.666667 %\n", + " epoch 27, minibatch 41/41, test error of best model 44.994949 %\n", + "epoch 28, minibatch 41/41, validation error 45.458333 %\n", + " epoch 28, minibatch 41/41, test error of best model 44.848485 %\n", + "epoch 29, minibatch 41/41, validation error 45.312500 %\n", + " epoch 29, minibatch 41/41, test error of best model 44.676768 %\n", + "epoch 30, minibatch 41/41, validation error 45.229167 %\n", + " epoch 30, minibatch 41/41, test error of best model 44.606061 %\n", + "epoch 31, minibatch 41/41, validation error 45.125000 %\n", + " epoch 31, minibatch 41/41, test error of best model 44.515152 %\n", + "epoch 32, minibatch 41/41, validation error 44.895833 %\n", + " epoch 32, minibatch 41/41, test error of best model 44.378788 %\n", + "epoch 33, minibatch 41/41, validation error 44.833333 %\n", + " epoch 33, minibatch 41/41, test error of best model 44.303030 %\n", + "epoch 34, minibatch 41/41, validation error 44.750000 %\n", + " epoch 34, minibatch 41/41, test error of best model 44.176768 %\n", + "epoch 35, minibatch 41/41, validation error 44.500000 %\n", + " epoch 35, minibatch 41/41, test error of best model 44.080808 %\n", + "epoch 36, minibatch 41/41, validation error 44.458333 %\n", + " epoch 36, minibatch 41/41, test error of best model 43.959596 %\n", + "epoch 37, minibatch 41/41, validation error 44.416667 %\n", + " epoch 37, minibatch 41/41, test error of best model 43.878788 %\n", + "epoch 38, minibatch 41/41, validation error 44.375000 %\n", + " epoch 38, minibatch 41/41, test error of best model 43.813131 %\n", + "epoch 39, minibatch 41/41, validation error 44.354167 %\n", + " epoch 39, minibatch 41/41, test error of best model 43.757576 %\n", + "epoch 40, minibatch 41/41, validation error 44.291667 %\n", + " epoch 40, minibatch 41/41, test error of best model 43.656566 %\n", + "epoch 41, minibatch 41/41, validation error 44.250000 %\n", + " epoch 41, minibatch 41/41, test error of best model 43.530303 %\n", + "epoch 42, minibatch 41/41, validation error 44.166667 %\n", + " epoch 42, minibatch 41/41, test error of best model 43.500000 %\n", + "epoch 43, minibatch 41/41, validation error 44.166667 %\n", + "epoch 44, minibatch 41/41, validation error 44.041667 %\n", + " epoch 44, minibatch 41/41, test error of best model 43.368687 %\n", + "epoch 45, minibatch 41/41, validation error 44.104167 %\n", + "epoch 46, minibatch 41/41, validation error 44.041667 %\n", + "epoch 47, minibatch 41/41, validation error 44.041667 %\n", + "epoch 48, minibatch 41/41, validation error 43.979167 %\n", + " epoch 48, minibatch 41/41, test error of best model 43.126263 %\n", + "epoch 49, minibatch 41/41, validation error 43.979167 %\n", + "epoch 50, minibatch 41/41, validation error 43.937500 %\n", + " epoch 50, minibatch 41/41, test error of best model 43.035354 %\n", + "epoch 51, minibatch 41/41, validation error 43.875000 %\n", + " epoch 51, minibatch 41/41, test error of best model 43.015152 %\n", + "epoch 52, minibatch 41/41, validation error 43.833333 %\n", + " epoch 52, minibatch 41/41, test error of best model 42.984848 %\n", + "epoch 53, minibatch 41/41, validation error 43.708333 %\n", + " epoch 53, minibatch 41/41, test error of best model 42.868687 %\n", + "epoch 54, minibatch 41/41, validation error 43.729167 %\n", + "epoch 55, minibatch 41/41, validation error 43.708333 %\n", + "epoch 56, minibatch 41/41, validation error 43.645833 %\n", + " epoch 56, minibatch 41/41, test error of best model 42.772727 %\n", + "epoch 57, minibatch 41/41, validation error 43.541667 %\n", + " epoch 57, minibatch 41/41, test error of best model 42.727273 %\n", + "epoch 58, minibatch 41/41, validation error 43.541667 %\n", + "epoch 59, minibatch 41/41, validation error 43.520833 %\n", + " epoch 59, minibatch 41/41, test error of best model 42.691919 %\n", + "epoch 60, minibatch 41/41, validation error 43.520833 %\n", + "epoch 61, minibatch 41/41, validation error 43.500000 %\n", + " epoch 61, minibatch 41/41, test error of best model 42.616162 %\n", + "epoch 62, minibatch 41/41, validation error 43.520833 %\n", + "epoch 63, minibatch 41/41, validation error 43.500000 %\n", + "epoch 64, minibatch 41/41, validation error 43.520833 %\n", + "epoch 65, minibatch 41/41, validation error 43.437500 %\n", + " epoch 65, minibatch 41/41, test error of best model 42.515152 %\n", + "epoch 66, minibatch 41/41, validation error 43.437500 %\n", + "epoch 67, minibatch 41/41, validation error 43.416667 %\n", + " epoch 67, minibatch 41/41, test error of best model 42.474747 %\n", + "epoch 68, minibatch 41/41, validation error 43.395833 %\n", + " epoch 68, minibatch 41/41, test error of best model 42.429293 %\n", + "epoch 69, minibatch 41/41, validation error 43.395833 %\n", + "epoch 70, minibatch 41/41, validation error 43.375000 %\n", + " epoch 70, minibatch 41/41, test error of best model 42.373737 %\n", + "epoch 71, minibatch 41/41, validation error 43.354167 %\n", + " epoch 71, minibatch 41/41, test error of best model 42.368687 %\n", + "epoch 72, minibatch 41/41, validation error 43.312500 %\n", + " epoch 72, minibatch 41/41, test error of best model 42.328283 %\n", + "epoch 73, minibatch 41/41, validation error 43.270833 %\n", + " epoch 73, minibatch 41/41, test error of best model 42.313131 %\n", + "epoch 74, minibatch 41/41, validation error 43.229167 %\n", + " epoch 74, minibatch 41/41, test error of best model 42.282828 %\n", + "epoch 75, minibatch 41/41, validation error 43.229167 %\n", + "epoch 76, minibatch 41/41, validation error 43.229167 %\n", + "epoch 77, minibatch 41/41, validation error 43.208333 %\n", + " epoch 77, minibatch 41/41, test error of best model 42.202020 %\n", + "epoch 78, minibatch 41/41, validation error 43.187500 %\n", + " epoch 78, minibatch 41/41, test error of best model 42.186869 %\n", + "epoch 79, minibatch 41/41, validation error 43.166667 %\n", + " epoch 79, minibatch 41/41, test error of best model 42.166667 %\n", + "epoch 80, minibatch 41/41, validation error 43.166667 %\n", + "epoch 81, minibatch 41/41, validation error 43.145833 %\n", + " epoch 81, minibatch 41/41, test error of best model 42.151515 %\n", + "epoch 82, minibatch 41/41, validation error 43.125000 %\n", + " epoch 82, minibatch 41/41, test error of best model 42.146465 %\n", + "epoch 83, minibatch 41/41, validation error 43.083333 %\n", + " epoch 83, minibatch 41/41, test error of best model 42.126263 %\n", + "epoch 84, minibatch 41/41, validation error 43.020833 %\n", + " epoch 84, minibatch 41/41, test error of best model 42.116162 %\n", + "epoch 85, minibatch 41/41, validation error 43.020833 %\n", + "epoch 86, minibatch 41/41, validation error 43.020833 %\n", + "epoch 87, minibatch 41/41, validation error 42.979167 %\n", + " epoch 87, minibatch 41/41, test error of best model 42.070707 %\n", + "epoch 88, minibatch 41/41, validation error 42.958333 %\n", + " epoch 88, minibatch 41/41, test error of best model 42.045455 %\n", + "epoch 89, minibatch 41/41, validation error 42.937500 %\n", + " epoch 89, minibatch 41/41, test error of best model 42.030303 %\n", + "epoch 90, minibatch 41/41, validation error 42.916667 %\n", + " epoch 90, minibatch 41/41, test error of best model 42.035354 %\n", + "epoch 91, minibatch 41/41, validation error 42.895833 %\n", + " epoch 91, minibatch 41/41, test error of best model 42.030303 %\n", + "epoch 92, minibatch 41/41, validation error 42.875000 %\n", + " epoch 92, minibatch 41/41, test error of best model 42.030303 %\n", + "epoch 93, minibatch 41/41, validation error 42.875000 %\n", + "epoch 94, minibatch 41/41, validation error 42.854167 %\n", + " epoch 94, minibatch 41/41, test error of best model 42.010101 %\n", + "epoch 95, minibatch 41/41, validation error 42.854167 %\n", + "epoch 96, minibatch 41/41, validation error 42.833333 %\n", + " epoch 96, minibatch 41/41, test error of best model 42.005051 %\n", + "epoch 97, minibatch 41/41, validation error 42.791667 %\n", + " epoch 97, minibatch 41/41, test error of best model 41.984848 %\n", + "epoch 98, minibatch 41/41, validation error 42.770833 %\n", + " epoch 98, minibatch 41/41, test error of best model 41.979798 %\n", + "epoch 99, minibatch 41/41, validation error 42.770833 %\n", + "epoch 100, minibatch 41/41, validation error 42.770833 %\n", + "epoch 101, minibatch 41/41, validation error 42.770833 %\n", + "epoch 102, minibatch 41/41, validation error 42.770833 %\n", + "epoch 103, minibatch 41/41, validation error 42.770833 %\n", + "epoch 104, minibatch 41/41, validation error 42.770833 %\n", + "epoch 105, minibatch 41/41, validation error 42.791667 %\n", + "epoch 106, minibatch 41/41, validation error 42.791667 %\n", + "epoch 107, minibatch 41/41, validation error 42.791667 %\n", + "epoch 108, minibatch 41/41, validation error 42.791667 %\n", + "epoch 109, minibatch 41/41, validation error 42.770833 %\n", + "epoch 110, minibatch 41/41, validation error 42.770833 %\n", + "epoch 111, minibatch 41/41, validation error 42.770833 %\n", + "epoch 112, minibatch 41/41, validation error 42.770833 %\n", + "epoch 113, minibatch 41/41, validation error 42.770833 %\n", + "epoch 114, minibatch 41/41, validation error 42.750000 %\n", + " epoch 114, minibatch 41/41, test error of best model 41.868687 %\n", + "epoch 115, minibatch 41/41, validation error 42.750000 %\n", + "epoch 116, minibatch 41/41, validation error 42.729167 %\n", + " epoch 116, minibatch 41/41, test error of best model 41.863636 %\n", + "epoch 117, minibatch 41/41, validation error 42.750000 %\n", + "epoch 118, minibatch 41/41, validation error 42.750000 %\n", + "epoch 119, minibatch 41/41, validation error 42.750000 %\n", + "epoch 120, minibatch 41/41, validation error 42.729167 %\n", + " epoch 120, minibatch 41/41, test error of best model 41.853535 %\n", + "epoch 121, minibatch 41/41, validation error 42.729167 %\n", + "Optimization complete with best validation score of 42.729167 %,with test performance 41.853535 %\n", + "The code run for 122 epochs, with 10.362868 epochs/sec\n", + "The code for file best_model.pkl ran for 11.8s\n", + "Now predicting...\n", + "Predicted values for the first 10 examples in test set:\n", + "[0 0 0 0 0 0 1 0 0 0]\n" + ] + } + ], + "source": [ + "\"\"\"\n", + "This tutorial introduces logistic regression using Theano and stochastic\n", + "gradient descent.\n", + "\n", + "Logistic regression is a probabilistic, linear classifier. It is parametrized\n", + "by a weight matrix :math:`W` and a bias vector :math:`b`. Classification is\n", + "done by projecting data points onto a set of hyperplanes, the distance to\n", + "which is used to determine a class membership probability.\n", + "\n", + "Mathematically, this can be written as:\n", + "\n", + ".. math::\n", + " P(Y=i|x, W,b) &= softmax_i(W x + b) \\\\\n", + " &= \\frac {e^{W_i x + b_i}} {\\sum_j e^{W_j x + b_j}}\n", + "\n", + "\n", + "The output of the model or prediction is then done by taking the argmax of\n", + "the vector whose i'th element is P(Y=i|x).\n", + "\n", + ".. math::\n", + "\n", + " y_{pred} = argmax_i P(Y=i|x,W,b)\n", + "\n", + "\n", + "This tutorial presents a stochastic gradient descent optimization method\n", + "suitable for large datasets.\n", + "\n", + "\n", + "References:\n", + "\n", + " - textbooks: \"Pattern Recognition and Machine Learning\" -\n", + " Christopher M. Bishop, section 4.3.2\n", + "\n", + "\"\"\"\n", + "from collections import namedtuple, defaultdict\n", + "from random import shuffle, randint\n", + "#----------------------------------------------------\n", + "__docformat__ = 'restructedtext en'\n", + "\n", + "import cPickle\n", + "import gzip\n", + "import os\n", + "import sys\n", + "import timeit\n", + "\n", + "import numpy\n", + "import numpy as np\n", + "import theano\n", + "import theano.tensor as T\n", + "#----------------------------------------------------\n", + "\n", + "def get_data():\n", + " '''\n", + " \n", + " '''\n", + " all_data = [] \n", + " DataDoc= namedtuple('DataDoc', 'tag words')\n", + " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " label=line.split()[0]\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " train_data = all_data[:25000]\n", + " dev_data = all_data[25000:27500]+all_data[47500:50000]\n", + " test_data=all_data[27500:47500]\n", + " # labels\n", + " train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + " dev_tags= [ 1.0 for i in range(2500)] + [ 0.0 for i in range(2500)]\n", + " test_tags= [ 1.0 for i in range(10000)] + [ 0.0 for i in range(10000)]\n", + " return train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", + " #--------------------------------------------------\n", + "#train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + "########################\n", + "\n", + "\n", + "# Let's get a dictionary of all the words in training data\n", + "# These will be our bag-of-words features\n", + "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", + "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", + "def get_space(train_data):\n", + " \"\"\"\n", + " input is a list of namedtuples\n", + " get a dict of word space\n", + " key=word\n", + " value=len of the dict at that point \n", + " (that will be the index of the word and it is unique since the dict grows as we loop)\n", + " \"\"\"\n", + " word_space=defaultdict(int)\n", + " for doc in train_data:\n", + " for w in doc.words:\n", + " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", + " # but that doesn't matter.\n", + " word_space[w]+=1\n", + " return word_space\n", + "\n", + "# train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + "# word_space=get_space(train_data)\n", + "# word_space={w: word_space[w] for w in word_space if word_space[w] > 500}\n", + "# space_len=len(word_space)\n", + "# print \"space_len: \", space_len\n", + "def get_sparse_vec(data_point, space):\n", + " # create empty vector\n", + " sparse_vec = np.zeros((len(space)))\n", + " for w in set(data_point.words):\n", + " # use exception handling such that this function can also be used to vectorize \n", + " # data with words not in train (i.e., test and dev data)\n", + " try:\n", + " sparse_vec[space[w]]=1\n", + " except:\n", + " continue\n", + " return sparse_vec\n", + "\n", + " \n", + "# train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + "# test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + "# dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", + "# #---------------------------\n", + "# train_vecs=np.array(train_vecs)\n", + "# train_tags=np.array(train_tags)\n", + "# dev_vecs=np.array(dev_vecs)\n", + "# dev_tags=np.array(dev_tags)\n", + "# test_vecs=np.array(test_vecs)\n", + "# test_tags=np.array(test_tags)\n", + "# print train_vecs.shape\n", + "# print dev_vecs.shape\n", + "# print test_vecs.shape\n", + "\n", + "\n", + "def load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags):\n", + " #------------------------------\n", + " # Modified from Theano tutorial.\n", + " # I basically pass data_x, data_y instead of data_xy\n", + " def shared_dataset(data_x, data_y, borrow=True):\n", + " \"\"\" Function that loads the dataset into shared variables\n", + "\n", + " The reason we store our dataset in shared variables is to allow\n", + " Theano to copy it into the GPU memory (when code is run on GPU).\n", + " Since copying data into the GPU is slow, copying a minibatch everytime\n", + " is needed (the default behaviour if the data is not in a shared\n", + " variable) would lead to a large decrease in performance.\n", + " \"\"\"\n", + " shared_x = theano.shared(numpy.asarray(data_x,\n", + " dtype=theano.config.floatX), borrow=borrow)\n", + " shared_y = theano.shared(numpy.asarray(data_y,\n", + " dtype=theano.config.floatX),\n", + " borrow=borrow)\n", + " # When storing data on the GPU it has to be stored as floats\n", + " # therefore we will store the labels as ``floatX`` as well\n", + " # (``shared_y`` does exactly that). But during our computations\n", + " # we need them as ints (we use labels as index, and if they are\n", + " # floats it doesn't make sense) therefore instead of returning\n", + " # ``shared_y`` we will have to cast it to int. This little hack\n", + " # lets ous get around this issue\n", + " return shared_x, T.cast(shared_y, 'int32')\n", + " #-----------------------------------------------------------------\n", + " train_set_x, train_set_y = shared_dataset(train_vecs, train_tags)\n", + " valid_set_x, valid_set_y = shared_dataset(dev_vecs, dev_tags)\n", + " test_set_x, test_set_y = shared_dataset(test_vecs, test_tags)\n", + "\n", + " rval = [(train_set_x, train_set_y), (valid_set_x, valid_set_y),\n", + " (test_set_x, test_set_y)]\n", + " return rval\n", + "\n", + "#rval=load_data(train_vecs, train_tags)\n", + "#print rval\n", + "\n", + "\n", + "class LogisticRegression(object):\n", + " \"\"\"Multi-class Logistic Regression Class\n", + "\n", + " The logistic regression is fully described by a weight matrix :math:`W`\n", + " and bias vector :math:`b`. Classification is done by projecting data\n", + " points onto a set of hyperplanes, the distance to which is used to\n", + " determine a class membership probability.\n", + " \"\"\"\n", + "\n", + " def __init__(self, input, n_in, n_out):\n", + " \"\"\" Initialize the parameters of the logistic regression\n", + "\n", + " :type input: theano.tensor.TensorType\n", + " :param input: symbolic variable that describes the input of the\n", + " architecture (one minibatch)\n", + "\n", + " :type n_in: int\n", + " :param n_in: number of input units, the dimension of the space in\n", + " which the datapoints lie\n", + "\n", + " :type n_out: int\n", + " :param n_out: number of output units, the dimension of the space in\n", + " which the labels lie\n", + "\n", + " \"\"\"\n", + " # start-snippet-1\n", + " # initialize with 0 the weights W as a matrix of shape (n_in, n_out)\n", + " self.W = theano.shared(\n", + " value=numpy.zeros(\n", + " (n_in, n_out),\n", + " dtype=theano.config.floatX\n", + " ),\n", + " name='W',\n", + " borrow=True\n", + " )\n", + " # initialize the biases b as a vector of n_out 0s\n", + " self.b = theano.shared(\n", + " value=numpy.zeros(\n", + " (n_out,),\n", + " dtype=theano.config.floatX\n", + " ),\n", + " name='b',\n", + " borrow=True\n", + " )\n", + "\n", + " # symbolic expression for computing the matrix of class-membership\n", + " # probabilities\n", + " # Where:\n", + " # W is a matrix where column-k represent the separation hyperplane for\n", + " # class-k\n", + " # x is a matrix where row-j represents input training sample-j\n", + " # b is a vector where element-k represent the free parameter of\n", + " # hyperplane-k\n", + " self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b)\n", + "\n", + " # symbolic description of how to compute prediction as class whose\n", + " # probability is maximal\n", + " self.y_pred = T.argmax(self.p_y_given_x, axis=1)\n", + " # end-snippet-1\n", + "\n", + " # parameters of the model\n", + " self.params = [self.W, self.b]\n", + "\n", + " # keep track of model input\n", + " self.input = input\n", + "\n", + " def negative_log_likelihood(self, y):\n", + " \"\"\"Return the mean of the negative log-likelihood of the prediction\n", + " of this model under a given target distribution.\n", + "\n", + " .. math::\n", + "\n", + " \\frac{1}{|\\mathcal{D}|} \\mathcal{L} (\\theta=\\{W,b\\}, \\mathcal{D}) =\n", + " \\frac{1}{|\\mathcal{D}|} \\sum_{i=0}^{|\\mathcal{D}|}\n", + " \\log(P(Y=y^{(i)}|x^{(i)}, W,b)) \\\\\n", + " \\ell (\\theta=\\{W,b\\}, \\mathcal{D})\n", + "\n", + " :type y: theano.tensor.TensorType\n", + " :param y: corresponds to a vector that gives for each example the\n", + " correct label\n", + "\n", + " Note: we use the mean instead of the sum so that\n", + " the learning rate is less dependent on the batch size\n", + " \"\"\"\n", + " # start-snippet-2\n", + " # y.shape[0] is (symbolically) the number of rows in y, i.e.,\n", + " # number of examples (call it n) in the minibatch\n", + " # T.arange(y.shape[0]) is a symbolic vector which will contain\n", + " # [0,1,2,... n-1] T.log(self.p_y_given_x) is a matrix of\n", + " # Log-Probabilities (call it LP) with one row per example and\n", + " # one column per class LP[T.arange(y.shape[0]),y] is a vector\n", + " # v containing [LP[0,y[0]], LP[1,y[1]], LP[2,y[2]], ...,\n", + " # LP[n-1,y[n-1]]] and T.mean(LP[T.arange(y.shape[0]),y]) is\n", + " # the mean (across minibatch examples) of the elements in v,\n", + " # i.e., the mean log-likelihood across the minibatch.\n", + " return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])\n", + " # end-snippet-2\n", + "\n", + " def errors(self, y):\n", + " \"\"\"Return a float representing the number of errors in the minibatch\n", + " over the total number of examples of the minibatch ; zero one\n", + " loss over the size of the minibatch\n", + "\n", + " :type y: theano.tensor.TensorType\n", + " :param y: corresponds to a vector that gives for each example the\n", + " correct label\n", + " \"\"\"\n", + "\n", + " # check if y has same dimension of y_pred\n", + " if y.ndim != self.y_pred.ndim:\n", + " raise TypeError(\n", + " 'y should have the same shape as self.y_pred',\n", + " ('y', y.type, 'y_pred', self.y_pred.type)\n", + " )\n", + " # check if y is of the correct datatype\n", + " if y.dtype.startswith('int'):\n", + " # the T.neq operator returns a vector of 0s and 1s, where 1\n", + " # represents a mistake in prediction\n", + " return T.mean(T.neq(self.y_pred, y))\n", + " else:\n", + " raise NotImplementedError()\n", + "\n", + "\n", + "def sgd_optimization(learning_rate=0.13, n_epochs=1000,\n", + " batch_size=600):\n", + " \"\"\"\n", + " Demonstrate stochastic gradient descent optimization of a log-linear\n", + " model\n", + "\n", + " This is demonstrated on MNIST.\n", + "\n", + " :type learning_rate: float\n", + " :param learning_rate: learning rate used (factor for the stochastic\n", + " gradient)\n", + "\n", + " :type n_epochs: int\n", + " :param n_epochs: maximal number of epochs to run the optimizer\n", + "\n", + " :type dataset: string\n", + " :param dataset: the path of the MNIST dataset file from\n", + " http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz\n", + "\n", + " \"\"\"\n", + " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", + " train_set_x, train_set_y = datasets[0]\n", + " valid_set_x, valid_set_y = datasets[1]\n", + " test_set_x, test_set_y = datasets[2]\n", + " print train_set_x.shape[0]\n", + " # compute number of minibatches for training, validation and testing\n", + " n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size\n", + " n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size\n", + " n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size\n", + "\n", + " ######################\n", + " # BUILD ACTUAL MODEL #\n", + " ######################\n", + " print '... building the model'\n", + "\n", + " # allocate symbolic variables for the data\n", + " index = T.lscalar() # index to a [mini]batch\n", + "\n", + " # generate symbolic variables for input (x and y represent a\n", + " # minibatch)\n", + " x = T.matrix('x') # data, presented as rasterized images\n", + " y = T.ivector('y') # labels, presented as 1D vector of [int] labels\n", + "\n", + " # construct the logistic regression class\n", + " # Each MNIST image has size 28*28\n", + " # MAM: We change size: n_in=space_len\n", + " classifier = LogisticRegression(input=x, n_in=space_len, n_out=2)\n", + "\n", + " # the cost we minimize during training is the negative log likelihood of\n", + " # the model in symbolic format\n", + " cost = classifier.negative_log_likelihood(y)\n", + "\n", + " # compiling a Theano function that computes the mistakes that are made by\n", + " # the model on a minibatch\n", + " test_model = theano.function(\n", + " inputs=[index],\n", + " outputs=classifier.errors(y),\n", + " givens={\n", + " x: test_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: test_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + "\n", + " validate_model = theano.function(\n", + " inputs=[index],\n", + " outputs=classifier.errors(y),\n", + " givens={\n", + " x: valid_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: valid_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + "\n", + " # compute the gradient of cost with respect to theta = (W,b)\n", + " g_W = T.grad(cost=cost, wrt=classifier.W)\n", + " g_b = T.grad(cost=cost, wrt=classifier.b)\n", + "\n", + " # start-snippet-3\n", + " # specify how to update the parameters of the model as a list of\n", + " # (variable, update expression) pairs.\n", + " updates = [(classifier.W, classifier.W - learning_rate * g_W),\n", + " (classifier.b, classifier.b - learning_rate * g_b)]\n", + "\n", + " # compiling a Theano function `train_model` that returns the cost, but in\n", + " # the same time updates the parameter of the model based on the rules\n", + " # defined in `updates`\n", + " train_model = theano.function(\n", + " inputs=[index],\n", + " outputs=cost,\n", + " updates=updates,\n", + " givens={\n", + " x: train_set_x[index * batch_size: (index + 1) * batch_size],\n", + " y: train_set_y[index * batch_size: (index + 1) * batch_size]\n", + " }\n", + " )\n", + " # end-snippet-3\n", + "\n", + " ###############\n", + " # TRAIN MODEL #\n", + " ###############\n", + " print '... training the model'\n", + " # early-stopping parameters\n", + " patience = 5000 # look as this many examples regardless\n", + " patience_increase = 2 # wait this much longer when a new best is\n", + " # found\n", + " improvement_threshold = 0.995 # a relative improvement of this much is\n", + " # considered significant\n", + " validation_frequency = min(n_train_batches, patience / 2)\n", + " # go through this many\n", + " # minibatche before checking the network\n", + " # on the validation set; in this case we\n", + " # check every epoch\n", + "\n", + " best_validation_loss = numpy.inf\n", + " test_score = 0.\n", + " start_time = timeit.default_timer()\n", + "\n", + " done_looping = False\n", + " epoch = 0\n", + " while (epoch < n_epochs) and (not done_looping):\n", + " epoch = epoch + 1\n", + " for minibatch_index in xrange(n_train_batches):\n", + "\n", + " minibatch_avg_cost = train_model(minibatch_index)\n", + " # iteration number\n", + " iter = (epoch - 1) * n_train_batches + minibatch_index\n", + "\n", + " if (iter + 1) % validation_frequency == 0:\n", + " # compute zero-one loss on validation set\n", + " validation_losses = [validate_model(i)\n", + " for i in xrange(n_valid_batches)]\n", + " this_validation_loss = numpy.mean(validation_losses)\n", + "\n", + " print(\n", + " 'epoch %i, minibatch %i/%i, validation error %f %%' %\n", + " (\n", + " epoch,\n", + " minibatch_index + 1,\n", + " n_train_batches,\n", + " this_validation_loss * 100.\n", + " )\n", + " )\n", + "\n", + " # if we got the best validation score until now\n", + " if this_validation_loss < best_validation_loss:\n", + " #improve patience if loss improvement is good enough\n", + " if this_validation_loss < best_validation_loss * \\\n", + " improvement_threshold:\n", + " patience = max(patience, iter * patience_increase)\n", + "\n", + " best_validation_loss = this_validation_loss\n", + " # test it on the test set\n", + "\n", + " test_losses = [test_model(i)\n", + " for i in xrange(n_test_batches)]\n", + " test_score = numpy.mean(test_losses)\n", + "\n", + " print(\n", + " (\n", + " ' epoch %i, minibatch %i/%i, test error of'\n", + " ' best model %f %%'\n", + " ) %\n", + " (\n", + " epoch,\n", + " minibatch_index + 1,\n", + " n_train_batches,\n", + " test_score * 100.\n", + " )\n", + " )\n", + "\n", + " # save the best model\n", + " with open('best_model.pkl', 'w') as f:\n", + " cPickle.dump(classifier, f)\n", + "\n", + " if patience <= iter:\n", + " done_looping = True\n", + " break\n", + "\n", + " end_time = timeit.default_timer()\n", + " print(\n", + " (\n", + " 'Optimization complete with best validation score of %f %%,'\n", + " 'with test performance %f %%'\n", + " )\n", + " % (best_validation_loss * 100., test_score * 100.)\n", + " )\n", + " print 'The code run for %d epochs, with %f epochs/sec' % (\n", + " epoch, 1. * epoch / (end_time - start_time))\n", + " print ('The code for file ' +\n", + " 'best_model.pkl' +\n", + " ' ran for %.1fs' % ((end_time - start_time)))\n", + "\n", + "\n", + "def predict():\n", + " \"\"\"\n", + " An example of how to load a trained model and use it\n", + " to predict labels.\n", + " \"\"\"\n", + "\n", + " # load the saved model\n", + " classifier = cPickle.load(open('best_model.pkl'))\n", + "\n", + " # compile a predictor function\n", + " predict_model = theano.function(\n", + " inputs=[classifier.input],\n", + " outputs=classifier.y_pred)\n", + "\n", + " # We can test it on some examples from test test\n", + " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", + " #train_set_x, train_set_y = datasets[0]\n", + " #valid_set_x, valid_set_y = datasets[1]\n", + " test_set_x, test_set_y = datasets[2]\n", + " test_set_x = test_set_x.get_value()\n", + " predicted_values = predict_model(test_set_x[:10])\n", + " print (\"Predicted values for the first 10 examples in test set:\")\n", + " print predicted_values\n", + "\n", + "\n", + "if __name__ == '__main__':\n", + " train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", + " word_space=get_space(train_data)\n", + " word_space={w: word_space[w] for w in word_space if word_space[w] > 600}\n", + " space_len=len(word_space)\n", + " print(\"space_len: %d\" % space_len)\n", + " train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", + " test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", + " dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", + " #del word_space\n", + " #---------------------------\n", + " train_vecs=np.array(train_vecs)\n", + " train_tags=np.array(train_tags)\n", + " dev_vecs=np.array(dev_vecs)\n", + " dev_tags=np.array(dev_tags)\n", + " test_vecs=np.array(test_vecs)\n", + " test_tags=np.array(test_tags)\n", + " #del train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", + " print('train_vecs.shape: %d, %d' % train_vecs.shape)\n", + " print('dev_vecs.shape: %d, %d' % dev_vecs.shape)\n", + " print('test_vecs.shape: %d, %d' % test_vecs.shape)\n", + " sgd_optimization()\n", + " #------------------------------------------------------\n", + " print('Now predicting...')\n", + " predict()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From fe3364f5fb90f83e1292251567828bc204f41208 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed Date: Sun, 7 Feb 2016 09:57:40 -0500 Subject: [PATCH 19/36] Delete neural_net_demystified_part_2_forward_propagation.ipynb --- ...mystified_part_2_forward_propagation.ipynb | 982 ------------------ 1 file changed, 982 deletions(-) delete mode 100644 neural_net_demystified_part_2_forward_propagation.ipynb diff --git a/neural_net_demystified_part_2_forward_propagation.ipynb b/neural_net_demystified_part_2_forward_propagation.ipynb deleted file mode 100644 index 62a67e1..0000000 --- a/neural_net_demystified_part_2_forward_propagation.ipynb +++ /dev/null @@ -1,982 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Populating the interactive namespace from numpy and matplotlib\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "%pylab inline\n", - "# Neural Networks Demystified\n", - "# Part 1: Data + Architecture\n", - "\n", - "from IPython.display import YouTubeVideo\n", - "YouTubeVideo('bxe2T-V8XRs')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "#Import code from last time\n", - "\n", - "#\n", - "from IPython.display import YouTubeVideo\n", - "YouTubeVideo('bxe2T-V8XRs')\n", - "# Supporting code for short YouTube series on artificial neural networks.\n", - "#\n", - "# Stephen Welch\n", - "# @stephencwelch\n", - "\n", - "import numpy as np\n", - "\n", - "# X = (hours sleeping, hours studying), y = Score on test\n", - "X = np.array(([3,5], [5,1], [10,2]), dtype=float)\n", - "y = np.array(([75], [82], [93]), dtype=float)\n", - "\n", - "# Normalize (by dividing by the maximum value in each array)\n", - "X = X/np.amax(X, axis=0)\n", - "y = y/100 #Max test score is 100" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(3, 2) (3, 1)\n" - ] - } - ], - "source": [ - "print X.shape, y.shape" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 0.3 1. ]\n", - " [ 0.5 0.2]\n", - " [ 1. 0.4]]\n", - "[[ 0.75]\n", - " [ 0.82]\n", - " [ 0.93]]\n" - ] - } - ], - "source": [ - "print X\n", - "print y" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "class Neural_Network(object):\n", - " def __init__(self): \n", - " #Define Hyperparameters\n", - " self.inputLayerSize = 2\n", - " self.outputLayerSize = 1\n", - " self.hiddenLayerSize = 3\n", - " \n", - " def forward(self, X):\n", - " #Propagate inputs though network" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "def sigmoid(z):\n", - " #Apply sigmoid activation function to scalar, vector, or matrix\n", - " return 1/(1+np.exp(-z))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAEACAYAAACuzv3DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XucVXW9//HXx+GqhooEyc3BIO94KdEfmpFpkifxJHWU\n0rIrJ8NuP01Qw/GSSNYJTSsLjLwcqUPlAY+lJnIqNYUSb6CChXLxhnnjIhf5nD++CxiGmdlrZtbe\na6/vfj8fj/2Y+c5es/fnw2I+s+az1ve7zN0REZHi2invAEREpGNUyEVECk6FXESk4FTIRUQKToVc\nRKTgVMhFRAquZCE3sxvM7EUze6yVba4xs8Vm9oiZHZZtiCIi0po0R+Q/B0a29KSZnQQMdvchwJeA\nH2cUm4iIpFCykLv7n4BXW9lkFPCLZNsHgd3NrE824YmISClZ9Mj7AcsajZcD/TN4XRERSSGrk53W\nZKx5/yIiFdIpg9dYAQxoNO6ffG07ZqbiLiLSDu7e9GB5O1kU8lnAOGCGmR0FvObuL7YnmCIzswZ3\nb8g7jnKJOb+Yc4Pay8+MnYDehLZv/yYf+yXPvRPoRcdq4FvAmuSxtpXHlufXJd+zAViffGz8eQsf\nbVGpQEomYWa3Ah8AepnZMuBioDOAu1/v7neY2UlmtiQJ+LOp/gniU593AGVWn3cAZVSfdwBlVp93\nAFkzYxdgH+DdcMS/mNEbGBzGDCCpUSm8CawCXm70eAV4DXi9mccbWz53Z2NmCbXCUhz+lizk7j4m\nxTbj0oUkIpKeGd2A/YADgYMaPeq3bXUAwPuafOsrhBbv8iYfVwAvEgr2KnfeKmP4FZNFa0WC6XkH\nUGbT8w6gjKbnHUCZTc87gDTM6Ewo0sOAI5KPBwB1zWy+EfgH8Awc9xbwx/A5zwBL3VlbkaCrhFXq\nxhJm5jH3yEWkbZL2yHDgg4T27eFAtyabbQYWA483eSxxZ1Plos1PmtqpQp4RMxvh7nPzjqNcYs4v\n5twgm/x01VllNFcj09ROtVZEJBXdFrK8LM1ZzZa+V0fkIlJK8vObdxhRM7N2H5FrGVsRkYJTIc+I\nmY3IO4Zyijm/mHOD+PMTFXIRidCkSZP44he/WHXvW19fzz333JP5+6pHLiIlqUeejUGDBjFt2jSO\nO+64HZ5Tj1xEysZMV7dVOxXyjMTeh4w5v5hzg/bnZ8ZOZowBFmYbUbYmT55M//796dGjB/vttx9z\n5syhoaGBM888c+s2N954I3vvvTe9evXi8ssvp76+njlz5gDQ0NDAJz7xCc4880x69OjB0KFDWbx4\nMZMmTaJPnz4MHDiQu+++e+trrVy5klGjRrHnnnsyZMgQpk6duvW5pu970003bX3fK664omz/Birk\nIrIDM44DFgD/CQwpsW1mj7Z66qmnuO6665g/fz5vvPEGd911F/X19dtdk71w4UK+8pWvcOutt/L8\n88/z+uuvs3Llyu1e5/bbb+fTn/40r776KocddhgnnngiEIr2xIkTGTt27NZtTz/9dAYOHMjzzz/P\nzJkzueCCC7j33nuTf4vt3/fss8/mlltuYeXKlbzyyissX7687UmmoEKekZhnBkLc+cWcG7QtPzMG\nmfFr4B7gYOA54AtlCq3D6urqWL9+PU888QQbN25k4MCB7LPPPttNXpo5cyajRo1i+PDhdO7cmUsv\nvXSHyTfHHnssJ5xwAnV1dXz84x/n5ZdfZvz48dTV1XHaaaexdOlS3njjDZYtW8b999/P5MmT6dKl\nC4cccghf+MIXuPHGGwF2eN+TTz6ZY445hi5dunDZZZex007lKbkq5CKCGV3NuARYBJxKWJL6QmBf\nd6a19r3u2T3aavDgwUyZMoWGhgb69OnDmDFjeP7557fbZuXKlfTvv+3uk927d2fPPffcbpvevXtv\n93yvXr22Fvvu3bsDsHr1alauXEnPnj3ZZZddtm4/cOBAVqzY4V46O7zvzjvvvMP7ZkWFPCPqsxZX\nzLlB6fzMGE5oo0wEugI3Ewr4FUVY5nXMmDH86U9/4tlnn8XMOP/887c74u7bt+92LY1169bxyiuv\ntOu9+vbtyz//+U9Wr1699WvPPffcdgW78bbLlm27nfHatWvb/b6lqJCL1CgzdjXjGuDPhDW/nwLe\n786Z7jverrEaPf3008yZM4f169fTtWtXunXrRl3d9qvejh49mtmzZ/PAAw+wYcMGGhoa2r1uzIAB\nAxg+fDgTJkxg/fr1PProo9xwww2cccYZO2w7evRobr/9du677z42bNjAxIkT2bx5c7vetxQV8oyo\nz1pcMecGzednxnuBvwHnEJaKvQI41J0/Vza6jlm/fj0TJkzgne98J3vttRerVq1i0qRJwLYTjwce\neCA//OEPOf300+nbty/veMc76N27N127dt26XdOeeWvjW2+9laVLl9K3b19OPfVULr300q3XhTd+\nrQMPPJDrrruOT37yk/Tt25eePXsyYMAAykETgkRqSHI/y28SCndnwtreZ7qzoPXvi2dC0OrVq9lj\njz1YsmQJe++9d97hbKUJQVWg1vusRRZzbrAtPzN6Ab8DriIU8WuBYaWKeAxmz57N2rVrWbNmDeee\ney5Dhw6tqiLeUSrkIjXAjMOBvwIfJtzPcpQ757izLt/IKmPWrFn069ePfv368cwzzzBjxoy8Q8qU\nWisikTPjU8BUwm3UHgJGu9OmmSkxtVaqlVorIrKDZIr9VYTLCbsB04Bj21rEpfqpkGekVvqsMYox\nNzO6Ab8EzoV7NwFfBr7ozvp8I5NyUCEXiYwZPYG7gY8Db8B/f8udn7ij3kik1CMXiYgZewO/J0zw\nWQ6c5M5jHX9d0y+BCmhvj1zrDItEwox3A3OAgcBjhCKeST9cB2HVTa2VjMTYZ20s5vxiyM2MfYE/\nEor4AzQ6qRlDfq2JPb80VMhFCs6MA4H/BfoSivmJ7ryWb1RSSeqRixSYGfsTincv4A/AKe6szTcq\nyZKuIxeJmBmDCMW7F3AncLKKeG1SIc9I7H26mPMrYm5m7EUo4lvaKae2tHZ4EfNri9jzS0OFXKRg\nzNiTcJ34PsB8dCRe89QjFykQM7oTLjE8inB3+w+4syrfqKSc1CMXiUiylvhNhCL+HPBhFXEBFfLM\nxN6nizm/AuV2JTAaeIMw2SfV7dgKlF+7xJ5fGirkIgVgxljgPGATYRnaJ3IOSapIyR65mY0EpgB1\nwFR3n9zk+d0Iy2QOIEz5/567T2/mddQjF2kHM44nrJ9SB3zenRtyDkkqKE3tbLWQm1kd4c7axwMr\ngHnAGHdf1GibC4B3uPsEM+uVbN/H3Te1NRgR2Z4Z9YQ7+/QErnRnQr4RSaVlcbJzGLDE3Ze6+0Zg\nBnBKk202Az2Sz3sArzQt4rUg9j5dzPlVa25m7Az8llDE7wAuat/rVGd+WYk9vzRKFfJ+wLJG4+XJ\n1xq7FjjAzFYCjwBfyy48kdpkhgHXA4cCzwBnuPN2vlFJtSq1jG2ai8xHAn9z9w+a2buBu83sEHd/\ns+mGZjYdWJoMXwMWuPvc5LkRAEUdb/latcSj/NKP3X1uNcUDAFOuhkPPgBFrgH8FO8Qsnvxi338d\nGSefn0WwlBRK9ciPAhrcfWQyngBsbnzC08xuBya5+33J+B7gfHef3+S11CMXScGMI4E/Ew60/s2d\n/8o5JMlRFj3y+cAQM6s3sy7AacCsJts8RzgZipn1AfYF/t6+kIsr9j5dzPlVU25m7E44F9UJmJJF\nEa+m/Moh9vzSaLW14u6bzGwcYWW1OmCauy8ys7HJ89cDlwHTzexRwIBvufs/yxy3SHSSvvhPYeuV\nKuNzDUgKQ2utiFSJZNLPT4A3gcPdWZJzSFIFtNaKSEGYcTBh4h3AWBVxaQsV8ozE3qeLOb+8czOj\nK3AL0A2Y5s6t2b5+vPsO4s8vDRVykfw1AAcDS9A8DGkH9chFcmTGcOBPyfD97tyfZzxSfdQjF6li\nZuwC/ILwc3iViri0lwp5RmLv08WcX465fRcYDDwGXFyuN4l530H8+aWhQi6Sg2Rp2rOBjcCZ7qzP\nOSQpMPXIRSosaak8BgwCLnLnOzmHJFVMPXKR6nQJoYg/QmiviHSICnlGYu/TxZxfJXMz473ANwjr\n+H/BnY3lf8949x3En18aKuQiFWJGZ2Aq4efuanfml/gWkVTUIxepEDO+BUwmrDF9kDtr8o1IiiBN\n7VQhF6kAM94NPE6Yhj/SnTtzDkkKQic7Kyj2Pl3M+ZU7t2R52h8RivgtlS7iMe87iD+/NFTIRcrv\nY8CHCbc3/GbOsUiE1FoRKSMzdgYWAQOBce5cl3NIUjBqrYjkbzyhiC8g3DRCJHMq5BmJvU8Xc37l\nyi05wfmtZDjOnbfL8T6l44h330H8+aWhQi5SPlOArsCN7tyXdzASL/XIRcrAjI8Cs4E3gH3deSHn\nkKSg1CMXyYEZ3YCrk+HFKuJSbirkGYm9TxdzfmXI7avAPsATkP9VKjHvO4g/vzRUyEUyZEZv4MJk\n+I1KLIoloh65SIbM+BHwZeB37pyUdzxSfFprRaSCzDgAeDQZDnVnYZ7xSBx0srOCYu/TxZxfhrld\nBdQBP62mIh7zvoP480tDhVwkA2Z8GDgJeBNoyDcaqTVqrYh0kBl1wMPAwcB4dybnHJJERK0Vkcr4\nLKGIP8u268dFKkaFPCOx9+lizq8juZmxC3BZMhzvzluZBJWhmPcdxJ9fGirkIh3zNeBdwDzglznH\nIjVKPXKRdjJjT+DvQA/gQ+7MyTkkiZB65CLlNZ5QxO9WEZc8qZBnJPY+Xcz5tSc3M/oD5yTDCZkG\nlLGY9x3En18aKuQi7XMxYa3xX7nz17yDkdpWskduZiMJC+TXAVPdfYdrZJPfiD8AOgOr3H1EM9uo\nRy5RMGM/wsqGDhzgztM5hyQRS1M7O5V4gTrgWuB4YAUwz8xmufuiRtvsTliq80R3X25mvToeukhV\nu5zw1+xPVcSlGpRqrQwDlrj7UnffCMwATmmyzSeBX7v7cgB3X5V9mNUv9j5dzPm1JTczjgBGA28B\nl5YrpizFvO8g/vzSKFXI+wHLGo2XJ19rbAjQ08zuNbP5ZnZmlgGKVAszDLgyGV7tzoo84xHZotXW\nCqEHWEpn4HDgQ8DOwANm9hd3X9zR4IrE3efmHUM5xZxfG3I7HjgOeA2Ks55KzPsO4s8vjVKFfAUw\noNF4AOGovLFlhBOc64B1ZvZH4BBgh0JuZtOBpcnwNWDBlp2w5c8jjTWuznHdCLjtajgZ4Ltgh5hV\nU3waxzJOPj+LYCkptHrVipl1Ap4iHG2vBB4CxjQ52bkf4YToiYTLsR4ETnP3hU1eK+qrVsxsRMxH\nBjHnlyY3Mz4C3AGsAga5s7oSsWUh5n0HNZFfx65acfdNZjYOuJNw+eE0d19kZmOT56939yfN7PeE\nO6NsBn7WtIiLFFnSG99yYnNykYq41AattSJSghkfBWYDLwH7uLMm55CkhmitFZEOSo7GL0mGV6qI\nSzVSIc9I7NeyxpxfidxGEa7KegH4SUUCyljM+w7izy8NFXKRFiRH4w3J8Ep31uUYjkiL1CMXaYEZ\nHwN+Q7hia7AKueRBPXKRdjJjJ7b1xiepiEs1UyHPSOx9upjzayG3Uwk3VF4BTK1oQBmLed9B/Pml\noUIu0kRyNN6QDL9TjTdUFmlMPXKRJsw4jbDS5zJgiDvrcw5Japh65CJtZEYd4e4/AJeriEsRqJBn\nJPY+Xcz5NcntNGB/4Flgeh7xZC3mfQfx55eGCrlIIjkan5gML3dnQ57xiKSlHrlIwowzgJuAfwD7\nurMx55BE1CMXScuMTmzrjV+mIi5FokKekdj7dDHnl+T2KWAw8AzhqDwaMe87iD+/NFTIRehRB3w7\nGVzqzqY8oxFpK/XIpeaZ8TlgGuH2hAeokEs1UY9cpAQzOgMXJcNLVMSliFTIMxJ7ny7i/D4DcwcB\nTxJmc0Yn4n0HxJ9fGirkUrPM6ML2vfG384xHpL3UI5eaZcZYwl1/FgJDVcilGqlHLtICM7oCFybD\nS1TEpchUyDMSe58uwvw+DwwAHocuL+cdTDlFuO+2E3t+aaiQS80xoxtwQTJsgI2V6S+KlIl65FJz\nzDgHuAZ4FDjMnc05hyTSojS1U4VcaooZ3QnT8PcCTnXntzmHJNIqneysoNj7dBHlN5ZQxB8GboOo\ncmuW8oufCrnUDDN2ASYkw4nuqDcuUVBrRWqGGecCVwHzgCNVyKUI1CMXSZixK+GGEb2Aj7jz+5xD\nEklFPfIKir1PF0F+4whF/C/AnY2fiCC3Vim/+KmQS/TM6AGclwzVG5foqLUi0TPjIuAy4M/AsSrk\nUiTqkUvNM2M3YCmwO3CcO/fmG5FI26hHXkGx9+kKnN/XCUV8bktFvMC5paL84qdCLtEyYw/gm8nw\n4jxjESkntVYkWmZcRriN2x/cOSHveETaI5PWipmNNLMnzWyxmZ3fynZHmNkmMzu1PcGKZMmMPQlt\nFdDRuESu1UJuZnXAtcBI4ABgjJnt38J2k4HfAzV51B17n66A+Z0H7Arc6c79rW1YwNzaRPnFr9QR\n+TBgibsvdfeNhJvTntLMducAM4GoF+iXYjDjXcBXk+HEPGMRqYRShbwfsKzReHnyta3MrB+huP84\n+VJNXqPr7nPzjqGcCpbfRUB34DZ3Hiq1ccFyazPlF79ShTxNUZ4CjPdw1tSo0daKVAczBgFfIvzf\n/XbO4YhURKcSz68g3NdwiwGEo/LG3gvMMDNIFiQys43uPqvpi5nZdMLkDIDXgAVbfptu6XMVePz1\nyPIpZH7gnwM6w4y7YEyvLccirX1/4x5r3vGXY6z8ijVOPj8rSWkpKbR6+aGZdQKeAj4ErAQeAsa4\n+6IWtv85MNvdf9PMc1FffmhmI2L+E68I+ZlxEOH2bZuAfd35R7rvq/7cOkL5FVua2tnqEbm7bzKz\ncYTV4uqAae6+yMzGJs9fn1m0BRfzfyQoTH6XE1p7P01bxKEwubWb8oufJgRJFMw4krBE7TpgH3de\nyDkkkUxorZUKiv1a1gLkd0Xy8eq2FvEC5NYhyi9+KuRSeGYcDxxHOIH+3ZzDEak4tVak0Mww4EHg\nCOACdyblHJJIptLUThVyKTQzTiPMOH4BGOzOmpxDEsmUeuQVFHufrhrzM6MrcGUynNjeIl6NuWVJ\n+cVPhVyK7CtAPbAQ+Hm+oYjkR60VKSQzegJLgD2Aj7rzPzmHJFIWaq1IzC4kFPE5wB05xyKSKxXy\njMTep6um/MzYBxiXDM9z79iKm9WUWzkov/ipkEsRXQF0AW5y5295ByOSN/XIpVAaTcVfD7zHnedy\nDkmkrNQjl6gkk3++nwx/oCIuEqiQZyT2Pl2V5DcGOBp4iW3Xj3dYleRWNsovfirkUghm7ApclQwn\nuPN6nvGIVBP1yKUQzPgOcAEwHzjSnc05hyRSEVprRaJgxrsJsze7AMPdeSDnkEQqRic7Kyj2Pl3O\n+X2fbZcbZl7Ete+KLfb80lAhl6pmxgnAKcAaYHzO4YhUJbVWpGqZ0QVYAOwPjHdncs4hiVScWitS\ndOcSivhiYErOsYhULRXyjMTep6t0fsl6Kt9Ohme7s75876V9V2Sx55eGCrlUnWQG53VAN+AWd/6Q\nc0giVU09cqk6ZnwC+BXhZsr7ufNiziGJ5EY9cikcM3oAVyfD8SriIqWpkGck9j5dBfO7HNiLsMLh\nzyrxhtp3xRZ7fmmokEvVMONowg0j3gb+XdPwRdJRj1yqghndCdeMvwf4jjsX5RySSFVQj1yK5FJC\nEX8CuCznWEQKRYU8I7H36cqZX3LXn28Cm4HPlvOa8ebfX/uuyGLPLw0VcsmVGd2AnxP+L37PnXk5\nhyRSOOqRS67MmERYDOsp4FB33so5JJGqovXIpaqZcSwwF3Dg/e7cn29EItVHJzsrKPY+Xdb5mbE7\ncBNgwKQ8i7j2XbHFnl8aKuSSl+uAgcA84JKcYxEpNLVWpOLM+BRwM+FmEYe5szjnkESqllorUnXM\nqAd+lAy/riIu0nGpCrmZjTSzJ81ssZmd38zznzKzR8zsUTO7z8yGZh9qdYu9T5dFfmZ0Bf4L6AH8\nFpjW0dfMgvZdscWeXxolC7mZ1QHXAiOBA4AxZrZ/k83+Dhzr7kMJs/J+mnWgEoX/AN4HLAU+705l\n+noikSvZIzez/wdc7O4jk/F4AHe/soXt9wAec/f+Tb6uHnkNM+N04FZgA3C0O/NzDkmkELLqkfcD\nljUaL0++1pLPA3ekeF2pEWbsD0xNhl9XERfJVqcU26T+89fMPgh8Dji6heenE/6shnD3lwXuPjd5\nbgRAgcdfjyyfTPID/xswE+buAi/eA6f9pEry2Tpu3GOthniUX23nl3x+VpLSUlJI01o5Cmho1FqZ\nAGx298lNthsK/AYY6e5LmnmdqFsrZjZiy06JUXvyM6OOcFLzZGARMMyd1WUIr0O074qtBvLr+BR9\nM+tEWAfjQ8BK4CFgjLsvarTNQGAOcIa7/6W9wUhcGq2j8iqhiO/wC15EWpemdpZsrbj7JjMbB9wJ\n1AHT3H2RmY1Nnr8emAjsAfzYzAA2uvuwjiYgxZVM+hlPuNvPx1XERcpHMzszUgN/3qXOz4xhwB+B\nrsA4d64rZ2wdpX1XbDWQn2Z2SmWZMQS4nVDEr2fbLE4RKRMdkUtmzOgDPAAMIrTiTnZnY75RiRSb\njsilYsx4B2H+wCBgPqEvriIuUgEq5BmJfb2H1vJL1lD5NXA48AzwL9V4mWFLannfxSD2/NJQIZcO\nMaMz8EvgBOAl4ER3Xso3KpHaoh65tFtSxG8FRhNm6h7nzsP5RiUSF/XIpWzM6ES4Vdto4HXgBBVx\nkXyokGck9j5d4/ySI/FfAKcBbxLaKYVdCKuW9l2MYs8vjTSLZolsZUZ34FfAR4HVwEh3Hsw3KpHa\nph65pGbGbsAs4Fjgn8BH3Hko36hE4pbJWisiAGb0Bn5HuMRwJaEnvjDfqEQE1CPPTMx9OjMOgjsf\nYdt14kfHVMRj3neg/GqBCrm0yoyRwP3Q9V2EJYyPcU+32L2IVIZ65NIsMww4B/gB4Rf+r4Cz3FmX\na2AiNUbXkUu7mLErcDNwNeH/yKXAGBVxkeqkQp6RWPp0ZhxAaKF8ElhDKOAXgx2bb2TlE8u+a4ny\ni58KuQChlWLGZ4B5wP7AQuAId2bkG5mIlKIeuWDGOwk3gfhY8qWbgX93Z01+UYkIqEcuKZhxCvA4\noYi/CXwW+LSKuEhxqJBnpGh9OjP6mvFL4DagN3AvcLA7093Z4c+0ouXXFjHnBsqvFqiQ1xgz6swY\nBywC/g1YC3wDON6dZ3MNTkTaRT3yGmLGCOD7hBmaALOBc1TARaqX1loRYOslhZMJKxYCLCdM9vnv\n5tooIlIsaq1kpBr7dGbsY8ZU4DG2LTv7bWA/d25rSxGvxvyyEnNuoPxqgY7II2TGe4ALgDOAOuBt\n4CdAgzsv5hmbiGRPPfJIJGujDAe+Rrj92k6EAn4zcIU7T+cYnoi0k3rkNcCMboRbrn2VbScxNwLT\ngCvd+XtesYlIZahHnpFK9umS6fSHm3EN4cTldEIRXwV8BxjkzpeyLOIx9yFjzg2UXy3QEXmBmDGA\ncO33Z4CDGz31MHANMMOdt/KITUTyox55lTNjMKHnfSowrNFTq4BbgOnuLMgjNhEpP/XICyi5S/0x\nwIeBkcBBjZ5eS7hv5s3AHe5sqHyEIlJtVMgzYmYj3H1u27+PLoT+9jHA8cAHgG6NNnmDMAPz18Cd\n7qzteLRt1978iiDm3ED51QIV8gpLlowdBhxNKN5HsH3hBlgA3AXcCdznzvqKBikihaIeeZmYsRMw\nCDgMODR5HAb0bWbzJ4H7gLnA3Zq0IyJbqEdeAWbsBrwH2Dd57Jd8HMKOR9oQpsk/DNxPKN73u/NK\nZaIVkRiVLORmNhKYQpjqPdXdJzezzTXARwgn485y94ezDjQPSf+6D+Eoeu/kMbDJ57uHrecCI5q+\nxPOENskCQvFeADzjzuYyh565mPuQMecGyq8WtFrIzawOuJZwEm4FMM/MZrn7okbbnAQMdvchZnYk\n8GPgqDLG3C5Jq6MHofDukTx2B3oSinVzjz1SvPQ6YDH8z2YYcTvwVPJ42p3Xs84jR4cSflvFKObc\nQPlFr9QR+TBgibsvBTCzGcAphJsSbDEK+AWAuz9oZrubWR93b3Of14w6oGvy6NLo866ENsWuwC6N\nHq2Nd2X7gr0bbZ/J+jbwEuHI+tnk8VyTz1e542bfa3C/qqGtORfI7nkHUEYx5wbKL3qlCnk/YFmj\n8XLgyBTb9IcdT9iZMZ/ti3PTgl3Xhtjb403gNeDV5LHl8xdbeLxSxDaIiNSWUoU87SUtTc+otvR9\n703xfuuBDcnHxo+3gDWEk4VrGj1Wt/J546L9ujubUubTHvVlfO1qUJ93AGVUn3cAZVafdwBlVp93\nAHkrVchXAAMajQcQjrhb26Z/8rVmlLz60AgtlOau9ugwK/PFj2b2mfK+Q75izi/m3ED5xa5UIZ8P\nDDGzemAlYbnUMU22mQWMA2aY2VHAa831x2vpGnIRkUpqtZC7+yYzG0eYYVgHTHP3RWY2Nnn+ene/\nw8xOMrMlhJbGZ8setYiIbFWxmZ0iIlIeFb2xhJmdY2aLzOxxM9thYlEMzOz/m9lmM+uZdyxZMrOr\nkn33iJn9xsx2yzumLJjZSDN70swWm9n5eceTJTMbYGb3mtkTyc/cV/OOKWtmVmdmD5vZ7LxjyVpy\nKffM5OduYdK6blbFCrmZfZBwzflQdz8I+F6l3rtSzGwAcALhGvPY3AUc6O6HAE8DE3KOp8MaTXgb\nCRwAjDGz/fONKlMbgW+4+4GESXpfiSw/CPeoXUj6K+yK5GrgDnffHxjK9vN3tlPJI/IvA5PcfSOA\nu79cwfeulP8AvpV3EOXg7ne7+5Zr6h8kXJ1UdFsnvCX/L7dMeIuCu7/g7guSz1cTCkFzi7YVkpn1\nB04CppLikrgiSf7ifb+73wDhfKW7tzhTvJKFfAhwrJn9xczmmtn7KvjeZWdmpwDL3f3RvGOpgM8B\nd+QdRAaxyTzQAAAB80lEQVSam8zWL6dYyiq58uwwwi/hWPwAOA+inLQ3CHjZzH5uZn8zs5+Z2c4t\nbZzp6odmdjfwrmaeujB5rz3c/SgzOwL4FbBPlu9fbiXym0C4q8/WzSsSVIZaye8Cd5+dbHMhsMHd\n/7OiwZVHjH+O78DMdgVmAl9LjswLz8w+Crzk7g9HevPlToQbzoxz93lmNgUYD0xsaePMuPsJLT1n\nZl8GfpNsNy85IbinuxdmCdeW8jOzgwi/QR+xMOuoP/BXMxvm7i9VMMQOaW3/AZjZWYQ/ZT9UkYDK\nL82Et0Izs86Eu0vd7O635R1PhoYDo5JF+7oBPczsRnf/dM5xZWU54S/8ecl4JqGQN6uSrZXbgOMA\nzOw9QJciFfHWuPvj7t7H3Qe5+yDCTji8SEW8lGQ54/OAU9z9rbzjycjWCW9m1oUw4W1WzjFlxsJR\nxTRgobtPyTueLLn7Be4+IPl5Ox2YE1ERx91fAJYltRLCCrRPtLR9JW8scQNwg5k9RlhLJZp/9GbE\n+Cf7DwkLnN2d/NXxgLufnW9IHdPShLecw8rS0cAZwKNmtuUeARPc/fc5xlQuMf7MnQPckhxkPEMr\nky01IUhEpOAqOiFIRESyp0IuIlJwKuQiIgWnQi4iUnAq5CIiBadCLiJScCrkIiIFp0IuIlJw/wfV\nrrHyXC+Q1AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "testInput = np.arange(-6,6,0.01)\n", - "plot(testInput, sigmoid(testInput), linewidth= 2)\n", - "grid(1)\n", - "legend(['sigmoid'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "0.7310585786300049" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sigmoid(1)" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 0.26894142, 0.5 , 0.73105858])" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sigmoid(np.array([-1,0,1]))" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[ 0.46600501, 0.37731874, 0.5415919 ],\n", - " [ 0.23157348, 0.41235015, 0.55084673],\n", - " [ 0.37693986, 0.10342644, 0.711002 ]])" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sigmoid(np.random.randn(3,3))" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Whole Class with additions:\n", - "class Neural_Network(object):\n", - " def __init__(self): \n", - " #Define Hyperparameters\n", - " self.inputLayerSize = 2\n", - " self.outputLayerSize = 1\n", - " self.hiddenLayerSize = 3\n", - " \n", - " #Weights (parameters)\n", - " self.W1 = np.random.randn(self.inputLayerSize,self.hiddenLayerSize)\n", - " self.W2 = np.random.randn(self.hiddenLayerSize,self.outputLayerSize)\n", - " \n", - " def forward(self, X):\n", - " #Propogate inputs though network\n", - " self.z2 = np.dot(X, self.W1)\n", - " self.a2 = self.sigmoid(self.z2)\n", - " self.z3 = np.dot(self.a2, self.W2)\n", - " yHat = self.sigmoid(self.z3) \n", - " return yHat\n", - " \n", - " def sigmoid(self, z):\n", - " #Apply sigmoid activation function to scalar, vector, or matrix\n", - " return 1/(1+np.exp(-z))\n", - " \n", - " def sigmoidPrime(self,z):\n", - " #Gradient of sigmoid\n", - " return np.exp(-z)/((1+np.exp(-z))**2)\n", - " \n", - " def costFunction(self, X, y):\n", - " #Compute cost for given X,y, use weights already stored in class.\n", - " self.yHat = self.forward(X)\n", - " J = 0.5*sum((y-self.yHat)**2)\n", - " return J\n", - " \n", - " def costFunctionPrime(self, X, y):\n", - " #Compute derivative with respect to W and W2 for a given X and y:\n", - " self.yHat = self.forward(X)\n", - " \n", - " delta3 = np.multiply(-(y-self.yHat), self.sigmoidPrime(self.z3))\n", - " dJdW2 = np.dot(self.a2.T, delta3)\n", - " \n", - " delta2 = np.dot(delta3, self.W2.T)*self.sigmoidPrime(self.z2)\n", - " dJdW1 = np.dot(X.T, delta2) \n", - " \n", - " return dJdW1, dJdW2" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 0.48736193]\n", - " [ 0.54813314]\n", - " [ 0.54630022]]\n" - ] - } - ], - "source": [ - "NN= Neural_Network()\n", - "yHat=NN.forward(X)\n", - "print yHat" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 0.75]\n", - " [ 0.82]\n", - " [ 0.93]]\n" - ] - } - ], - "source": [ - "print y" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Third part" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEACAYAAABI5zaHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAF4hJREFUeJzt3X+MVeWdx/H3V/yBjoBULLMCdax1V9iklsVFss1SmnZb\nJFqSxeiincWupmazLNv+o9vNZqvpH5umm6axboyuFpqQFNMf21AEdLMpWdO0tliVWtBIGlaQQJFS\nQKTALN/9496h43Xm3jvPPHfOfb7zeSXEe+595vA8fJzvnPnec88xd0dERGI5r+oJiIhIfiruIiIB\nqbiLiASk4i4iEpCKu4hIQCruIiIBtSzuZvYNMztoZr9oMuYhM3vNzF4ys/l5pygiIqPVzpH7WmDp\nSC+a2TLgA+5+LfBZ4JFMcxMRkUQti7u7PwscaTLkU8A362OfAy4zs5l5piciIily9NxnAXuHbO8D\nZmfYr4iIJMr1hqo1bOuaBiIiFTo/wz7eAOYM2Z5df+4dzEwFX0Qkgbs3HkC3lKO4bwRWAxvMbBHw\nW3c/ONzAlAmWwswecPcHqp5Hp0ReX+S1gdZXutQD45bF3cy+BXwEmGFme4EvAhcAuPuj7r7ZzJaZ\n2W7gBPCZlIkE0Ff1BDqsr+oJdFBf1RPosL6qJ9BhfVVPoBu1LO7uvrKNMavzTEdERHLQJ1TzWVf1\nBDpsXdUT6KB1VU+gw9ZVPYEOW1f1BLqRjdfNOszMI/fcRUQ6IbV25nhDVQAzW+Lu26qeR6dEXl/k\ntUF3rU9nzTWX8wBYxV1ExpVu7Tk8s7yNDbVlRGTc1OtA1dPoSmY27JF7au3UG6oiIgGpuGdiZkuq\nnkMnRV5f5LVB/PXJ8FTcRUQCUs9dRMbNcD33efNu4MSJzv2dPT2wc+f2zv0FmeTuuetsGRGp1IkT\ncMUVnSu+hw7d0LF9dzO1ZTKJ3teMvL7Ia4P468vhK1/5Crfeeus7nluzZg2f+9znKprR2OnIXSS8\nyevMequeRFfr7+/nwQcf5OjRo0ybNo2BgQGefPJJtm7dWvXUkqm4Z9ItnwDslMjri7y2mmnAgTer\nnkWNXVX1DIbT29vL4sWL+fa3v80999zD1q1bueKKK5g/f37VU0umtoyICLBq1SrWr18PwPr16+nv\n7694RmOj4p5J9L5m5PVFXlvNqclVz6AEy5cvZ8eOHbz88ss89dRT3HnnnVVPaUxU3EVEgMmTJ3Pr\nrbdyxx13cOONNzJ79uyqpzQm6rlnEr1vG3l9kddWc9Hvqp5BMz09nT1dsaen/bGrVq3i8ccfZ+3a\ntR2bz3hRcReRSnXTB4zmzJnDxRdfzIoVK6qeypipLZNJ9L5t5PVFXluNeu7tOHv2LF/96ldZuXIl\nl156adXTGTMduYvIhHfixAlmzpzJ1VdfXfS57UPp2jIiwZn17umi89wX6Hruw9P13EVEpCUV90yi\n920jry/y2mrUc5+IVNxFRAJSz10kOPXcy6Ceu4iItKTinkn0vm3k9UVeW4167hORznMXkUrdMG8e\nnb7P3vadO5O/fMmSJfT393P33Xefe27btm309/ezd+/ell8/mrE5qbhnEv36JJHXF3ltNd19bRlO\nnGD7FVd0bPc3HDo0pq83M8zKe7tQbRkRmfDGepu9tWvXMm/ePKZOnco111zDY489BtQ++XrTTTex\nf/9+pkyZwtSpUzlw4ED2+Q9HxT2T6H3byOuLvLYa9dxb6e/vZ+vWrRw9ehTg3G32Vq1aBUCrM3xm\nzpzJU089xbFjx1i7di2f//zneeGFF+jp6WHr1q1ceeWVHD9+nGPHjtHbOz63PFRxF5EJb+ht9oB3\n3GbP3VmzZg3Tp08/9+eWW255R6tm2bJlXH311QAsXryYT3ziEzz77LNA6x8MnaLinkn0vm3k9UVe\nW02X99y7xEi32TMzvv71r3PkyJFzfzZt2vSOor1lyxYWLVrE5ZdfzvTp09m8eTOHDx+uZB2DVNxF\nRBjdbfaGFvZTp06xYsUK7rvvPn79619z5MgRli1bdm5MVW/GqrhnEr1vG3l9kddWo557O5rdZq9Z\na+X06dOcPn2aGTNmcN5557FlyxaeeeaZc6/PnDmTw4cPc+zYsY7Ov5FOhRSRavX0jPl0xVb7b9dI\nt9kb7uh78LkpU6bw0EMPcdttt3Hq1CluueUWli9ffm7cddddx8qVK3n/+9/P2bNn2blz57i8qdry\n2jJmthT4GjAJeNzdv9zw+jRgPTCH2g+Lf3P3dcPsR9eWKZjZlJ3Qc0nV8xjeibfdj8+rehbdSteW\nad/rr7/O3LlzOXjw4LjfjSn3tWWaHrmb2STgYeDjwBvAz8xso7vvGjLs74CX3f0WM5sBvGpm6919\nYLSTkW7Wc0n3FIhGvTOqnoGUb6LdZm8hsNvd9wCY2QZgOTC0uJ8FptYfTwUOT8TCbmZLYp91Ebdv\nq+wk4m32WhX3WcDQCyLsA25sGPMw8AMz2w9MAW7LNz0Rkc7r6enhrbfeqnoaWbUq7u00x5YCP3f3\nj5rZNcB/mdn17n68caCZrQP21Dd/C7w4eMQ0eMZCqduDz3XLfDqxPtg0BW4+/vvH0C3bY1mfu2+r\n+t+3s9sX/a7qfH6/La3Us7urvrkneT/N3twws0XAA+6+tL79BeDs0DdVzWwT8K/u/qP69n8D97v7\n9oZ96Q3VgnXXm3KNeme4H+irehbdqruy6+43VKs03jfr2A5ca2Z9ZnYhcDuwsWHM69TecMXMZgJ/\nBPxqtBMpnc6VLpeyk4iatmXcfcDMVgNPUzsV8gl332Vm99ZffxT4ErDOzHYABtzn7r/p8LxFpFAl\nXj63RLqHqrSlu361b6S2TDPKrmy6h6qIiJyj4p6J+rblUnZli59fGhV3EZGAVNwzif0JR4h8TXBl\nV7b4+aVRcRcRCUjFPZP4fb+4fVtlV7b4+aUZ1+u5107J6ka6ZKyIxDLON+vo3nNtx7qH+H2/uH1b\nZVe2+PmlUVtGRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM\n4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVd\nRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z\n4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf9\n4vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCQgFfdM4vf94vZtlV3Z4ueXRsVdRCSg\nlsXdzJaa2Stm9pqZ3T/CmCVm9oKZvWxm27LPsgDx+35x+7bKrmzx80tzfrMXzWwS8DDwceAN4Gdm\nttHddw0Zcxnw78An3X2fmc3o5IRFRKS1VkfuC4Hd7r7H3c8AG4DlDWPuAL7r7vsA3P3N/NPsfvH7\nfnH7tsqubPHzS9OquM8C9g7Z3ld/bqhrgfeY2Q/NbLuZ9eecoIiIjF7TtgzgbezjAuBPgI8BlwA/\nNrOfuPtrY51cSeL3/eL2bZVd2eLnl6ZVcX8DmDNkew61o/eh9gJvuvtJ4KSZ/Q9wPTBMcV/RB1ed\nrj2eNgALTsLNx2vbm6bU/lvN9uCvdoP/o2j7ndu1X+03TemWvJTfaPPrrrx+v11T9b9PN23XH99V\n/6fZQyJzH/ng3MzOB16ldlS+H/gpsLLhDdXrqL3p+kngIuA54HZ339mwLwd/PnWindU7w/1A31j2\nYGZLIh9BmE0/AEcaf7B3ibHlp+yqpO+9VszM3d1G+3VNj9zdfcDMVgNPA5OAJ9x9l5ndW3/9UXd/\nxcy2AjuAs8B/NBZ2EREZX02P3LP+RcGP3KMz690DB7r0TCjl14yyK1vqkbs+oSoiEpCKeybxz7WN\ne660sitb/PzSqLiLiASk4p5J5Hfra+KeK63syhY/vzQq7iIiAam4ZxK/7xe3b6vsyhY/vzQq7iIi\nAbW6/IC0KX7fr3v7tpfw5pW9ZntSv34m0GujPo24LSfg7ePu8zqy87Z1b3Y5xP/eS6PiLsW7BLcD\n0JUf0ukF3d+gibH+YO6k7vjBnE7FPZPo17eI3LfdBFNuhuNVz6Nzuje7HD+YO5Vf6T+Y1XMXEQlI\nxT2T2EftELlvG/uoHSJnBxMhvzQq7iIiAam4ZxL/XNvu7duO1SaY0npUyeJmBxMhvzQq7iIiAam4\nZ6Kee7ni92zjZgcTIb80Ku4iIgGpuGeinnu54vds42YHEyG/NCruIiIBqbhnop57ueL3bONmBxMh\nvzQq7iIiAenaMuS5eNEpmHwRZD9C6p6LF8Xt2+raMmWLn18aFXd08SIRiUdtmUziHznE7dsqu7LF\nzy+NiruISEAq7pnEP9c2bt9W2ZUtfn5pVNxFRAJScc8kft8vbt9W2ZUtfn5pVNxFRAJScc8kft8v\nbt9W2ZUtfn5pVNxFRAJScc8kft8vbt9W2ZUtfn5pVNxFRAJScc8kft8vbt9W2ZUtfn5pVNxFRAJS\ncc8kft8vbt9W2ZUtfn5pWhZ3M1tqZq+Y2Wtmdn+TcX9qZgNm9pd5pygiIqPVtLib2STgYWApMA9Y\naWZzRxj3ZWArYB2YZ9eL3/eL27dVdmWLn1+aVkfuC4Hd7r7H3c8AG4Dlw4z7e+A7wKHM8xMRkQSt\nivssYO+Q7X31584xs1nUCv4j9ac82+wKEr/vF7dvq+zKFj+/NK2KezuF+mvAP7q7U2vJTMi2jIhI\nN2l1m703gDlDtudQO3ofagGwwcygdku4m8zsjLtvfPfuVvTBVadrj6cNwIKTcHP9p+6met+smu3B\nvt3gUcBotx+E9y6Ak6lfP9L2IDNbAuDu26rYhremwqZT3ZJXzvyG/lvHzO/U5KrzGXm7vtWF+Z2C\nc+9VjGde9cd31f/qPSSy2gH3CC+anQ+8CnwM2A/8FFjp7rtGGL8W+IG7f2+Y1xz8+dSJdtIMJl1/\niLMvjWUfnbyH6gH3vtz7HS2z6QfgSOMP9q4w1vw6eYPlbsgvcnYwEb73zN191B2Rpkfu7j5gZquB\np4FJwBPuvsvM7q2//mjSbAOK3/eL27dVdmWLn1+aVm0Z3H0LsKXhuWGLurt/JtO8RERkDPQJ1Uzi\nn2sb91xpZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEX\nEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2\n+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/\nuH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlI\nxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+Pml\nUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQlIxT2T+H2/uH1b\nZVe2+PmlUXEXEQlIxT2T+H2/uH1bZVe2+PmlUXEXEQmoreJuZkvN7BUze83M7h/m9TvN7CUz22Fm\nPzKzD+afaneL3/eL27dVdmWLn1+alsXdzCYBDwNLgXnASjOb2zDsV8Bid/8g8CXgsdwTFRGR9rVz\n5L4Q2O3ue9z9DLABWD50gLv/2N2P1jefA2bnnWb3i9/3i9u3VXZli59fmnaK+yxg75DtffXnRnI3\nsHkskxIRkbE5v40x3u7OzOyjwN8AHx5+xIo+uOp07fG0AVhwEm6u/9TdVO+bVbM92LcbPAoY7faD\n8N4FcDL160faHmRmSwDcfVsV2/DWVNh0qlvyypnf0H/rmPmdmlx1PiNv17e6ML9TcO69ivHMq/74\nrvpfvYdE5t68dpvZIuABd19a3/4CcNbdv9ww7oPA94Cl7r57mP04+POpE+2kGUy6/hBnXxrLPjbB\nlE78etgLMw649+Xe72iZTT8AR/ZVPY/hjDW/TmUH3ZFf5OxgInzvmbu7jfbr2mnLbAeuNbM+M7sQ\nuB3Y2PCXv49aYf/0cIV9Iojf94vbt1V2ZYufX5qWbRl3HzCz1cDTwCTgCXffZWb31l9/FPgXYDrw\niJkBnHH3hZ2btoiINNNOzx133wJsaXju0SGP7wHuyTu1snTyV/vuEPdcaWVXtvj5pdEnVEVEAlJx\nzyT+kUPcvq2yK1v8/NKouIuIBKTinkn861vE7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+N\niruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsq\nu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE\n7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruI\nSEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LF\nzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF7dsqu7LFzy+NiruISEAq7pnE7/vF\n7dsqu7LFzy+NiruISEAti7uZLTWzV8zsNTO7f4QxD9Vff8nM5uefZveL3/eL27dVdmWLn1+apsXd\nzCYBDwNLgXnASjOb2zBmGfABd78W+CzwSIfm2tWeh4urnkNnnbmw6hl0irIrW/z80rQ6cl8I7Hb3\nPe5+BtgALG8Y8yngmwDu/hxwmZnNzD7TLncUzq96Dp3lYVt4yq5s8fNL0yr0WcDeIdv76s+1GjN7\n7FMTEZFUrYq7t7kfS/y6MP4XQv/qC/8X9uhI2ZUtfn5pWoX+BjBnyPYcakfmzcbMrj83DFswuumN\njzcBgzHPzeDyDNN5937NuuSHZdz8OpUddEt+cbODifC9N3qtivt24Foz6wP2A7cDKxvGbARWAxvM\nbBHwW3c/2Lgjd288uhcRkQ5pWtzdfcDMVgNPA5OAJ9x9l5ndW3/9UXffbGbLzGw3cAL4TMdnLSIi\nTZl7sb91iIjICLKfIhX5Q0+t1mZmS8zsqJm9UP/zz1XMM4WZfcPMDprZL5qMKTI3aL2+krMDMLM5\nZvZDM/ulmb1sZmtGGFdkhu2sr9QMzWyymT1nZi/W1/bACONGl527Z/tDrXWzG+gDLgBeBOY2jFkG\nbK4/vhH4Sc45dOpPm2tbAmyseq6J6/tzYD7wixFeLzK3Uayv2Ozq8+8FPlR/fCnwapTvvVGsr9gM\ngUvq/z0f+Alw41izy33kHvlDT+2sDd59WmgR3P1Z4EiTIaXmBrS1Pig0OwB3P+DuL9YfvwXsAq5s\nGFZshm2uDwrN0N3frj+8kNrB49mGIaPOLndxj/yhp3bW5sCf1X9t2mxm88Ztdp1Xam7tCpNd/ey2\n+cBzDS+FyLDJ+orN0MzOM7MXgYPAM+7+s4Yho84u94cbIn/oqZ05/hyY4+5vm9lNwPeBP+zstMZV\nibm1K0R2ZnYp8B3gH+pHuO8a0rBdVIYt1ldshu5+FviQmU0D/tPM/tjdf9kwbFTZ5T5yz/yhp67S\ncm3ufnzw1yt33wJcYGbvGb8pdlSpubUlQnZmdgHwXWC9u39/mCFFZ9hqfREydPejwA+pXaxxqFFn\nl7u4n/vQk5ldSO1DTxsbxmwE/hqg2YeeulDLtZnZTDOz+uOF1E41/c34T7UjSs2tLaVnV5/7E8BO\nd//aCMOKzbCd9ZWaoZnNMLPL6o8vBv6C2nsKQ406u6xtGQ/8oad21gbcCvytmQ0AbwN/VdmER8nM\nvgV8BJhhZnuBL1J7Y6fo3Aa1Wh8FZ1f3YeDTwA4ze6H+3D8B74MQGbZcH+Vm+AfAN612ifXzgCfr\nWY2pbupDTCIiAYW+zrOIyESl4i4iEpCKu4hIQCruIiIBqbiLiASk4i4iEpCKu4hIQCruIiIB/T96\nyfLdDqRLRwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "#Compare estimate, yHat, to actually score\n", - "bar([0,1,2], y, width = 0.35, alpha=0.8)\n", - "bar([0.35,1.35,2.35],yHat, width = 0.35, color='r', alpha=0.8)\n", - "grid(1)\n", - "legend(['y', 'yHat'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Fourth part: https://github.com/stephencwelch/Neural-Networks-Demystified/blob/master/Part%204%20Backpropagation.ipynb" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "def sigmoid(z):\n", - " #Apply sigmoid activation function to scalar, vector, or matrix\n", - " return 1/(1+np.exp(-z))\n", - "\n", - "def sigmoidPrime(z):\n", - " #Derivative of sigmoid function\n", - " return np.exp(-z)/((1+np.exp(-z))**2)" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAEACAYAAACuzv3DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmYFNXV+PHvYTaGTTZZhl1ABQy+bohoDK6gUYxbEKO+\naFRcUGN+JgJGetoNiTFRI0YElbhE44tR0dc1Iq/7QkRRBAWVHRUQZR+28/vj1gzDMMz0DNVdXXfO\n53nmgeouus+hZs5Un6p7r6gqxhhj4qte1AEYY4zZPVbIjTEm5qyQG2NMzFkhN8aYmLNCbowxMWeF\n3BhjYq7aQi4iD4jItyLySRX73CUic0XkYxE5INwQjTHGVCWVM/IHgYG7elJETgS6qWp34GLgbyHF\nZowxJgXVFnJVfQNYVcUug4C/B/u+BzQVkdbhhGeMMaY6YfTI2wGLym0vBtqH8LrGGGNSENbFTqmw\nbeP+jTEmQ3JDeI0lQIdy2+2Dx3YgIlbcjTGmFlS14snyDsIo5FOA4cDjItIX+EFVv61NMHEmIsWq\nWhx1HOnic34+5wZ1Lz8R6gGtcG3f9hX+bBc8tyfQktrVwPGqXLKbYacslZPgapMQkceAnwEtRWQR\nkADyAFR1vKo+LyInisg8YB1w/u6FHVudow4gzTpHHUAadY46gDTrHHUAYROhIbAX0BUO+bkIrYBu\nbpsOBDUqBWuAFcDycl8rcTd4/Aj8EHyV//v34WUSjmoLuaoOSWGf4eGEY4wx24lQH9gX6AXsV+6r\n8/a9egIcXOGfrsS1eBdX+HMJ8C2uYK9QZWMaw8+YMForxpkUdQBpNinqANJoUtQBpNmkqANIhQh5\nuCLdBzgk+LMnkFPJ7puBr4Ev4eiNwOvu73wJzFdlfUaCzhKSqYUlRER97pEbY2omaI/0A47CtW8P\nBOpX2G0bMBf4tMLXPFW2ZC7a6KRSO62Qh0RE+qvqtKjjSBef88vW3OxOr7qnshqZSu201ooxWcyW\nYqw7RGp/nmtn5MZkqeBnJuowTIaISK3PyG0aW2OMiTkr5CERkf5Rx5BOPufnc26mbrBCbowJzZgx\nY7jooouy7n07d+7Mq6++msGIMst65MZkKeuRh6dLly7cf//9HH300VGHskvWIzfGmDrMCnlIfO+z\n+pyfz7ml09ixY2nfvj1NmjRh3333ZerUqRQXF3PuueeW7fPQQw/RqVMnWrZsyU033UTnzp2ZOnUq\nAMXFxZx55pmce+65NGnShN69ezN37lzGjBlD69at6dixI6+88krZay1dupRBgwbRokULunfvzsSJ\nE8ueq/i+Dz/8cNn73nLLLRn434iWFXJjYkoknK/a+Pzzzxk3bhzTp09n9erVvPzyy3Tu3HmHe6E/\n++wzLr/8ch577DGWLVvGjz/+yNKlS3d4neeee47zzjuPVatWccABBzBgwADAFe3Ro0czbNiwsn3P\nOussOnbsyLJly5g8eTKjRo3itddeC/4vdnzfyy67jEcffZSlS5eycuVKFi9eXLtEY8IKeUiycWRg\nmHzOz+fc0iUnJ4eSkhJmzZrF5s2b6dixI3vttdcOA5gmT57MoEGD6NevH3l5edxwww07DXo58sgj\nOe6448jJyeGMM85g+fLljBgxgpycHAYPHsz8+fNZvXo1ixYt4u2332bs2LHk5+ez//77c+GFF/LQ\nQw8B7PS+J598MkcccQT5+fnceOON1Kvnd6nzOztjPKYazldtdOvWjTvuuIPi4mJat27NkCFDWLZs\n2Q77LF26lPbtt6/6WFhYSIsWLXbYp1WrVjs837Jly7JiX1hYCMDatWtZunQpzZs3p2HDhmX7d+zY\nkSVLdlrDZqf3bdCgwU7v6xsr5CHxvc/qc34+55ZOQ4YM4Y033mDBggWICNdee+0OZ9xFRUU7tDQ2\nbNjAypUra/VeRUVFfP/996xdu7bssYULF+5QsMvvu2jR9mWE169fX+v3jQsr5MaYGvviiy+YOnUq\nJSUlFBQUUL9+fXJydpxt9vTTT+fZZ5/lnXfeYdOmTRQXF9d67pgOHTrQr18/Ro4cSUlJCTNnzuSB\nBx7gnHPO2Wnf008/neeee4633nqLTZs2MXr0aLZt21ar940LK+Qh8b3P6nN+PueWLiUlJYwcOZI9\n99yTtm3bsmLFCsaMGQNsv/DYq1cv/vrXv3LWWWdRVFRE48aNadWqFQUFBWX7VeyZV7X92GOPMX/+\nfIqKijjttNO44YYbyu4LL/9avXr1Yty4cZx99tkUFRXRvHlzOnTogM9sQJAxWcq3AUFr166lWbNm\nzJs3j06dOkUdTtaxAUFZwPc+q8/5+Zxb1J599lnWr1/PunXruOaaa+jdu7cV8TSwQm6MSZspU6bQ\nrl072rVrx5dffsnjjz8edUhestaKMVnKt9aKqZq1Vowxpg6zQh4S3/usPufnc26mbrBCbowxMWc9\ncmOylPXI6xbrkRtjTB1mhTwkvvdZfc7P59wyzcel3i699FJuuumm2oaWEdZaCYmI9Pd5qLfP+WVr\nbtZaCU/5pd6Ki4u5+eabqV+/Prm5ufTs2ZPbb7+dvn37RhqjtVayQDYWgjD5nJ/PuZmdiQhDhgxh\nzZo1LF++nCOOOILTTjut0n3jMtmWFXJjTK3Edak3VS2bhTE3N5fzzjuPb775hpUrVzJ06FAuvfRS\nTjzxRBo1asRrr73G0KFDuf766wGYNm0a7du357bbbqN169YUFRXxzDPP8Pzzz7PPPvvQokWLssnD\nSt/r1ltvpVu3brRs2ZLBgwezatWqEI+Ckxv6K9ZR2frxPCw+5xfX3CQZTqdSEzVv35Rf6q1NmzYs\nXLiQLVu28MYbb5TtU7rU20svvcQhhxzCqFGjKl3qbcqUKUyaNIkLLriAAQMGcNFFF7F06VIefPBB\nhg0bxldffQW4pd569+7N5MmTmT17Nscddxxdu3blqKOOqnSptxdeeIE+ffowcuTIXS71VlJSwqRJ\nk+jYsWPZ4hOPPfYYL7zwAocddhglJSU88sgjO7z+t99+S0lJSVmMF154IQMGDODDDz9kwYIFHHzw\nwZx99tl06tSJu+66iylTpvD666+z5557csUVV3D55Zfzj3/8o8b/51WxM3JjTI3Ffam3J554gmbN\nmtGxY0dmzJjBU089VfbcL37xCw477DCAsil3y79+Xl4e1113XVmMK1eu5KqrrqJhw4b07NmTnj17\n8vHHHwNw7733ctNNN1FUVEReXh6JRILJkyeH3rKxM/KQxPGMriZ8zi+uudXmTDos5Zd6mzVrFgMG\nDODPf/7zDvtkYqm36dOn7xRbKku9DR48uOyXQHkiUumqQ+W1aNFipxhbt269Qx6lKxktWLCAU089\ndYdfJLm5uXz77be0bdu2yvepCTsjN8bUSpyXeqvp3UAVP0mkqmPHjrz44ousWrWq7Gv9+vWhFnGw\nQh4a3+9F9jk/n3NLF1+XeqssvvIXR2vqkksuYdSoUSxcuBCA5cuXM2XKlFq9VlWskBtjaizOS71V\n9r5VPVfxsepiLu+qq65i0KBBHH/88TRp0oTDDjuM999/f5f711a1A4JEZCBwB5ADTFTVsRWe3wN4\nBOiA67n/SVUnVfI6Xg8IMiZsvg0IsqXeqpa2AUEikgPcDQwEegJDRKRHhd0uBz5V1f8C+gO3i4hd\nRDXG2FJvGVJda6UPME9V56vqZuBx4JQK+2wDmgR/bwKsVNUt4YaZ/Xzvs/qcn8+5Rc2WesuM6s6c\n2wGLym0vBg6tsM/dwLMishRoDPwyvPCMqZtE8KINOWHCBCZMmBB1GN6rrpCn0qAbCHyoqkeJSFfg\nFRHZX1XXVNxRRCYB84PNH4CPSu/hLT0riut26WPZEo/ll/q2qk7LpngA4I47MXVS8L0wNNicn9K/\nqepiioj0BYpVdWCwPRLYVv6Cp4g8B4xR1beC7VeBa1V1eoXXsoudxqRAhEOBN0FyfbrYaaqWztkP\npwPdRaSziOQDg4GKN0EuBI4N3rA1sA/wVarB+8L3PqvP+WVTbiI0xV2LshsGTMqq/GZR1S0iMhx4\nCXf74f2qOltEhgXPjwduBCaJyExAgN+r6vdpjtsY7wR98fuAzsB/gINqO6LQ1C22sIQxWUKEYcC9\nwBrgQFXmRRySyQK2sIQxMSHCT3AD7wCGWRE3NWGFPCTZ1GdNB5/zizo3EQqAR4H6wP2qPBbu6/t7\n7MD//FJhhdyY6BUDPwHmAVdFG4qJI+uRGxMhEfoBpcvq/FSVt6OMx2Qf65Ebk8VEaAj8HfdzeJsV\ncVNbVshD4nufzuf8Isztj0A34BMgka438fnYgf/5pcIKuTEREOFY4DJgM3CuKiURh2RizHrkxmRY\n0FL5BOgC/EGVmyMOyWQx65Ebk52SuCL+Ma69YsxusUIeEt/7dD7nl8ncRDgIuBo3j/+FqmxO/3v6\ne+zA//xSYYXcmAwRIQ+YiPu5u1OV6dX8E2NSYj1yYzJEhN8DY3FzTO+nyrpoIzJxkErttEJuTAaI\n0BX4FDcMf6AqL0UckokJu9iZQb736XzOL925BdPT3oMr4o9muoj7fOzA//xSYYXcmPQ7FTget7zh\nbyOOxXjIWivGpJEIDYDZQEdguCrjIg7JxIy1VoyJ3ghcEf8It2iEMaGzQh4S3/t0PueXrtyCC5y/\nDzaHq7I1He9TfRz+HjvwP79UWCE3Jn3uAAqAh1R5K+pgjL+sR25MGohwEvAssBrYR5VvIg7JxJT1\nyI2JgAj1gTuDzYQVcZNuVshD4nufzuf80pDblcBewCyI/i4Vn48d+J9fKqyQGxMiEVoB1wWbV2di\nUixjrEduTIhEuAe4FHhBlROjjsfEn821YkwGidATmBls9lblsyjjMX6wi50Z5Hufzuf8QsztNiAH\nuC+birjPxw78zy8VVsiNCYEIxwMnAmuA4mijMXWNtVaM2U0i5AAzgJ8AI1QZG3FIxiPWWjEmM87H\nFfEFbL9/3JiMsUIeEt/7dD7ntzu5idAQuDHYHKHKxlCCCpHPxw78zy8VVsiN2T1XAW2AD4B/RhyL\nqaOsR25MLYnQAvgKaAIco8rUiEMyHrIeuTHpNQJXxF+xIm6iZIU8JL736XzOrza5idAeuCLYHBlq\nQCHz+diB//mlwgq5MbWTwM01/oQq/4k6GFO3VdsjF5GBuAnyc4CJqrrTPbLBb8S/AHnAClXtX8k+\n1iM3XhBhX9zMhgr0VOWLiEMyHkulduZW8wI5wN3AscAS4AMRmaKqs8vt0xQ3VecAVV0sIi13P3Rj\nstpNuE+z91kRN9mgutZKH2Ceqs5X1c3A48ApFfY5G3hSVRcDqOqK8MPMfr736XzOrya5iXAIcDqw\nEbghXTGFyedjB/7nl4rqCnk7YFG57cXBY+V1B5qLyGsiMl1Ezg0zQGOyhQgC3Bps3qnKkijjMaZU\nla0VXA+wOnnAgcAxQAPgHRF5V1Xn7m5wcaKq06KOIZ18zq8GuR0LHA38APGZT8XnYwf+55eK6gr5\nEqBDue0OuLPy8hbhLnBuADaIyOvA/sBOhVxEJgHzg80fgI9KD0LpxyPbtu3s3M7pD0/fCScD/BFk\nf5Fsis+2fdkO/j4UZz4pqPKuFRHJBT7HnW0vBd4HhlS42Lkv7oLoANztWO8Bg1X1swqv5fVdKyLS\n3+czA5/zSyU3EU4AngdWAF1UWZuJ2MLg87GDOpHf7t21oqpbRGQ48BLu9sP7VXW2iAwLnh+vqnNE\n5EXcyijbgAkVi7gxcRb0xksvbI6NUxE3dYPNtWJMNUQ4CXgW+A7YS5V1EYdk6hCba8WY3RScjSeD\nzVutiJtsZIU8JL7fy+pzftXkNgh3V9Y3wL0ZCShkPh878D+/VFghN2YXgrPx4mDzVlU2RBiOMbtk\nPXJjdkGEU4F/4e7Y6maF3ETBeuTG1JII9djeGx9jRdxkMyvkIfG9T+dzfrvI7TTcgspLgIkZDShk\nPh878D+/VFghN6aC4Gy8ONi8ORsXVDamPOuRG1OBCINxM30uArqrUhJxSKYOsx65MTUkQg5u9R+A\nm6yImziwQh4S3/t0PudXIbfBQA9gATApinjC5vOxA//zS4UVcmMCwdn46GDzJlU2RRmPMamyHrkx\nARHOAR4Gvgb2UWVzxCEZYz1yY1IlQi7be+M3WhE3cWKFPCS+9+l8zi/I7VdAN+BL3Fm5N3w+duB/\nfqmwQm4MTXKA64ONG1TZEmU0xtSU9chNnSfCBcD9uOUJe1ohN9nEeuTGVEOEPOAPwWbSiriJIyvk\nIfG9T+dxfv8N07oAc3CjOb3j8bED/M8vFVbITZ0lQj479sa3RhmPMbVlPXJTZ4kwDLfqz2dAbyvk\nJhtZj9yYXRChALgu2ExaETdxZoU8JL736TzM79dAB+BTyF8edTDp5OGx24Hv+aXCCrmpc0SoD4wK\nNothc2b6i8akifXITZ0jwhXAXcBM4ABVtkUckjG7lErttEJu6hQRCnHD8NsCp6nyVMQhGVMlu9iZ\nQb736TzKbxiuiM8AngavcquU5ec/K+SmzhChITAy2BytivXGjRestWLqDBGuAW4DPgAOtUJu4sB6\n5MYERGiEWzCiJXCCKi9GHJIxKbEeeQb53qfzIL/huCL+LvBS+Sc8yK1Klp//rJAb74nQBPhdsGm9\nceMda60Y74nwB+BG4E3gSCvkJk6sR27qPBH2AOYDTYGjVXkt2oiMqRnrkWeQ7326GOf3G1wRn7ar\nIh7j3FJi+fnPCrnxlgjNgN8Gm4koYzEmnay1Yrwlwo24Zdz+rcpxUcdjTG2E0loRkYEiMkdE5orI\ntVXsd4iIbBGR02oTrDFhEqEFrq0CdjZuPFdlIReRHOBuYCDQExgiIj12sd9Y4EWgTp51+96ni2F+\nvwMaAS+p8nZVO8Ywtxqx/PxX3Rl5H2Ceqs5X1c24xWlPqWS/K4DJgNcT9Jt4EKENcGWwOTrKWIzJ\nhOoKeTtgUbntxcFjZUSkHa64/y14qE7eo6uq06KOIZ1ilt8fgELgaVXer27nmOVWY5af/6or5KkU\n5TuAEequmgp1tLVisoMIXYCLcd+710ccjjEZkVvN80tw6xqW6oA7Ky/vIOBxEYFgQiIR2ayqUyq+\nmIhMwg3OAPgB+Kj0t2lpnyvG27/xLJ9Y5gd6AZAHj78MQ1qWnotU9e/L91ijjj8d25ZfvLaDvw8N\nUppPCqq8/VBEcoHPgWOApcD7wBBVnb2L/R8EnlXVf1XynNe3H4pIf58/4sUhPxH2wy3ftgXYR5Wv\nU/t32Z/b7rD84i2V2lnlGbmqbhGR4bjZ4nKA+1V1togMC54fH1q0MefzNxLEJr+bcK29+1It4hCb\n3GrN8vOfDQgyXhDhUNwUtRuAvVT5JuKQjAmFzbWSQb7fyxqD/G4J/ryzpkU8BrntFsvPf1bITeyJ\ncCxwNO4C+h8jDseYjLPWiok1EQR4DzgEGKXKmIhDMiZUqdROK+Qm1kQYjBtx/A3QTZV1EYdkTKis\nR55BvvfpsjE/EQqAW4PN0bUt4tmYW5gsP/9ZITdxdjnQGfgMeDDaUIyJjrVWTCyJ0ByYBzQDTlLl\nfyMOyZi0sNaK8dl1uCI+FXg+4liMiZQV8pD43qfLpvxE2AsYHmz+TnX3ZtzMptzSwfLznxVyE0e3\nAPnAw6p8GHUwxkTNeuQmVsoNxS8B9lZlYcQhGZNW1iM3XgkG/9webP7FirgxjhXykPjep8uS/IYA\nhwPfsf3+8d2WJbmljeXnPyvkJhZEaATcFmyOVOXHKOMxJptYj9zEggg3A6OA6cChqmyLOCRjMsLm\nWjFeEKErbvRmPtBPlXciDsmYjLGLnRnke58u4vxuZ/vthqEXcTt28eZ7fqmwQm6ymgjHAacA64AR\nEYdjTFay1orJWiLkAx8BPYARqoyNOCRjMs5aKybursEV8bnAHRHHYkzWskIeEt/7dJnOL5hP5fpg\n8zJVStL3Xnbs4sz3/FJhhdxknWAE5zigPvCoKv+OOCRjspr1yE3WEeFM4AncYsr7qvJtxCEZExnr\nkZvYEaEJcGewOcKKuDHVs0IeEt/7dBnM7yagLW6GwwmZeEM7dvHme36psEJusoYIh+MWjNgKXGLD\n8I1JjfXITVYQoRB3z/jewM2q/CHikIzJCtYjN3FyA66IzwJujDgWY2LFzshDIiL9VXVa1HGkSzrz\nC1b9eTvY7KvKB6G+flJygP2Ag4GDgK5AR6ANUMDXFNCFEtxdMiuBecAc3CeENzShS8OMJ9PsezPe\nUqmduZkKxpjKiFAfeBD36fCPYRVxSUpD4KTgayDQspp/Uhh8tcUV/fKvNQ+YAjwJvKsJtd69ySp2\nRm4iJcIY3GRYnwP/pcrGWr9WUgToC/wa+CXQuNzTC3B3wkwHZgMLgaXABmAzUAA0BVrjWjz7Aofi\nViRqVO51FuPuppmgCV1W21iNSZXNR26ymghHAtMABX6qWtZeqdnrJKUecDJwLXBYuafeBSYDzwNz\nNFHzb3ZJSi6uoJ8OnAF0CJ7aErz2LZrQT2oTtzGpsEKeQXWgTxdqfiI0BT7G9aprdZdKcAb+C+Bm\n3ORaAKuAicCDmtDZqcWSWm7B+x0NXIabWjcneOpJ4AZN6MwaJZAh9r0Zb9YjN9lsHK6IfwAka/qP\nJSn9cGt49gseWgT8GZioCV0bVpDlBWf0rwKvSlI6AL8DLsadrZ8mSXkQuE4T+k063t+YXbEzcpNx\nIvwKeAS3WMQBqsxN+d8mpQ2uYA8JHlqO+0UwQRO6KexYU4inLa6lcxmQB6zFfUL4iyY0bTM2mrrD\nWism64jQGddSaQJcpMrElP6d64NfBNyKuyi5AfgT8CdN6Or0RJs6SUr3IJ5BwUOzgQs1obXq+xtT\nKrQBQSIyUETmiMhcEbm2kud/JSIfi8hMEXlLRHrXNui48n2+hzDyE6EA+B9cEX8KuD+lf5eUHsCb\nwL24Iv4C0EsTOjqMIh5GbprQuZrQU4DjgS9wPfs3JSl/laQ0rvpfp5d9b/qv2kIuIjnA3bh7cXsC\nQ0SkR4XdvgKOVNXeuFF594UdqPHCn3GDcuYDv1alyo+DkpR6kpQrgQ9xd6N8g7ut8Oea0K/THGut\naEJfAfYHbsHNGTMc+FSS8rNIAzNeq7a1IiKHAQlVHRhsjwBQ1Vt3sX8z4BNVbV/hcWut1GEinAU8\nBmwCDldlepX7J6U9MAk4JnhoEnC1JvSHNIYZKknK/rhPHQfhbrH8IzA6il6+ia+wWivtcHcElFoc\nPLYrv8bdt2sMACL0gLJe+G9SKOJDgE9wRXwFcJom9Pw4FXEATejHuE8SN+IK+bXAu0GryJjQpHL7\nYcpXQ0XkKOAC3Gi4yp6fhPtYDW5ei49K7/8s7XPFePs3nuUTSn6gHwKTYVpD+PZVGHzvLvdvTAH/\nj8HA+XwNbOAdenKaJvSbdOZXvseapv+/0XKwfEsPrqMbBwAfygC5h1d4Vrel//hlIL9It33LL/j7\n0CCl+aQgldZKX6C4XGtlJLBNVcdW2K838C9goKrOq+R1vG6t1IFBCTXOT4Qc3EXNk3F3cfRRpdJ7\nvCUp++IuhO4HbASuwt1SmPbbqjJ17CQppasfDQ0eegwYpgldk9b3te/NWAvl9kMRycXNg3EMbm6K\n94EhqttHzYlIR2AqcI6qvlvbYIxfys2jsgpXxHf6BQ8gSfkVMB5oiPteO9PnYe8V8v0Cl29Wjgo1\n0QvtPnIROQG4Azck+X5VHSMiwwBUdbyITAROxU1EBLBZVfvUNBjjj3KDfrYCx6sydad9klKIO0O9\nKHjoH8Al6T5DzQaSlH1wn0B+gvsEciVuVGpmBnaY2LABQRlUBz7epZyfCH2A13EzCg5XZdxO+yRl\nb+AJ3K16JbhClpFWyk6xRHTsKvlF9ijuF1moUwzY92a8hTYgyJhUidAdeA5XxMcD9+y0T1IGA//B\nFfF5QF9N6H117WxUE7pBE3oxcA5uuoJfAdMlKT+JNjITN3ZGbkIjQmvgHaAL8BJwsiqby55PSgFu\nUNBlwUNPABdlwxD7qFW42LsBuEwTOinSoExWsNaKyRgRGuPmFj8Qt3jDUeXvUJGkdMEVqoNwg4Ku\nBv5W187CqyJJaYAbRX1+8NAk4HJN6PrIgjKRs9ZKBvk+30NV+QVzqDyJK+JfAj+vUMQH4YbZH4S7\nL/ZwTeg92VLEs+XYaULXa0IvwBXyDbjbFN8LztZrLVvySxff80uFFXKzW0TIA/4JHAd8BwxQ5TsA\nSUqeJOWPwDO4ya6mAAdqQqsc2VnXBS2VPrhbMffD9c3PjjQok9WstWJqLSjij+EWVvgBOFqVGQCS\nlHbA48ARuFsQR+KmnM2Ks/A4kKQ0wl0wLi3i44HfaEJrva6piR/rkZu0ESEXd5/4YOBH4NjSOVQk\nKcfi7gnfEzeI7CxN6BtRxRpnwfJyF+NuUywAPsINIKp0cJXxj/XIM8j3Pl35/IIz8b/jivgaXDtl\nuiQlR5IyGngZV8RfBQ7I9iKezcdOE6qa0PG4ybe+BP4L+I8k5fRUXyOb8wuD7/mlwgq5qRERCnFz\n6pyNW9ZsoCrvBWtYvsr29TeTwABN6HfRROoXTegM3MXiJ3ELc0yWpNwpScmPNjKTDay1YlImwh64\nC5ZHAt8DJ6jyviTlVNy8281wiz+cFyywYEIWtFqGA7fj1gj9ANe6+irSwEzaWI/chEaEVrgl1g7E\n9b2Po1jm4wb4DAt2+1/gfE3o8kiCrEMkKX1wA6o64T4ZXQlMsovJ/rEeeQb53KcTYT946WO23yd+\nOMVSejY4DDfA5yrg5DgW8TgeO03o+7jjMRloBDwAPClJaVlx3zjmVxO+55cKK+SmSiIMBN6GgjbA\n+7T6pD/Fci5u9GZPYA7QRxN6l50NZpYm9HvcGqb/jbvofCrwiSTlhEgDMxlnrRVTKREEuAL4C+4X\n/hNcsfdYWsy9D3fRDWAccK0mdF1EYZqAJKUz8BDw0+Chv+GOjfdTAvvOeuSmVkTYcSBKvU038ofC\nddTbdgOQDywALtCE7jTHuImOJCUHuAa3Rmgebn2AYZrQFyMNzOwW65FnkC99OhF64laBOhtYx8H3\njGR0wQlIFF/BAAAL4UlEQVQs2HYrrohPAHr7VMR9OXaa0K2a0LHAIbhpgjsCL8gZ8pIkpUW00aWP\nL8dvd1ghN4BrpYjw37gLmD0o+GEOV3d4kpMuvxk4mG0sB07QhF5s085mN03ox0Bf4PfARhpxPDBb\nkjIkuH3ReMZaKwYR9sS1Uk4FhUPG/R8nXtkN0XbANtwyf4mwV64x6SdJ6Yb7FNU/eOg14EpN6KeR\nBWVqxHrkploinALcB7Si9cx1DBk0n6YLegVPf4Drsc6ILkKzuyQp9YALgFuBFrhJzMbhfjn/EGVs\npnrWI8+guPXpRCgS4Z/A0zT8rhVn/HIpl+xfGBTxH3B3rBxWWsTjll9N+JwbAMUcqQmdCOyNK+CC\nG0D0hSTlYklKbqTx7Sbvj18KrJDXMSLkiDAcmE3e+l9yxC2bubrDRvb7nyIExa1Q000TercmdGvE\n4ZoQaUK/14QOxw0kegPKWmqzJClnWP88vqy1UoeI0B+4ndyNB3LQeOifLKFwVUHw9PPANZrQ2dFF\naDIlKNpnAjcD3YKHpwMjNKGvRhaY2Yn1yA1QdkvhWHJKTuKAB+FnN2yl8bKc4OnpwHWa0JcjDNFE\nRJKSB/waSABtgodfB24BXrbRutGzQp5BItJfVadFHUd5IuwFjKL+qvM56L569L1DafxN6TGYCVwP\nPJvKD2s25hcWn3OD1PKTpDTE9c1/j1uWD9y96LcAT2tCt6U1yN1QB45ftbUz1hc5TOVE2BsYRZOF\n59D3zhwOug8K1oK7yPUJbuTfk9n8w2kyK5hmYYwkZRxwKfBbts9/PkeSchfwsN2Cmp3sjNwTwdwo\n/ZCtV9Hl1dM5eHw99n0G6pVdr5wK3Aa8ZB+XTXUkKYW4WxZ/jxshCrAaeBAYpwmdG1VsdY21VuoA\nEeoDg2m09Lfs/0hvDhoPzYM1BpStCP8D3KYJ/TDKOE08BT3003C3ox5e7qmXcUX9GU3ohihiqyus\nkGdQJvt0wdn3ARSuvJC9nzuXXk80ouvLkLPF7bAtZwn1tv4NeEATuiyc9/S3D+lzbhBefpKUA3Cr\nE50N1A8e/hH4JzAJeDeKT3t14PhZIc+UTHwzidCBwhVn0/Xfl9D9fzvT4ynID2aQVdmG1nuRelvv\nAV4M+x5wn39YfM4Nws8vmIDrLGAocHC5p77Grec6GXg/U9dg6sDxs0IedyJ0o/kX59LtxXPoMm0v\nur4E+eu371DSaCYFaycAT9hCxybTJCm9cAtbnAO0LffUElxRfwZ4UxNaEkF4XrBCHkMiFNJgeX+6\nvXAubT4+hvbvtqL9u1Cv3MnN+uZfkr/2YXI3PaIJ/TK6aI1xgrnQDwPOwPXUO5R7ej0wDXgReAmY\naxfcU2eFPINq+/FOhHyaft2Xvf59Js3nHUfrmd3p9Ea9spYJwLacbaxtM5OC1ZMoWPOkJnRxiKGn\nGKe/H199zg0yn18wavQQXEE/AehdYZeFuCkC3gDeBGbvThumDhw/K+SZkuo3k+Sv3ZOuL/+cVrMG\n0fzLPrScXUTbGULO5h13XNN2BRua/R9NljxK/R9fjXoOcJ9/WHzODaLPT5LSFjgeGAgch5uBsbzv\ngbeA94APgf/UpE0YdX7pZoU8QiLUo+iDHnR4+yT2WPRTGi/pRdMFRbScnU9hhZlDVWB1+1Wsb/kR\nBasn0/zLp8K628SYbBK0YPYDjsCtL/pToKiSXZfgRpZ+CHwKfAbM04RurmRfr1khzwBpNasZnf7v\nZzRe1o8GK3rTcHlXGn7Thj0WNWSPRYJU8v+7odlmVrdbxMam71L4/ZO0+uxVTeiPmY/emGgFbZjO\nuMJ+EG5mxgOARpXsvgWYC8zGFfYvcHfKfA0s83WkciiFXEQG4laIyQEmqurYSva5C9cLWw8MVd15\nIYI4FnLp8loBLed0p8GK3hSs/gkFq/em/o+dqP9DawpXNqPhd4U0XlqPnC3uW6lLhRfYmgtr2q1l\nTdsFlDSeQb1tU2n74Yt66/exO9v2+eOrz7lB/PILFsLozvai3jP46oybZmJH7mevBLco+FfBIwuB\npRW+fozjRdbdLuQikgN8DhyL+6jzATBEdftUpyJyIjBcVU8UkUOBO1W1b22CSSfp9UQ9Gi9rTcGa\njuStb0+9TUXkbmpN7sa25G4oIm9da/LXNadgdRMKVjeg8Pt8GqyQskE2VVnbagtv5Gxgnx5z2dRo\nDlvzp1O46nX2enWmLx8FReQ3qnpH1HGkg8+5gT/5SVIaAPvginoP3PS7e/EmPTmChim8xAa2F/Xv\ngJXAiir+XJ0NZ/lhTJrVB5inqvODF3wcOAX30abUIODvAKr6nog0FZHWqvptrSPfDXLsqMNp/84/\nydtQSN76+uSvyadgTQ6n/5haUa5oYxNlQ4sS1rf4kY1Nl7Op0SI2F85jW/4scjbOoNObM/X2xRtF\npFjfXVocekLZo2n1u8SWz7mBJ/lpQtcDM4KvMiJSzBHcjjtj7xJ8tcf13ku/2gENga7BV0pvKUlZ\ngxu9Wv5riiZ0/O7mE6bqCnk7YFG57cXAoSns0x6IpJBTb1N9ukxrV+lzmwthY5OtbGq8hc0NStjc\nYANbCteyqeFKNjf4lq0FS9hSsIBtuV8h2+bQ5uMvdMK7GzOcgTGmhjSha3Aze36yq30kKY3ZXthb\nBl8tqvizSbmv8vfFZ92EYdUV8lT7SRVP+6PrQ2nODD789V/YmvcdmrOMrXmLqbd1EYXfL9YnH1lf\n/QvUWuc0vnY26Bx1AGnUOeoA0qxz1AGkWedUdgqK/efBV7WCO2waA3uU+2qK68Vnlep65H2BYlUd\nGGyPBLaVv+ApIvcC01T18WB7DvCziq0Vkcpu3zDGGFOd3e2RTwe6i0hn3AWCwcCQCvtMwc2I9nhQ\n+H+orD8etztWjDEmLqos5Kq6RUSG4+ZHyAHuV9XZIjIseH68qj4vIieKyDxgHXB+2qM2xhhTJmMD\ngowxxqRHvUy+mYhcISKzReRTEdlpYJEPROT/icg2EWkedSxhEpHbgmP3sYj8S0T2iDqmMIjIQBGZ\nIyJzReTaqOMJk4h0EJHXRGRW8DN3ZdQxhU1EckRkhog8G3UsYQtu5Z4c/Nx9FrSuK5WxQi4iR+Hu\nOe+tqvsBf8rUe2eKiHTATQqUdVe1Q/Ay0EtV98cNjR4ZcTy7LRjwdjduMqeewBAR6RFtVKHaDFyt\nqr2AvsDlnuUHcBVuuL6PrYU7gedVtQduBsnZu9oxk2fklwJjVN1IR1VdnsH3zpQ/4xar9Y6qvqJa\nNsrtPdxYgbgrG/AWfF+WDnjzgqp+o6ofBX9fiysElU1QFUsi0h44EZhIZUP3Yyz4xPtTVX0A3PVK\n1V3Px5TJQt4dOFJE3hWRaSJycLX/IkZE5BRgsarOjDqWDLgAeD7qIEJQ2WC2ygeTxVxw59kBuF/C\nvvgL8Dsg8mH0adAFWC4iD4rIhyIyQUQa7Grn6m4/rBEReQVoU8lT1wXv1UxV+4rIIcATwF5hvn+6\nVZPfSNycy2W7ZySoEFWR3yhVfTbY5zpgk6r+I6PBpYePH8d3IiKNcOtoXhWcmceeiJwEfKeqM0Sk\nf9TxpEEubtKw4ar6gYjcAYwARu9q59Co6nG7ek5ELsWt4UcQ2DYRaaGqK8OMIZ12lZ+I7If7Dfqx\niIBrO/xHRPqoxmcdzaqOH4CIDMV9lD0mIwGl3xJ2HHrdAXdW7g0RyQOeBB5R1aejjidE/YBBwaR9\n9YEmIvKQqp4XcVxhWYz7hP9BsD0ZV8grlcnWytPA0QAisjeQH6ciXhVV/VRVW6tqF1XtgjsIB8ap\niFcnmM74d8ApqurL/DNlA95EJB834G1KxDGFRtxZxf3AZz7Mflieqo5S1Q7Bz9tZwFSPijiq+g2w\nKKiV4GagnbWr/UM9I6/GA8ADIvIJsAnw5j+9Ej5+ZP8rkA+8EnzqeEdVL4s2pN2zqwFvEYcVpsNx\nq9vPFJHSGQNHquqLEcaULj7+zF0BPBqcZHxJFYMtbUCQMcbEXEYHBBljjAmfFXJjjIk5K+TGGBNz\nVsiNMSbmrJAbY0zMWSE3xpiYs0JujDExZ4XcGGNi7v8DzCg4W/oGfTYAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "testValues = np.arange(-5,5,0.01)\n", - "plot(testValues, sigmoid(testValues), linewidth=2)\n", - "plot(testValues, sigmoidPrime(testValues), linewidth=2)\n", - "grid(1)\n", - "legend(['sigmoid', 'sigmoidPrime'])\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[-0.00916908, -0.00390022, -0.00285075],\n", - " [-0.00483125, -0.00184321, -0.0016088 ]])" - ] - }, - "execution_count": 31, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "NN = Neural_Network()\n", - "cost1 = NN.costFunction(X,y)\n", - "dJdW1, dJdW2 = NN.costFunctionPrime(X,y)\n", - "dJdW1" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[-0.02903 ],\n", - " [-0.02442499],\n", - " [-0.04018761]])" - ] - }, - "execution_count": 32, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dJdW2" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.0175075086056 0.0293176938671\n" - ] - } - ], - "source": [ - "scalar = 3\n", - "NN.W1 = NN.W1 + scalar*dJdW1\n", - "NN.W2 = NN.W2 + scalar*dJdW2\n", - "cost2 = NN.costFunction(X,y)\n", - "print cost1, cost2" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.0293176938671 0.0133981591651\n" - ] - } - ], - "source": [ - "dJdW1, dJdW2 = NN.costFunctionPrime(X,y)\n", - "NN.W1 = NN.W1 - scalar*dJdW1\n", - "NN.W2 = NN.W2 - scalar*dJdW2\n", - "cost3 = NN.costFunction(X, y)\n", - "print cost2, cost3" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Neural Networks Demystified\n", - "# Part 2: Forward Propagation " - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# https://github.com/stephencwelch/Neural-Networks-Demystified/blob/master/Part%202%20Forward%20Propagation.ipynb\n", - "from IPython.display import YouTubeVideo\n", - "YouTubeVideo('UJwK6jAStmg')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "# Visualizations:" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAEACAYAAAC9Gb03AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHLRJREFUeJzt3Xu0XXV57vHvQxKuGgiICZdIUKBFAUEQImgJFzVFbqLc\nBtqknlM4heCl53ggaIVaWxVti1qHh1OuHkWgaCUKiDEQpAoYCgEkIEaIISAIQgJBEQLv+WPOHXZ2\n9mXtvdaavzl/6/mMscdac6+593xfJnmz8qx5UURgZmb52iB1AWZm1l0e9GZmmfOgNzPLnAe9mVnm\nPOjNzDLnQW9mlrm2B72kiyQ9LumeYdb5sqRfSrpL0l7tbtPMzFrXiXf0FwMzh3pR0mHAThGxM3Ay\n8LUObNPMzFrU9qCPiJuBp4dZ5Ujg0nLd24AtJE1ud7tmZtaaKjL67YCH+y2vALavYLtmZkZ1H8Zq\nwLKvu2BmVpHxFWzjEWBqv+Xty++tQ5KHv5nZGETEwDfT66hi0M8D5gCXS5oOrIyIxwdbcaRim0zS\nORFxTuo6uiHn3qB3+pOYBBwAvBV4M7AnsMMIP/4H4LHy63HgKWAVsLJ87Hv+DPAc8Hz5M31fzwPP\nR7Cmw22t1QP7b8Q3yW0PeknfAg4EXiPpYeBsYAJARJwfEddKOkzSUood/ZftbrOhpqUuoIumpS6g\ny6alLqAbJDYGDoaDjpd4L7A768esfwSWAEuBB/t9LQd+A6yOqH0UOy11Aam1Pegj4sQW1pnT7nbM\nrH0SmwBHA+8H3g1sBq/re/kF4DbgFmAxcBfwQDffbVs1qohurHBJ6gK66JLUBXTZJakLaJfEXhTn\nsZwIbN7vpTtg/3uAi4CfRfB8ivq67JLUBaSmutx4RFLknNGbVU1CwCHAGcCh/V5aBHwduDpinUOf\nrYFamZ1+R18RSTMiYmHqOroh596gmf1JvA34IrB/+a3VwIXABRH8fN1169Wfj8Ab2ljfDHvQm2VE\nYlvgX4Djym89AZwHfC1i2DPYa6UuSUOdSGMPPBzdmGWgjGlmUwz5zSkOW/wn4NwInklY2qiVsyB1\nGbUjadB39I5uzHpAefz7pcAR5beuAU6NYHm6qqxOfD36ikiakbqGbsm5N6h3fxJ7A3dQDPmngQ8A\nR4xmyNe5P+sMD3qzhpJ4P/ATihOC/gt4SwTfbMAJTFbaYIMNePDBB7u/na5vwQCo01ENnZZzb1DP\n/iROB64ENgIuAN4ewbKx/K469ldn06ZN44Ybbkhdxqh40Js1jMQngS9TXK7gLODkTE90qqXyQ9HU\nZYyKB31Fcs5Bc+4N6tWfxMeBvwdeBj4UwWfbjWrq1F/dffCDH2T58uUcccQRvPrVr+YLX/gCxx57\nLNtssw1bbLEFBx54IEuWLFm7/uzZsznttNM4/PDDmThxItOnT18vqpk/fz677LILkyZNYs6cLl0t\nJiJq8VWUkr6OLvY3I3UN7q3Z/UGcBhHl16zc+utXTwyn33+Dtr7Gatq0abFgwYK1yxdffHGsXr06\nXnjhhfjoRz8ae+6559rXZs2aFVtttVUsWrQo1qxZEyeddFKccMIJa1+XFEcccUSsWrUqli9fHltv\nvXX84Ac/GKJvYuB/q4jWZqff0VckMs5Bc+4N6tGfxHso4hqAUyKK23N2Qh36a7LZs2ez2WabMWHC\nBM4++2zuuusunn32WaCIeY455hj22Wcfxo0bx0knncTixYvX+fkzzzyTiRMnMnXqVA466KD1Xu8E\nD3qzmpPYA7ic4s/rORH838QlJdWp9/Sd8NJLL3HmmWey0047sfnmm7PjjjsC8OSTT65dZ/LkV26R\nvckmm7B69ep1fseUKVPWPt90003Xe70TPOgrknMOmnNvkLY/iS0pbt7zKuAy4NOd30be+6/T+l+K\n4LLLLmPevHksWLCAVatW8dBDDwH1u4SDB71ZTZWXNbiI4i5PPwP+W4SPkU9t8uTJ/OpXvwLg2Wef\nZaONNmLLLbfkueee46yzzlpn3dEO/G79BeFBX5Gcc9Cce4Ok/Z0OHEVxO77jo0uHUOa+/zpt7ty5\nfOYzn2HSpEk8/fTT7LDDDmy33XbstttuvO1tb1vnHb+k9S5GNvD1ga+1c/GyofiiZmY1JLE7xdmu\nE4D3RfCdxCVVxhc1G1w7FzXzO/qK5JyD5twbVN+fxHjgYoohf363h3zu+8886M3q6G+AvSluwP3x\nxLVYBhzdmNWIxM7A3cDGwMwIrk9cUuUc3QzO0Y1ZPs6jGPJf78Uhb93hQV+RnHPQnHuD6vqTOAw4\nDHiGCiOb3PefedCb1YLEBOCfy8W/i+C3KeuxvDijN6sBiY9QxDYPALtH8ELikpKRVI+hVENjzeg9\n6M0Sk3g18CDwGorbAH4/cUnWIP4wtkZyzkFz7g0q6e90iiH/U4obe1fK+y9/HvRmCUlswSsfvP6t\nr2Vj3eDoxiwhib8DPgXcGMHBqeux5nFGb1ZjEhOBh4GJFDf3/knikqyBnNHXSM45Yc69QVf7O5li\nyC9MOeS9//LnQW+WgMSGwEfLxS+krMXy5+jGLAGJWcAlwM+BPfwhrI2VoxuzGirvHNV3pM0XPeSt\n2zzoK5JzTphzb9CV/g4B3gQ8Cnyrw7971Lz/8udBb1a9U8vHr/XypQ6sOs7ozSoksT3wa+BlYGoE\njyUuyRrOGb1Z/ZxM8efu2x7yVhUP+orknBPm3Bt0rr/ykMq/Khe/1onf2Qnef/nzoDerzpHAFGAJ\n8OPEtVgPcUZvVhGJ7wPvAT4WwXmp67E8+Fo3ZjUhMQVYAQSwbQRPJC7JMuEPY2sk55ww596gY/2d\nBIwDrqnbkPf+y58HvVmXlWfCzi4XL0lXifUqRzdmXSaxN3A78DuK2MYnSVnHOLoxq4dZ5eM3PeQt\nBQ/6iuScE+bcG7TXn8R44MRy8dKOFNRh3n/5a3vQS5op6X5Jv5R0xiCvz5C0StKd5dcn292mWYMc\nTHHj718AdyauxXrU+HZ+WNI44F+BQ4FHgEWS5kXEfQNWvSkijmxnW00XEQtT19AtOfcGbfd3XPl4\nRV0vR+z9l79239HvCyyNiGUR8SJwOXDUIOv5Q1brOeUlD44pF69MWYv1tnYH/XYUNzfus6L8Xn8B\n7C/pLknXSnpjm9tspJxzwpx7g7b6OxSYBNwbwb2dq6izvP/y11Z0Ay39U/QOYGpE/F7SnwPfBXYZ\nbEVJlwDLysWVwOK+f3b17aymLgN7SqpNPV6uYvmyD5efw15Rj3q8nMNy+Xw2hWW0oK3j6CVNB86J\niJnl8lzg5Yj4/DA/8xCwd0Q8NeD7Po7esiGxEfA4sDnwJxE8kLgky1QVx9HfDuwsaZqkDYHjgXkD\nipgsSeXzfSn+cnlq/V9llpV3UQz5xR7yllpbgz4i1gBzgOspLr16RUTcJ+kUSaeUq70fuEfSYuA8\n4IR2ttlUOeeEOfcGY+7vveXjVR0spSu8//LXbkZPRFwHXDfge+f3e/5V4KvtbsesKSTGAYeXi1en\nrMUMfK0bs46TeDtwM/ArYOe6Hj9veagiozez9fWdS3K1h7zVgQd9RXLOCXPuDUbXX3lJ4qPLxUbE\nNt5/+fOgN+usXYGdKC5J/NPEtZgBzujNOkpiLvCPwKURa09qMesaZ/Rm1VubzyetwqwfD/qK5JwT\n5twbtN6fxDbAfsDzwA+7WVMnef/lz4PerHNmlo8LInguaSVm/TijN+sQiSuBY4E5ET5J0KrRyuz0\noDfrgPKWgU8AWwBviODBxCVZj/CHsTWSc06Yc2/Qcn/TKYb8A00b8t5/+fOgN+uMPy8frxt2LbME\nHN2YdYDEHcBewMwIrk9dj/UOZ/RmFZCYAvwG+AOwZQTPJy7Jeogz+hrJOSfMuTdoqb++wypvbOKQ\n9/7Lnwe9Wfv68vkfJK3CbAiObszaMOCwyp0jWJq4JOsxjm7Muu+tFEN+qYe81ZUHfUVyzglz7g1G\n7O/g8nF+BaV0RY/vv57gQW/Wnr5Bf0PSKsyG4YzebIwkNgaeBjYGto7gycQlWQ9yRm/WXdMphvzd\nHvJWZx70Fck5J8y5Nxi2v4PKx0bHNj28/3qGB73Z2PXl8zcmrcJsBM7ozcZAYjOKfH4csFUEKxOX\nZD3KGb1Z9xwATADu8JC3uvOgr0jOOWHOvcGQ/WVzWGWP7r+e4kFvNjZ9H8Q6n7fac0ZvNkoSmwNP\nAS8DkyJYnbgk62HO6M264x0Uf3Z+5iFvTeBBX5Gcc8Kce4NB+8smn4ee3H89x4PebPScz1ujOKM3\nGwWJrYAngT9S5PN/SFyS9Thn9Gadd2D5eIuHvDWFB31Fcs4Jc+4N1usvq3weem7/9SQPerPRyeJC\nZtZbnNGbtUhiCvAb4PcU+fwLiUsyc0Zv1mF97+Zv9pC3JvGgr0jOOWHOvcE6/WV5WGUP7b+e5UFv\n1rrsPoi13uCM3qwFEq8Dfg08Q3H9+TWJSzIDnNGbdVJfbHOTh7w1jQd9RXLOCXPuDdb2l2U+Dz2z\n/3qaB73ZiDYA5/PWYM7ozUYg8QZgKfA74LURvJy4JLO1nNGbdUbfu/mFHvLWRB70Fck5J8y5t8IV\nJ5RPssvnIf/9l3t/rWh70EuaKel+Sb+UdMYQ63y5fP0uSXu1u02zqkgItuz7f9b5vDVSWxm9pHHA\nL4BDgUeARcCJEXFfv3UOA+ZExGGS9gO+FBHTB/ldzuitdiTeCNwLPAZsG0E9PtQyK1WR0e8LLI2I\nZRHxInA5cNSAdY4ELgWIiNuALSRNbnO7ZlVZe1ilh7w1VbuDfjvg4X7LK8rvjbTO9m1ut3Fyzglz\n7g04GBZCxrFN5vsv+/5aMb7Nn2/1Hc7Af1YM+nOSLgGWlYsrgcURsbB8bQZAU5eBPSXVph4vt7I8\nQfDCDAB4z++la2fUqz4v9+Jy+Xw2hWW0oN2MfjpwTkTMLJfnAi9HxOf7rfN/gIURcXm5fD9wYEQ8\nPuB3OaO3WpHYE7gTWA5Mc3RjdVRFRn87sLOkaZI2BI4H5g1YZx7wF2VB04GVA4e8WU05n7cstDXo\nI2INMAe4HlgCXBER90k6RdIp5TrXAg9KWgqcD5zaZs2NlHNOmHFv5YlS//ho2jK6K+P9B+TfXyva\nzeiJiOuA6wZ87/wBy3Pa3Y5ZlSTGA39WLM2/E85KWo9ZO3ytG7NBSOwL3AYsjWDn1PWYDcXXujEb\nu77r22R52QPrLR70Fck5J8y0t74PYm/ItL+13F/+POjNBpDYEHh7ubgwYSlmHeGM3mwAibcDNwNL\nInhT6nrMhuOM3mxsfDcpy4oHfUVyzgkz7G2d+8Nm2N863F/+POjN+pHYBNif4npMNyUux6wjnNGb\n9SNxMLAAWByBb5JjteeM3mz0fPy8ZceDviI554SZ9XZI+big7xuZ9bce95c/D3qzksRE4K3AS8CP\nE5dj1jHO6M1KEocD3wNuiWD/1PWYtcIZvdnorBfbmOXAg74iOeeEGfU26KDPqL9Bub/8edCbARKv\nBXYHngduTVyOWUc5ozcDJI4HLgd+FME7U9dj1ipn9Gatcz5v2fKgr0jOOWEmvfWdKLXeoM+kvyG5\nv/x50FvPk9gBeAOwCrgjcTlmHeeM3nqexIeAC4GrIzg6dT1mo+GM3qw1Q8Y2ZjnwoK9Izjlhk3uT\nEK98EDvojUaa3F8r3F/+POit1+0KTAEeA5YkrsWsK5zRW0+TOB34MnBZBCelrsdstJzRm43s3eXj\n/KRVmHWRB31Fcs4Jm9qbxEa8cn/YHw69XjP7a5X7y58HvfWyA4BNgXsieDR1MWbd4kFfkYhYmLqG\nbmlwb32xzfXDrdTg/lri/vLnQW+9rKVBb9Z0HvQVyTknbGJvEtsAbwb+APzn8Os2r7/RcH/586C3\nXvWu8nFhBM8nrcSsy3wcvfUkicuAE4GPRvCl1PWYjZWPozcbhMQGsPbmIs7nLXse9BXJOSdsYG9v\nAV4D/Br4xUgrN7C/UXF/+fOgt140s3y8PoJ6ZJdmXeSM3nqOxK3AfsDREVyduh6zdrQyOz3oradI\nTAZ+A7wAbBXBc4lLMmuLP4ytkZxzwob1dhgg4IZWh3zD+hs195c/D3rrNYeXj99PWoVZhRzdWM8o\nr1b5JPAqYIcIlicuyaxtjm7M1jWDYsjf7SFvvcSDviI554QN6q0vtvneaH6oQf2NifvLnwe99YTy\nJuBHlIvO562nOKO3niCxO3A38AQwJYKXE5dk1hHO6M1e8d7y8Xse8tZrPOgrknNO2JDe3l8+fnu0\nP9iQ/sbM/eVv/Fh/UNKWwBXADsAy4LiIWDnIesuAZ4CXgBcjYt+xbtNsLCR2BnYHVgELEpdjVrkx\nZ/SSzgWejIhzJZ0BTIqIMwdZ7yFg74h4aoTf54zeukLiTOCzwDci+GDqesw6qdsZ/ZHApeXzS4Gj\nh6ulje2YtWvMsY1ZDtoZ9JMj4vHy+ePA5CHWC+BHkm6X9FdtbK/Rcs4J69ybxDRgb+A5xniTkTr3\n1wnuL3/DZvSS5gNTBnnpE/0XIiIkDZUBHRARv5G0NTBf0v0RcfMQ27uEIu8HWAksjoiF5Wszym01\nchnYU1Jt6umVZYi9i8crfwbH7wf1qs/LXh7tcvl8NoVltKCdjP5+YEZEPCZpG+DGiPjTEX7mbGB1\nRPzTIK85o7eOk7gFmA4cF8G/p67HrNO6ndHPA2aVz2cB3x2kgE0lvbp8vhnwLuCeNrZp1jKJN1AM\n+eeAaxOXY5ZMO4P+c8A7JT0AHFwuI2lbSdeU60wBbpa0GLgN+H5E/LCdgpsq55ywxr2dVD7+Rzs3\nGKlxfx3h/vI35uPoy8MlDx3k+48C7ymfPwjsOebqzMaovLZN36D/RspazFLztW4sSxL7AIuA3wLb\nRbAmcUlmXeFr3Vgv+0D5+C0Peet1HvQVyTknrFtvEuOBE8rFb7b/++rVX6e5v/x50FuODqU4ge8B\n4PbEtZgl54zesiNxFfA+4JMR/EPqesy6qZXZ6UFvWZGYDKyguL7S6yJ4NHFJZl3lD2NrJOecsGa9\nzaI4bPiaTg35mvXXce4vfx70lo3y2Pn/Xi7+W8pazOrE0Y1lQ+JAYCHwCDDNh1VaL3B0Y73m1PLx\nYg95s1d40Fck55ywDr1JTKU40mYNcH5nf3f6/rrJ/eXPg95ycRowDrgqghWpizGrE2f01ngSmwEP\nA5OA6RHclrgks8o4o7de8QGKIX+rh7zZ+jzoK5JzTpiyN4lxwN+Ui1/qzjby3Xfg/nqBB7013bHA\nLsBDwFWJazGrJWf01lgSGwB3AbsBJ0f4JCnrPc7oLXdHUgz5FcDXE9diVlse9BXJOSdM0Vt5uYNP\nlovnRvDH7m0r330H7q8XeNBbU70f2Bt4DLggcS1mteaM3hpHYkPgXmAn4H9EdPZMWLMmcUZvuTqZ\nYsj/ArgwcS1mtedBX5Gcc8Iqe5OYCHyqXJxbxcXLct534P56gQe9Nc2nga2BnwLfTVyLWSM4o7fG\nkNiLV272vU8Ed6asx6wOnNFbNsqTo75G8f/sVzzkzVrnQV+RnHPCino7FdgPeJRXMvpK5LzvwP31\nAg96qz2JPwHOLRdPj+CZlPWYNY0zeqs1iQnAT4C3Al+PYFbiksxqxRm95eBTFEN+OfDhxLWYNZIH\nfUVyzgm71ZvEkRTXs3kZmBXBqm5sZ+Q68t134P56gQe91VKZy/+/cvGsCBYmLMes0ZzRW+1IbAX8\nJ/CnFDcTOS6CevyPalYzrcxOD3qrlfJG3z8CpgP3AAdE8Gzaqszqyx/G1kjOOWGneiuvSnkFxZBf\nDsysw5DPed+B++sFHvRWCxIbA/8BvAf4HfDuCB5NW5VZHhzdWHISr6K4QNkhFEP+nb7EgVlrWpmd\n46sqxmwwEjsA84A9KO4WdWgE96atyiwvjm4qknNOONbeJN4B/IxiyD8AvKOOQz7nfQfurxd40Fvl\nJMZLfBpYCLyW8iibCJYmLcwsU87orVISewDnUxxZE8DngLMjeDFpYWYN5YzeakNic+BsiuvVjANW\nAB/0Ga9m3efopiI554TD9SbxKom5wEPAxwABXwF2a8qQz3nfgfvrBX5Hb10hsS1wCvDXFPd4Bfgx\n8LEI7khWmFkPckZvHSOxETAT+ABwNK+8kbgV+Ftgga9ZY9ZZzuit6yS2pDjR6TDgvcDm5UsvAf8O\nfBX4sQe8WTpjzuglHSvpXkkvSXrLMOvNlHS/pF9KOmOs22u6XHJCie0kjpH4nMRtwJOw8EpgNsWQ\nXwycAewQwXER3NT0IZ/LvhuK+8tfO+/o76F4B3f+UCtIGgf8K3Ao8AiwSNK8iLivje021Z7QlA8f\nEUWuvguwK8XlgncF3gxsO2D1F+FHy2DGRcDVEeS4bxuz78bI/WVuzIM+Iu4HkIaNhvYFlkbEsnLd\ny4GjIMthMJItUm5cYhzFO+6tgC0HPE4Gtgemll/bAxsN8atWAosozmi9BbgJ/uF/RXzmc11tIK2k\n+64C7i9z3c7otwMe7re8Ativy9sclsTWwOspDvPr/8Ug3xvN1wg//45dJd43wu/YAJhQfo3v93zg\n8sDXNgQ2AzYd5nHTfnW2YiXwS+B+ir+Y7wfuBZZG8PKA/6ZmVmPDDnpJ84Epg7x0VkR8r4XfX8ds\n9kjgguo3+3qA46rf7jpWAk9RXCHyqX7Pn6D4C3lF32MEq0fxe6d1tszamZa6gC6blrqALpuWuoDU\nhh30EfHONn//IxRRQJ+pFMNkUJLq+BdDB12auoAtyq/Xj7TiaN+lS5o1tpKawf01W+79jaRT0c1Q\nY+F2YGdJ04BHgeOBEwdb0cfQm5l1RzuHV75X0sMUF6e6RtJ15fe3lXQNQESsAeYA1wNLgCt69Igb\nM7NkanNmrJmZdUetLmom6XRJ90n6uaTPp66nGyT9T0kvS9oydS2dJOkL5b67S9J3JG0+8k/VX84n\n/EmaKunG8sTHn0v6cOqaOk3SOEl3Smrl4JFGkbSFpKvKP3dLJE0fat3aDHpJB1EcEbNHROwGfDFx\nSR0naSrwTuDXqWvpgh8Cb4qIN1PcLWpu4nra1u+Ev5nAG4ETJe2atqqOehH4WES8iSKCPS2z/gA+\nQhEb5xhdfAm4NiJ2pbhL25CxeG0GPcVVDj8bES8CRMQTievphn8G/nfqIrohIuZHRN/x9bdRnHTV\ndGtP+Cv/v+w74S8LEfFYRCwun6+mGBQDz3xuLEnbU1yD6QJGdw5J7ZX/Yn5HRFwExeehEbFqqPXr\nNOh3Bv5M0q2SFkraJ3VBnSTpKGBFRNydupYKfAi4NnURHTDYCX/bJaqlq8oj4/ai+Es6F/8CfBzW\nPcEvEzsCT0i6WNIdkv5N0qZDrVzp1SuHOQHrE2UtkyJiuqS3AlfSwvHedTJCf3OBd/VfvZKiOqiV\nE+gkfQJ4ISIuq7S47sjxn/vrkfQq4CrgI+U7+8aTdDjw24i4M9OLmo0H3gLMiYhFks4DzgQ+NdTK\nlRnuBCxJfw18p1xvUfmB5VYR8bvKCmzTUP1J2o3ib+C7ymsDbQ/8l6R9I+K3FZbYlpFOoJM0m+Kf\nyodUUlD3jeqEvyaSNAH4NvCNiPhu6no6aH/gSEmHARsDEyV9PSL+InFdnbKCIiFYVC5fRTHoB1Wn\n6Oa7wMEAknYBNmzSkB9ORPw8IiZHxI4RsSPFTnpLk4b8SCTNpPhn8lER8Xzqejpk7Ql/kjakOOFv\nXuKaOkbFu44LgSURcV7qejopIs6KiKnln7cTgBsyGvJExGPAw+WshOIKwfcOtX6dbjxyEXCRpHuA\nF4BsdsogcowEvkJxcbX55b9abomIU9OW1J6IWCOp74S/ccCFmZ3wdwDF3cDulnRn+b25EfGDhDV1\nS45/5k4Hvlm+CfkV8JdDregTpszMMlen6MbMzLrAg97MLHMe9GZmmfOgNzPLnAe9mVnmPOjNzDLn\nQW9mljkPejOzzP1/WZbWrJSu7KcAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "import numpy as np\n", - "testValues = np.arange(-5,5,0.01)\n", - "\n", - "plot(testValues, np.tanh(testValues), linewidth=2)\n", - "grid(1)\n", - "legend(['tanh'])\n" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "def ReLU(x):\n", - " return x * (x > 0)\n", - "# See here: http://stackoverflow.com/questions/32109319/how-to-implement-the-relu-function-in-numpy" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWgAAAEACAYAAACeQuziAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFKlJREFUeJzt3X2MXPV1xvHnxDZJKTgxDXFNsLtEclQMpW5KCHUKbBpC\nLcsEJJAwUh2ZKm9uQoAGKC8qdaUqBhUVV6mqVMH2UjlNWiBAzFvsOCyltARIvCHYDi2tjWw3Di81\n4JBYLPj0j7nr7LJ3996duW+/3/1+pBW+O7Mz5+yVD7Nnn7k2dxcAoHneVncBAIB0DGgAaCgGNAA0\nFAMaABqKAQ0ADcWABoCGmp7nTma2S9Krkt6UNOzup5VZFAAg54CW5JL63f3/yiwGAPBLU1lxWGlV\nAADGyTugXdJ3zOxJM/tUmQUBADryrjg+7O4/MbNjJW02sx+7+yNlFgYAbZdrQLv7T5L/vmBmd0k6\nTdIjkmRmXMwDALrg7pOujjMHtJkdKWmaux8ws1+VdI6kv5zKk4TMzFa5+6q66ygL/YUt5v6q6M1M\nsyTtkDRb0ifdtbbM5xv73NkvbvO8gp4t6S4zG7n/19x9U4+1haSv7gJK1ld3ASXrq7uAkvXVXUCJ\n+ip4jtXqzLhHJK2v4PmmJHNAu/tOSQsrqAUAKmOmRZI+I2lY0mfddajmksbhnYTZBuouoGQDdRdQ\nsoG6CyjZQN0FlGigrAc20wxJ/5Ac3uSu7WU9Vy+s1wv2m5nHvIMGEB8zXaPOeuNZSae46xfV15A9\nOxnQGcys390H666jLPQXtqb0R5prcmkzMs/szJuDBoBJ8c/npUsCFt19La+gAfQqmQN1l9FIZtb1\nK2h+SQgADcWAzmBm/XXXUCb6C1vs/bUdAxoAGooBnaEJvyEvE/2FLfb+itDX16cjjzxSRx99tObM\nmaNLLrlEr732WubX9ff3a+3a8e/87uvr05YtW8Z8bmBgQGeccUZhNY9gQAOImpnp3nvv1YEDBzQ0\nNKStW7dq9erVub4uLYEx0efLwIDOEPuOj/7CFnt/RZs9e7bOOeccDQ0NSZIee+wxLVq0SLNmzdLC\nhQv18MMP11zhWAxoAKUzK+6jGyMRwD179ujBBx/U/PnztXfvXi1dulQ33HCD9u/fr5tvvlkXXHCB\nXnrppQI77w0DOkPsOz76C1vs/RXB3XX++edr5syZmjdvnmbPnq1Vq1Zpw4YNWrJkiRYvXixJOvvs\ns3Xqqafqvvvuq7niX2JAAyide3EfU2Vmuueee/Tqq69qcHBQO3bs0IsvvqjnnntOt99+u2bNmnX4\n49FHH9W+ffsmfbzp06dreHh4zOeGh4c1Y8aMqReXgQGdIfYdH/2FLfb+inbmmWdqxYoVuvLKKzVv\n3jwtX75c+/fvP/xx4MABXX311ZM+xrx587Rz584xn9u5c6f6+voKr5cBDaBVLr/8cm3evFmLFi3S\nxo0btWnTJr355ps6ePCgBgcHtXfv3sP3HR4e1sGDBw9/DA8P66KLLtKaNWv0zDPPyN315JNPav36\n9Vq2bFnxxbp7Tx+dh+jtMfjgg4+wP5I50Eh9fX2+ZcuWMZ9buXKlX3jhhf7444/7WWed5cccc4wf\ne+yxvnTpUt+9e7e7u/f397uZjflYvny5Hzp0yG+88UafP3++z5w50xcsWODr1q2b8PknmpF5ZicX\nSwLQMy6WNDEullSi2Hd89Be22PtrOwY0ADQUKw4APWPFMTFWHAAQIQZ0hth3fPQXttj7azsGNAA0\nFDtoAD3jX/WeXLc7aP5VbwA9q+NFmpmukbRa0rOSTnHXL6quoWysODLEvuOjv7DF3N9kvZnpfZL+\nIjlcGeNwlhjQAAJjJpP095LeIWmDu75Tc0mlYQcNIChmWibp65L2S/pNdz1fc0ldIQcNICpmmiVp\nTXJ4VajDOS8GdIaYd3wS/YUu5v4m6G21pNmSHpG0vtKCasCABhAEMy2S9BlJw5I+665DNZdUOnbQ\nABrPTDMk/UDSyZL+yl1/XnNJPWMHDSAWX1RnOD8r6Us111IZBnSGmHd8Ev2FLub+RnprS+Y5DQMa\nQGO1KfOcJtcO2symSXpS0h53P/ctt7GDBlCKWDLPaYrcQV8mabskLogCoBJtyzynyRzQZna8pCWS\nbpXUulfKMe/4JPoLXdz9bbhNLco8p8nzCvoWSVdJ8WcOATRDJ/N8/LlqUeY5zaSXGzWzpZKed/et\nk19ZygYk7UoOX5Y05O6DyW39khTq8cjnmlIP/dFf/P3NnCa9skbql/TVb0iffo/k25tTX3fHyZ9X\nqGOXcpj0l4Rm9iVJyyW9oc5vUWdKutPdPzHqPvySEEBh2nCdZ6mAXxK6+3XuPtfdT5C0TNJ3Rw/n\nNoh7x0d/oYutv7GZ5z/9SqzDOa+p5qBJcQAoxVszz9It36+5pNpxLQ4AjRBz5jkN1+IAEAQyz+kY\n0Bli2/G9Ff2FLaL+xl3nOaLeusaABlCrNl7nOS920ABqE+N1nvNiBw2g6Vp5nee8GNAZYt+D0V/Y\nQu4v6zrPIfdWFAY0gMq1/TrPebGDBlC5tmWe07CDBtA4ZJ7zY0BniH0PRn9hC7S/cZnnNIH2VigG\nNIDKkHmeGnbQACrR5sxzGnbQAJqEzPMUMaAzxL4Ho7+whdJfVuY5/WvC6K1MDGgApSLz3D120ABK\nReY5HTtoALUi89wbBnSG2Pdg9Be2APrLlXlOE0BvpWNAAygFmefesYMGUDgyz9nYQQOoC5nnAjCg\nM8S+B6O/sDWxv24yz+mP07zeqsaABlAYMs/FYgcNoDBknvNjBw2gMmSei8eAzhD7Hoz+wtaw/rrO\nPKdpWG+1YEAD6BmZ53KwgwbQEzLP3WEHDaAKZJ5LwoDOEPsejP7CVnd/RWWe0x877nOXBwMaQFfI\nPJePHTSArpB57g07aAClIPNcDQZ0htj3YPQXthr7KzTznCb2c5cHAxrAlJB5rk7mDtrM3iHpYUlv\nlzRd0h3uvmrU7eyggZYg81ycPLNzetaDuPtBM/uIu//czKZL+jcze8Ddv1dYpQBCQea5QrlWHO7+\n8+SPR0iaIbXnR5rY92D0F7Yq+ysz85z+fHGfuzxyDWgze5uZDUn6qaRN7v5EuWUBaBIyz/WYUg7a\nzN4p6S5Jl7r7tuRz7KCByJF5Ll4hO+jR3P0VM3tI0mJJ20Y90YCkXcnhy5KG3H0wua0/+VqOOeY4\nyOMFR0nbkszzTbdK1yyQ/Pnm1BfGcfLnFZ3v4+F5Oak8KY53S3rD3V82s1+R9G1JN7r7/cntUb+C\nNrP+kW92jOgvbFX0Z6avqBOre0RSf1Wxuhacu0JeQc+RdJuZTVNnZ/3PI8MZQNzIPNeLa3EASEXm\nuVxciwNAL8g814wBnSH2LCb9ha2s/qrOPKfXEPe5y4MBDWAMMs/NwQ4awBhknqvBDhrAlHCd52Zh\nQGeIfQ9Gf2Erob/Sr/OcV+znLg8GNABJZJ6biB00ADLPNWAHDSAvMs8NxIDOEPsejP7CVkR/Tcg8\np4n93OXBgAZajMxzs7GDBlqMzHN92EEDmBCZ5+ZjQGeIfQ9Gf2Hrsb/GZJ7TxH7u8mBAAy1E5jkM\n7KCBliHz3AzsoAGkIfMcCAZ0htj3YPQXtqn219TMc5rYz10eDGigJcg8h4cdNNASZJ6bhR00AElk\nnkPFgM4Q+x6M/sI2hf4anXlOE/u5y4MBDUSOzHO42EEDESPz3FzsoAGQeQ4YAzpD7Hsw+gvbZP2F\nlHlOE/u5y4MBDUSIzHMc2EEDESLz3HzsoIEWIvMcDwZ0htj3YPQXtgn6Cy7znCb2c5cHAxqICJnn\nuLCDBiJB5jks7KCBdiHzHBkGdIbY92D0F7aR/kLPPKeJ/dzlwYAGAkfmOV7soIHAkXkOUyE7aDOb\na2YPmdk2M3vazL5QXIkAekHmOW55VhzDkq5w95MknS7pc2Z2YrllNUfsezD6C92G2xRB5jlN/Ocu\nW+aAdvd97j6U/PlnknZIOq7swgBMrpN5Pv5ckXmO1pR20GbWJ+lhSSclw5odNFADMs/hKzQHbWZH\nSbpD0mUjwxlAbcg8t8D0PHcysxmS7pS0wd3vTrl9QNKu5PBlSUPuPpjc1i9JAR9fHlk/9Nes+ro4\nXjxHeiDJPH9kszT4IalJ9RVzPHoH3YR6CupnRdLSLuWQueIwM5N0m6SX3P2KlNujXnGYWf/INztG\n9BeWJPP8gKQ/lLRBsrUx9TdabOfurfLMzjwD+vcl/aukpySN3Plad38w75MAKAaZ53gUMqCLeBIA\nvUsyzzvUidV90l1ray4JPeBiSQWIPYtJf0EZd53nyPobI+be8mJAAwHgOs/txIoDaDgyz3FixQHE\ngcxzSzGgM8S+B6O/Zsu6znPo/U0m5t7yYkADDcV1nsEOGmgoMs9xYwcNBIrrPENiQGeKfQ9Gf401\nLvOcJuD+MsXcW14MaKBhyDxjBDtooEHIPLcHO2ggPGSecRgDOkPsezD6a46szHP614TT31TF3Fte\nDGigAcg8Iw07aKAByDy3DztoIABknjERBnSG2Pdg9NcIuTLPaQLprysx95YXAxqoEZlnTIYdNFAT\nMs/txg4aaDYyz5gUAzpD7Hsw+qtHN5nn9MdpZn9FiLm3vBjQQMXIPCMvdtBAxcg8Q2IHDTQOmWdM\nBQM6Q+x7MPqrXNeZ5zQN7K8wMfeWFwMaqAiZZ0wVO2igAmSe8VbsoIHmIPOMKWNAZ4h9D0Z/VdRQ\nTOY5/bHr768sMfeWFwMaKBGZZ/SCHTRQIjLPmAg7aKBGZJ7RKwZ0htj3YPRXqkIzz2liPn8x95YX\nAxooAZlnFIEdNFAwMs/Igx00UA8yzyhE5oA2s3Vm9lMz+1EVBTVN7Hsw+iv6+crLPKc/X7znL+be\n8srzCnq9pMVlFwKEjswzipZrB21mfZI2uvtvpdzGDhoQmWdMDTtooCJknlGG6UU8iJkNSNqVHL4s\nacjdB5Pb+iUp4OPLI+uH/kp4PsmXSZotfesp6cL/kV5XTP3VcTx6B92EegrqZ0XS0i7lwIojg5n1\nj3yzY0R/RTyHFkl6VJ3M80J3bS/z+cY+d7znL+bepHyzkwEN9IDMM7pVyA7azL4u6d8lvd/MdpvZ\nJUUVCESAzDNKkzmg3f1idz/O3d/u7nPdvZRrCjRV7FlM+uvlsavNPKfXEO/5i7m3vEhxAF0g84wq\ncC0OoAtkntErctBACcg8oyoM6Ayx78HoryulX+c5r5jPX8y95cWABqaA6zyjSuyggZzIPKNI7KCB\nYpF5RqUY0Bli34PRX97HqT/znCbm8xdzb3kxoIEMZJ5RF3bQQAYyzygDO2igR2SeUScGdIbY92D0\nl6kxmec0MZ+/mHvLiwENTIDMM+rGDhpIQeYZZWMHDXSPzDNqx4DOEPsejP7SvqaZmec0MZ+/mHvL\niwENjELmGU3CDhoYhcwzqsIOGpgCMs9oGgZ0htj3YPQ3RqMzz2liPn8x95YXAxoQmWc0EztotB6Z\nZ9SBHTSQD5lnNBIDOkPse7C29xdS5jlNzOcv5t7yYkCjtcg8o+nYQaO1yDyjTuyggQmQeUYIGNAZ\nYt+Dtbi/4DLPaWI+fzH3lhcDGq1D5hmhYAeNViHzjKZgBw2MR+YZwWBAZ4h9D9am/kLPPKeJ+fzF\n3FteDGi0AplnhIgdNFqBzDOahh00IDLPCFfmgDazxWb2YzP7LzP7syqKapLY92At6S+KzHOamM9f\nzL3lNemANrNpkv5O0mJJCyRdbGYnVlFYgyysu4CSRd7f752vuDPPMZ+/mHvLJesV9GmSnnX3Xe4+\nLOkbks4rv6xGeVfdBZQs2v46medFFyeHN7lre60FlSPa86e4e8tlesbt75W0e9TxHkkfKq+cyZnp\nNyS9u9pnXTDHTL9b7XNWKer+LpSOeo/IPCNQWQO6t4hH8a6X9Klqn/KDkvTpap+zSrH3t0uKJPM8\ngb66CyhRX90F1G3SmJ2ZnS5plbsvTo6vlXTI3W8adZ+mDXEACEJWzC5rQE+X9Iykj0r6X0mPS7rY\n3XcUWSQAYLxJVxzu/oaZfV7StyVNk7SW4QwA1ej5nYQAgHIU9k5CM7vUzHaY2dNmdlP2V4THzL5o\nZofM7Ji6aymSmf11cu5+aGbfNLN31l1Tr2J+g5WZzTWzh8xsW/L37Qt111QGM5tmZlvNbGPdtRTN\nzN5lZnckf++2J7/vG6eQAW1mH5H0cUmnuPvJkm4u4nGbxMzmSvqYpOfqrqUEmySd5O6/Lek/JV1b\ncz09acEbrIYlXeHuJ0k6XdLnIutvxGWStqt5abIi/K2k+939REmnSEpdHRf1CnqlpNXJm1nk7i8U\n9LhN8jeSrq67iDK4+2Z3H3mH3fckHV9nPQWI+g1W7r7P3YeSP/9Mnb/cx9VbVbHM7HhJSyTdKimq\ni7ElP6Ge4e7rpM7v+tz9lbT7FjWg50s608weM7NBMzu1oMdtBDM7T9Ied3+q7loq8MeS7q+7iB6l\nvcHqvTXVUioz65P0O+r8jzUmt0i6SorurfmSdIKkF8xsvZn9wMy+amZHpt0x640qh5nZZkm/nnLT\n9cnjzHL3083sg5L+RdL7uii8Nhn9XSvpnNF3r6SoAk3S33XuvjG5z/WSXnf3f6q0uOLF+CPxOGZ2\nlKQ7JF2WvJKOgpktlfS8u2+N9IJJ0yV9QNLn3f0JM1sj6RpJN6TdMRd3/9hEt5nZSknfTO73RPKL\ntF9z95emXHpNJurPzE5W5/94PzQzqfPj//fN7DR3D+aylZOdP0kysxXq/Ej50UoKKtdeSXNHHc9V\n51V0NMxshqQ7JW1w97vrrqdgiyR93MyWqPMPLMw0s39090/UXFdR9qjzE/kTyfEd6gzocYpacdwt\n6Q8kyczeL+mIkIbzZNz9aXef7e4nuPsJ6nxzPxDScM5iZovV+XHyPHc/WHc9BXhS0nwz6zOzIyRd\nJOlbNddUGOu8Ulgrabu7r8m6f2jc/Tp3n5v8fVsm6bsRDWe5+z5Ju5NZKUlnS9qWdt/cr6AzrJO0\nzsx+JOl1SdF8M1PE+OPzlyUdIWlz8lPCf7j7n9RbUvda8AarD0v6I0lPmdnW5HPXuvuDNdZUphj/\nzl0q6WvJC4j/lnRJ2p14owoANBT/5BUANBQDGgAaigENAA3FgAaAhmJAA0BDMaABoKEY0ADQUAxo\nAGio/wfTqr8t10n9mwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "plot(testValues, ReLu(testValues), linewidth=2)\n", - "grid(1)\n", - "legend(['ReLU'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 2", - "language": "python", - "name": "python2" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 2 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.10" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} From 99eb59b22eff8ca6975043384fd276858eec3f81 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed Date: Sun, 7 Feb 2016 09:58:04 -0500 Subject: [PATCH 20/36] Delete python_tutorial_part_9_neural_net_a.ipynb --- python_tutorial_part_9_neural_net_a.ipynb | 677 ---------------------- 1 file changed, 677 deletions(-) delete mode 100644 python_tutorial_part_9_neural_net_a.ipynb diff --git a/python_tutorial_part_9_neural_net_a.ipynb b/python_tutorial_part_9_neural_net_a.ipynb deleted file mode 100644 index 6149e03..0000000 --- a/python_tutorial_part_9_neural_net_a.ipynb +++ /dev/null @@ -1,677 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "# A Vector Space Model, with scikit-learn" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# This is code to build a vector space model, with SVMs on Andrew Mass' \n", - "# distribution of movie review sentiment data." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "25000\n", - "200\n", - "200\n", - "7142\n", - "6994\n", - "0\n", - "200\n", - "200\n", - "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", - "200\n", - "200\n", - "(200, 7142)\n", - "\n", - "\n", - "\n", - "Done fitting classifier on training data...\n", - "\n", - "================================================== \n", - "\n", - "Results with 5-fold cross validation:\n", - "\n", - "================================================== \n", - "\n", - "********************\n", - "\t accuracy_score\t0.715\n", - "********************\n", - "precision_score\t0.765432098765\n", - "recall_score\t0.62\n", - "\n", - "classification_report:\n", - "\n", - " precision recall f1-score support\n", - "\n", - " 0.0 0.68 0.81 0.74 100\n", - " 1.0 0.77 0.62 0.69 100\n", - "\n", - "avg / total 0.72 0.71 0.71 200\n", - "\n", - "\n", - "confusion_matrix:\n", - "\n", - "[[81 19]\n", - " [38 62]]\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using gpu device 0: GeForce GT 750M\n" - ] - } - ], - "source": [ - "from collections import namedtuple\n", - "\n", - "all_data = [] \n", - "DataDoc= namedtuple('DataDoc', 'tag words')\n", - "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", - " for line_no, line in enumerate(alldata):\n", - " label=line.split()[0]\n", - " word_list=line.lower().split()[1:]\n", - " all_data.append(DataDoc(label, word_list))\n", - " #print my_data[line_no]\n", - " #break\n", - "train_data = all_data[:25000]\n", - "test_data = all_data[25000:50000]\n", - "print len(train_data)\n", - "\n", - "train_data=train_data[:100]+train_data[12500:12600]\n", - "test_data=test_data[:100]+test_data[12500:12600]\n", - "print len(train_data)\n", - "print len(test_data)\n", - "#--------------------\n", - "# Let's get a dictionary of all the words in training data\n", - "# These will be our bag-of-words features\n", - "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", - "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", - "from collections import defaultdict\n", - "def get_space(train_data):\n", - " \"\"\"\n", - " input is a list of namedtuples\n", - " get a dict of word space\n", - " key=word\n", - " value=len of the dict at that point \n", - " (that will be the index of the word and it is unique since the dict grows as we loop)\n", - " \"\"\"\n", - " word_space=defaultdict(int)\n", - " for doc in train_data:\n", - " for w in doc.words:\n", - " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", - " # but that doesn't matter.\n", - " word_space[w]=len(word_space)\n", - " return word_space\n", - "\n", - "word_space=get_space(train_data)\n", - "print len(word_space)\n", - "print word_space[\"love\"]\n", - "#-------------------------\n", - "import numpy as np\n", - "\n", - "def get_sparse_vec(data_point, space):\n", - " # create empty vector\n", - " sparse_vec = np.zeros((len(space)))\n", - " for w in set(data_point.words):\n", - " # use exception handling such that this function can also be used to vectorize \n", - " # data with words not in train (i.e., test and dev data)\n", - " try:\n", - " sparse_vec[space[w]]=1\n", - " except:\n", - " continue\n", - " return sparse_vec\n", - "\n", - " \n", - "\n", - "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", - "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", - "#test_vecs= get_sparse_vectors(test_data, word_space)\n", - "\n", - "#print train_vecs, test_vecs[0]\n", - "print len(train_data[12500:12600])\n", - "print len(train_vecs)\n", - "print len(test_vecs)\n", - "#-------------------------\n", - "# We should usually get tags automatically based on input data file.\n", - "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", - "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", - "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", - "# and test_data with the same setup for class label. \n", - "# The rest of the data in the file is unknown and we don't use that part.\n", - "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", - "# later, for now we will hard-code the labels.\n", - "\n", - "from random import shuffle, randint\n", - "\n", - "\n", - "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "\n", - "\n", - "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", - "# tags= [train_data[i].tag for i in range(len(train_data))]\n", - "print train_tags[-1], train_vecs[-1][:10]\n", - "print len(train_tags)\n", - "print len(test_tags)\n", - "#--------------------\n", - "train_vecs=np.array(train_vecs)\n", - "train_tags=np.array(train_tags)\n", - "print train_vecs.shape\n", - "#--------------------------------\n", - "# Classification with scikit-learn\n", - "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", - "# Let's use sklearn to train an svm classifier:\n", - "#-------------------------------------------------\n", - "\n", - "import argparse\n", - "import codecs\n", - "import time\n", - "import sys\n", - "import os, re, glob\n", - "import nltk\n", - "from collections import defaultdict\n", - "from random import shuffle, randint\n", - "import numpy as np\n", - "from numpy import array, arange, zeros, hstack, argsort\n", - "import unicodedata\n", - "from scipy.sparse import csr_matrix\n", - "from sklearn.svm import SVC, LinearSVC\n", - "from sklearn import preprocessing\n", - "from sklearn.cross_validation import StratifiedKFold\n", - "from sklearn.grid_search import GridSearchCV\n", - "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", - "from sklearn import metrics\n", - "from sklearn.cross_validation import train_test_split\n", - "from sklearn.decomposition import TruncatedSVD\n", - "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", - "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "from sklearn.linear_model import LogisticRegression\n", - "from sklearn import cross_validation\n", - "import gensim\n", - "n_jobs = 2\n", - "\n", - "#train_vecs=array(train_vecs)\n", - "train_vecs=np.array(train_vecs)\n", - "train_tags=np.array(train_tags)\n", - "\n", - "print type(train_tags)\n", - "print type(train_vecs)\n", - "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", - "clf.fit(train_vecs, train_tags)\n", - "print \"\\nDone fitting classifier on training data...\\n\"\n", - "\n", - "#------------------------------------------------------------------------------------------\n", - "print \"=\"*50, \"\\n\"\n", - "print \"Results with 5-fold cross validation:\\n\"\n", - "print \"=\"*50, \"\\n\"\n", - "#------------------------------------------------------------------------------------------\n", - "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", - "print \"*\"*20\n", - "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", - "print \"*\"*20\n", - "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", - "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", - "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", - "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", - "#----------------------" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# A lot of code taken from this tutorial: \n", - "# https://github.com/dennybritz/nn-from-scratch/blob/master/nn-from-scratch.ipynb\n", - "# Package imports\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import sklearn\n", - "import sklearn.datasets\n", - "import sklearn.linear_model\n", - "import matplotlib\n", - "\n", - "# Display plots inline and change default figure size\n", - "%matplotlib inline\n", - "matplotlib.rcParams['figure.figsize'] = (10.0, 8.0)" - ] - }, - { - "cell_type": "code", - "execution_count": 67, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 67, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAAHfCAYAAABj+c0fAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xd8XXX5wPHPc865M0mbjnSme0JpaUtZpVAoIEumKFtR\n8YcIOFmKCiooIvyAnyKIiIIosmRpmZVNoVDK6t5N90qbcW/uOOf5/ZHbmiY3bZo0Scfzfr36ejX3\nnPM9z7m5bZ58x/MVVcUYY4wxxuxaTnsHYIwxxhizN7IkyxhjjDGmFViSZYwxxhjTCizJMsYYY4xp\nBZZkGWOMMca0AkuyjDHGGGNaQYuSLBHpIyKvishMEflMRL7dyHn/JyLzReRjERnTknsaY4wxxuwJ\nvBZenwG+p6ofiUghMF1EXlbV2VtOEJGTgcGqOkREDgXuAQ5r4X2NMcYYY3ZrLerJUtXVqvpR7u9V\nwGygV73TTgMezJ3zHlAsIt1bcl9jjDHGmN3dLpuTJSL9gTHAe/UO9QbK6ny9HCjdVfc1xhhjjNkd\ntXS4EIDcUOETwHdyPVoNTqn3dYO9fETE9vcxxhhjzB5DVevnN9tocZIlIiHgSeBhVX06zykrgD51\nvi7NvdbAjoI1uycRuVFVb2zvOEzz2Pdvz2bfvz2Xfe/2bE3pHGrp6kIB/gTMUtU7GzntWeDLufMP\nAzap6pqW3NcYY4wxZnfX0p6sI4ALgU9EZEbutR8BfQFU9Q+qOllEThaRBUA18NUW3tMYY4wxZrfX\noiRLVd+iCb1hqnpFS+5jdnuvtXcApkVea+8ATIu81t4BmGZ7rb0DMK1LVHeP+eYiojYnyxhjjDF7\ngqbkLbatjjHGGGNMK7AkyxhjjDGmFViSZYwxxhjTCizJMsYYY4xpBZZkGWOMMca0AkuyjDHGGGNa\ngSVZxhhjjDGtwJIsY4wxxphWYEmWMcYYY0wrsCTLGGOMMaYVWJJljDHGGNMKLMkyxhhjjGkFlmQZ\nY4wxxrQCS7KMMcYYY1qBJVnGGGOMMa3AkixjjDHGmFZgSZYxxhhjTCuwJMsYY4wxphVYkmWMMcYY\n0wosyTLGGGOMaQWWZBljjDHGtAJLsowxxhhjWoElWcYYY4wxrcCSLGOMMcaYVmBJljHGGGNMK7Ak\nyxhjjDGmFViSZYwxxhjTCizJMsYYY4xpBZZkGWOMMca0AkuyjDHGGGNagSVZxhhjjDGtwJIsY4wx\nxphWYEmWMcYYY0wrsCTLGGOMMaYVWJJljDHGGNMKLMkyxhhjjGkFlmQZY4wxxrQCS7KMMcYYY1qB\nJVnGGGOMMa3AkixjjDHGmFZgSZYxxhhjTCuwJMuYvZTktHccxhizr7Iky5i9jIgcWSCh1wWyAtkC\nCb0uIhPaOy5jjNnXiKq2dwwAiIiqqv3WbUwLiMgZUdy/nc/Q+MF0A2Aaa3iE+Yka/PNV9Zl2DtEY\nY/YKTclbLMkyZi8hIuEwzpqrGVM8SDpuc2yBbuY2ZpSnCbqraqadQjTGmL1GU/IWGy40Zu9xbHfi\nTv0EC2CwdKSEmAtMavuwjDFm32RJljF7j64lxBr9rapb7bEubRiPMcbs0yzJMmbvMXsBm50gzxSA\nQJUFbHaA2W0fljHG7JssyTJm7zE9jb9kCsuD+gdepszPECxS1RntEZgxxuyLbOK7MXsRERkUxnln\nOJ0KjqBngaK8zerquZRXpQnGq+qi9o7RGGP2Bra60Jh9kIgUARcW4H0JoJrsY8DDqlrZvpEZY8ze\nw5IsY4wxxphWYCUcjDHGGGPaiSVZxhhjjDGtoMVJlog8ICJrROTTRo4fLSKbRWRG7s+PW3pPY4wx\nxpjdnbcL2vgz8Fvgoe2c87qqnrYL7mWMMcYYs0docU+Wqr4JlO/gNJvQbowxxph9SlvMyVJgvIh8\nLCKTRWT/NrinMcYYY0y72hXDhTvyIdBHVRMichLwNDA034kicmOdL19T1ddaPzxjjDHGmO0TkaOB\no3fqml1RJ0tE+gPPqerIJpy7GDhIVTfWe93qZBljjDFmj7Bb1MkSke4iIrm/H0JtYrdxB5cZY4wx\nxuzRWjxcKCKPABOBriJSBtwAhABU9Q/A2cBlIpIFEsC5Lb2nMcYYY8zuzrbVMcYYY4zZSbvFcKEx\nxhhjzL7IkixjjDHGmFZgSZYxxhhjTCuwJMsYY4wxphVYkmWMMcYY0wraouK7MaYJRCQEHAuUALNU\ndfoubLsUGAisVtV5u6pdY4wxjbOeLGNyRMRzRK6Ii7fYEycVE2+VJ85PRKSgDe59ehhnTR8KHx1L\nyd0dCL8eE+8zERnUwnZ7xSX0SgR3fh8Kn4njzYhL6GMRGb2rYjfGGJOf1ckyBhARN4r7XC8KJp7N\noHg/ilhNgmdZkpxL+bwa/PGqmmile0+I4r74A0bHB0lHAAJVprA8eJKF69MEQ1S1ohntFkVwZx1P\nnx6n0M+LiIuvAVNZow8ztypNMFZVF+zyBzLGmH2A1ckypunO7Ez0yOsYGx8unYiJxwDpwLcZGRtC\n8VAHubK1bhzHu+k8hmxNsAAcEY6XPs5wOhUIXNScdgUuHkZx57NkoBcRFwBXHCZITzmBvvEo7o93\nzRO0PxEZ6oj8xBPnNhH5Um7o1Rhj2pUlWcYABXhXnkK/Qk+2/SchIpxK/1gE5/LWuK+ISJLskYfQ\nPe/xCfQsiON9qTltx/EuOpre8XzHjqSn66NnNafd3YmIODHx7o/ifjSJ0p+cwYAfDKDD/WGcMhEZ\n0d7xGWP2bTbx3ZhaPbsRy3ugGzGyaOdWvHejY/YtHMyPRHDzH8BF0T3+338I5+puxM69lrGxmNQ+\nzin0L3pHVxc+xJxXRaSvqta0c5jGmH2U9WQZAwQwazEVeXOaxVQQwlnSGvdVVY3hvfUea/Ief5tV\n1dVkH21O22mCF6ezNp3v2AzWE8Kd2px2dxci4jlwzdfYr2BLgrXFeOkh/ekQA77YLsEZYwyWZBkD\nQJLs/z7HkmSFbpuTpNTnCRZWJ8j+prXunSB7/T+Yn1igm7e+FqjykpYFcyivBv7anHYzBL99k1WZ\nmbpxm9dXajWPsSCRJPvzFgXe/np7ONG+UpT34DhKCqO4x7RxTMYYs9UeP1xgzK6gqm9ExL3jx7z3\nvVO0X7Q/Rc4qEkxmaXUVmWdpZqLTxHu/LSLn38aMP3fTmFtCTBaw2ckQLE4TnKGqlc1st0xETv4/\nPnmmrxa6QymOr6A6OYtyNyD4lqq+vqufpY0lMwRuVgPqz6UDqCYb+Oz8qkxjjNlVrISDMXWIyPgY\n7nddnGEBWpYg+zvgRW2Dfyi5FXGTgK7AbFX9cBe1GwFOBwYDq4EnmlMSojWJiFBbiPVwIAk8paoL\nd3RdgYQ+vJChYw6THtu8ntGAa3inejPpY1X1vVYJ2hizT2tK3mJJljGmXYlInyjuK0WEex1Mt3g1\nmcy7rFGBR2rwv6Gq/naunRDBffHr7BcfSwmOCOs1yYPMTS5k86s1+J9viwTZGLPvsSTLGLNbExEn\ngjvvFPr1P4V+bm2HFiQ1y+18lFhO1f+m1P/JDtqYGMP7PeiAGF62kozrIPel8K9V1bwT/40xpqUs\nyTLG7NZE5MQexB+7mUOLtiRYW6zRBD9lWlWGoKQpZRhEpD9QCCxqrer8xhizhVV8N8bs1gSOPIRu\nDRIsgO4SpwPhABjWlLZUdYmqfmYJljFmd2FJljGm3Sgkk2SzeY+pksJ3gVQbh2WMMbuEJVnGmPb0\n9NuszqTzzG2fTTlZgo3A3LYPq/2IiCsip8bFe6RAQk+LyGUijRQDM8bs1mxOljGmXcXEe3IAHU78\nOvvFO0sUVWUum/gdnyaTZC8IVJ9q7xjbioh0iOK+2pnI0GMoLYziMo211XMpT6UJjlbVT9s7RmNM\nLZv4bozZ7YlIKIp7m49+owvRdJKsm8KvSOF/O1B9sr3ja0tx8R4ZQ8mZX2O/iFNnntpUXa0PMmdN\nmqCPquYdXjXGtC1LsowxewwRKQRGUFuM9DNVDdo5pDYlIl1COMtv54hooYQaHP+pvle5nOoLVfXZ\ndgjPGFOPrS40xuwSIhIXkdNF5AIRGdoa91DVKlV9T1U/2dcSrJxh3Yil8iVYAKPoWgCMatuQjDEt\nYXsXGrMbEZHewDkCnRQ+BZ5u74KaIXGuCOHc0pdCv4iwzKXci4v3XhL/bFXd0J6x7WU2bSbtBao4\neUparCeZAja1fVjGmOay4UJjdgMiImGcnwFXH0x3KSEa+YQNlcupSqcJTlDV6e0RlytyUQci917D\nmHgPiQO1+wI+zoL0W6yaW4M/elf0OolIB2AoUAHM3xe3whERieHO+zr7Dx4rJdscK9carmFqWuAv\nDuKnCaYAz6pqpn2iNcbYnCxj9hCOyEVdiN57PePiHSW89fXpupY/MmtzmqC/qrZpL4aISBS37Lsc\n2HuoFG9zTFX5Ie9WriX5BVV9uQX3iEVx7/LRCzsRSVeTCQXoiiT+par6aosfYg8jIhMjuJPPY0j8\nMLrj4fAJG/gTs4IQTnYivb0IrvMuqyvXkNyUwj9KVZe0d9zG7IssyTJmD5DrwVj4LUYOGCGdGxz/\nrX6S+Ij1PwpU72rjuAbE8T77LUfG81Vk/7cu0WdZcnda/Sub2b5Ecf+zH50OvYhhsWKJEKjyMeu5\nj1mJFP6Jqvpmix9kDyMih8bxflODf7gD6uGk9qdT5DIOCLny32m0L+gy/1kWL67BH7ov9vwZ095s\n4rsxe4aCNEGf/emU9+DBdIvH8U5o45jawlFxvHHf4oBYsUQAcEQYIyV8hWHxON4dItJBRC4NiXun\nI3KNiPRq55hbnaq+V62ZowK0QxYd4qPeVxi+TYIFcAJ93EJCPYBj2idSY8yOWJJlTBsRkQ6uONcW\nSGh+TLxVcfGeFZHxQEaBNPmnNiXJEkBVE+/RWUQOFZFhkq/7aecsCdDy+WxucEBVeZNV1RmCZpcT\nCOOcewy9C+onDwDj6EaG4EAPWTmKLrefyYDvjKfHz8I4C8Pi/qC599yTqGoS6NedWKqozhDyFiLC\nWEqiwMFtHpwxpklsdaExbUBEukZw39+fTt0/R59YB8LMZOPnn2bxsWn8q0K4r7/NqkmTKN0mMQpU\nmcLyqiTZv+6g/aIo7r0hnLO6Ek1VkA4F6HIR+YaqvtGcmFVVXZEf3svMe6/VMfHu9Sa+V5BeDExp\nTtsALlIYx8ubCHri4Kp432CEN+a/k8CjZ+hAfsEHPxOR2ao6ubn33oNUVZN1VZV8OfNm0hmamIAb\nY9qeJVnGtIEo7h3j6dHrQhm2tUuiJwUySrvGf8p7tyfJnvUYCw6Pqxc/hO44IlRomn8wP7WR1Fyg\n0YRCRNwo7n9G03XkeQyJFEk4GqjyIeuG3s+s50XkGFWd1py4fdW/hsTp+FOm3dJXC/0OhGUO5R7w\nXk1tCYdmryxM4r8yjbVnHENpYf1ji7QCD4cD6brN610kyrk6uOBh5v2U7bwne5GPkmQr57KpcHi9\n4eRKTTODdQLsM9sOGbOnsYnvxrQyEYl7yIbbOCLaIc+wz4M6J/UWq37loy/F8O4TGFhIKFNOKuIi\nT9TgX6aqjfZWiMgpPYn/4xccWli/vtIbupLHWPBGtWYmtvQZgOOBQuADVW3xps0iEgvjLL2QYV0n\nSM+tgVdphluYHuxHJ+cCGdbguoRm+A5vpbIaRFsaw57AETkjhve3S9g/PoouOCIs1Ur+yKzq9STv\nTal/VXvHaMy+qCl5i/VkGdP6ukZw/XwJFkA/iiLTWDMkodmfASNFZFCCbCdgQboJZRtiuBdMorRB\nggVwGN15iLnjRSSWm+PTLKqaAJ5p7vWNtJkUkaMfZu4rL2tZ4Ri6Fm0ilX6XNYGDlA+luGe+66rJ\n4iCpXRnL7ixQfVpEvvhHZt6h0NdTx0/j1/joTT5tu+LUGLNzLMkypvWtTxG4FZomX6K1jMpUmmD+\nlq9VdeHONO4gBVHcvMdCODigAYSo3RMwL8n9SrYz990VVHWWiPQto+rzy6kao1AJPA4cN4UVd43T\nbgX15yK9xoqsgzzW1rG2J1WdLCLPA73BDwNLVdVv77iMMdtnqwuNaWWqmvCQJ59jSYPq3Os0yTus\nVh99oLntV5N94X3WVuc7NptyPJwV1CYv25BaF8UlNBfwPXGSMfH+KiL9mxtLc6hqVlWfDlRvUNXb\nVHUp8PelVC5/iLnpytyuQin1eUGX8QrLvYDgSxFx7xKR7m0Za3vSWstVdZElWMbsGWxOljFtQERK\nIrjvH0DnbsfnVhd+xgZ9hsXJFP41GQ3ubkHbRWGcpRczvPgw6bH139BmTXEz0xMbqLksUH2o/nVR\n8W7vSPjS8xlaMIJOVJJhCsv9lymrTBMcrKoLmhtTS4hIYQjnetBvgXQAKCTkV5Nxe1PIRQwlisfL\nlKXfZlV5muAgVV3RHrEaY/ZdVvHdmN2IiHR0kMuiuJcEaKEg05Nkf6Wqb7Ww3RhwUBjniR7E46Po\nWriBZOoD1onArWmCG+oPBYrI8Cjuh7cyPlYooW3a+7cuCSaz7KWEZk5qSVzNkdtm590RdB56JgOj\nvaSAqbqah5nL9YyjlxRsc/7juiD7KiueTGr23LaO1Rizb7OJ78bsRlR1M3BL7k+LicjIGN7tLnKM\ngwRAeRlVjy+jqgwoBx5X1ZX5rg3hXDyRXl79BAtgEqXOMyyeJCIdczG3GYFvDKDD4G9xQHTLXKwy\nqphEaYMEC+AYensvseyMfBP7RSQCeKqadyi1WfGJHBTFvdpBDhGoqCZ7H/CX3MIAY4zZhiVZxuyB\nRGR0GOfNMxhQcCQ9JYLLIiq6P8Tcc9aR/EdSszdu73oP6dWNWMMMC4iJR0TdTJZsR8hT7r0VxfAu\nO5l+2+yVuJk0+9WrEbVUK3mKRcyhHBcn4iFLPXFu8tHfAmPjeLc6yFGAxMVbnMT/qao+0pLYXHG+\nFsP97Sn0j46gs7OZNC+y7DeLqbhCRMa39Qbexpjdn018N2YPFMf73TkMKThe+khUPESEQdKR6xhb\n4CDni8iI7V2fwp8xm/K8vS/rNLlli581rRH79gRo565sW/6qBzEWU7H160Vawe18xGi6chdHcg8T\nuYrRJT2J/9JDngzjvHE2gybdzVHefRztXsYBgzsTuT8s7o+bG5eIlLrI3T/h4PjJ0s/pJ0WMki5c\nxej4QZQMjODe2vynNsbsrSzJMmYPIyJdMgQHT6BHg7kAMfGYSK+Qh1xU53xHRPqJyNa6UwE8+DEb\nWFhvNDBQ5R/MrxH4k6q2eS0qB5k1j207hCbQi2msYUWuHuvjLOAcBnO09CYi7tYE81rGFHg4Z1zC\n/vEtxxwRDpAuXM+4OHC9iPRoTlwu8vXx9JAeua2FthARzmRgxEcvFJF9ojiqMabpLMkyZs/TIYKb\nCUn+2lidiHguTlcREU+cyyO4q+J4syI4i+MSmi0iJ6rqxgzBF3/DjMRfdW7qI13Pm7qSG5hWNZvy\nj9IE17XxMwGQIPvrp1iUqMiVbQDoJBHOYhA3MZ2Hda5fRhWH0rByQwUZwjgylpIGxzpJhLGUKPDF\n5sQVwR3en6JIvmOdJUoIR4EuzWnbGLP3sjlZxux5VqbwdY0m6F6vZwXgEzZUpfDfC+Pc3InIt7/B\niIIBFKHAx6wf/idmPykiF2htJfHhb7Hq8vdZe2SAbkqQvR94TlWzbf5UgKq+EBH3rut59zuf076R\nUgrcMqr8lyhL+QR/eZNVXeN4Z3viNPgFMUGWjkTIV/keoIRoFOpN7mqiNP6iFVSngQbVZCs0TQbf\nBTY2p21jzN7LerKM2cOoakrgnoeZl8zW25/5U93AXDYFwBSF713HQQUDpQMigiPCGCnhCkbGI7j3\niIijqmVp9a+r1PQR1Zo5RVWfaq8EC2oLpKYJpgew7F8s0T8yK/Nvln6WIHtyRoPLMwQXJ8jWrM+z\nQ1B34qwhQZU2qPkK1CafwGfNiSuL/ulNVgbleUZQJ7M04+E82dxti0SkpyfOTwsl9GxUvD+IyLjm\ntGOM2f1YnSxj9kAiEo7i/iuOd/ixlBZ0ICwfsT7xCRv8DMGJwKiD6Xb7ZXJAg64uVeVq3qncSGqS\nqn7QDuE3KirubwoJX/YlBhcMo5hNpHiJstR01q1L4R+kqmuj4t01lOJvXMnImCf//T3xQ13HH5np\nH0Q392vst02P1jRdow8we32aoLdqI1nYDoTFvS6G+5MvMCg+gs5sJs3LlKVmsG59qrYg6hqpXRZ5\nehzvh1mC4Q5SkSG430fvzFcOwxE508P522F0l+F0iq4n6b/C8poMweM1+F9rj62OjDFNY8VIjakn\n90NwUhjnC4J4KfznacfhsZbIPcvEKO5XXKQ4SfaNoLZmU7mIXH0cpTefL0Pzlmm4UadtXkbVGcDr\nwH5AETCnreti1SUio+J4797C4Q0KpP5N52XeYtVfazT7dRGJRHEnx/EOOZbSgkJCMp111XMoj5ZS\nmM0SRDwcjqAnUVymscafTXllmuAYVf2ohTF+Lo53vU8wSpDqLMEDWfQuVd0AW5LE0GVn55LEjaR4\ngaU1n7JxZQr/YFXdWKetfmGcWT/koHg/Kdp6j6Rm+RXTq1dQfVWgem9L4jXGtB5LsoypQ0Q6RnGn\nFBEeNpFeBS4i77C6ci3JdSn8o/amrVlE5JiuRJ/9NYcX1t9guUoz/IC3azIEXwl5sV87jlcSCRdm\nqxPrI47jPpzJ1ny7uUNfLREV93efo+83z5SBDWb0l2uKa3mnJot2UNXMlgQzgnuhhxQnyDKU4pOu\nZkxcUT5hAx+wjgw+C6lIl5P6tqr+oTXjF5Excby3buHweP0k8c86O/0ea+5PqX/5ltfC4v76KHp9\n5wIZ2mBC/Rwt5//4ZFlSs/1aM2ZjTPNZxXdj6ojhPXQQJQdczPDIlqGkE+hb9Kwujr3AsskiMnov\nGp55rZLM6pcoG3ACfbcmLYEqDzO3xkHeCXnRvxw17luxXt1GISIkazbx7sd/uWD1+lkDReS4prwX\nIjIWOASoBv6lquXNDdjDGdCTeN4lk50kgqg4oAXAplxsr+X+EJfQgtMZEK/9vgpjKGFMbpXhdF0b\n/gtzvg60apIVwf3m8fSJ5Kuifyr9w1NZfbGIXKlaO5Eugnv4/nTKu2JxGMXU4PcREW9P7GU1xtSy\nie9mnyAivX2Cz53HkEj91Wefp78XwR1EbbKwR5FaE2Li/blQQv8KifNjEemuqprCP/5pFq/8mb5f\n9ZKW8Zwu1mt4J/MJG2b5brj7+DGXxHp3P5AtPV2xaDETD74iFvJihwLjd3DfHjHxPigi9OYR9Pjf\nkXT5fQhnZVjcn0j9rrMmShN8uoiKdL5jq2t3rakBKvMd9wm6dCOWt90SYih0a05MO2N7SWJXiaG1\nKxO3zpHz0XWbyF+KbDNpXCQN+K0SrDGmTViSZfYVo/tRlIpJw85bR4SRdHGAg3a20VySc4wnzq89\ncW4RkSObm2Q0495uFPeJYsIvnEr/L5/P0FMOpfuPQjiLROQkVV2Swv/WcqpCr7E8O5dNMogOogT7\nKf7wPj0aPq7jeAzpf0zMdULn5O4xWES+KiIXiEjX3GtOBPe1SZSOvoMJ8a/L/rHvyYGFt3B4tJjI\ntS7yzeY8T4bgD2+w0l9TbxvAQJXHWVAj8AdVzZt0eDgLF9WpCl/XIioUmN3UOJr7/cvgz1xCZd5J\n9au0GgdJAFsfLkn2Ty9SVlV/hSjAy5RlPZxH96KeVWP2STZcaHYZEXGBkUAI+Kw95vVsR2UlmUZ/\neG4m7QNVO9OgiHSO4r5SQGjIBHoWKMpbrLo8QXa2iHyutfey85Cre1Nw4tWMiYdzhUkPp0dsovbm\nN8x4XEQmhHEevYoxkcHScetly7TS+6UzE8fJX8w07MUcEbdjXLzJUdxjDqCzn8IP5rApFBXvbuCt\nYsKDv8BAt24+0kkiXKojCn7DjBtF5L7GEqLGqOpiT5zv/Iz37zxJ+0X2o5O7iRTPs6x6FdWz0wQ3\nNnZtguytT7DwgRHauaBuIl2paZ5hcSJB9vbt3VtE+kRxf54lOAeIxsVbWIN/k8JDTU100gT3vsqK\nSydp71BX+W+vWqDKEyysAX6/Zagw54UK0lPv4OMJ5+mQWKkUUqlpXmG5P4Xlm9MEzd4GyBize7CJ\n76ZJRKQ7MBBYq6oL6x/3xLnYw7k1jhcL4QTlpByBO9MEN9T7wdIuRMQL46y9hrGdBkqHbY5t1Bp+\nyLs1GYJeOzOnKCruOz0pOKSQkBvF41C6MYou/I156fdZ95+EZk7a5Q+Sk+tNWnMdY7vWXZm2xf06\nKzmNNdOOpNdhF8mwbeb9BKp8132PiROupUvxgAbXTn7jZ5WbyxctG0e3QRczPLqlsnyFprmdj6pX\nk6g+jf7dTpH+eWP7tr5ZXUVmtKouaOazjYni/sBFxilsSpB9Cvi7qpZt5xqJ4v4xinfuKfSL9yIu\ny6gK/s3SmgzBb2s022gFexE5OYzz6CRKY8fTx+1AiNmU83fmV5eT+kuNZq9oauwhcb8TxvnlqfSP\nDqPY2UiK51lavYLq2TX4E1W37aYTkXAI53qBKxUKApQwztNJ/GtUdWlT72uMaXu2utC0mIiUxHAf\nyKLHdyVas4l0GHR+Ev9iVZ0B4Inz9UJC/3cFI+ODcj0mazXBvcxMrCbxUFKzl7XrQ+S4IufHCf3x\nEvaPH0BnBFhIBfcxM7GZ9M1p9X/Z1LZE5NIwzr1H0YsRdKaCNK+xkigulzGCq5lak8IfoaqLWuNZ\nRKTYw1lznxzdoAI5wFRdzd+Zt/Hr7N95dO0o3zZe1DJeKKzmxCN/TCRcuPX1uYun6AczH9kU8f3I\nnRwZr1uHCmCtJvkx73IK/ThdBjZoV1W5kjfTCbLDVXVxnrhLgWNzX05R1eWNPF9BFPeuLHp+EaFs\nNZmwizMjSfZSVf2kkWsEODqGd6UDAwJ0bhL/LlWd2sj5vWO4z4IceDyl7hn1nqdaM1zL1GSC7GGN\n3bORdg+J4V4tyBiBjdVkfw88sr29IEXEAToC1aqad16aMWb30iZJlog8AJxCbQ/HyEbO+T/gJGrn\nI2z94bxsrRJbAAAgAElEQVSzwZq2JSKxCO6nR9Gr7xkMCMXEw9eAqazRh5lbnSYYBywK46z9IQcV\n1+9RSWiG7/N2TZpgSGM/TNuaiJwWw709gFIHUUXL0wQ/8TV4YCfa6BLCWXkNY8KD/jsMR6DKH5hJ\nF6KsoLriUzb8j6o+2krPEXGRyjuZECrIs5ptsi4N/sWSFecypM9R0qvB8U1aw9UyzRfHcfr1OkTi\n0U6sWvUhyeRG0n5NZhid5PsyOu90gqv0bQ3hyM0c1mALm9m6kd/yaUUNfnHdYbZc8dQ/BejZI+ic\nBZjJRs9BnswV3UzXOdeL4k4dRZcDzmFItJNEyGjA26zSfzC/Kk1wiKrOafabl4sngjvveEp7v8xy\n7zbGk+99fFIX+i9R9ru0+t9tyf2MMXufpuQtu2Li+5+BE7cTxMnAYFUdAvwPcM8uuKdpG+cNoKjH\nuQwObZnn4orDBOkpp9A/FsO9ETikmIibb8gqLiHGUBIAp7Vp1Nuhqs8m8Yem8AclyQ6vwe+zMwkW\ngANfOZCu1E2woHYC/dkM4i1WUUUa6kxy3tVUNRXG+fd/WNFgKDatPq9QlqzB//1LlFUFeX6RepWV\nvqfBgv38ovRBZasYNP8zLqjqwl3+YfyIcaH5bPYqGulQ8RCJ4fEgc0jWqS6wTCu5j1nU4D9bfx5T\nFPe+gXQ46w4mRK+UUYVXyqjCO5gQHUiHM6O499W7xee7Eh3+P4yIdpLakc6QOBwtveU0BhTEcJvc\n47gdZ/WioPOxlHphnLwJFkA3Ym4Ip2GWaowxTdDiie+q+qaI9N/OKacBD+bOfU9EinNLzNe09N6m\ndRXgXXwMpQX5FlsdRU/3ORafAfwxhtfonKs4ngfkrQXUXnIJwMrmXh/BG7MfnfIO05VIDE+FMqo8\nYEpz79EUSfyr/s2So1HtMIlSp0BCLNEK/sa8RA3+C8BvNlJz6t18OvZcHRItkRhJzfIqK4IXWFoJ\n0ucihkbqTtIG6EcRB2oXnmABVZplOVXE8TicHtRudZMOTqGfM5tNXMM7DNKOVJFhAzVUk8kAP6nb\nnoj0COGc+y1GRupOSo+Jx7f0gPj3ePtcEblOVVcDxPG+ehx9CvNt9DyRXs4/WXhqbt/FZs/1i+Gd\nPoEeRQXUJlerNUGPPJttz2FTMoXfoOfdGGOaoi1WF/YG6k5YXQ6UApZk7eYEiUbJvwItikeAesCM\nFVRHKjRNB9k27whUmc66DPBW60fbOnIrJrsAlVtWS2YIVq4jmQEadH8kNUsVGRR+Vn+S866mqgtF\nZNwLLPvNsyz5vKioi2zOEtzmo7erqi8ix81i469+zHuXhNUhhR8K4f4ng/4yjvtC/QRriywBc9nE\nWQzkXIZQTg0vU8ZzLNEswcMPMe/cvhSGB9CBrkTZj2LeYFV1Ddk/ZjRYUq+5icMoTsfFa5BsxyXE\nMC1Of8bGicCjAA5SVNDIf00xPBRcav9sN8nKzXMaB3QGZqnqsm3ePsATh6O0F0+wkMt0BG6dOWhL\ntIIPWIuP/ml79zHGmMa0VQmH+r+S5p0IJiI31vnyNVV9rbUCMjtWg//idNaNHEmXaP1jM1hHFO/D\nas2Ux8T76/3MuvBKHRnbshJNa5etZ9L4s1X1/TYPvoVyq75+Gsa5QpBIlsCJizc5if994C+vs+LK\nk7VfqH517ymU4eF8mtTsrW0RZ26l51kiEgaNZaGi7lBdLjH8rohcmyHoDmzOaGaziBSm8b2EZojX\ne4blWsVcNnETh9IxN1zXjRhDtZi7+CSYQ/mXRtA5OJweZAl4nZW8zaogTfBLhV/lC9NHG0xNyGqA\nAEHtfwdbY06SfXUG6w87iG4NMsBZbCSKuzCh2e1u8iwiJ0RwHyjAK+pEJFhOdSQuoTeTZC9Q1XVJ\nsv98mbLT5+qm+DIqSZLl53zAsVpKMWE+YYO+xapkluACVV27vXsZY/YNInI0cPROXbMrVhfmhguf\nyzfxXUTupTZh+kfu6znAxPrDhTbxffcjIj3DOPMuZ2ThSOmy9fW1muBmpicqyXxBVV/Ibdj7iCAn\njqeHF8Zx32VNIkl2YQ3+8aq6rh0fY6eJiBPFfWkQHcafx9BYLymgWjNMYbk/maUVaYIxEdxvdyR8\n6fkMLRhBJyqoPf4KZRVpgoPzlbnY3cQl9Mzn6HPK6TJgm+7KB3UOBXicLYO3OV9VuZ73OIE+TJTe\n2xx7WhdlX6bsvYRmJ9S/j4gcGsJ59zbGUyRhputanmcZS3PF20M4QQ3+Car6Su78bmGchVcyqnCE\ndN7aToWmuZkPEuuouVRVH27suURkfAT3lcs5IDaCzogIKfV5ikWZN1i5pAb/AA+5PIJ7+xkMkOF0\nppwanmIRG6jBR4MU/tNZ9AequmSn31hjzD6hzUo47CDJOhm4QlVPFpHDgDtV9bDmBGvanogcEcL5\nVz+K3OF0KlxFdeJjNrigV2U0uLveuftTOwcvRO2ecm/tiRWrReSkbsQeu4lDC+uXMHhSF/pTWP5I\nDf6XgQtieD9Jkh3iISkP59Ea/Bv2lPpGItIvjDN9Ir07Hkep14EwM9nIX5nLeQzhEOm+zflztZyH\nmcfPOWTrVjxbZDTgu7yZTOIfqKrz6x6LifdIXwrPEUT2pzNvsZJzGcKBdCVDwDus4jEWJlL4J6vq\n67nYjgrh/GsoxTKCzoVrSaansjpQ9M40wY+297kqkNCb5zB4wpH1VlWqKjfxQdViKn8Uwbn1Fxwa\nrTtcqqrczyyms25KSv3jmvu+GmP2DW1VwuERYCLQldp5VjeQm6uyZdd7EfkdtSsQq4GvquqHzQnW\ntA8RiQBnAsOAtcBjqrqhfaNqPXEJPXk2A886RkobHNusKa7inbSPRrf8oJfch7fNA90FRKR3BPfH\nAXqBj8ZjuDKYjk4phXxBBm1z7qu6nKVUcbEMz9vWrfrh5jls+rKqPlv39bC4Vb/ksILnWcqbrOJm\nDqX+XLAZuo4/MmtRDf7gOu9rAXBuGOfALMGaoLYgaYPaW/WeJyyQvIeJzpYq+HW9oSv5O/PmHUWv\n/ufL0AaLFzbUFqZNZAmKVXW7Q5LGmH1bU/KWXbG68LwmnNPkislm95MroviP9o6jrbhIl46NLIjs\nQBgfDVE78ToLW1cr7pFUdQVwGXCZiIiPLh1Flz7PsJhJWsqWEgoABYRYTXVj7bCeGgdYX/9YgHpR\nXCK4jKBTgwQL4EC6EsbpXoM/Evgk12Y1sLOTzh0At8E00FohHEI4HftRlHd1aBeJ4igOUAzsUcPc\nxpjdj20QbUw9Kfx3ZrIxb3XuuWwiirtMtU6BqL2EqmoN/sWPsSAxlGK9mQ+YostZoVV8phv4D8vT\ni6kMlmvDLR5nUU4lmSrg3frHIrjT/sUSprCcbjQskwC1NcaKiWSoXQnYkmeoieHN/oT8Ha3vsrq6\nBv/jMqry9lJt1BqC2oWHm1sShzHGgCVZxjSQIbjnbVb5i7Vim9eTmuXvzKtO4edbQbfHy21LEyi8\nPIuNazeS0sdZwC+ZnrmXmcvms/n7WYJLb+HDxFRdTUYDajTLa7pCf8eniVTtVksNyiokyN74Oiv1\nZPqxoJHcJalZVpOIAnNb+hwJstc/yNzEmjoVNFSV13WlzmVT0kevfp2V2Y1as811qspzLEm7OH+z\nrW2MMbuC7V1oTB4icqaHPDyObs4IOkfXU+NPYXkqQ/BICv8b7TVEmEuERgOdgNmqumoXtRuN4v47\njnfIJEoLfAJ5izXpcsloVjO/AW5V1crcucfH8W5Kkh0HaBTvtSTZ61X1vUbaHteJyFu3cnjkR7zL\nmQzkMOmx9biq8jDzsu+y+vmEZpu1O4CIDAa+HMLpliVY4IArODcMp9jvSsybxcbMZtIbU/hXAmkX\nOTaOd/lZDIztRyfZRJoXWZacRfmqFP7BqrqxOXEYY/YdtkG02euISHfgQKASmKaq/q5uP4p7W5bg\nbBdHAlQcKAvQ/2TQe/Mt2mjmfTo5cHEUb2KAltfgP0RtqZO6+/3FBS6J4X1T0eIsukK9cN+QF43H\nIsX+5qqVEUfclzLZ5MWqWt6SeKLi3T2c4q9ezsjYc04Zr8hK+vcZT0Fhd1au/ZQ16+dU+UHmzC1l\nFnLxOdSOMm73PxER+dxAOjz2YxnXsUyruIOPGEoxYyghjc/rrGQF1WtS+PvvbHIjIvuHcB4H9h9F\nFwbRkWVU+tNZlwnQa320nNohyGQU93shnD4lxLKrSIQVXQpsAIYKUpHC/2OA3qNarwvTGGPysCTL\n7DVEpCiG+0AWPbWUwpoqMk4F6Zo0/rcC1Sd20T26RnA+nkjvkpPoF+ooYdZpkidYWPMpG+bU4B+W\nWwTQ0vscEcJ5fhRd3NF0jVeS0Vcoq06QfacG/1RVTYtIURT3nYF0GHgS/eKVpHnIXcQRB19Or26j\nEBEymSTTZz2aXrT8nfnZbM1o4PAo7jddnN4Z/BlpgrtVdUET4ikI4az9FYfF57OZx2LrOG7iT4lF\nOmw9Z836Obzy7m3Vvp8elpssvzPP2yeCO+9OJkQj4pLQLG+zinlswkOYTXlNBZkzVfWFnWx3qIdM\nj+AWXstYSqXwv/Fqgpv4IFFN9gRgVRhnxpcZVngYPcQRIasBU1gePMWiDWmC/fbm1bLGmNbRVhtE\nG7NTRCQkIj1zS/Sbcr5EcV8aTdfP38GEyE9kXMdfy+FFP2B0SQzvQRH5/K6IK4Rz1cF073KuDAl1\nzG0RVCIxLmVEtDcFQ4AdrqTdzjMUiUhnESkK4Uy+kpFFl8vI+BHSkxOlr9zC4YWD6HBkGOcXAGGc\nn4+iy5AfMDo+QjrzhruesaMuonf3A7fWqAqFYhw66ivhonhJXw+ZUkz4+dMZcN5XGT7xGHp/K4L7\nSUicrzUhvIFFhLKdJcpkbxVjRl24TYIF0L3rcAaWHu46jnfZzj67qpY5yOv/ZFFGVYmLx/HSh8tl\nJPvTWVMEa4GXdrbdKO4v+1JUcAJ9t0mwALpLnDMZGIvhXR/B/eHx9ImNl56yZT9ETxxOkL7OGEoK\nXeR/GruHiHgi0l2kkf2HjDFmOyzJMm1GRKIRcW8N4WyI4i70kI1x8Z4RkQE7uPToAkIHfJ39o/E6\nGwwPlo5cwv7xGO7tkm8X653kIF89nj4Najc4IpxI34ICvG/uqA0RcUTkvAIJTYuJtyYq7ry4eAtc\nZEMIZ5WHLBlGceSAOhX0ofaH/oUMi2ltKYWIwiVnMTAiImzWFPOCDQzofWi++zG0/7FFMSd2xK84\nvOAE6StjpYRzZEj4Bg6OuTi/E2mksNV/VSTIhjLqszxbTq9uDWoKA1DaY2w05EWP3tF7kE+S7IVv\nsnLxTXxQ9YauZKqu5g79uPpvzNuQwj9xZzd7FhEnTXB6Cl9G0iXvOaPoIgF6KHDmBHrmLVdzFD1j\nUdwL8rQfiYh7SwhnQwR3iYtsiov3TxHptzNxGmP2bW21d6HZx4mIG8V9cSjFB5/HkFh3iZOo3arm\nlH+zdIKIjKm3ge9WIZyzJtKrwMmTR42iCwH0BXoCK1sSY4AWdiRv+SRydbOKt3d9bjueRzoROeVM\nBhak8XmE+d3OZQiH0B0P4W4+7bxfI1UKukuciLpOhmCIopFuEmearuFB5gDgOPn/uXpumF5S6Ebq\nFd/sIXGO1dLQFJZ/h9paWHmp6tK4hBZOZ90BIXFJZaob9GQBpNJVNHe+kqquF5GRi6k8Yw0LzheI\nVJN9Fnh4y4T6nVSiqJsgyxMs5GjtzWi6bLPBczVZBBIBWug18vtkCBfNFU/OzTE7K4b7nSjuuMF0\nDJ/HEKenFFBV+1k97XmWHpX7rJblbdAYY+qwnizTVk7pRGTslYyMdZfaWklxCXGqDHCPo0/HKO7P\nG7tQIBzCydtTJYCLBOR+ULZECGfmHPLPH5/FRt8n/+q5Os4qJnLKDRxccBAlvEAZX2M/jpCehMRB\nROhJAeXU5L04rT4p/BCwWpD0DF3P35nHdRxEL7cDK9d+mve6hcvfZpyfP/8bTrHn4Ry0g7hJkv3m\nX5iT6Csdmb94SoPjqgGzF75Ylc5UP7CjtrbIDZFeGhH3HlecnwK9VfWxas2cUaWZk1T1nuYkWCJy\ncAhn3lhKOIfBHEQJz7OUXzODZJ3yZa9QlvXRh1yc16ezNu/k0/dZk84STM79EvDPnsT/fCS9JnQj\nFv0Oo5yeuRHtQglxugxwJ1HaMYJ7487GbIzZN1mSZdpEHO+Sz9Gn0JWGH7njKHUzBOc0NuSXJnhp\nKqvz/jBeRAU+Wgm0uGchQfbmR1lQXVGvRNIqreYFlqVq8P93e9fH8b57Gv0LwuKykmqSZDmQrtuc\ncyjdeYfV2yQDW7zNag3hvKeq6wXuf5z5wefoQx8p5PPZXkz/6C9UJf5bUF1VWbD0DV23Yb4OpWPe\nmNZTg7LtZuz5qOrbaYJJy4NNH346/1/MWvgi2WztHP9EciNvTr+3pjKxdh7wzI7aAhCRY0M4K0fR\n5fazGPjNo+l1fQR3ZkTcm5py/XbajYRwXvgmIzpcLiPlEOnO0dKbH3EQPYjzD+aT1CxP6SLeZ+2m\nDMEdSbK/eJrFyfp1zz7VDbzOynSa4E7gy92IHXcjBxduIsUkSmnks+r5BM2em9dcublhZzoivxCR\nq23Y0pg9gw0Xmjbh7Hirmgi1HVP5ehyeXUNy42RdGj+Jvu6WXGyTpriPWYkswY07O6cnH1V9JiLu\nndcx9ftHa+9QLwq8RWxOvc1qDdArVPXj7V4P/UqpnYCdxKcjYeoPcZZKIWO0hFuZwYU6lIF0oAaf\nt1ilT7KwOk1wJUCa4KebSV9+IF0dgEOkOxtTaZ6Zch29Sw4gGu9C2dpPNFmzabkEmTdeZcUXv8zw\nbcY6sxrwIsuqEmTv3fJabkjsSKAHMB+YsaUEQ67O1UEicvDHc/5514xZj40Nh+LpdCbhieP+LZut\n+V5T9vMTkV5hnGe+x4EFw6TTlpfDp+kAbuKD74rITFV9ZEftNOLMvhSGxkjJNi86Ipytg7iad5jK\najycaWmCs1V1HbBORC68hQ8f6q9F2ofCyAI2p1aTyKQJTlfVpQUSuuosBhWEcqsfGxs2LiZCFo22\n5X6VIjIijPNKD+IFYygpKieVepfVP4+Kd08K/wd78rZOxuztLMkybSKF/9ZnbBg3mq4NfnrNppwY\n7qKEZvMmSqqaEZGj/sWSF/7D8j4HatfQZlLZT9noCtzmU7sReXPkes8OASYBPvCkg3SewvKvuoiT\nIXBd5CkffX6HbcHS5VSVllJID+KsJkGVZiiUbUcyL2AoV/F26jY+SgZoPECdMO5/0gRXqeqnuWeu\niEuoLEF266KAE+nDhKAHH65ZRxlrWMC6tVmC/kCXqaw+xlEpOZl+oU5EWEIlj7IgsZn0W8DzuWed\nFMH9W0fCBT2I6xIq3TT+MhE5U1W3VlpX1feB8SJSkkxt7gwsVz+df9PCPDycyw6jh1snwQKgg4S5\nQIcW3MfMG4BmJVkCI0bSpSjfsQ4SppNGUmtJfiGj2X/XPaaqT4lIt/lsPm0+m3sBi4DJW5LGLEGf\nvrkEuQ+FzKa8QS8kwCw2bvmstlWCFQ/jvPZlhnUZLz23ZOyRs3UQt/Dh/6whsRC4uy1iMcbsPEuy\nTJvIEPz+LVZdcYT2DA+Q/06qTmiGvzOvugb/5u1dr6rLRGREDf4Rr7JiHFAFPK2qDTYkbioR6RTD\nnRzCHXkY3SNZAn2XNb+K4urljHQHSAc2acp5nmVnvsGKCSIyWlXXNtZeguwdz7B49BgtKSiUEGO1\nhMdYwMU6fJserams1gTZjWmCPkAHoCahmWT99tL4D77KiuuGUBzd8lqhhDiKXvxJZ6UC9L5cD946\nERnzNqt/8QYrL/LRSBh3fZbgdh/9X1UNROTAMM5zl3NAfASdERECVV5nxbBHWfC2iAyrXytqSy/Q\nzr6vUdxjDqRLNN+xA+hMEn+oiDjN6X1UKF9HsgZo0L6vAZWkA2BW3mtVk8Cj+Y65OGtWkSgqJsLR\n9OYXfMDh2oN+8t98rvazOn+Hn9Vd7JxBdIzWSbCA2s/BV3V4wW189GMRuWdX9OQaY3Y9K0Zq2oyI\nnBbCeeQgSmQ4nWLrSPqvsiLto39O4V/R1sMecfFeO5Tuh1/IsPCWJCijAfcxk0JCfKVO5YMHdU56\nKqt/n1L/e421l1td+LdiIqeewYCCbsR4gNlkUY6hN1FcprG2aj6batIEE1U1bzJQp70uYZyZJ9K3\ny4n09aLiUaNZXqLMn8zSDWmCA3KJUN1rBAjV33svLt4/T2XA6SdK3wYTjf6gM5MfsPbnWQ1uadIb\ntwMFEpp8DoNPOlJ6NThWoWl+wNtpH4025/stIr1DOAtv5fBIR9l2+HmqruZh5n2S0MyBO9uuI3Ll\nEDrecg1j444IH+o6/sxsxlLCYDqylmTwKitqfPTBFP7lbfVZjUvo719k0HlHS+8Gx1SVy3kjWYM/\naFdtr2SMaTqr+G52OyJS4iBfi+KOzRCsyhA8oKqftEMcI+J40+5kQtyrN8G5WjNcy1R+yWF0yBUl\nXa0JbmTappT6nfK1V6ddBzg7jvf9AO0nsCKJ/58obk8HiSXIvgA8oqpNGn4TkdIY3v0+wcSOhNOb\nSYddnDeTZC9prORFPmFxq37N4QXF0nBe3Ge6gT8w86MqzYxpans7iPnsPhT++QYOLqw/J+1ZXey/\nwLInkpo9t7nth8W9oQPhq7/MsIIRdCaF///snXd4HdXRh9/Z3dtVLMvdltwbbtgGFzrGYFoIoQYC\nhFBD+UIgkBBaIECAQCDUAAmhBggtlBB6sMHghgu49ypbslVsSbfv7nx/6NpRubIlS274vs9z7Ue7\ne86ZXenuzp4z8xu+YoO+wfJIAnecqk7fCZu9fszPCsga8UN6BjsTYiHlvM7yRBJnUxJ9K4n7zI7i\n8lobv1hP/4Ael5wo3RvcG211uZIvEjZup5aWVcqQIUPzaYrfklkuzLBbSc283Lez7VMzNd2pCZJf\n3YJlksOG0Y76DhZASDz00hxWUMmBqbicPHzYuFkNDq5Hyp7XUp8Wo6rrgONFpMMmYl2BIlWn0SXL\nRvuBRtVaBan5r7H9In5gApAPzANm7GAm551NRBc/zfxBZ2tff574iKvDF6zX/7A6nMC9pbn21yah\nzh0isvRJ5t0Rw+kloD6sTxK4N+6sE5QqZXTMMrZc9gTzfmHjdrAw1kawHwBe3FPLcXGcVz6n6JwJ\n2jAzdwYb8WLMSaqTcbAyZNhLyThZGfYZTJFz/Jj3GUi+AA66xRLjZlvdZwFE5Ogg1q8VHWwgpWHs\nx6l5QKarNxiPkT7QHiCGg6eWwsliNuPH2mEdwF1FKhas2c7VVjwY/53OxpOOpaCBMzWF4mgM5810\n7UyRcz0YT3YjS9vjNxezmXhNsPzJqrqiEVuTInLUd5Q9NJvSnwTVsiPYXg/GtATuFcBqETkriHWR\ngeTGcSYla+osNlmGQ1VfBl4WEb+CHdFkQ02MZpJaYn0s9dlbmBgmOesvzB91nvbztxEfriqzKeUF\nFkfiONftaQMzZMjQOJnlwgz7BJYYl2XheehyBgX7p4TXl1PJk8yLVJG8zYBsH+YNp9Ir0IdcKSPG\n+6wOr6N6fgznqFTQ8zZEpIMHY/UDHOLPlroJjxs0zL3M4gEOYWtK/118Ey4mcqWqvrD7zrp1SC1h\nDvdhfnEtw4L9pOb6qSpfU6wvsnhLArd//aB+ETk2iPX2rxkeLEwFgLuqfMo69y2Wb0rg9t7Rsmeq\nPmVXoEJVN4lIlh9zYgcC/Y+lICsLD99RFv+KDU4C9zRV/WiXXIR9GBEJ+DEfcdCf5OFLVpO0FNZF\nsS9X1Yl72r4MGfZXMjFZGfZKRMQC3KYuwYiI34Ox8TYOzu5ar6b0Jo1yC9NiFuL+gbHB3FoOk6vK\nI3wXXUD5H5Pq3l6/X79YD3UgcOmVDA5tVaFfo1U8wne0x28fTTerhKjzCWvjNu6LMZwr9iVNIhE5\nLoh1VxR7JKAWxrcCfToQoDMhYzlbCGOXxnFO2SodUZuQeGacT/+DRkvHBn3/SedUz6f8OlX9a3Ns\n8ov11DDyL7iMQf7a8VrLdAv3MzucxO2qqlt24nS/94hIDtAXqFTVpXvangxNQ0SyDTGvNk3v5apu\nrmGYCxPJyD2q+t6eti1Dy8g4WRn2KkTkrADWHVHs/gY4PqwPo9g3qur8HbQ7oTvZr/5ODm5YUA+4\nV2clc/GYV8iQBgFWa7WaP/BNWUydBqJHImJ4MW5VuL4tPtdBpZKE7aAPGUjQi3Ggja6L4zyd0o7a\nZ7DEuNCP+fhP6BccSQdsXKZQrK+xLJrAvRMoBpYBX6VzHEXEA8QOIM8oJkI2XsbSiSPpgk9Mpmox\n/2DJR9WaPL6pNolI0INReg9jAm2locLDo/pdeDalv1HV/Vr3SUQ6eDF+C/zUQYM+zEUR7LuBN/Yl\nJz8DiEgby/RN69x+UMGgvicFQoG2lJQtZtb818KJZPjxpB37zZ62McPOkwl8z7DX4BPzlrb4fnse\n/YNDySeGbX3BhpPeZsVRInKUqs7cTvOs7O2UJszBY+biS1siqhsh4rj5ImKqqlN7X2om7Q4Rua+E\n6DBqxEi/bYqq+d6MiGRZGI/fyMjg1pk/Dwbj6Cb56g8+xfyrYjiF23tg+zD+FMJjHEZn+pBLKTE+\nYg0zKOF6HY5bI8zf3GDwLgFMJ52DBTCAvNACKpotv/B9QkS6+DBmjqVT22Mp8ObiYxEVw15l6bNV\nJEYBN+xpG/dHRCQfaA+s12YUSTdN7x2FXQ7qfujwy3xbK1X06nYIXdoPCb392a+vFpFXVXX2LjI7\nw15ApnZhhl2OiHRRuOVmDgoeKO0wRAiKh+OlUM6jf1YQa0dLTt8sZYs3UddHAmrS2BdQodLIA389\nETzIZuBQn5iPB8T6u4icK/I/LQNVjanqNFX9ZnsOloiMFJFrROQKESlo4unvCU7pQ65Tf2kVYCj5\nBIn7oaoAACAASURBVLDaAAc31lhExnoxL76DUYyRTrSTAAMkj18wlLb4+ZA1fMmG6jB2czMoy6M4\n3nia3yPARqIJG3e/1nvyYz5wNN3aXSADvJ0lRFAsRkh7buPgkIFxlYgM3tM27k+ISE+vJ/ixaXiK\nAv420w3DU+L1BF4RkbZNaGugevGw/j/a5mBtxe/LZmCv43yW6b9ilxmfYa8g42Rl2B38+GA6kJdG\no2kMHXHRAdsreKuqKw344lWWxt1aky+qylusSLrogimUxKrr+Uc1+5fHTIzKtvjeP5keV5xGr5/1\nIfdJL8YqEenbFONFJD8g1tRsPF8cSZf7RtPxAS/GkoBYT4uI2eSrsPto15FA2qk/EaEdfocaOYa0\n+DGvPoHu/lC9ckAiwg/owWesZRVVm2lEPb0xVLXcgzF5IkUNHOJKTTCZDY6DPt+cPr9PiIjPxj3t\neAobrDBkiYej6erxYFyyJ2zbHxGRrqbpnTG478njzjrhcd+ZEx7JPuO4h/w9u409zTL9U0RSgZyN\n43fV8WeHOqTd2Sanm2kaVu/WtzzD3kRmuTDDLseAdh0IpK0ObYlBjnoTMaL5wOrG+oji/HgqJZ/N\np7zv4dolZCAymQ1Vm4mvi+GM98Fvbmf65adr71ABWayhislsiK6iKjaIth1+ziD/Vp2h8RRkf67r\nQq+x/FMR6bm9AHwRET/mh4fSeeg59N2mDB9Rmz8x5yfrCZcBv23J9dkFLFxERVJV/fXfoJPqsJZq\nL7A4fVOwMHp3JZT2BawLIaqxAcbWz9hsChHsK/7FiulhtbPG0dXMwsM8ynmZJWFFH25MFmI/IcdA\nNEfSF6fuTNDyYBTuZpv2WyzTe2Of7kflDOn3g20vUn5fDqOHXuitrC7pWly64CfA9mbho4ZY4S1V\n63NysxtWPyjbvMp23OTCXWB6hr2IzExWhl2OC/PnU16dbl+VJqgg7qOmYG+jqGpFDGfkJmKnvseq\nR99h5WPFRM6K4QxW1Y1xnOvLiV/+IovLfs83vMYylrIl4KBtzqe/v76Q41F0NfLwtaVGZBOocahE\nZJSIXCIiZ6bkB0b7MAfWdrAAgmJxBYOCLvqL1HGtRsqOoSJyqIhsV2G+ET7bTGLLFIobxFy9xypb\nasREG73eNu7iNVSlXdNbSzU+zE0pkdRmo6pLErjDP2Xty7/m69hlTNS/sWBBKbGL4+rcvDN9NgcR\n6Soihzd1FnM3U6Fgl2gk7c5lbIkncBpkgWbYVci5/Xsc02BGWEQY2Ou4kNcTunR7rWtiHvWJWQte\nj9V/jwtHy1m88pOk7cT36ySP/YHMTFaG3cGbq6h67DstZaj8L8nPVeV1lsdN5K2E6uYddZIK1P4s\n9alP0Id592g65v+IXuSIl3laxussl3QzAyLCcG0X/IA1I4APRKS7H/PfXsyeB5BHOXF3JZWmg/vR\nKDr4DBE2apSNRMjBSwFZtJMA+epPFhMZBny901enrl0neCz/U6bpy/N7s5yq8Eaf1xP4Z9KOXana\nyNO3HqmC0Ce8wOIvZmupbwwdg0lcJrG+ehVVZXGc7Za0ieE8+iFrzjhcuwRz6klivMXyqI37aEvO\nUVVXAhcAF4iIhDW5yzPmRKRLAOt5L8ZhHQjGyon5guJZEsX+6e4uldMYqmr7xHz6dZZdeaUOqSNx\nUaRhvqZYbZonmZFh53HV9fu86Ys8pLbvsAKE4ybv3FA6/9gPJ989YHDfk0OhQD4lZYv0u8VvR111\nb1fVzEzW95yMhEOG3UJNMLXx0QjaWyNpH4hg8xnrqjcSXRHDObw5GTvpMESuHkjew7/iQGPrEtk6\nreYRvuM+xlJ/2Qzgr7ogNoXiG4GnfJhLf0CPzsdTaG59uKVESZOdCIoP01pNFQVkUUIUF+UA8phH\neXgLiaNbQ+JBRI7xWP73jjj46kCX9kMQEWLxSqZ++3xsw6a5M5J27MjmpPCLSJ4BPwvgOQU0GcZ+\nGXi1Kct8PjHv9GNdeyo9g33IlVJivM+qcBHhuTGco1U11qKT3Y2ISLYPc8GxdOt0Ej0sn5g46vI1\nxfoPllQncEeo6h5T86+NiAT9mJ93InjA8RRm5eJlARX2J6xNJHAud1Rf2tM27i/4vKHJo4acf2iv\ngkMb7Ju14LXkohWfPJO0YzsMXE8l2Zzn9YSuAm2rqnOSdvR+Vf1qV9idYfeR0cnKsFchIu1M5BI/\n5gSFcAT7OeCd1pBMCIi1ZDQd++bho4AshpKPgXAr0zmbPgyRunHeVZrgBr6OJXD7Akf1IfeJm2Rk\ndv1+p2sJz7KQE+jOCXTHIwauKrPYxLMsxEBw0M9jOCc3daapMbze0HeHHHjRkO5dRtXZ7roO//r0\n+upwtOwkVf2iJWM0BxE5JoB1AzBIoDRSU6bohVT5mX0GQ+T/hpB/7y9lWINA5X/pCucT1r4UVfvC\nPWBaWkTEC5wRwrocaGujU+M4D6nqglbq30PNMnkhNXGQH6lqi8sSfd8QkQl+X85bJx5xezAr+L8Z\n+LLNK/lo8h8ithMfrqpL9qCJGfYwGScrw15LqtBzEIjV169qbj9ejLuAm0bRgTb4WEgFVST5P4ZS\nToy/sYDz6c9w2mGKwUqt5G8sCJcRezKuzvUh8bx+Jr3POFK6Nuj/U13Lt5TxKzmwwb4PdDXrqCaJ\nG5tP+b8iap/bgvPoYJreNeec+JTPMBomLH635F2du+TdJ2w7fvXOjrEvknI48oHNOxNoD5AlnhkX\nc8BBB0oDPVpKNcrNTKtKqJNW6Pb7hogc48V4rRNBqzvZnpVU2RuJxBO4p+9OB35fwTQ91wlyd89u\nY2mT3dW/sXxppKjkWxw3ea6qvrOn7cuwZ8mIkWbY6xARy0R+5cP8lY2bB7gBsV6P4dy4M8HUAhe3\nwXfNTYykdvzQV7qBh5jD3YzhEDrxPIv4G65tqZFw0GoH9y4bfQxAUbex9MI5lHIM3dLuO4RO3MQ0\n/shY/3V8dZqIdFTVkuaeQwqvIaabzsEC8Fh+ETF2lDL+vUFEsnyY93gwLjIRsXGNgFhvx3CuU9X1\nzezO5yf9dfVhouh+cR8UkYFejHevYWhwYC2Zp3lalv0Yc/8jIgfuLcumewuOk3xQRP65fO3k803D\nU2A78YXAP1S1Yk/blmHfYL+4uWTYO0jJIbzZjazx59A32FNy2KIJPmbN2Z+xboKIDG+Oo5Xq7/aL\nGBiqH9x+qHRmpm7iM9YynY3EcGIOOiCB6wJFtWUbojivf8H6E47SLtn1Y7eqSeJpJAnXg4GDS0g8\nFGpWbDmVw4EPm3FJarNe1a0qrVgRaJfXq8HOVeumVtl27NOd7HufQkR8fswvh5I/8Ax6+9pJgEpN\n8BFrTv+MdUeKyLD6xay3RwL3o5ls6jeAvAYyIrMpxYM5rXXPYO/Ej3njcRR4B9bT0Rws+YzXAu9n\nrL0e+PmesW7vRVWLgHv3tB0Z9k0yEg4ZdifjQnjG3cCBwZ5SszqTK17OlD7WOLq18WPe1cz+8h20\nfV9y0+48kHa8z2rCJBMuep6qrlbVtWl0sd4pJlL8OsuTyVq7lusWSogkp1KSNgZpJpsYQI3CQhhb\ngJ2OyVJV13XtOyfPeioci1fV3s7ilZ+5FZVrqoE3d7b/fYxzuhLqexmDfO0kAEBO6u9kDJ3aejB+\n3ZzOkriPfsn65Hwtr7N9g4Z5jWWRKPbtrWb53s1xB9Mx7Yv1KDp4DIwTd7dBGTJ838nMZGXYbQQw\nLz2OgpAnjUj6cRRYn7D2xyLys2Zk0CVcVGwUDw2XxSPYuOiGBO6pqjq9sU5UNSkih02i6PWJFI3q\npTmJCuKUEXMSuL/5ho0PDNf23pHSflubdVrNW6zg5wxitVZRTswFpjTR7rS46jweiZZ3f/OTa6/u\n3vlgAv423rXFs8KRaHm57SSOVdV4c/tMxb4d5cM810RCEezPgFdaGqS/KwlhXT6BwpCRJiN0PN28\n0yj5KXB9U/tT1TUictIjfPdOoWaZ/WgTKiIcXUCFoehVqjpJRPoBvYANwHffx0LMAq7TSLlJZ+dq\nUWbIkGEHZJysDDskVTrm+ADWTw3IjmJ/6sKzqvWmBnaAidGhLf60QYK5eHFRD+ABmpS9pqqVIfHM\nnk7JqEPpXGefq8pEiqrjuBduz8Gq1ddG4EgR6buAiiHAZuALVbVFZPZfmf/+2xpo04823lKirKCS\nc+mHIDzInIiDXtfSLMnUg/0GEfnzinVfnQ5kAzOBj7enSt8YIhLwY/4nhOego+kaCmLJN2z8wTK2\n/DFVlHtvFbbMzSW96nkbfDi4zRZ/VdUvRKTTcip/uJzKPkDCi/RT5IGAWH8LYEkXgpGNRCWJu15E\nzlXVb1p6InsTDvrW1xRfXkh2A4HNr9iQsHGbW4syQ4YMOyCTXZhhu4hIyI/5aR6+wcfQLSuIxWxK\nI3MotZO4E1R1alP78op57xF0+eVPpF+D2JhluoUHmbM+qnbDFL/t23eoD/PjyzggOIya4tOVmuAV\nlsa+pfTbGM4hO+OgpBlnhIU8pDAa8GZhJV2wk7ibEzjXO6qvtHSM1iYg1pMDybvgSgYHaiveT9Vi\nfY5FmxK43VpDPqO18Yv1zAQKLjhVejV4CZyuJbzA4m/Cmmy0wPWOEJH+Xoyph9Ap9C1lnmPpxngK\nsFLyHNMp4TkWbdXPWtqys9mhLWZLsmubOVZ3D8bcn9I/awydxBDBVeUrNug/WFKZwB2Uij/KkCFD\nE8hIOGRoMQGx/jqYtuf9nMF1FKi/1VKeYN7mJG6XpqbWi0h3L8bCmxgZKJT/SVLF1eE+ZkXWUH2T\no+7DzbVRRMYFMJ82MTpl47FLiflM5I0YzhWqmracT6pdW6A9sF5Vqxo7zhA5zYf54sn08B1IOzNM\nko9Yk5hLWTiJHgR0DmD9GhguUBHB/gvw/M5KDrQGIpLtwSi5l7GBdIW5f68zqlZRdZGqvrEHzGuU\nlHDjr4NYt2fjMQrIYhzdGCB5VGqCO5gRriB+nqq+vbNjhMQz5VR6jkrgGkWEuVQOaHDMv3SF8zFr\nX4yp/bMWnE5aRKSNF+M2hUuSuNk+zE027p8d9IFdrUEmIgf6MV/zYnbpQtAuImwlcdfGcM5U1Xm7\ncuwMGb5vZJysDC1CRLI8GBsbe1Dfp7OqF7P5KlV9oal9GiJnWBgvHEonYyBtfaVE3U9YG43hvB/D\nOXdn3+pTsUcDgFxg8fZSrEWkRwDrSRv3qCBWIoztsZB/xXCurr8EmnJWim9iZLC71NUq/UBXO++w\ncrUHo9MP6RkYQJ6UE+dDVkdWUbU8NYvWqJO3KxGR4e3xT7xPDkmr//SuruRdVt7jqN60u21rjNTy\n5sSuZA06ie6hdvhZymb+zWraE3DWUJVw0Qfj6tzSgjEK/ZiLHubwwP3M5of0ZFC9bDuAjRrhNqZv\njquzM7Ujtzd+rg/zmxG0LziFHr4OBFhNFa+zPLqSymkxnGOBAuBMgRytWS5+rzXFQlPflQNT46ze\nW8oKZciwr5HRycrQUnpk40nmiS+Qbudg2mYtZ8uBQJOdLFf1DRGZ9hXFP5/BxoMctCSG81dgckuC\njVNtd1gHTEQ6ezFmHE9h3ni6mQGxfJWa4G1WnD6FkoNSWkG1g8LP6E8bt76DBTCEtua7rOp1B6No\nK34AupHFEG0bfJL5/b6l9PfAdTt7Ti2kMoxtuaqkCyCvIJ5wa+LO9hos5Dd9aTPkGoYGttrcjSxG\naHtuZpqmnPCdnsFK0SUPX8IjRkBVtyPPYQKNiGu1AAu5bghtu13CQN9WuZAe5HCdDgvcwYyDNhB5\n10SOHkMnIw+fdzabqoqJhEXkmNZSfE99V2anPhkyZNiFZCQc9iFEJEtEClJLKruDzWFsr91ISFMZ\nsYSDlja3U1Vdm1Dn5mpNToiqfYGqflnfwRKREUGx3vaKWeUVszIg1j9FZNBOnsc2vBi/OYzOOT+Q\nHmZAat4xcsTL+fT39iKnK3B+vSaFPclJWwj2a0o4ii7bHKxatnMavXwKl6aSBnY7qrpcYfUsNjXY\nV61JplLiAntVoLOJXH06vQL1ncJc8XEcBeLDPLkVhlldRswXV4f+5DEzzfUB+IaNaiKTWmG8OlgY\nl51ED399PTZTDE6ke1YIa8IDHOq/UAZ4fyg9uV1GZZ9P/45ejIkikvZlJ0OGDHsvGSdrH0BECoNi\nvW0hpUGsRR6Mcr9Yj4tIs7OsmoOqrjORBVMobrCvShN8TbGr8HJrjysiJ/gwvzyVXqfcy9isexiT\nfRLdz/BiTBeRI1rUN3LeOLo1SF0TEY6lIBjCurTerjUrqEy75LeRKD1JX42lowRR1AekddB2B1Hs\ny59hYeQr3UBSXVSV5bqFe5kVBp5S1VV7yrb6iIgRx80vaORyFZJtejAGtnQcVd1gYnz5H1bbR9OV\nqRTzbb33hJVayb9YEY3i3NnS8epj4+bm40+7rx0BcvEaWVI3+e9Q6Sw9yfEDZ7W2PRn2DqSGzMrS\n95DML3UvR0S6eDFmHktB3nEUmEHxUKpR3mD5Rd9RNkZExu7KYNkI9mX/YMnEmDqhw+gsfkwWUcEL\nLA4Dj7f2g1pEPF6Mf1zLsGA/abNt+0n0MLpqVvAp5r8iIgWq6qZiSw4DBgKlwAc7CjZ3UX8WDTLY\nAUhtr/+Uf30Jmx9dpZV0J5u1VJPApQshqknoWqoZRccG63GlGgUkCXVjslI30jOCWFcKtHfQOTGc\nP+0KuQBV/VJExr/C0gefZ9FIE8MBNtu4dzvo4609XktQVdcn5ub1RNp0peG7w3rCro3bKiVfotg/\n+5g104sI551Ej8BzLKKN+uhBNkWEo6upcpK4FzRF+qO5eDGXLWXzkOG0b7BvCRUU0nBZGmAUHbLX\nUDUeeL61bcqw5xCRAsvy32WIebarjtfrCayz7fg9ij7VGlnRGfY8GSdrL8eHeevhdM49VXptW3Zq\nJwEu00H+u/im3yqqzmAXzCZtRVVnisgh77Dyvn+y7FgBPBjr4jh36K654R/XiaBZ28HayjDyycaT\nHccZKyJlfsz3Q3g69KeNUULEWUO1GCKXuqqvNta5B2P2XMoOq6+rBfAtpbZN3SK5qlotIuf9gVmv\nhrB8XgxCeFhPGEXja6lmvBb4c2uV9VFV3mVVwoDnagfyp8rFfNSewMgTKMxqR4AlbO77H1af4hHz\npqQ6zc6s3BGqOgUYKyJtbBw/sHFvvXkrPPUOK665QgfXWU6r1iQfsiYWw3miVcZRLRKRQXPYdPFi\nKi50UX8R1UtXUzUTmEtNoPkuyQyNYN/3OsufGqB5oUCtiYtyjfEf1nANQxtrpw57Jokiw65BRApN\n0zurf49jcgf2nmAFfLlsLFtcMH3uS/dXRTaOEZELv4+iuPsbmezCvRyfmJvvYFRuR2lYG3ialvAS\niydVa/Ko3WGLiHgALxDZVV9+EblkNB3/fLkMSrsU+qDOqZxH+dVejIfOoV/bI+gsWx/Ia7SKPzI7\nEsE+WVU/b6T/47Lx/Os2Dg7m14qlWq1V3MusSBxnpKouqtfmxCDWm1cxxD+ANogIWzTBcyyMLaKi\nyI/V6XR6hwbQhgrifMia6AIqiuI4o2pnOVpi3NqfNr+9lmF1dKtKNcptTI/GcA5qreDmfRERyfZj\nft2X3N4n0j3QjgBL2cybrAhXk/xrTO1r97SNLUVExIf5lB/z3OMpDHQkaKyk0v6UtUkXyn5K/25j\npFOdNra63MiU6nLiJ6rql3vI9AytjNcT+OeAXsedPnzgGXXiNpN2jH99en04Fq8ctytmUzO0Hpns\nwu8BDhrIaUT9uma7pC/ctwtICVe2inhlKiB8AjWp5JXAm6q6AVi2gi2uqlI/ONhVZTVVlsDgQbQN\nHCld6hxQKNmcq32DL7P0TmqWEdOdw8ceMW65mal/GK0d6UrIv4wtkW8pExv3vDQOlgQwH7mUA/wD\n5X/Z/Lni5Sod4r+erzttIXHTqyw93UEHG8iWOM7TLvqEqlbW7stCfnEmfeo4WFAzM3mMdvN8wtqr\ngKuadyW/P6hqlYiMmU/55cuovNJF25rIogj2fcC7e9q+1kBVVUQuj+M8/w4rr7Qwuidx58VxHgPa\nPM/ijzxqBoenhHUrNM5LLI5GsGcAk/e0/RlaBxHxGmKeOrDXhAaJMR7Lz8BexwXmLnnvUiDjZO3j\nZJysvRwf5sIFlA8bSYcG++ZSlrRxvtoDZjUJEfEIXObHujaJ29mDURzDfljhcx/mf/Lx5Q6lXbCC\neGIWm+73iXk/cHslyc3TKMkaQ6c6TtQkilwbd1UQz+ixdGo4tQccRAeeYeFYETEaWxZLqvuQiLz2\nFRvOtzAKE7gLgZca0dYqBLoMIb/BDksMjtDOgQ9Y0zWsySN3dC2A/MJGArt7kWN5MQdvr4/9AVUN\nAw+mPt9LUrPAX6U+dRCRk//OgicNpFtIPckK4l4TeSGG88vM0tH3ipCIgd+XPgYvFGxnGIbVMKYh\nwz5Hxsnay4lg3/Uqy57rq21CObXiftZoFZ+zLhmn+QrpuwMRsfyYH3QlNPY0egW7kcU6wr3eYvk9\na6n2/Jg+1pHSdasTFdiicf7ArOtKia6K45zyHIsmLtQK3xg6+l3gKzZEZ7EpksA9LYT8xSX98ya1\nXVOfRkmVD7m3Cafi82A46bSmAAJYhoGkTxeri+3BCG8iltWBhpn4xURcG3d1E/rJ8D1GVT8XkQFA\nnzB2NrA0sZ1qBBn2WbaISLiicq03L6egwc7iTQtjthOfsQfsytDKZCQc9nJU9Y0qEg/fyJToq7o0\nOVGLeFrnR+9mZjRRkwG1S2urtYCfdCQ45jeMCA6UtmSLl4GSx+F0DvYkx1PLwQJqtJAuYkDIh3kH\n8G0Cd8AUSu5/gnnznmTedzPYeFcCd4CqLg5jv/oF68PpBp1KifqxPm/Ft/4VMRx7XSMxx1MpqUri\npo3/qk1q4f7v77EyXn9fRG0+Yk00hvNkK9ibYR9Ha1iqqrO2V+4pQ3pExCMiXVpT4kZEDBE53jCs\n+wwxfi8iI1rSn6q6rrp/njH3pajj1hXzL9u8ipXrpqjr2n9tkdEZ9goyge/7CCLSz4NxkYVREMf5\nzkWfVdWNe9quxgiJZ9bFDBw+XOqmqv9dF9KLHI6ShnWgVZUrmBRP4BaoanqVSGqKVvswlpxEjw7H\nU2hZYqCqLKSCR5kbieOMU9VpqWMFCALRnc2q84r5m04Eb/s1w4OhlIaRqvIxa523WVkUx+ndlLIn\nIpLnw/xmGPldT6C7Lz9VNuY1loW3kHgpVWtx7/hCZsiwjyEiAcv03q2qlxqGZTpu0jQNz4dJO3pN\nS6RmRKSLZfknBv1tOvXqdki27STcZasnxRzX/jJpR08FuoKcYxhmW9e1ZwFvqGqsCf16PFbgPZ83\n+9AD+hyfFfTnsWHT/MSyNV/YjpM8X9V9a2dtzrB7yNQuzLDbEZEg0N6HMe02Du7Yud7L5Au6mI4E\nmCCFDdra6nIlk5I22l5Vt+xgnIIA5huCDOlBdrKUmGwhEYnj/FRVPxIRvwfjFoGrHDRLIGliPB/H\nuUVVy5p5ToYP8xFFLz6YDuTg9c5kU3gLiY1xnPHNuYGLSBsL41cWcomNm+vFXBbBvhd4JeNgZciw\nc4iI5bH8kzq2Gzji4MHn+rNDHUkkwyxc/rEzb9n7WxwncaCqrt2JfsVj+b87oPcJA4b2P9Xamozj\nujYTpz8aLS5duAK0d++Cw4xgoK23qOTbqrItq2zHSRzXFO07ETGAYz1W4FLDMNvbdnyK4yb/oqrN\nCh0QkX7UxI+uVdXFzT3PDDtHxsnKsNsQkbZ+zIcc9EwvhhvHCfYmV37OIHJrVQGap2W8yjJ+z6gG\nNfWmaDH/YMmMsCZHNWPcAfxPjPSrlEipx485qS9tDjyL3oGukkWpRnmf1YmplKyP44xoJMh9R2MV\nAqcBIWqyfj7bWzWnMmTYnxCR09tkd3vu5KPvyjLqZe/OnP+qvXjlZy8k7djFO9HvYUF/3oenH/fn\nUP1s51i8kjc/vo4fHH03OVkdt21fs2EmX878yxbHSXTf0ctiSxGRfh4r8ArIwJysTomqcIlH1V2a\ntKPn7s9yMLuLjIRDht2CiGT5MKeNpmPhqfT05oqPak3yPqv4A7O4VQ9ia6mQXuSwhbj7VxY4P9F+\nnizxoKrMpYwXWByN4/yqkTFyDOQKP+alLpotyOwo9r2qOhFYVO/wszoSHFq70HA7CfBTBnjj6nT+\nhk3XAbc29zxVdQ3w5+a221tILZ2OBg4BosC7qQSA5vQRBE4G2gHzgAZ1JzNk2N14PcFLDuhzQgMH\nC2BAr2OthSs+/jHQbCcLGFPY+SBPfQcLwO/LoW1uIZFoWR0nq7DzSDq3H2QVFc85H3hsJ8ZsEiLS\nyTS9Uw4ceEab/j2ONgzDCriuw9LVk4Z+M//lr0VkULrvt4h0oEY+J48a8d2vVbVBrGiG1iHjZGVo\nMQIX9yO3ywX09269GWWJh7PpyxZN8HcWcJL2oIgw77EqbOO+/S2lxiw2ndZBA7EqElYCtzSOc2k6\nsUURaevDnH4AeV0mUBjIxctCKo57ixWHe8W8NaFOnXT/ENYVJ1AYSpcReDyFvjmUXspOOFl7K7XK\nCw2nRnPsnfozdSLSyWMFPrAsX9/CziM98UTYXls860GP5f+b7cSvacqMnCXGhR6Mx3qR47YnYC2k\nwgmT3CgiJ+6JJYpUiaLxQGdgORmHb79FxMgL+NLXEQ34cnFdxy+paYdmdh2JJaqcxnYm7Rim6Wuw\nvXvng0Iby5aMZxc6Wabh+UWvboeEBvY6dptnaRgm/XuOk81V6wLL1nxxHbDtpVVEDMvyP2iI9fOA\nP9eKJapMy/Rh2zHHsnxPOE7ihoyz1fpknKwMLSaIdcmxFATTve0dSwEPMDu2hG/XAisj2H8GPkyJ\nMrYrIjwA2ALMa+wG6MO8fwwdC2o7cR0JyhDND97C1LtF5B1VXV6rSbu2jRThbYsfG00vTrMPohEp\nNgAAIABJREFUIiI9/ZgfBLG6DiHfqiBuL6TiL14xf5dQ54+pY8Rj+T/r33N8v+EDT7dqwkDwxhNh\nPvn63p9tqd5QAty1g3GOD2E9fiMjgl2lRutLVfmC9aFXWDpZRHrXF19tgu0DgYOBKuDjlEZWU9tO\n8GK83IGA1ZmQuZJKrSJZLiI/VNU5zbFjV5FyficEsa4S6OKg38ZwHlbVb/e0bd83bCfx1fqNc4d3\n7TisgXLzhk3z8Vj+ZYlkZGcc8HfWFs/6UzxRjc9bV+NuU/kyknaU/LyeDRrFE2FVdXbpUqFhes7p\nU3hkQw8P6FN4hHfF2q/OppaTZZm+3wf9eZfGE9W+QX1Ook/3I7BML1Xhjea0756/alP50kEicmwm\nBKJ1yUg4ZGgxCtmNqdJn48GFaFiT/cKanKCqH2x1plS1VFUnq+rcxhwsEfE5uOeeQk9vfScuX/wc\nThfDg3FJ7e0OOmsxFWnfPhdTgRdjyc6c555CRPJF5DARGSq1LoKI+HyYX/6Qnn3u55Csn8oA/y9l\nWNYfGOPPxvM7Q+S81KHjfN7swuEDz9jqYAHg84Y4fOSVIeAGEUl7s95KEOvu8+m/zcFKjc+R0lUG\nkhcAzm/G+bQPivVlEGvmCNo/3pfc5zwYGz1i/F8T2x/ow3zrlwxr+3sZnXOFDA7dx9isn9K/wIsx\nUaReXZoWIDUcGxLPByHxLA+J5wsROUMkzbpU3XamH/PN9vhfP4PeJ/+cwSOOp/CCAObXXjH3+fJA\nexuOk3hsyeqJdtnmlXW2xxPVTJ/7UjhpR7f7EtEYqlokyNMff3VPeHPluq3b2LBpAf+d9qDbqd0g\n6i9Ruq7DopWfhJN27MWdPJ2mGmdZZvpi96bpAXTbJIqIhFTda7NDHYKD+57MgF7jscyae3Z2qAPj\nRl9r+LzZY4Bxu9Tm/ZDMTFaGFqPo1PmUFxSS3eDBM58KNZFZLei+jYWhJsL7uooiwgSwGEsnepND\nAVleD0bf2g1iOH/6gDU/HKUdg+3lf8KfYU3yGsvDEex7WmDPbkNEQgHMpzwYZ3QgEKsiaSVwykXk\nMlX9EDi9G6GcCVJYpzRHvvi5WA8IPsp3fxaRNwU5qme3QxoE7gLkZnfG782WcLRsIJB2BkhELIHh\nI2ifbjdj6RRaxpZTgcebcE6GH3PiEXTpezq9PVbqAVWiEe5j9r2mSIWj+tL2+ghg3nYKPf0DapU5\nEhHG0EkWaoVvCiVXArftyJYm2Co+zIeCWJecQo9gIVmyhupeH7FmxGYS54vIaVqrAHidtvDzTgQn\n/JYRQY/U/HoG0dY8RDsFf8f0u0Tkv5kZrdZDVVeKGOd++OVdLxd2OVg6tRsQqKwusZes+jyp6vwV\n2O7f1Pawnfi1ldXFxe9/cftvfJ6Q4bhJw3Xt8qQde2TV+qm3d2o/MNSz62gMwyIcLWf6dy9G44mq\n2cB/W+8MG6KqH69eP+OCvNzCBs/xNeu/cYDPam0aGQq2TxZvWsDhI69o0JdhWAzodWzg20X/+hnw\n6a6zev+jxU6WiBxPTTCwCfxNVe+rt/8o4B1gRWrTm6q6U28V+zqpWYihQBtg4d6sc9Ucojj3/5tV\nPxiu7YOdahWyLtMYb7E8GsG+uwXdVyRwjJuZygjaM5i2bCbB31hAb3IJYCYSOHXigVR1pkeMX9/G\n9PuP1q5WL3I8G4g4n7A2nsR9Bni9BfbsFkRE/JgfDyF/xHn092WJx6eqzKc89ATz3hKRkwJYJ4yl\nU9qlzwG0QSHfMDxrXTf5iuPEXWq+o3VQVRzXFrZfk1IBdVBJd8NwasT1d6gTlmJCG3yFZ9GnTjBx\nRwlyuR4QfJjv7hGRf2wvdsZGx4+iY9qZpLF08s9k0zU+Ma9RVEGmpH7n7+1EvMm4AOYlt3Nw6GuK\n+QvzCZPEQUMBrJNiODcCaf+2/ZjXn0WfbQ7WVtpJgAla6P2QNb8EftZMezJsB1X3HRHpsapo6kVF\nJXMOdJzkesdNPtPSLLvU8tk9IvKniJPoS813ZWkq5OHLqXOee3HK7Gd6WJbXse2EGob5vO3Er9vV\ny262E7t/wfIPz+7cfpDVsd2Abds3lS9l3tJ/x20nXruihavqiIjgsRpWnADw+3JExGizK23eH2mR\nkyU1RX4foyb4tAiYISLvqurCeodOUtVTWjLWvo6IHOPHfMaL2S4Xr11MxBcU64MozkWqunlP29cS\nVHWmJcbVv2P6E6O1Iz3J8a+jOv4VG9RFb1HVHSqib4c8E8O8hmH0qVUL+xjtxkPMYQWVho3+rX6j\npLqPi8jH/2XdlZMxB9u4q2M4f1HVmS2wJS1SU5cwC6hsbGZjJzg6G8/Qyxjk3xrALyIMJp8LdUDg\nBRY/6KJzbNLfx10UxODgwT9pO2PeS+cuXzM5PnzgmUGz3vLCxvIl2E68Cqj/nd2Gqjoh8Xw5jZIj\nj6BLg/2TWF8dxn61KSflxfjB4XTOSjer1o82COQDBcCa7fWj2ymrlIMnZwyd+dQsxvSGjndEj4/G\ntkQs03uX4ybvbWrwcxDrulPoGXyd5WwkwhUMpqfkEFGbSRSZ77Dy9yLyptYrKg4QxSnYTJwZupF+\ntCG3Vkms3uRaJjK0KTZkaB4pEeP7dnjgzvWdAOZv/VlECizL/6zXCnTu2nGYRuNbdMPGeYaiJcAO\nxUhbgSWu6/z70yn3n902t7u2bdNDyjavpGLLGttxk+eo6txax84IR8sl6G/DxvIldMzv36CzdRtm\nx5J2dNJusHu/oqUzWaOAZVvFGEXkVeCHNLxh79f6VyIy1of57uUMCg4lHxEhojavsfSk6WycJCIj\nm6IYvjdjq/usiHw4heKfzWTjgATuCqdGlb5F9fgs5NKD6WD3kdw6f6s+MTlb+/JHZsVs0ouBak3J\noV0W/yIiXfyYfzSRMwxEFI37xVxlIPkg8STuyzbuIzuasRSRfoZhXWganq62E5+j6r7gxTj7SLqm\nzZAcQXv+xsIDbNx7J7L+jPFakFX/uDmU0ja7K/17jpPla7/0bK5ct3jSjEcHHDL8koA/lYVVtnkl\nk6Y/GnGcxPU7euuOYP/2FZZ+lq/+wAHkISIk1eU9VtmrqSoDXmviZXO1ZmYs7T1B6/yXHgv5YArF\nZ5xEjwazWVMpph1BvghUc/Tom8jLrRG93VK1PjhxxiM3hyNl2cBNTbS1n4UhC6ngLkbjS81KBcXi\nBLqDYrzP6geBE7c2kJqanX/2YJhTKcFAeI5FjNWO/Ji+WGKwiSgurG+iDRn2QkTEskzfl0P6ntxt\ncN+Tza0hepFoBR9OvuuGcLRsPfD0rrTBMn2/zw51OPmoUb+kbPNKCUdLyc/twaqiqclNFcvPB97d\neqyqxi3Ld2fSjt35zdyXfcce+hu8nv+tOqzfOI+1xbNtVfeZXWnz/kiLxEhF5Axggqpemvr5PGC0\nqv5frWOOBN4C1lEz23V9uunb77MYaUg8k39M30MPk7pF1VWV25heVUT4PFV9t5Hm+zVZ4vn3ufQ7\naWwjscyX6edNUohvbUSkkxdjzji65U+g0FpJJX9nIePpxgjaE8dhEutj0ympTuCOVtUVafoQy/Te\ngxi/6Nv9KCs71MFTUroosq54NpbrTjuT3kePk24NxlZVruSLWBynjx/z/VF0HPhj+nj9qcW8JbqZ\nx8xFjBl1FV06DGHukveYs+itRyzTm+W69rm52V3jSTsi0dgW23Hta13XfqGJ53y8D/PZbDyhfPzu\naqq8AjOjOGerapOcBhGZ0JHA639gTHb92ayFWs6jzF0Tw+mxvdkmERnsxZh2JYODQ1IvLW5NpiPv\nspK4CMcfczfZoQ512kVjm3nrk1/FHDfZpSlitCHxfNaLnHF9yOUUaZhBFlWbX/Bl0kHztmZGBsR6\ntCuhi37B0GB2avaqWpM8zXzaE+Bs+nAL06pLiZ2lqh805Zpl2PsQkdPa5nZ/7uSj7mywXL+pfCmf\nfH1/se3Euu6qJUMRyTINz8YfHnNfICvYrs4+247z2odXx2wnfoCqrqzVRgzD81tB7jBNy+pTeCSh\nYD5FJd/ZJaULo46bPFlVv9gV9n5f2R1ipE3x0GYBBaoaEZETgLeBfukOFJHba/04MSU0uU8jIkED\nGT2aDun2MU67Zr/B8p9Q660jw/9w0E2biaed+QhrcusfYHQ3m4UX49bD6Nz2LOljxdXh7yzkGobS\nu9aSZm9y/V005H2PVS8Ch6bp5my/L/fqE4/43bbZpQE9xwdLK5bzwZd3HTJNS8Lj6NagyO1yKhGo\nADbEcI6aQcnrUyke39NsTyUJwoZy0LCL6dJhCACxeKWNuuWJZOQaEbmhfMuq4dRcs+nNmUFV1Q9F\npGscZ0wpsXxq4gqXNeOyAXyyhcSqV1ja/0zt4/WkZgDWa5inWRBJ4Ny4o+U8VZ0nIif+hfmv5ODN\n6qxBXUWVT8BzCj2M/+Y4DRwsgIC/DR3zByTXb5p7AvDyjgyNYD+yisqjDqZD2vivgFhYajgOThYQ\nFpF8D8Yl/8dQf3at5cEs8fBzHcQNfM1SNkerSX4CfLSj8ZuKiIwKYN2ZwDkKwIs5KYp9q6bqd2Zo\nfSzTd1yvboekjYdsl9cHwzBycOjGDpa9W8DY3OwuiaxguwYBVpblo6DzCHfluikTgG1F51Pfqz+I\nyMOOmzhv4YqPjzUNT8J2Yl8A/9BMMfIdkooxP6o5bVrqZBVREz+xlQJqZqy2UfsXp6ofiMgTItJW\nVcvrd6aqt7fQnr0RSwCzEbUMLyYGkl7UKQMxnL9/wrozj9FuIW+9IOL/ss71Yvw7onZid9ulcN5x\nFHgAZrKRXuTUcbC2cgxdjXdZOVxEutdfOvV4greOGnJeyF9PRLFdXm/6Fh7JqtUT+VyL3KPoYmyd\n9anUBM+wIJzAvSP1lryZmtpnizw9D+4/rNNw2uX13pZWnkxGWbp6UlJR2+fNmuH1hHJddabYduyB\nnVmiTo35dXPb1W4vIkd/RfFrk9kwtr+2sStJ6DrChove4Ki+0sR+JolIt01ED91EtDOQ1w7//dl4\nsz3buat5PAED2K5cRS3ei+HYCyj3Hp4mFm2dVqNoAigVETGQh3uS7c+RhnImQfHQW3N1HuX/BC5p\nrRkOETnBh/nG6fQKjKKjCDCdjeNfZ9mhInK2qv67NcbJUBdFbcdNNrLsrbiuY9D0ZBBEZLDH8t/q\nuu4JoBiG9VnSjv5eVWc32mS7UTiy7Z8G1tXMuj6V+mRoBqmJn4lbfxaR3+2oTUudrG+AviLSg5oY\ng7OBc2ofICIdgY2pTIxR1CxRNnCwvsdUeTFWL6C892DyG+ycRkl1GPv9XW2EiHT1YPzCg/EjQJO4\nbyVxH23qMk8Lxg0ApqpW72QXk2PYnzzAnON+ov2C3SWbsCb5L0Xu+6yuTODe0Jr2NhUHN5SXelaX\nEaOArLTHecSknfoT6wgXAnWcrGQyOqBzarapPgWdR/hWFk1Z9Jq9rM0nrG17oLYLVhBPzGKTIfCI\ni9aJ97Cd2GWLVnzyQZvsrsH8Nj1BDMq3rGHyrKfDQLJj/oDfHtD7+JDfl8uGTfN6zVv2/hmGGNcp\nuoGam/FkbWbh7J0lNc4xItL3W8pGUiNG+l9VbdaMZMpR+RJqpCGqSN5SSSK7ePMakskoHs//XvIT\nyQhLVv6XjetnB4NYt/jEHJbAfbj2ckoaenow7PlUeBdrBf1rSUYk1eEfLMFBZ6mq4xfz7gDW6b6G\nCZzbMJFqaupd7nRyhIgcHsS6PYFzCOB4MTy/ZKi3tm1H01W6aij4IHOeF5GO+3q8596I4yTeWrJq\n4k8H9T25QSmfopLvEDHWABua0peIHGma3v8M7vsDf6+CQwxBWFU07ZQ5i946LiUTkm7Wc+rmqiJv\nJFpOMNC2vm2sLZ4lwCc7eXoZWpEWF4hOLQFulXB4RlXvEZHLAVT1KRG5CriCGq8+AlynqlPT9PO9\njckyRM7Jw/e3mxgZbFtr0mqybtCXWFyewO2xIyckJf/gBRJNzY6q1XaEF+PzQ+nsG0snn6J8TXF8\nCsWxBO4RqvrdTp3Y//ofFcS6M4YzDhQf1hdR7NeCWBfEcEYLqBdzeRT7ZlV9cyf6tyzkBgP5lUK2\ni4oH4/0YzmvUOAgrganNvS4tISieRZczqP9Qyecr3cA3bOQaGdbguKS6XMOX0RjOAVsTRLZimp7I\nj8Y/EAjVu0kCLF8zmRnz/vFxIhk+ATgWGEGNM/Kmqqa9eYvIoV5P8GHHSQ4xTY/tuHYcZGlh55HD\nDhtxua92DNTmqiLen3gbha6/yoOhK6nymsizMZxf7KsP5a1/522NrKxgx/7GIQddgWFYRKLlfPLF\nnfSKmxytnfFhMotNic8pSiZwT1HVtHpGIjIoD9+UixmY/STzGUY+B9CWzcT5gvX4MFlH9YMu3GVh\nrL+Vg/z3MYt7GbutVudWImpzLZPjSdxeO/tiY4qc68f665n0Do6gPfMp5wPWcIekr6d+q06rKiJ8\nZu2HdEqotSdQvAMHM8N2EBHxWP4pBZ1GDBs19AL/1iDyTeVL+Wzqg5FEMtykWUQRMSzTt+6Ig6/u\n3K1j3ftHcekiPpv6QLnjJNI6yh4rcF9uduerjxnzq+DW2fCkHWfyzCejxaULPk4kI6e2yslmaJSm\n+C0tdrJai++zkwXgFfO3wG1DyXfa4ffNpTxaRqwyjnPc9nRcRCTkwbgVuMLGzbIwwsDTyZrloh2u\noacEFVf/jAEFo6RjnX1f6wZ9iSUrYzhjTeTSANaxikbC2M8D/0qlLO+o/xN8mG+cRZ/AaDqIIEyn\nhH+yjMPozJn0xsRgLmU8z6JINfbNSXV2qshySmU7Fxjuw3wpF29WF0Kspkqi2BtjOD9qqcPYDFsu\n6ETwiVsYGTIxuJ6vuI4D6Sl1l/4+1rXuO6ycHtHk2Pp9eDyBZwf0HH/eiAPOqjOjrOry70m3VVds\nWXORqjZb00tE2gMBoMw0PBtPHX9/MJ0jN23mXxlUtJ6T6U6lJniMuZF1VL8UVfvy5o65tyAiXSzk\nl4bpvVIMK9iz2yFasmGOHBoLyWnSq86xi7SCh/i2MonbUVUbpNyLiM+DsekORmWHsJjMBtZSTQCL\n0XTkRRZVFRE5DwgMJO/pG2R4ziu6lHVUcwWDtzlaEU3yGPPiq6h8Nar2hTt5XlkWRsltHBTsllLd\nn6LFfEspP5fBads8pnOrZ7HpSlV9UUQ6BzCfc9Aj8vHHK4h7BVkYxb6wXqp/hiYiItkeK/Cs69on\n5+UWxmLxKiMW3xK3ncSVqm6TvrcicnhWsP37Pxr/QINEEID3Pr+5sqJy7dlaIz5cv+3/s3ee0XWU\n1xp+9pTTJFmWbLnIstx7wd0GbMCmmN4JBJJACoQQEkIvITfk3iQkhBAIoQQIJSGBQMAEiCGAsTG4\ngMEF997kIsnq0qkzs++Pc2zUjLslm/OspeXlmTPz7Tl1z/ft/b6mZQUeUM/9fsd2/eKGabO9dKkt\nYr6ZcCLf2teZ4TT7TjrJamWkfvwuJOl+voikX5ub2hcEAkDlzhkZEQkEMOcMJLf/hfQM5EsG2zXM\nFNbFPqdsTQx3rO7B701EJrQnMPW3HNtEm0hVuYXZkRriOpqOMoK8YBSHaRTVbie8KYr7aCrWdSST\nrkijc5s+jOKfcEy7+urbAGu0ikdYzO84jp2q3qUa4ad8HHXwOut+aoOJyAAfxrwfMDhjpxyGqjKH\n7fpXVlbH8QbsbqbnYJJKXh+xkCtPoas/TMKcxXZOp5CRqe7CmWyNzWZ7Xaq7sEmBuIgUmqZv4dC+\n52X363GK4bOD1IZ38NnSF6JbSxYvSTjR41T1y0RC9xRjgW2HVn79zMdDze1ftWE65pL3+K6XFMyv\n0wQ3MSuaSM6sFu/vuK0FERkGXOrDuOUhJlh+abqUd69+VrOaqu/rbmrB/GL+XwGZN93CsFCgnhTr\nu7rZe5V1RTHcnsDlI2j/yPUyNMtVj3+yhtlspz85CLCYMgRmxPAm782Ny26u5RsDyHn0Vhm+q9h6\ng1bzGEu4l2NpLOHhqXIzs+qqiE8Clvsxl55MQeez6WYFxMJRj4/Ypi+wujaBN6y57tc0e0dqdnAo\nUAt8vC/LwSLytS4dhj558rG3NOtuPfPTR+o2bPn4R6r6zJecoz1JrUoLmKmqh6rYPk0jDkd3YZp9\nQJNCeQ2KDUVkaBDrfhOZKIhnITtMMX7loY8B3+5GVt8fMjiwM0HqJCGu1UH+P7Cox1LKrya5VPtl\n9OxOFs3dJYkIXTUzkE+GfE1679p+nHbKfJrlA1dS+eAYOhprqKpbT/XjInKRqtZf5z+pHQG7cYIF\n0FuyydMgyyhnKMkW4zwJMlBznM8pOx94ds/PGIhIG+DrFsYAB29zAPPY0ykMHCPt6z+G4+gsq7Uq\nMJvt1wM/3ZtzHwipRPg6EXnqbTZdbSEFUdxtU9nYaSobxwjEHfQfCbwHVXX7bs6xSURGL1n95p8W\nrZwyyTYDCceNiYjxrOPGbjuQBCtFhevGzWi8hoCvaSNUTXURPbwvvgIyxKa/tk0spvwU4O8HOPY+\nISJ9Apg/d9HzPdT2Y84L49yjqvtt8aGqC0UkM4/gD/xiNu1KAPrQNmM1VT2b2wcQx7tnK3WFNzHr\nkuO0k5WJbX9KaU0Z0fIY7iRNymjPXkqFHVcXn5hcTl/O0u6soII4Lp+zIxpHv7W/CVaKjvlkNCjY\n70YWmdjMYAuTaCj18QFbvBhuETBP4Pp+tM29WHrterEtMTiJLlKm0eB7FP0U+O4BxNaqEZEJthX6\nmafOOEFioC86bvy3qlq056P3TOrz3exnfC9YW1a10VT1aGyHqarsqFjnAWv3MP4OYK/EgNMcftJJ\nVgsiIsN9GDMvpGfG8XQSPyZrqc5/jhW/20F0sIlMOJNuocYJkohwhnYLraP6OvacZG3ZQl2znUyq\nynbCcgaFbNIaplHERmoIYDKUdnxGqXUGhWSInbUyubTymogcU29WJq8Dwd1m8XkEqWnk1tKegA9o\nunbVDCJylo3x0kBytC9tM7ZQF/2UksA4Ojb7+GPp5J9HyUUchiRrJ6o6n2TN4f4evxY4Q0SyY15t\nW6C4uaWrvUFEevBF7dYMVa3z2aHXl66eesHIQZc2+KyHo5Ws2/QR39RjGvQgGYhwmMWDRWSYD2Pm\nZApDE+hsBrBYSOnxL7Lm36bIXR7sNPSerfuuh7atnKjPUW/XjGp9iqiN8CUFyqlZiStF5Nfvs+VC\ngQyFOcDbO2csVHVtSKz3n2PFpO/ogIApBtniY6Tm8QRLoybydgKd5BNzpINXqvCP1Ou+L6xeRWUM\nvnBiFxGu0UHcxwKWajnj6YwgzGJbeDFlkTjeeaqqmWJ/6yS6NJECATiBfOtdNl/MUZpkGYb1HZ8d\nenjEwEuDBR2HSdwJZ63eMP37qzbOuEJExmpSsLglme+6sc2rN37Qr2/3iQ0+d+u3zNForKaCVHNH\nmiOTdJLVgoSwHv0avTNPkC/aw3uTzZ06InQLs69StCaX5tUd2hHAQ/fGZ2p6OdHoEi1rM1gadjcu\noowoLjuI8jJrOZUCJtKFahJMpwgTYTO19CeHfpLDyVpgT6PoJhH5MXAaMHQd1T5PtdnlinVUMYku\nu7apKksojwN7rJsSkd4+jJduZXionjRCYNmXNKZKg3+OLFLJw34JqopIbhDzBT/mCb3JjlcTp5gw\nlhg3uOhPVq5/b0IsXps7oOepvkCgLdtKlrB46Uuc4XWhQz2vyYg6LKfCAg7EBmmfCWI9cxm9MydI\n/q7Xbpx2Yi1VoQ/Z/mBXQtVRPHMHEcsv5mNxvJv3VgIhmQDZyz9i2/CT6NLgvbFFa1lGhQH8ay/O\nsxLYrbF4BPeyBex4/UZmjRmrHW2AuWxPJPAWK0wcQM4pg8nNLCeWmMW2OwNi/jmGd9M+NGtMLSYc\nW6g7sobVm8XtKCGO1Y7Ou2zetIrKkmQszssePKNfCK4Ggrvpegxg4qE2gIi0BSakdn24v0v6rQUR\naWca9iNnnvDzQJvMpBB0iBxGD/mGnRFqn71oxZRngPEtGWOq6/7CeUv+PqukbFWwZ9fjA4YYrCua\nHV2/ZW7MdePnHs6GnjQHn3SS1UKISEcfxvBjaapkHhKbCZpvf8jW2tVU5nUho0nisIpKDGSPxqep\n5YxL/8Ti/5yp3fzj6GgqMJdi9y02uj1ow4us9t3JSPLli5vdIZrLM6xgJlvpT3I5cAR59vsUnePD\nuDiPYKAnbawF7LDfZTOTKWww7gdswY9JT74oNZjJNq0iXkYz7vQi4gcuCmKdA+qaSM5JdLEba08N\nph0fU8w5NFXgnsP2WALv1T09J0cTImIEMN8fR6cBl9Lb5xMzALBJa/g9Cx+pJfFNx40NXV80++YN\nWz7+tqobMg2rroNjtJ1Evn9nShpRh8dYEjGRV1V1y2GMv1cQq99xdGrwHn+FtWyilglSwBwpbpOb\nmU+eeuyo2/oTS82xInL83v74RHCufJHVs0o1EppAvhXAZAE79BXWRjy8a1W1+kCvI9WEMlFERkyj\n6FSSQs2zbIypP2Bwm3qJkX2e9rDv5bOrtxNewV5qFamqIyJnP86Sd8dpJ3s0HQIJPD5gS3gZ5WET\nYxlIgYeu85IC0LsSpATeO/Mp7duPnCYCXgvZgY35cVCsP9gY13YjKw7oRmr8AbEei+HeeiCSEy3M\nZQWdhnk7E6z69Os+yVi4/JWRItLlcL7fm0NVl4tIvw1bPr62qHjhJYA4bnyK5yUe3V2pQZojh3SS\n1XJkB7HithjNCiO2w28l8Ja8xrpOw7V9KFu+eFi1xnmVtXVhnL0yQlXVGSIy+m023fUWm84AEHgz\njvfUWqreP45ODRIsSC5FXKg9uZuPiamLX0xKiKCQfy2DjZ0/GmdpmN8wn+VawQnkI8AstkeWUOZr\nRyD6AVszbDWYS3HtaiojcbzTG89CiEg3P8asAjLbHEfnLA9lBlu8JZQZNdqN+urZk+lKug0JAAAg\nAElEQVTKb1lAD23Dzpk5VeVjinV2Msl6eG+ek6OIU7Lx9fwGfX31l5ULJYurdWDoMZb8LoLb23Hj\ndwB3QNLQugzziZv46LKBmusIsJRyy0SmRHAP97JRxxz8cVOMXaJWtZpgBlsZZXRmRRsfZ4/57S4t\noJq6YqbN+f2xdZHye3dez57QpEL8Me+z5a732XKRh/osjNkRnP9V1VkH82JSy8fzAUyRG4fSzqw/\n8wRJBfhvab+Mh/j8bhF5Ym+TRVX9WET6zWb7Dz+j5GwgHserDWCNPYtuZ3Yly9hC7TH/YePkKO5f\nReSHqqpxvD9+wNbvD9M834B69ZPbtI6XWBN2cCu6knXNjxgayBZfAKBKY/yRxd/fRp0J3HCQnp7D\niojRJadN12abPkzTRzCQHaupK+lEUlS7RUnV6/5f6i/NUUS6u7CFEJGgjVH6K8ZmtJcmzgj8ThfU\nLqfiBh9GgYlxx6kU2F3Jsoqodd9lc8zFeyiq7t4a3e6WoFjzL6HX8IlSQExdPqaY5VRgAMfQnhdZ\nzV2MpL0E+anOTQwk17hC+jZYe4iow5Ms09VUlgMrIzj/8uBvwIkhrMsFfHU4b5CsRWmgByYiEsBc\ncg7d+50h3XadV1V5gdWUEeVHMrRBzHN0G8+xUnMJ1HQhw9hADbUkymO45+vuFZKPSvxiPnYuPb5/\npnRr8tnxVPlhyuOwubt1Eckn2ZUkJEUyD0oh8L4gSQ/I9X9gfCCY6t6bpyXMZCtrjTrOm/wgfl/D\nG4Da8A7+Pe32mOsl2jd+P7UmMsV+43L6nt2c76aq8n1mJBy0feOZtJRUyck+jPMAM443FZjaeEZJ\nRE7Pwf+vXzAmo74uV1gd7uGTuh1Ev6mqU1KPnWhjvNaDLOlLTmYRteGllBsu3h0mxm/v57gGVkCQ\nvJm7ldnRBF6BHiah2oOJiHw3v8OQB0859tYmSsHxRISX374+5nqJrqkEJ02afSbdXdiKUdVIQKyn\nnmfVNdfrkGD9otzPdQdrqHKBF2PqhkXktf+y+YcW0tdB18RwHz1YyUQC740i6oYUa9i6n4V0JZOR\n5OHgMY0iorjE8Hhd1ztVxI2xdGxS3BEUi8u1j9zNx2ZM3foefa+k/r6McQHMbpMpbHBeEeEi7cWt\nzGaHRqifiG6iNgE8vZ3wy9sJdyEpMTHrq1i7ICCpYvXm9iHJZatmPZ00KYq5V+bQhwpV3R4Se/ob\nbDjla/S2ATyUOhJ0aT+oSYIFkBlqT9s2BW5Z5foTgKmHO+a9xUWra2neeiWGi5fcHqu/XUTaBjDf\nz8LX5wQ6Z5gYMottV+wgul1ETqi/fBTCuvV8emQ0Fj4NicWF2jPjeVbdCkwBUNXpItJxFVUXrqKq\nN8luuJeA0waQHc9KzWDVp4346K3Z8eVUTAL2Wa+tFfDS9h0rHiotX0Nebu8GOxavej1hGNY0x423\n2gRLRE732aG7PM8ZKmLWuF7iL57nPFi/Vk5EjL2tT9yL8QySN11DgDKSsj37VSea5gvSSVYLEsO9\nYxWVQ+9i7uiTtSAjE1sWsCO8mDIngXeGqoYBUgKbh0Qg0kWf/oitt62gwjqTbkyULwrVT9B8XmA1\nv+JTT+AjQbq6eL2aO4+DIsj+fNiPGUSu0bhwHsAvJl01k9VU0Z4gMXWZwRZvOltq43i/UtXN+zHe\nUUUM7z+z2Hb5ZO3aRMxwVbIsp4xGfqKtjQjOVdPZ8vEmrcmbSEGGohRRSw/Tbvbx8UQYx40DjBCR\nGTs/J62NKO5fp1F07iTtkmk26mz8iG3qx3w/rIkGSVYQ8+9j6Djwm/Tz7/xMTNauWVNYF5xG0RvA\n6J2PVbR/T5pVpqAX2bhon/rbUl2rDYyxRcQ0MHZ7J24mE/jdewW1YlS1RkQufXf2b17q3/M0X9dO\nI6y4E2bl+mnh7aXLyhw31mo7Ki3L/z9Bf/btIwZdFurUfgCRaGX2srVv3V60feFVIjLRNOxrSTqr\nZNtWoNRT94+e5/xOVWN7PHkziMggy/S/HQrmZufnDfbX1BXHt+9Y/qhp2re6buKRg3x5XynSy4Ut\nTMouZ1IA81smkh3BmeHBs4ezs8cU45Ec/Nfdx7FN9LRi6vITPozF8PpZyLfH0vH278rAJne9r+pa\n512Knouq8719GVtEvj6AnD/XF1msz506x9lB1MvGF60l4TMx5kZwrmkFrdetAhExA5jLT6Ggx3n0\nsHb+mJdohN8yP1xF7Aeu6l9FxAQmW2bgIhGxE07kTeC1A9RuOmiISAZwRQbWVUAggZeF6et9yRl/\nwjSTy1ie57Jg+b9YtWE6WRl5qHo11bXbTUTudd34r1rbTGaqKWHGAHJGXUHfYK4EcNRjDtv5O6vq\n4njHq+qieo/v5sdc8SDjA42FUz1VfsJH4VoS43fOYmeIveB7DBzWuOYLYKmW8xhLVtZpov8eYsz3\nYaytv1y7k3pWQN2P5AJsEelrmf6bDMOaBEQSTuQZVe/pg9HwcCgQkf6WFZh/wcn3BYOBhg3kHy96\nLrF286xwQadjAkP7ne/PzuxMedUm5i97KbKjYs1nCSc6aV/19UQk2zR968YNvSqnZ9fjZedvQE1d\nMW99+H/haKz666r6+sG7wqOH9HLhEUDqh2Fa6q9F8NAVg8iNi0iT7qPkbFJWdA1Vgxz00XmU/Ki7\ntvGdRL5hioGnyjxKeIfN0Tjeb/Zj+P+socoq0XADOQFIqsZXEKty0X7lxDoDparuEa9Evj+IyGk+\nO/SLRCIyGjFcy/RNTTiRn6WKuk+YRtHrM9g6aJDmahUxby3VlqI/SyVYOZYVmJERyOnRp/vELENM\n1m7+6Pyqmq2/E5EJe6sQLSIjgDtMwzfOMMy4qvuW48Yf3A/NpyZo0rngidQfIuL3wfqP5j/RefyI\n72OaNp8u+QcVNUWcd/JvCCV/fLJq6kqYNvf3d9RFygzgfw80joOJqnoiMnkZFfffydxvZ6jlhHFs\nC2NxHO8H9ROsFCN60ibuT3WI1scQYbDmMpfiUcACgDDOw2+y4Y9DtV1G/ZlgT5U32RAO4+yxCURV\ntwbF+tdjLLnoOh0c3KlqH012m4Yt5F/xIzjBAlDVVcC1LR3H3mKavqv7dZ9kNU6wAIb0O89es+nD\n7OOGX4OVmult17Y7Jx97S3DqB/cML6/acCnw/L6NKN/Kzxvs71U4vkGykJXRkXFDrwrNWvjUL4F0\nkrWfpGey0iAi3xhE7qM3y7Ams0mqym3MqSkjOllV54hI3yDmywbSu4DMxHbCZgy3OIp7map+uj/j\n22L8KAP7N99lQGgguSjKAnbwLCvCEZwrPdU96hgdzZiGdbVtBx8cPeQbocJOI3HcOGs3f+gtXPFq\nxHXjp2jKcD1lIzOSpBjpWylZAXx2aGqPgnEnjx16VYMOxMWr3nCXrH5jRcKJDvmyWSARybSt4OvA\nSQWdhomqsqX4c9pkdPAqarZEPS9xvjZ0AjgopMZ9Eziuc94gtpYsti+e/CA+u3EhfCn/nnZH2PUS\nHXQPNlMthYiEgG4kbbN2Z/B9SgEZr/yvjG3WYuV+XVCzjIpdFkAiYgcw3+9Bm5EX0jPYlUy2UMcU\n1kXWULUkinvC3gjbiog/iPm0i144hHYuoIspswzk1Sjud/Z3CSrN/uH3ZU4ZPfjy83sVTmh2/4tT\nr+X8k+9jpyn0TjZuncechU/PjcVrm/ik7mG8d8YOvfLUHgXjmuzzPJfn3/iOBxpsLbPerYn0TFaa\nveX1lVQ+0dxs0goqqCVRB3wMu+4KjxGRQSup7Emy/XnBgSzVJNR72BDZ/hhL7nXQroqKD3NZGOc2\nVX3nAK7riEdEsg3DfuiMCT8L7tT7sSw/g3qfaYSCuRlzFz7znIj01yQLgYWNji+0TN/EkYMub5Bg\nAQzuc5a5Yt273RNOdCwwV0QCwBlAB2AFSR80ta3gv7p0PObE44dfLWbq7jmeiPDBvIeNws6jQpu3\nffqqiHQ82LVRqc7Bk0Sk/6Ztn97dtdOIi312RhPJk8xQHm3bFCTKKtdPAJoY6bYGUs/N8j08bGYJ\nEd2kNRRKw/udHRphNVUW8J9650yIyCkrqbzh9yy8IY7b0YdZksD7k4v+YW8SrNR5YsAVItL1M0pP\nTW1+N13z2DK4bnx5WeWGM3sVTmiyslAbLgUEn91UmSIz1B6Sn919Je56za8wep6DgCoclOL6ryLN\ndh2l+WqRrE3QW3/N/PBC3YGnSkI9Zus2HmZxJIb73cYdLKq6VFXfUNX5B6MWxlN9OYLbJ4GX76Ad\n6jQx7KueYKW4sHPeQLc5QcXu+WMwDKsLMPBLjh+a27ZHzLaayrGJGHTpONQAhosYF5uGr7h9Tq9n\ne3Y9/oHMUPs3LTOwXkTOAk48fvjVhlmvEN1nBxk/4vtsLVlEu7Y9FLjoQC90d6jqCuAD0/TtVhTT\nNGw4wm8aVTXuojf8noXhJVqGqqKqrNJKfsP8MPDzxnVEqhpz1bsvok4XV9WKqJPvqPdrbWTmvpfj\nb1bVp1N/6QSrhXC9xBOrN33g1dQ1rIxQVeYve0k75PbBMJq+1UvKVqmqt0c3jcbEE3X/WLXh/drm\nvsbXFc3GsoIfqKqzr+dNk+SI/lJKc/BIqPeIiGx9kmW/iuP2UZAA5idR3NtV9bB4Z6WStSNKjydV\nJdoZsIHNB6uduh4d2mR2blawVsQgI5jrxOI1eV9yfHU0WrXb6ey6SLkD5NtW4P7Tjr8j1K5tUklf\nVVm7+aOMOQuffqmg03Axm+n0Cwayyc3uTjCQnUU5zXadHkRmbCleJK4b31UIv5NorIayyvV+kp6C\nRzSOes8ZIrWPsuR3QGcD8Ty0MoF3t6PeMy0dX5pDj6puME37pjc/+Pn9Q/ueG+jUfqARiVWxbM3U\n8I6KdWV+X1ZuPBHOqD+bFY5U8Pmqf0cSTuT+/RjylcrqLfd8uvQf3Yf1v9i2LT/JkoBFfLrkH2HH\njd158K7uq0c6yUqzi5Rw4ZRUp5dXp4l9vhv+KiEiZwUxH/Cg0EQ8F62xxPiFiz5+EDvdlm/fsSxK\nMolrQMKJUV27zQ+saXrYLmaHo+VOc1pBteFSSspWWrYVPHHU4Mt3JVipa6N34QRZse5dy/MSu03S\nPM+htq40ypeYLB8MVHW1zw69P2vBkycfP/yawM6kL+FEmfnpn8KGYT7vuPEjKkHfHZ7qKyLyKtCF\npHzCHpP31Gf2TKAtsBio82PeYCGjFIrDOI8BbzQWNE3TOnHdxGMi8unnK/992+JVr48CqYwn6h4H\n/kpcfv/atNu+Naj3mcHszHyjrHK9s3zdf+Ou5/xS98PBQFVjInL8mo0z/7Z6w/QT27YpiNZFyi3H\niZY7buzbqvrJIbjErwzpwvc0afYDQ+SiINbfvsfA4FDaIcA6qnmSZXWVxB6Iqfs/B2McEbFM07d1\nwsgf5BV2Htlg32dL/5lYtWH6jHii7rQvP4dxic8OPXvc8O+FCjoNRxCKdyzno/lPhGPxmp976t57\n2RmPWbbd1Hlg9caZzFv8V71k8sPSeH9NXQn/+eDnJJxYRNXpol8YEh8SUoXwLwEnde08As9ztWj7\nfEPEeCXhRL+9r63rRwumGN8zkYd60MbNxW8up8KM4frG09kdTp5VTpSpbKytIDY3invmV/V5amlE\nJBsYASSATw6kkFxEjrOt4A8Nw+zhes4yx4k+3Ey36v6ctyvQn+SKwgHV2n4V2Ju8JZ1kpUmzj4iI\n6cfcdhPH5PWRhm3WVRrjNuZEE3iFepDsOkRktGn63uvWebSve5cxAceNs3LD+3VlFetKHTc2TlX3\nKGshImfaVuh3npfoLWJ4Ikax48bu9jz3ecOwYl87/U++5tTVN239lFkLnizJyS5sc+LoHwWCqY6m\nmrpSpn/8B2rDpQnXc77vec5hW8oSkf7AJMAF/quqGw7X2K0NETk3C/uFOxgR6pzyH/VUeZfNTGcL\n/8sYfGLiqMfvWRheQ9UvHfXubeGwv1KIiG1ZgQfUc7/XJqtzzHXjUhcpV1XvLtdNPNrS8aXZf9JJ\nVpoDQkQKSXabmcB0Vd1Td9RXAhEZ157AO/fJcc0KqP5JP6+bz44bVfXJgzhmB0Os79t24ExVYvFE\n7d+AF/a1o09E8ki+nsU771J9dsbUYf0vOL1P94liGnYDQdppcx+o21K88C7bCvTzPPe7bdt0Fc9z\n7OrabSJirE0tJxyWmr3mEJEJPjv0fwknOh5Qywy8n3DCd6vqvJaK6XCSIfaS7zJg0HBpWpb3gC5k\nLB05XpJNExu0mt+yoDSqzv50oKXZT3x26O+52d3OnzDqulBK342K6s1Mm/v7cDRWfYvrJh5r4RDT\n7CfpJCvNfiEidhDzLy56yTDaexaGLGAHoDMjuBfrYTLlTdWZXAR0BTaQ9NJqcQsVETm1B1kv/0xG\nN+tp8ryujL/PljtU9Q+HO7Z9JSnx4H/c85wzECEjkEu/nqfQt/skVqx71/185ZRSx433U9VqEckB\nxgEOSa/IFn0tRIyLbSvw3KjBl4e6549B8VhXNEfnL/1nxHFjF3xZd6qIDAxg3qlwOkl/x9djuL9R\n1S+rb2tViEimiVT8mZOs5mypPtStLKeCa2QQkGxm+B7TVcF/tC8ZikgH4ASSr+0MPUwG16lGmGFA\nR2A1gGUGllxy+sOBxh2+FVWbmPrh/1a6brzD0f56HK2kdbLS7BcBzIcKybroxwzdZbXhqMfTLD9p\nETv+RfKH6ZAiIufbGM/3IVu70ya0jqrwWqofF5HLVPU/ez7DIeXzIur8YU0QamTOq6osZEcc2C9h\n1sOJiBSapm9+/56ntR3QazIBXxY7KtYxb/HfWLRiiquqCx03fvFO2YBUzdVbLRw2kBTQNA37L6ce\nd3uofU7PXdv79zhF2mR0Cs345KG/ikh+cwXjIjLJh/HG6RT6x9LR9FDmsP3Kd9h8qYicoqofH9aL\n2X88JWmobTT1oMZttL2MKCYScTh62/FFxLatwKOmYX+jQ7u+cVWP0vI1PtsOPuk40RsPZeG/iBxr\nW4HnLdPfITOU51bWbPG5XqK4W/5oaU5CJSe7kFAgx6ipKx7FUdAZm6Z50klWmgaISI6N8e0fMLiB\nl5klBt/WAf6f8OEJItJPVVcewhiGBTD/fhvDQ91ll6px5lqt4n4WviQio1V12aEaf0+oanFIrDee\nZ9U539OBgfqzCG+xyQ3jFAEftVR8e4ttBX7bv8epbYcPvGSXUV5ebi9OG38XU967JR6JVt7Yiuud\nTm/bpqvUT7B20jlvEMFAdqimrmQ8MLP+PhGxfBj//BFDQ4Mkd9f2C+llFWpW5tMs/6eI9DgSCn5V\nNZwh9sLPKB01lo6N9/ExxUzkC8P3/7AxbiDPqXqt/tr2F9sK/Dk3u/ulJ435ccDvywwARGPVvP/x\nH75bWbNFgRsOxbgiMtA0fe8eN/x7GYWdRyFi4LoJpn/yYKFp+narR5nSd2veCT3NUUFajDRNY8Z2\nJTPWpqmNIbYYDCNPgYmHMoAA5p3n0D1QL8ECoJdkM5muvgDmbYdy/L0hgvudhexYcDtzat/UDfpf\n3cQ9+knNG2zYEsWd3Np/pEXEct3Ehf17TTYb77NMHwN7nh6wrcA+mX0fZvLaZHZs9vtLRMjK6KhA\nc/php+QR9NVPsHYykjxCWO2AsQc31ENHGOf2v7IyvEardm1LqMfLrKWCGIPIZYvW8RddFptLcXEc\n76B0vbZGRCTf89zLJ479Scjvy9y1PeBvw8SxN4Y8z71GpJkX/iBgW8F7hvY9L9gtfwySMmk3TZth\n/S8yNm/7FK8ZBY66SDk1dcU2KS/KNEcn6ZmsNI1xHbzdrjEncJVkTc6h5KTh5DX7AzqCPOtdik4+\nxOPvEVWtFZHjo7gnvsGGiwR8cbx3gNePkPqKAGAE/c3a5JERaiciZsdmd7YOViQVrrVBoT4k/dZ2\nVKyzSFoDNaaggMwmiSUkk7N8zfDKiXUF5h78kA8+qvq+IfKN+1nwZK4G7Bz8up5qH7AZMG/go24W\nUqPwlwTebw5XbVILMalT3qCEzw41WZsL+tuQl9MzXly28kRgysEe2PUSZ/UqnNDkO6t9Tk+CgbZ8\n8vlfdczQb4mRSsASiQgzP30kLGL+WTVec7DjSdN6SCdZLYiI+EjeNdvAZ6r1bkdbjlnbCJslGqGD\nNNRFCqvD55SZHOK6HIFEjOZLJ+J4CLQKo9LUbNWM1N+RRp1hWBU7KtblNbfktq10acxxY625rmxW\nLFFXsmbTBxl9up3UIMtatvZtV9VboapLmzlu/QaqvWaTM1U2U2sC6w9h3AcdT3WKiLyxnfD47YRz\ngCWqurql42oBTFSbmvql0Hq/d5KcbppEski9Cnj1gBJQVdNsxuoGYEifc5i98C/hjVvnSbf80abj\nRL1N2z4TEXnJcaMtPiuf5tCSXi5sIXxiXu/DKO1M6M1CMl+1MbYFxfqjiLTo+ryqhgV+/QcW1pXU\nax6r0hgPsihsIP9Q1S2HMgYHfWEmW5tNpD5gSyyO+/yhHP+rgKqq5zn3z1v8fNh1Gz7VZZUbWLd5\ntud5zuMtFN4eUVV1nOjZnyx+vnzGJw+HN237jI1b5zFt7gN1n698rTThRHbnpTi9knhtslu2IXPY\nrjHcrcBnhzT4g4CITM4Qe3pQrJIMsVcZ8GNgnqpOOVoSLBHpKCLjRaTfXh5iFJctN+LNGFVEYzWU\nVawLAjNFpI9l+tdmZXR8tX/P037dtfPIP5iGXWSZvv22jzEt/6xN25q/JykqWRR1vcTvY/GaCas2\nvH/nuqLZtzpurH88Efl22hPw6Cct4dAC2GL8sA2++27kmFAXSdYOVGqMP7M0vJGaKRF1vtGS8YmI\n+DDuUPhpR0KuhbCFOttEnozi3nyovxhEpLMPY+nF9Mo+iS6GJQYJ9ZjGZvc11lfE8QapasmhjOGr\ngIhYthV42bZDpw7seXooFMyVbaVLY+s2z/JcL3GFqnfQl1UONiKSI2Jc5bNDF6uqF0/UvQD8TVV3\nuwQjImN9GO+dSL5vDB19CsxmW2w226NxvAmquvjwXcGumAyStY79gGLgP6oabe6xfjH/N4h104X0\nzOhHW8qJMZWNkTVUrY/iHtvYRHofYsgCjgMEmL2/5zlQRKSdbQWf8TzntKyMjtFwtMJW9TYknMiX\nWrwExXqmi9H2qkRuZ44b8yN8KYeCWLyWj+Y+SHHVetfxEj1M0zdv1KCv5/XtPsnYOZtZFynn7Q//\nL1wXKf++qrfPN3EiMt62gv+dPP6nodzswl3bN26dx0ef/bnG9eJ9VXX7Pj8ZaVo1aZ2sVoiI+HwY\nJXczKrtAMhvsi6rDjcyKxnAHqmqLL1mISJDkl65J0gai8hCP5wMSqqoi0jeI9TdFh+QRjJcS8Qmy\nIILzTVVddyjj+CqR0vU50baCVxuG2cFxYp+4XuJxVd3c0rEdSkSkmw/jBgvjPEATeC8n8B5W1a2H\nMQYbGAr0tUz/fcFA2+wO7fpZ1bXbnPLKDbie811V7+VGxwwNYs39NeOC2fWaU1SVJ1gWW0DpozF1\nb9rHOAw/xi89+EkXMuICFFHnM5HHori3HU6/QxEJWGZgUe9uE7oP63+xz2cH8dRj45aPmb3wL3Wu\nGz92d0lwUKxnz6PHlVvNGPO0hPx2A1A8tpWt5Fg68ZG3KZZAb+3Yrv+vJ4+/K7Px8dtKlzHjkwc3\nJpzofnWXihiXmYb1VPucXl52Vr6vpGxlvDZcGnbc+Nmq2pqX3tPsJ2mdrNbJ6Bz80jjBAgiIxUjN\nYzbbzwYePvyhNURVI8C0QzmGiBgGcp0P4w6BzgbiBMX6J/DTsCbGikiPzdR2BTa1YjmBI5YjvK5s\nv1HVjcBNqb/DioiIhfFjH8Y9mdhWtaEZ4465SnoUHLerTqyscgPvzLr3WRHZpqq75ED8mNedSoGd\n3aj7V0Q4X3v451N6tYjcvC9Jgh/z3g4Ef3gDQ4O5EggCVGiMP7H42m3U2SSXIg8XX8vJLsgfPfgb\nvp3PhSEGPQqOJRytDH6+8rVfAuc1d2AU9425FF/4P+6orPMpZGVp0kpzAKPZTA1zKSrC8o3vUXBs\n0y9foFP7AXie2xnIJendt0+oei+KyBvFZSvOLS5b0YGkGOl/D2eSmqb1ka7JOvzYNuZuvwB9mAZf\nEd0UEZEA5l8LyPjNjRzT5SkmGvdzvG8SBZf7MBaISKGqrlfVmekEK82eEJEOInKOiEwWkd0WQLc0\nNsbtOfh/dTej2p4kBZk9O4+Snl2Pb1CI365td0YNvjzkMwNTReQ7qVlebIzeBWQ2e3PcUUK4aBBo\n6vS9G0SkrYf++EaOyciVwK7tOeLnJwwNueg1KSumw4LPzvhmv+6nZDZuSgDoXTjBcNzYmdLcziSv\nbydc+m/WO22wGSedGCedCJPgLywPR3HvAI25bvPNv566qHpC0sB5v1DVOlV9QVUfUtWp6QQrTTrJ\nOvzMLybsq9BYkx2eKp9S4nCAswoikiEiF4nI7bYYz2SK/apPzHtFpGkb2RfHGCIyOSTWPzPF944l\nxs9EZK9a+EXEEpGTReRiERmwD6GO82OefycjM/pIW0SEbPFxsfQyT6OwbQDzvn04V5qvKCIS8Nmh\n50zD3tght+/fcrO7/9M07BLL8t/6JT/ILYKIZAI/u4VhGQWSyRKzloKuxzb72G75o3G9RFYv2vwx\ngPmhiAQTeKs2UdNsTeQ2rcNEwkDTyu/dM7EHbeJtpakieZb46EfbOHDKPpzvgBARv9WMOjqAZQVQ\nVQOakbcHVDURw53wLpsX3sis8GO6pPZe/az6HuaFa0nc4qn+K+FEX1q14f3aZowA2Lj1E0zLv6il\natHSHJ2klwsPM6paHRDrz4+x5Job9ZjQTlV1Vz3+zuq4g7dQVefv7/ltMa6zMX7XFp9RSyJwPJ3p\nRhYbqEl8yNYbbDHvTKj7UP1jRMQfwJyahW/MqRRktsHH55SNn0fJ7SJyvqq+t+NZP5IAACAASURB\nVLvxDJELfRhPtidg5RJgLdV2SKxFEdyL9lTf4sf87il0DfqlqWzRqRSYb7HxAhGxjxDdqX0mNTvR\nC4gB61u7gGlrxbaCL3Zo1/e08SOuDfh9GQGA6tpi3ptz3z2RaKUDtCYPyYldyXRy8POpllBGlHyv\n+T4Sz3MxMbiTkRkP8/mQpVTcFcN9dBpFV03UAiunXmKkqkxhXRR4fB/fR6bVfM4CgIUhJGsyDwuJ\nROTtTVvnjSrsPLLJbFzR9vnYVvDzeKKuaYaUIvWdM1pEjplHyXCgmuSSXV3qIW/VRcvXzFn49IBR\ngy/3++wQqsq20qXMXfRsxHGitx6aK0vzVSVd+N4CiIgVwHzKQy8dQZ4GMI1PKXVdvAUR3HNSHnH7\njCHytTb4nrmYXqHXWMedjKT+EsAOjfAL5oXrcE5T1Vk7t/vFvK8vba//MUODlnwxublSK3iARXUJ\nvILGRe8i0ga4x4/xo3F0ss6mO+0kgKMeb7LB+S+bt8Rw+++uQwogU+ypV9D3jHHSqdn91+j0hIPm\ntRL9sIOGiBg2xl0CtwSxzASeqei2CO4NqvpmS8d3JCEiA2wrOP9rpz8cMM2GdUqV1Vv4z8yfV6UM\neFuFtpqIXNKH7L/UkMhqg00mPnZ06MJJxzYtDVu25i1YMZPrvH4UaS2/5NOKOF47H8Zdfsy7zqdn\nsB9tpZwoU9kU3kD1miju8boPBu4i0tHG2PAHjg809uGMqsNP+CgWx+tzuBohRCTPNHxrJoz6QZvC\nziN3ba+pK+WtD38RjsaqL1PVNw5wjOxU9+KZbdsURCPRSjPuRKocJ3q1qrYKb840RwbpwvdWSkoC\n4SoRuWcuxWcCPmAWkAmMF5EFqlq0L+cUEQli3vs9Bobep4iz6N4gwQJoL0HO0x7BKay/AzgndZzP\nxrjuCvo2SLAA+kkOQ7SdLKD0SuChemNNtjFeGUBOcAA5RikR7uETTtdCzpLunE9Pa5lWtFtD1cXA\nbtuhY7ifrKBi4jg6BRrv26Q1GIgJ2tTf5wgngPloB4LfuJpBGV0kA0+VJZT3fIKl/zRELvdU/93S\nMR5BnN69y1hpnGABtG3ThVAgR2rqikfSegx4526iNvNcunOGdCOiDj8rm8/SlW/Qv88ZmIaFqrKl\neCHLVrzGbe5gECiQTBLqZQO+mLq/EpHZ/2Lt7aBDBKkI4zwKPJNqVtlrUj6cLz3G0kuu1yG7ZpVj\n6vIEyyIm8ubh7DRV1VIROeXDzx57q21WF1/nvEEZ1bXbo0XFiwzQOw80wUqNUQVcKCKdyirXDyQp\nRjo/1dU8yDL9NxqGdSxQFU/UPQG8oNpMfUeaNHtBOslqQVLF3I9aYlxpIu+0J2hkYusGqv0hsf4b\nwf3WPtQH5LlolwHk8Bwr+Bq9m33QAHJlCutG1Ns02MbI6LibOuHB5IaWUz5q5/9Tre+v3MLwjN6S\nvetxZ2t3fst8OmkGIyWPCXTO3ErdJXxJkuWgT8yh+M6JWkA3ydq1faf3Wj4ZuoW6O4Cb9/I5aPWI\nSA8/5pW3MSIQSi0VGyIMpR0/1MGhh1n8iIi8nl463GtMw7B2eydpGKZyGJe79oIeGVju6RRaAEGx\nuMMdwpOrZ/DamrfIaJNPIlyBlYhxvdufwtTnoljDWBjhBF4cQFWnA9MPRkAR3GvWUeW7gQ/PH6bt\nPUFYyA7DgLcjuFcejDH2BVWdJyKdyyrXn1tWuX4AUAr862BbAqV0q3ZpVxmG+XXL9D81sNfpvs4d\nBlvRWDXL1rw1tKKm6CciMn5fZgjTpNlJOslqYQyRizOxH72JYaGdiUZEHf7BqtM/o/RdERm3lz+4\nux4TwqKKOB1pmjhVEUOQXYmbhXwzgUdUHQLS9O2wg4jnoMU7/29jXD+BfKt+ggXQVvxcrL14h02M\nJA8DQfbQWKGqW/1iLruPBcOO0070py0VxJjBVjoT4mv0Nn/Fp9/mKEqygAvG0EFCzTzX/cnBj5kd\nxR0CfH74QzsimbZhy9xfjB58hc8wGuZSNXWl1NaVWsB+1zgeAkYeQ3tX5Is3QHsJcqc3hG1ax4Pl\nnzOJzpzJ4F3dhqrKv1kfE3jyUCTfqVmar4tI908oOY1kYfl7qrr2YI+1DzElgFcO13gi0sk07L+c\nccLPgzltCnZtL+w8MuOjzx7vv3n7gvuA6w5XPIcSETGB00WMCcmZT31FVZe0dFxHK+nuwhZERMSP\nef81DArVn8kJisW3GeDPwB4InLCXp9thYmxcTBlj6cg0imj8fayqvM2mSATnzzu3+TAv6EU2M2la\nox5WhxlslQTeszu3+TFPPIZ2zbb/DKEd60nmb7PZXluHs0fFcBuDb9GXIBaz2c5Gavg6fbiOwXQg\niINm7ekcRxihLHzNSnSICEEsF5rJjtM0i6ou8Dz3s7mLnom59QrIo/EaPpj3x7CI8aBqPX+olqem\nilizle6dJQNF4++yOfIJJVRojDVaxZ9YHFnAjg1xvF8cysBUdYOqPqGqf27JBKslMMT8TveCcVI/\nwQIQMRg+8Gt+Ve9KkWZaMI8wRKSbZfpXZ2d1eWFov/NvH9hr8t0+O+Njnx16uaUt3Y5W0jNZLUuh\nIB0GktNkhyHCCdo54002ng98sKcTpeoJbv0Ly1+4niGh2WzneVZxtnYnR/yUa5QprIutpmqTwlO7\njkOtUyngOVaSUI8T6UIGFquo5EXW4OJF6t/lKFpbuxsZmVoS+DD5r25y11JVA7y4p7gd9LNthAdf\nKD2bvBdXayV+jBZXvj/IfPIZJXUXac+sxuoCFRqjjKgPWNYyoX05ItLDNH3XW4ZvgqIV8UTdU8CU\nQ22ztCcSTuScjVvnvbJx67zjCjoNU8eJeVtLFltimE85bux/DlccqRmCq4NYNydwCyyMsgTeIy76\nUL1E79+LKX+4UmM0lk1YpZXUkKiM4V77N1be7uD1N5HKGN7jHvrIl1kFpTkwTNM/JC+nd5PaUIDM\nUHsM08b1Eu2BvfZtFZFcQ8xrLStwBeBzvcR/XTf+h5ZKYEXEsEz/+8f0v6BwUO8zd077WsMGXGxN\nm/v7M8sq1t0H3NgSsR3NpJOslsU06i3zNdmZbJ/e69dIVV83RX7wAAsfbk9QllAW+JCttqmG5+LF\nTYy/xZI2GbtqCxTe20jNFXcywnqd9dzCLDwgjwAdCHrbqXu1/hh1OE+/S9GYsdoxo3GSMJ0tgDr/\nZv2qON7ZezODEMN96B02X36sdrLq14VF1OFF1tRFcO/d2+s/QnivknjJO2wOTaZw1/pWQj2eZXnE\nRJ6Nt0KdHhE5zzR9/+jbbaKV32GILxqrZtnat4+rDZfcLCKTWnK2KFXIfIqI9F9fNOcEIA68pW68\neA+HHjRExAhgTulE6OSL6BXqThbbCXd5nfU/W03VJamanrCqlvrFvO9e5t98tQ7M6EUbFPicMp5i\nWSSOe52qTgGmiEgvoADYmE6wDi2el9hYXbstQTNC0NF4Da4bN4EmtmKpLutLRYyeqt5m4EVVLReR\n7qbp+7hrpxFZfbqdFLRMHxu3fdpt5fr3rhKRc1I1dQdMavZpEtAOWKKqX1ZmcFookJM3sNcZDdbV\nLdPH8cOvDr027fZrROTuenIXaQ4CaQmHFiTpGWZuu4VhHXo1qnFSVe7m45pthC/f17b+lP7SRCCL\n5KzIDqCyuTZ2ERnox5h3I8NCfaUtrnq4KBup4QEWhWO4Y1R1ab3H+wOY80aQ1/cSevmzxU9MXWaw\nRV9lXTSB93Vgnwq3LTGusTAePIl8qzdt7RLC3jtsjsRwX47ifudoKwIXke5+zBntCeSOo2NWBMed\nybaYgzc9mtQXa1WdTMm2envD5PE/DbXP+ULP1lOPmfMejm4tWfLnhBP9SQuG2OKIyEWdCT17D2My\n7XpduqrKH1gUWUbFL1z1fpt6rBjI1T6MewRp6+EZBsb6CM7NqjpVRPoFsZ4HHZRHMF6S9O1cFMG5\nQtO+nYcEEeljWYFFF5z8u2Aw0PC7eP6yl52V6999M+FErwbKNaVkKiLnmob9Qqe8gZqX0zujvGpT\neEvxQvHU+4Fl+q4e3OfscUP6ntMgodlWuoz35/6+yvUSHQ/0cy5iXGia9lNtMjqaGaH2Ulq+2vQ8\nd0XCiVygqpuaucZ7hvQ993+GD7i42d/ZKe/dWl1TV3yypn0W95q9yVvSSVYLY4lxbS7++29nxC5b\nC0+V11jnvEvRupRZ9CG1ZkhJMrzUjSzpTlZgAzXRjdSQwLu0Od0YEWkTwPyji34tA8upw/FZGHMi\nONeq6sr9jKGPD+M6H+YwB68oivs4MPtoS7B2kip8PstCTvQg4qGvqupnLR1XcxiGdWv3/DG/mDDq\nB00EImvqSnn9/TtqXS/Rvv6Phoj0T7XCTwCq44m6J4G/f5lu2pFMhtjTr6DvScc2o/m2Wit5kEWb\nw+oU1t8uIgbJmaqEqm5Lbcv3YSy5hN7ZJ5JvWGKQUI9pFHmvsa48jjdQVUv3N87U+87d18+ViHQA\nLgDakGwkeP9o+2xalv8Xfjvj5pGDv56RnzeYSLSK5eveSawrmq2emxDDtDxBqlzPuVfVfdcy/Z+c\ndvydDW48qmq28p+Z90QA49LTH/GbZtMyp6kzf1Gzo2Ltd1X15SY79xIRmWhbwTdPOfa2UF5uLyAp\nXrt0zVR38arXix031qfx7LKI3N63+6T/G3fMVU2CUlVefvtHddF49dj6N9Vpvpy0TtYRgIv+uYp4\n+zuZ+9N+2jaRjc/6nDJ18JbHcM8+1AkWgKr+V0Q6rKHq3DVUdQM2kZyNavYHMSUrcZWIXF9JvAAo\ni6u731/8qXOu5itUD5CqY/p36q9VY5m+4R3b92/WDy8rIw/TtA3XS+QBRQAixsWW6X9uQK/Jvi4d\nhlrReA3L1r49uLxq440icvzRJi6bokuH3VgGdiSEg+Y23p6aEWkw42Bj3DyezhknS8Gu6TBbDE6n\n0NiitZkfU3w98PN9DU5ELgti3QP0NcANifVmBPcuVV2+h+PENH33mIZ9W0GnYW4okOPbvH1hLBqr\nLhGR046mAnnHif1cROZ/vOjZux03PsgQM67qBYf2O9/u3/M0sUwfZZXrOsxZ+PSvaupKrh3Y+wxf\n/QQLIDsrn14F4wMl5atoLsECyMvtE9hRsXa3Fmd7g88O/XrMkG/uSrAADMNkSN9zzG2lS9ps37H8\nMuDpRodNWVc0+56Rgy6zbath+dm20qW4XryCVloPeiST7i5sYVRVY+r+MoGXv4TyH85i+001JE4I\nqzMqpeNyuOKIpe6s/kpyeXFI6k77y46pVdUV+3pnLSIhEbk6U+yPMsX+1BbzlyKSfwDhpwFEZLTP\nzphqmnbYNO2IbQdX+azQUr8vc2bKZHivjYPr43pOUU1dcbPdDvFEGMeNWyQFHRGR9qZh/fX0CT8L\nDR9wsdWhXV8KO49k8vF3ZXTrPLq3ZQUeOIBLbLUorNhATbMzOxuoxsZosnzTHBZy2QnkNyvAewL5\nAR/mFfsam1/MX7Qj8JfvM7DfU0yUh5hgnU33c30Yn4jIMV92rIjxvZC/7c0XnPr7wImjf5Qxesg3\n7AtO+V3m8IEXd7dM/4dHQ8ddfVT137F43WjXTYTEMFeMGfot35C+54ht+RER2uf0YvL4n4ZEzL5d\nOw1vdpKiMH+U1EXKxPOavz+uqNoUZR8K6BsjIlY8ERnTLX9Ms/t7F56Q6bMzLmnm2lYBL703+75w\ndW3xzm1sLVnMzE//FEk4seuPttnJ1kB6JquVoEkrnb+11PgikhXEfMrGODefjFgNcTOMUyMi16rq\n6wdxnA5+zLm9aNNhIl0y/Jh8RumgOWy/QURO13p2P2n2HhE5y7ICLw0fcFGwW5exgirri2b3+XzV\n6/TrfjIl5atGlFVuuCulu7ZjX87teYmnV22Yfv2gPmfZAV9DRY0V6951TcP+r+smapJxGFcWdh5F\nbnaDlTFEhOEDL/Fv2DL36yLyo1Ymq3DARHAeeIP1J4/RDqEs+SJHiqvLv1hbF8b53d6cR8H27UY7\n1Y+JJt0h9hoRKfRh3HY3owLZqbgysDmDbkZIrYyXWfsEMHY3xxqWFbjnuBFXZ4QCbetvZ0DP04wN\nRXMzSyvWXAT8Y19iOhIQkQ6m6RvWs+C4Jvt8dgi/L0PiiaS4vuPEiCfC+H2ZmKaN35cJwNrNH9Kn\n20kNji2r3EBp+WoD2KO8zZeFtxe7m32M40S/U169afMb0+/6ccDfBseJmZ66JQkneoOqd9C+59N8\nQTrJSoOISADz/9k77+iqyuzvf/dpt6WREJJAElJJQg29iiBdsWN3xjLq/OxlxrGXGR1HZ/S1zjg6\nMyo6drGDICAgSG8BQkkjCQmppN922n7/yA2G5AZIIBCc+1kri5VTnrOfw809++xn7+9eloneQ6/B\nAKudZCszYy/qgl7Djg99TaKXnoxr2SDNPxt9Yy+nlMOx9MGIsI7gSLyGnd8QUUxPS/zu6RCRLIrK\ne9PG/c4eFZF2ePvAlDkIC4nD+qy3cOG0vzm2Zn8Yn1e8+i0AF3RmfGbeK0vWfyxa9cf/GzPkGkdM\nnyHweOuxt2Cpvnf/sgbDUO9uOVYSlcGR4al+I2Z2axgkyWoaqhYF4BclzcHMKy0kvvwoNtw9lxNs\nCQimcrjwLQqdjdAWAZh/POMQsGIbqubNQf92UeStqNJNdNysvYPxrh6HKCGU2vtmExBDHyFvKBH1\nY2Z/kZUoMML7hA/wO3Zi7ITgusbS2fgFOlkAQmTJqomi7DdSFxYSi+zchXrBgTVScdlmiKIFpqkj\nKW4iDENDXPQIbNuzAA1NFUiJPwuiqKC4bDNn7f3cbZj69cdbwUfNJdzTADoLYBXNaRw7LYpjS1HZ\nptH+nMC84h+bVM35mb/xfOknjxLR0073oTQAbgC5gQhW9xFwsgIAwLRgKAN/g4FWwSfLQETIQC/c\nyBn2t7D3/wEYcqIXIaIYBcKUC5DQLllhCEUgjoOkAjRcCOCTE73W/xjTQxxRYmsHq4W+fQZDkmyo\nqsnBsPRL5JzCFTOIqA8zV3bmArrhvb/JVbl19ZY3HtV0d5ogiB6BxA8NQ/0jt+ptZxhacX3TQRV+\nIi6q5oKmuyUANV2YY5chonQ0J21b0dwjdFlLhdjJxMvGw0S06Evsv1cA0hkocUF/BcCi432IuWH8\n5RsUzk3nXvZECjm8PY/rsQQHVC+M44qItSCC+kTC5jf6JZOAIJZVL4xw+F++0pgNgdkEUfvomm54\nmdn8RRYyACjRNDcanZUIdvRpt9PrbXTWNhywpyWeg4unPw+rJQRNrmps3/MZDpRvw9wpT2FY+kXY\nnb8Ey9Y9D93wwjSNek33zGTmTcdjABHFSJJ1uVUJjkuMHe/Qda+Rf2DNw4psX6bp7sc37nh3QWhQ\njD0iLBFAc7XvnvzFRnVtfhOAD482ti/fNqvztyVAZwk4WQGgQLhsCvo6BGofYc5Eb+gwBxBR5AlW\nNfUDcF0QZEPoINqdgV6OAjT4f20OcDSiQoJiOsyfs1t7oaR8O6J7ZyDI3ttb31SWBKASOFwFeIco\nykNN0yjRdPfrANa0dQp8v3+AY0QtTDbezi9e/YchqefDbjtSZHdP/hJDFOTvDUM7JYnvRCTJku1d\nWbJdlBw3UZJlu1RYusHp9tSX+7S9TnrjY2ZeA2DNCZy/nYiueRZb/5vCodwfwbYCNLj2o4E0mJcf\nK1G9LTp4x27UNp2HhKC2++pZRQNUBUBRB7ZUWxTHvgPlW4a0zf8x2URO4Qqnbng/7ow9ZwrM7JEl\n65sbdsy/derYe22i8POjsqR8Gw7VF4mJ/cZpIwddddiBDbL3xsQRv8WydX9FefVupPafgjFDrsWY\nIdcit2gltmR/tK4TDhbJknVJetLM1Mz0SyWfJqE0YuBl0oqNL8+oqsktUzX3jYvX/PnN0KC+CLJH\nUmXNPsEwtALd8F4Q0LrqOQScrAAQQBYFol/PRwBBBBlaJ3NBWiCiIBuk+QqEc5MRqjuhOX6PtbiA\nEzCd4o449iCcbvge/gE6RW5VbR6Y+XC/uxZMNlHfWIqa+iJkZlwKl6dOge8ei6J8qyRaX0hPmi5F\n9kqRG52VnJ236ALN8HxGRDd0ZQmBmQslyfLUwh+feHjkoCsdffsMgdfbiL37l6l5xT/WG4Z6x8mZ\n8rGRRMszvULjL5w+7vc2SWpe9clMvzRoV+63iTtzvvmBiNK6EtEioigAVxIQycBeAAuY2X2y7Gbm\nL4koag9qL9mD2jgAhWhW1u/KNT7JQ/3Lu/gQBlPE4Y0mMz5GrlcEfXw08VtVc923dtt/vlZkhy26\n90AQEbyqE5t2vufxqg3ZAH7ogk1nBLrhfbiyJnfIl8vuH5+WON1hVYLpQPlW58HKXYZAop6RPKtd\nxSgRIS1xOvYWLD2cj2WYOrJzFzWpmuv1Tlx+gizZE1s5WAAAsVk41Pb50t9dB/ADhqFG1tQXzqip\nL2wRI+1JfToDIOBkBQDggbF4HcqvmIbYdm0l8tEABpwAyroytg3i10MQPv56pFtbGlBXsAsvYwcU\nFjHZV1RYwS7sRI0IoMvaMT0BnwLzRXZI1xBgdUFfxMD8bpYtWKuqTmde8Y9Bqf3PPmJHbuEKWC2h\nYNPA9j0LGMA+Zi4goqGyZHth7pSnbK2WQyg1YYpj0Y9/nFffWLoSwDu+OQ0BMBpAE4DFfAxFel33\nPkNEWRt3vPuYbqhDBRLdDH7PMNTnWvSguhsisouCfPukEb+1tzhYvu0YnDpXzCteFd3o9EwH8H1n\nxlVIvE+G8OeRiORo2G27Udu4Hw1/96l4/3iy7PdFIk64EIaZXUQ091XsXDSK+0gjEGl1QsNylDRV\nwZ3rgXHnMc5fRkRXrNj48j8VyR5itQQb9Y0HLYIgfaXpnpt/ybk8zOwlolm67jl7x76vfiUIYpiq\nuVYBPF8U5BKbJdTveTZLGLxqE5gZNfWF2LzrQ5fbW7cOwMJjXZOIYgSSbpYl25VWS7C9urYAvXsl\nHfHyZLOGISQo2lvbcCCTmVcdz7gBTh8BMdIAIKLeCoTKS5FM0xF7+A+6jr34G7ahGu4qDRzVBQHD\nUSGQV76AiQ6xjRpEETfiFezAnzAGWajGR8hzuaHfp7P5RgfD9XiIKNwKcXUU7PFT0S9IgYBNqHRm\no0ZVYU7ho7e8ONFrPyFLtifjY0Yhod9YAIz9JetRfmgPZk54AJt2vo+DVdkeZmMCM2+TZdvbg1LO\n/dWwtIvaJduUVmThx82v79N012RZsn1FJAztGzmY3d56s6o2XyLgcd1Qn++uuZwMiGhUkL3P8ktm\nPB/ib/+2PZ/yzpxv/sTMT3ZizPNDoHz0GEbZI+jn95FsrsGr2NGkwhxwqpzIzkJEfSTQLRZI0xnc\n6IL+DpqTqP1KcxBRGIAUNHeKyPPJuQyHr4tEZ3P6fmlYFMfaMUN/Pd5f4nnW3i95V963pmFoJInK\nIdPU/5/Jxgsd3esWiIQLREH6MDF2vBDde6C1yV2NvKJViO49EOMzb0BrRZ3Pl97X2OSqns7MG0/+\n7AIcLwEx0gDHy9wEhLhWo8zxIw5iEIejASqycAgzEYflOGDXoA8B0Fkn4dwJiLG2dbAAoD8Fg5lx\nF1brNkgbXdCf7EoFIxFZBOAOBeLdGswoGUKlCvMVE/zKqa5StEF6exyiUq7FAKXFUR2HaMd6Lre/\ng71LiCi2G8Vll0iicn9IULQjO28hAEK/qGEYPeQaSJIVlTW5BrNxMTNvAwCBxBFREWl+tQL6RKRB\n0z1JkmRdOSBhasrwjHmy4MtJaXRWYclPf/6jIEiHTFN/u5vmcjLw6IZX8LeECgCq6tIBdCpp2w7p\nyWsx4AgHCwAGUTjGcbS0FuW3AXjsBGzuNnxO0dO+nw4hoiArxNdkCFeEw+JthKbYSS4EcPMvUV6l\nJfIsS7ZLASJNd32J5uXfdi3IWqNqrj9vyf7o4+jeA4+Qt6hrLMXu/EVkGCokybZb190PMvPxRLBi\nRVH5cNbEh49QkM9Imoll6/6GffuXIz1pBgCgqiYfHm+jF0CP7BAR4EgCTlYAAIiMR5ByFVKxF7Uo\nQhOiYceVSEUIKcjiar0Jjb27MC4J8POE8yGCGhmY7GRte1eMJiLFCnFZAoJHXIJkeywcKIEz9gsU\nPLkfDRf6kpuP+mV5siCiaAXCzHlIVto+1MdRNC3kIkcpnHMAdKoPZSfYoBveEkmypM6c+NARXm3W\n3i90AJuZeXGrzRVOl3+5LKerGoIgSHZrWOKIgVfIrecT7IjE5JG32Zetf/5pIprfHVV6J4lsw/DW\nV1TvCYqOHHjEDk33oqDkJwOd1CpyQ88chgi/+0Yg0roVVTPRQ52s44GIRCvE5cPQe+hVSLWGkGI1\nmbEZlRlvYc/3RDTleBO3zwSIKFoSLWtCg/v2SYmfHExEyCtePaeuoeRZX2eCko7OZeaFkmh57stl\n9z+UmjBFCgvuJ1ceykVx2WaMGnwtkuMmiiXl2wavy3rrE0GQ7jjWC4koyLcmx00S2yrIy5IVIwZe\njnXb/oO0xOmorMnByo2vuAxTu+dUdAMJcOIEFN8DAMCePaj1EBEyKByzKR5TqB9CSIGXDRyE0wIg\ntwvjLl2LcrfpZ5WxlJvQCM0AcCJ9sn7dF47hv8dwewqFwkoSUigUv0OmvR8cmQCuO4GxO0taNOxe\nG/l/bxmMCBuAwd11cWZmTffM3bb7k6qla//qLDiwFvnFa7B49dNN2XmLDmq6e17r41XN+fqu3IVN\npqm3Gys7fzFCHNGUEj/Z6i8KFBmeCoGEMADx1Ix/9czTSPP98N61ctOrrvKq3WhZ6W50VmHZ2r+C\nGbIsWZcKwrEV1ImoFxGNEkC6C/6fa27oYJzxFV2zwmAZeDMGWkN8uloCEcZQFK5Cqt0OqVPyET0d\nWbJ9lp40s/+5k58MTkuchgEJ5+DcyU8ED049r68s2Y7Z7ko3vE/phnfoquHwaQAAIABJREFUvv3L\nX9y86wNVEERceM6zSO0/GYIgIr7vKMya+LBdIOFVIrIfbSxJsoyLiRzkV5OrT/gANLoq8cniO5uW\nr3uhTNWcvzFN4/2uzjvAqSXgZAUAgOwquJVNbdIsmBlfYb8mgtZ0sdx9nQf67v8iR9VbBTxq2Yu/\nY5eTgaePladwNOyQ7jofCe2kJwQinI8Ehx3SXV0duwvU1MErdZS2VgW3F92sD8XMebqhppRV7bp3\n4873lmza9f6iypqc23XDm+5HbPJrl6dm/dK1f3VX1xaAmdHorML6rHdQXZOHPhFpMM2Og1TMLEii\n5VUiwQtAU2R7LhH9ivx5ZR1ARPFE9IAoyi8Q0d2CID9jUYIWy7LtHSKa1HYsIhKI6CqLErRRke1l\nFiVoPRFd3lH7J2bzc1Vz/eqHjS+Vfrr4Tu/nS3+HhaseQ1TvdFwx5x/i1LH3xlktIW+Kouw3+ZuI\nQmwkvS9DONgHtmUyBHGVHzkpZsZylDQ50aOXT4+JDeI156BfkD8pl3GIhgfGxK62ZuppEFE6EQ0f\nln6x1PYjO3jABZIgSOlElHmscZg5zzT1JXZruGd85o3tZEvCQmIRHpZgAJh9tHFM06hyeWr9fnl4\n1EYApHvVhvG64Yk1TeOjY9kVoOdwwsuFRDQbwEsARAD/Zubn/BzzCoA5AFwArm/JCwlw+iGiWAXC\nxjHoI72PHOzgaoxAJLwwsAKlRjGaSrwwru3K2MzMRDRrA8oXbED5uMEcYbqgmzmokwl4UYN5Qn3s\nTHCfozXlNcGRJzJ+J9mlwazYjuqk4TjystXsxk4cEgEs6G4jmLkJwL98P0c7ziCicytrct5Ztvav\nV2m6m2TZjuT4SZh11iOoqSvC5uwPMWTAXLT1Ycqr94DBloEp585JT5wmKkoQyqp2pWzc8e7rbk/9\nUAD3H+3aRESiqDwticp9ibETKcje21JauQO19cXISJ4FUZDNPQVL5um6ZyERXe2zVZAl6wKHLWL6\nsPRLgsKCY1HbcCA6a++Ct1ye2nlEdKW/pUtm83MiWmiSfmj00GstcVHD0VJtGN07A7MmPmL/ZsXD\nzxLR275712KjZIW4agQiM65AiiWYFGspN+FZbEUIK5iEGEgkwMkaFqBALUFTKc7wylgBFGTr4JGg\nQIAAsAlY0KwSfqYzLDI81Witf9WCQAKie2eYRQc3ZgI4nlSGXg67/2VkAAiy9RarkNtO7qE1mu7+\n9+78xecN6D81qG1j6b0F3xuSKH+savqu47AlQA/jhJws3zLBawCmo1kxeBMRfd1aMI+IzgWQwsyp\nRDQWwOsAxp3IdQO0h4j6SqDbLBDPB6C7ob9vAm8xc93RzrNAfGoq+oVdTiliI6tYgzKswkEIIJTD\npXlh3MvMFV21y9eT8RwiytiEyvFo/oJe7Nt+Qgig3P1oiIqBo92+AjRAAOWd6DWOByKyArC7Yfzm\nDWQvvJJT7eMRDRkCduAQ3sVeF4MfZ+ZDp8Ke44WZNSL6Pjpy0AWTR90aRCQeThKPiRwIRbJhfdb8\n5uR5sXn5qLahBKu3/MOMjx5JmekXH14m7NdnKOac9YTj82W/u52I3mDmo9x7utZmCb17zuQnrDZL\nc/HfkAHno6QiC2u3/QsXnPMXIT1xumPJT8/MrakvuhPNL3FXOey9Z5w3+UmH6LMlNDgGsdGZjm9X\nPnau1lQ2Dx13CpjqsEegomwHivevhCO4L1ISpyEspB9CgqIQEZaoV9bkzAbQuhXJ+b1hS7kRGZaW\nyE4/CsL9PBxvIBufIM8MYaWxFl6LBPrWA+O3PhXtMxYX9O83o3LaRMS0+4PaizpIEA7qME6JkOwp\noM7l9h85AgCXu8YEcNTvzlbsqqrJlw1TR1unjdlEefVu4NhFQ8tVtenHZev+NmX0kGvt4aHx8KiN\n2Fuw1Nydv7jeMNRHjtOWAD2ME5JwIKLxAJ5g5tm+3x8EAGZ+ttUx/wSwgpk/9v2+F8DZbR/cAQmH\nrkNEYxQIy8YjWhmDPhYNjNU46NqJQ40qzHHMXNjBeSSCXH/FBGsvap8OsIJL8BkKvnCxdkl3z6Er\nENEFEbB+8EeMdtjp57c/F+t4Ehud1fBcw8zHzK04geunyJLted1QzxVIYCJqMkzjIysjwwNjMgCy\nQtrnhv4YM3d7FKsrEFG6LNm3Xj7nNVvbB4TLXYuvfnjAZGYtMjzV61UbUd94UGBm65Xnvi611p9q\nYdPO/2r79i9/zjD1DhPAFdmef/boO5L69mnfqWnN1jfQKyQOg1LORVVNLpau/Wu5pntiLErQ1gnD\nbxoeHzOy3Tn7S9djQ9b89V61abyf+ZECYakN8rRZFIcotiGfmrCKyjF40DykJc3Aqk2vNRUd3Hgn\nM7/Tcp6D5C8uQ/JFZ1O/dtdzs4478KPBwHgABT3Nee4qRBSiQCi6ERlhYyjq8PYGVvEMtjir4b7T\nYD6jl0RbICJFFJXK2ZMeDY0ISzhiX11DKRauerzJMLU+xysAq8j2NelJM8YOz5h3xB9Rdt53xo59\nX+zVdM+QY0ngEJEsCNIDAon3MptBzCZEUfla0933d/QdHuD0ciokHPoBaJ2rU4L2Hd39HRMLoMvR\nkQA/Q0SiAuGbWzAoeAT9vEw1FBH2RVxkWYjCj9H+/6QFwQBbQzsQcw+DBSKoK1WFp4pvmqDNfwQb\nrpvLCfZYOKgUTv4WhS43jHcBdFtXeSJKFkVl0+DU80LSEqcJiuxATX1R+MYd711f21C8lHXDCkB0\nsdajm10z815Ftm/cvOuD8WOG/OpwZSSzie17F3gBWq4bnjvKqnaNQLMY6QGrErJBkizt2rQAQJC9\njywIcpS/fUCz5AZBSIiJHOR3f2xUJgpLNwAAevdKgWFofYjIKknWuF4hcX7P6RUSB2azfweXvDYc\n1gmPYRRskAAChiMSU8xo/Gn3AkT0SkJZVbaANuXwBDg6WjqzQPQdgpJfioMFAMzcQERT38Ke7xdz\nsXUYegdXw+3dhEoG8IrpE6ftLnxyCrEAmvgEWngdD8ysCoJ429K1z/1rQuZv7LHRwwEilFZkYe22\nf7tMNu/ujMK+prsv25O/5Keyqt2Rqf3PDiISkF+8uulQ3f5G3fCefzwag7781KeJ6BkAIQBchuY6\nJdXRAbqPE3WyjjcM1tbT83seET3Z6teVzLyyCzb9rzErEjZbawerhZmIExeiaAgRpTJzu+pAZjZs\nJBXnoi4+Db3anb8btV4vjA3dY/aJ48v5usML49PPUXCPACSbQL4b+sto/vx0m9KuLNmeH5RybsiQ\nARccTlgKD+2PGRP+YP9i2f3TNd0z4WQqgHcnmu6+pODAmuUl5dtTUuLPcjCbyCte7dR0925Nd1/t\nU6vfDwBEZNN0t+B0H4LD1j4Ppaw626kbnqPlseggmJruERS5fcGVqjkPL016vA0AkQ6GKpBYUttw\noLe/Zr11DaUgEvwWZtghPXg5Umxtqz57kw0zzRis3vqWyWzuZOadrfe7oS/ZgqqJYxDVzshdqIEV\nMqkCChXFsU7TXLcx8+6jzPmMgZt7J/YrROOFRWgcys1LZp8cTc7gRCEiSRTkR0VRuUcSFUnXvZJF\ncexQNdcd3Sm2aZrGB0RUu3b7v5/TDTUdAERBydN014PM3KkXNGYuI6KM6tq8SxuaDl4GkKhqzs8B\nfNwZZ803VmeWKgOcQohoCoApnTnnRJ2sUgCtXy/j0BypOtoxsfDf8R2dUV8OcJjUVITJ/nZIJKAf\nO9Q81KegAwkGL4y/vI/c5x/iEY7WD6JibsRqHDQ1mH/vHrNPDj5HaqXv55RARAqReF5a4vR2VW2i\nqCAjaZY9K+fLGwGcEU4WM9cQ0QhN95y9I+frOc131fwWwE9tHVVmdsuSdf6mnR/cMHn07VahVVJ8\nRfVelFXuAoAOy8uZ2VBk+/d5RT/OGZgym47cZyK36EcMHXABAGBPwRJDFOVPNE03iejFrL1f/KNf\nn6GO1onBhqEia9/nTlVzvujvel4YyQPgv/3JAA7F4qadugbtorb7TODtLFQ/vo2rMLzVC0wde/G+\nWIBRmTdRXPQIJbdo5eStez5dR0SjmTmno3mfSfgiKp/hyBy1bkOWrB+EhcSdNz7zRntYcD8Ypo7C\nknWj1++Yv8KndddtL3rM/B2A74goFAAZhtZl58YnfnzMJurHAxHFiYJ8uyRaZjHYrWrOdwD8l5ld\nJzp2gK7jC/ysbPmdiJ441jkn6mRtBpBKRAkADgK4AsBVbY75GsAdAD4ionFobtMQWCo8eZQfhFMD\n0K7voMmMKrhFAOUdnczAm9Vwj3sQ6y6bznHWSFiFvajzrEM56zB/FcgF8IuViGBR2ifcA4DNFkYC\niaeysvGE6Yyzqhve35dV7Rzx7YpHB2UkzwqyKEEoKd/u2V+6zjBM7SI+Rp9GTXc/sG3vZ5Nl2RaU\nFDcRoiDB7anDluyPIIkKInolYdueT409Bd/XtUr4fb/JVXnZoh//OHVY+sWOXiGxqK0/gO17P3c6\n3TXfo4OkdwlCQzU8kfFo/x5yCB4AvJaZ2/19+BzP6f9E9uJ4DpKHICK4HG5sF2oxMOU8JMY2p39l\nJM8i3fAG7cpd+AyAeW3H+aVBREkAzgcgo/klYtOJRIyJaKRFCTpvxvg/HO4xKQoSkuPPAgP2Tbve\nfwUdpzucNI71mW2LT/dqHoBENAcNPu3sGEcZ+2xRVBamxJ0lxcWMtOi6B3v3LxtaXZv/ABGN6+6l\n1AAnlxPuXUhEc/CzhMN/mPkvRPRbAGDmN3zHvIZmnRAngBvYT6fwQOJ71yAimwyh8mGMDOpPwUfs\n28yVeAt7CjwwUo4j6XK0BeLNEqivF8ZmHfwmMx/sVuPPUIiIJNFSMXPiQ5FtFZoBYM3WN92Fpeuf\nMQztqC1MTje+B8X1iuy4BeBQZt6g6e6/MfMx23UQkQRgriLbbyQSwjTds8I09TeO9zNDRCNk2f4G\nm8ZgixKkur0NdoEE3TA0kYhYFOXPNd3zADMXtzpHBHC1ItvvM9nsJ5BY7ItgfehPvgEAFBKfGYnI\n+26hI4UedTbxR2xqKoXzBmbuMGJDRBYAFwskPhEbPSJ9zJBr22khedUmfLL4Dp3ZtHRkx5kOEclW\niG8zcOko9CErRGETKlUVxm4PjDldzU0TRfn5QSnn3jM8Y147QVvD1PHRwt96DVOL60mOBRHNEQX5\nk8jwVESGpwTV1hc7D1btEpjNm0zTOKEoFhFZRUEuP2fcfaGt8xaZGVuyP9Ryi1YtUTXX+Sc8iQAn\nhePxWwINon8BCETzLBDnz0OybRT6kA4TP6HcWIhCjwpzOjOvP902/tIQBen34aH9/zhz4kP21lV2\nlTW5WPrTs07D1JJ7csSWiMIkybo2sldy/4zk2XabJRRlVdnmzpyvPYah3m2Y+r9PkR2xAHoBKERz\nYn0QADczt5ei79r4vSwQt4xGn35zkaBEwop8NOA/2K3WSlBBVM3Mm3zOZYctY6yW4OXjhl1/Tv++\nY9rtY2a89/X1JsA2PkVtnE41NpJejkfwTfdgqN3qSyswmfERctWfULbdxXqXok2yZPv38IHzfpOR\nNNPv/o+/u83pVZsG95SIOhGlSaKydcaEB+yR4amHt9c2lGDx6qdcmu6ediLft0R0TZ+ItH/OnvRI\nu8ISVXPj0yV3egxD7c//4w26ewqnorowQA/AZP6MiEoXoOCJj5B3FgGGCPpWhflUa82yACcPk40X\n65sOjv5i2e/PS0+aabPbwoWyyp3uooObTMPU5vVkBwsAJNHyt/4xo5MmDL/J0lJRGBGWIMTHjLR/\ns+LRV4locXcmO7fgu0br6zSe5PFriWjURlQ8uR4VN+ow7bJoMcPDEmhSynlBNmtYUFlVdvzO3G/O\nE0X5d4ah/dPfOJrmWVVSnjWhf98x7Zbly6t3Q5asReoZWglGRJEAhqFZLHpjWweXiEJkCDf/FoNs\n1lZ5mwIRruRUZSMqB/ly0jb5jj9blmx3E4mpgLlf1VyvAFjuL5quG56fDpRtvSIjaWY7p6KuoRS6\noeroIIf3dCCJlt9lJM9SWjtYANArJBaZGZfatu9Z8Aial1M7BRGNkCTrvYpsn6WqzqD9pevRP2Y0\nBOHnAJ8i2xBki/DWN5UlAjgjnCxf9HmeIjvuBBDDzLs13fX/mHnF6bbtVBFoq/MLgZnXuVibrbHh\nUNkIcbN+dcDB6j6Y2dB0z5Vub/30nTlfv7Vp53+/Lizd8JRhakl8ZCPmHgcRWUw2rsnMuPSwg9VC\nSFA0kuImkEDiTZ0Yb5pFcfwoirJLkix1smT9ly9Ps0fAzDVeNu7SYQaLovLP2JiR+syJD8ux0ZmI\nCEvA4NTzhLln/9FOJLxIRH6lIEzW3ygs3WCUVx/5J+VVm7Bhx3ynpnueOiWTOYkQkV2R7f8VBflA\nRFjiZ8GOPosl0VIhCGLbDg+ZUbCp/rT0BCKMQqQM4GwiIlmyvmSzhC4cnjHvoqlj7x48POOyuXZr\n+JdS82fC3xv/x1W1eVrRwSODiLruxbqst1xgfplPoPXWyUYQxGlx0SP8BifiokcQszmx9TYiGiBL\n1letluCNFsWxiIgupDa9PkVRvk2WbKuHpM69aurYeyMHD5iLPflL8MOGF2EYP0/dNHW4PHUKzhwH\nS5Yl63dhwbH/GTvsuonTx9+fNHzgvPOslpBvZcn6zOm271QRiGQFCNBFfG/m630/ZxIRoiDDYfPf\n6aN3ryRL4cGN6cczkChI/2dRgl4YOegqe2xUJjTdZcspXHn93v1LLyOi8T3M0ZfAfN3wjHntnMtg\nRxRS4icLuUUrbwbwaNsTmbmCiC5Yvu75r2IiB1FMn8GOJle1lle0UmfmfwM/C5meCTQ7RLZvYiIH\nTxifeYPFogRZAKC6tgDL17/wBpHgZTZb2gSpKswOl0S8MEwAKoDZiuy46fypf3a0FIVERaRRUuwE\nx5c/PHA1mFOtlmAyDG23bnhfY+ZdzOwioulrtr6xbN/+ZXJczMggj7feyClc4TVM/VvD1E6K80rN\nPRcvBZCEnxPVG7owkkfT/Qv7a7oHIDrsFQmCeI0kWv6VljhdiokcKDvdNcjOW3SW21O7iYhmM7Pa\nvPxofX7ulKdtwY6fa2US+o7Fio0vITvvOwxNa662zS9ewwDtYeb9nbf7dEC3hoXETZw18SG74BM6\n7t0riRL6jrF/9cNDdxPRt8y89jQb2e0EnKwAAf73qDVMjdzeBrS0tTliZ0OJahhqwbEGIaLeoiC/\neO7kJ6zBjmb9UaslGCMHXSE5bOEh2/YseAvNqug9hV5EghRk96+v2zssUdlfsm5gRycz8w9E1K+k\nYvs15Yf2jjRNvcI09XeZeV+3Wdx9jBZFZezkUbdahVZK/717JWHyqNvtKze+9AIRfeZ7kdhSB69Z\nzI2Ib1Nc42Edm1EFAN8osv3fw9IvdrSuumVmbN/7OQiCbUjahZPDgvqiuq5g/N6Cpb+WROUx3VBf\nYOatRBRbXr3niuq6/ZPYNGoNU/sczbIzJ1xIQEQzRUH+rHd4Cvr0SgmqaSh2lVftflUQxJtN0+hQ\nbsQfmu5+L6fwh8djIge2a5qaW7RCZTY/9F0zXhSVf507+QlbWEjs4WOS4yYGLV//wrjKQzkPA3hS\nEi23pyVOk1o7WAAgCCKGZ8zDD+tfRHL8ROQWrTKy8xa5DEO9oUs34TQgS9b7Rgy83C606SRhtYRg\nSOpca9a+L+8B8It3sgLLhQEC9ECIyEJEV4mi8pJAwhNElHayxmZmtyhIn+/K+abdMozLXYO8olWm\naer/OY6hroyLHsEtDlZrUvtPIWYj05fY3lPQTTYUt8e/FFJtwwHDMLSjOpfM3MDMr2ua+ybD0B45\nQx0sEAkXpcZPtrV9AALNjbOJxAgAyUCzbpYBfuAl7HAV888pc7XsxUvIclGzWOl+Zk6L7JV8xFiF\nBzegrHo3LjjnGQxJnYu4mBEYnjFPuuCcZ2yiqPzJ188WzOxi5rd13fOiIMhDBRJXSqKlWBItlaIo\nP9B2ie3450kpoqh8MX3CH4JnTXwoePjAy2jauN85zj37jzZJtLzpaw133DCbb5RWZNVn7f1C1/Xm\nZg6GoWFPwfdmbtEqp2GozwOAKMi/TYk7S2jtYAGAIEgYNfhqGxHdRUSCKMhDInsl+9U5DA/tD7en\nDp8vvd+5J3/xB4ahjmDmY/VA7DHohrdveKj/RgzhYYkCkXBc0fIznUAkK0CAHgYRDRNFZVl4aH9L\nXPSIYLenTs8r/vEBWbZ9oOueW06GTICme+7LLVo52aM2RmYkzbTYrGEoq8rG1t0fu5jNp5g5/5h2\ngvqGhfRr90YPAKIow2YN8zY6K6LQXqD4dDHPbu2l78r9Vho95Mi0I5enDjmFK0XDVP/VejsROQDY\nARz6JckzEEgRRcXvSzYRQRQkE/hZXExn802JBDyDLc+GsUWUIXAl3IoAetML4/7m84SKBmdFv9aO\nxb79y5GZdjHaqvs7bBEYMuB86459X/0OwOW+6w4WRWVdZvrFjpT+Z5MsWeVDdYXWDVnvPF7fdHAo\ngGuOe35EMkC3ypLtadPU7eu2v4WU+MnISJoBUVTQKyQWwzPm2bbt+exRAOcd77i+QorR2XmL52fn\nLZzgsPX2Oj01CkHIMgz1+pZiEVFURvSJSGufxIbmNlAmm0EAgkzWixqc5Sb8BDwanVUgQXCZph6s\nalrPkAHoBKKoHKpvPBjtT+amoekgA1x0Gsw65QQiWQEC9CCIyC6Kyg8Tht8UMeesx4IHp56H0UOu\nkS6d+ZItxBF1pSBIvzsZ12HmCt3wDis6uPG5pWufK/lq+QN1m3e9v8rjbbhIN9Rnjz0CwOCcikM5\nTf72qZoLLneNBUCxv/2dhYjsRHQ1ET3o+9evc3c0FNk+JS1xunSwcifWbHkD1bUFcLprkF+8GotX\nPw1mUwdQ77veUDvJy0RQrQKh2AKxXCLh90T0i/jONNlYVlDyU5M/CZ/a+mJoulsDkNd6u87mmyrM\nqEq4Z5XCeYEGM8rD+r0t1Yiq5nxlZ85XTtM0Dp9T11CCPhED/NoQFZEmEAmZLb/Lkv3lEQMvd2Qk\nzyJZai7ijAhLwIyJD9kFQb6IiNp3BwdARELrSBcRSbJkXRQZnvKXqWPvDb5s9quYOPxmVB7ah2Xr\nnodhNBeBxsWMIGZjor8xO7jOAFmyfQ5QgW64pxKJ5fVNB1/RdU+GqjnHt45qmqZe0uSq9OuUuz11\nAMME4NJ0zz935y92q1p7IfedOV+rAonvnIjY6+nENPR/7Nj3pbut+Zruxc6cb1yq5nr1NJl2SvlF\nfGEE6NlQM5c4SN5gIbHWTnKeSHSfTwwzwJFc2Sc8VUnsN+6IRGNFtmHC8JscAoldXjppCzPXGIb2\nhKq54jTd08urOqcw89JODPFp5aF9qDh05IoZM2PHvi81QZC+PxkikkQ0VxTkiqiI9H8OTJ7zVJ/w\ntH+JglxHJHxMRBM7qFprh2kaDaap8+yzHkdIUBR+2voGFq16EoWlGzBq8FVgmCYANxFlKhB+ugRJ\n57yGyfI/aYr1DxgeGYugJ60Q3zrR+ZxOfH+LEwGMcrlr3Vl7P9dbB+g83gb8uOV1JzP/2V9VHzNr\nzLyemX9k5rZyGx80NJVv+n7ts66KQ/ugaW7Isg1Nrmq/tjS5DgFoTugiohDDVCelxJ/d7v9SlixI\nS5xmEUXlujZzGa7I9sUEUgHSFNmxg4guAHBFkL3P+FkTH7JH906HItsRGZ6CqWPvgSgqyClaCQDQ\nNA8A4bhkN4joHEm0ZjPMi+3WMFmR7RTdOyMhyB55nyRa7mh7vG5439yTv8Sjau3bFmbnfacLovQJ\nM+vMvF7XvR8sXPWE90DZVqiaEzX1xVi9+XVvYen6g7rhffx47OuJmKy/UHFo777l6/7mrji0D25P\nHQ6UbcWiVU84fX0dO/Ndc8YSECMN0K0QEVkgvhYM+bp5SHakIBRVcGMhitx5qM/1wJjAzM7TbWdP\nQZHt744YePmv0hKn+d3/4cLfujXdPeBUaFgdD0Q0QxSVLwckTJXio0cqqu7GvoKlzsqa3Erd8I47\nUdFEIhoiiZb1MyY8aI8M/znfp6omF8vWvwCRJJdhqlma7pl9rGoxIjrLbu313cUzXnCIbXKRcotW\nYUv2hz95VeckB8mrL0XyxKnU74jvIw/ruB9rXU7o43tibgw197u7UxSVuQA0TXe/z2z+i5lrffsj\nrRCX2CCljkWUrQGqsUmslSTJyrHRI3Sv2qSXVmwXiYTXdMP7h65EUIhIIRJul0TLPYahRhOJnn5R\nQ+1nj75Tau0Lm6aBRT8+6aypL7qFmT8gor6yZMu76rw3/EYocwpXYEv2Rx+qmutq33UmiaKyZOTA\nK2zJcZNIFBWUlG/D+h3zXYahVU0Y/pv+/fuObjdOWdVubMn+CHOn/Ambdn2g5RaueFPTPe2cpDZz\nipJEy4GMpFnywJQ5sCgOON01yNr7OWrqi1HfWOYxTO/gtkvssmz7t8MWceXowVc7onpnwOWuxe78\n77T84tU1uuEdwcwHiShcFC1rbJbgREGQrS53DQRBYt1QvaapzWDmNZ39P+hJEJGNSLhNEi13mGxE\niIJUoGquvwH44EyN0LUmoPge4LRDRJNCoCx+BmMddvo5v5OZ8Xfs9OxAzQsaG+1K5v9XkSTLa0NS\nz791aNqF7aLMhqHho0X/pxqm1rerbUy6AyJKFEXlLlGQZwBwqZrzLQDvnQznWZFt/x2cOvfKIQMu\naBe927HvKzS5D4HZ8BYf3Lz0WO1GfDpOSyLDB0waN+x6W5C9N0xTR2HpRqzLestlGOoUAPkShLLX\ncJai+AkYLuB8YwkOvKhxcx5ST4GIxoui8n1K/GQloe8YxTB15BatdJdWZDXqhncsgCIbxI2T0XfY\nZUiRBZ/D42Udz2GbWoImrw4OAsB2SKtd0B8+GeX1RBQiidYt8TFSeLdFAAAgAElEQVQj44amXWQJ\ndvRBbUMxtmR/7K6uzdui6Z5zmFkjIkkUlerzJj8Z2jZZHABWbHjJeaB820PM5qs++YmcCcNvSmnr\nSDU6q/Dtykd51qRHKDw0vt04TvchfPfjnzB4wPm8JfujBsNQh7Zu3eQPSVReT4qb+H/jM288Yjsz\nY9m6v4HZNCoO7fuzaepHNAtujrDS9bJkfVjTPUmiILmIhHd1w/sU+/plKrL966S4ibPGDPmV0toJ\n3V+yntdt/3eFbqixzGwgQI/kePyWwHJhgG7FBvH2OYi3tXawgObk2guRZBWB/ztNpvVIDEP9YF/h\ncrdhtu8qU1i6AaIob29xsIgoUxTlTYpsNxTZzopsryaiO0+1zcy8X9e993rVpsFetWkMM//zJEYn\np8XFjPS7PBoXMxKVh/ZhzJBfW5jN6R0JibaykzXdc37loX1vfLX8AednS+5u/GjRbZ6NO9/dZhjq\nNJ9iebAFgu7PwQKAMFhECeRfA6Kb8S31pRHRCF9Cfst2SRSVr88edUfQ2KG/VqJ6p6Nvn8E4e/Qd\ntqFpF0XIku0jAKNliBmtHSwAsJCEuzFUkSAEv4iJ9DrOFq5AytlWiMuIaO6J2szMDbrhGV1Utukf\n36x4uOm9r6/jxaufrq6o3vsXTfdMb1mS9C2dvbxhx3xXS85UCwcrd+Fg5U4T4Hd9mwYJghQTH9M+\nRSvYEQmL7DAP1fkvEq2uLYCme3nb7s82G4Y6/lgOFgAQCVdkJM3ys52QlnAOPN4GURSkaD9zZ2bz\nbVVzpTKbom6owZruub3FwSKiaJONGSMyLjvCwQKAxNhxFGTv4wAw51j2BejZBJysAN2KCCGpLxx+\nP2d9YYcXZvjx5tT8j7BO1z2rV6z/f+5GZ3M6k2ka2F+yDht2zHepmuseACCiyZJo2Rwe2n/UhOE3\nCTMnPoTM9EsjLErQK6Iof3JaZ3CcENFAWba9J8u2Slm2VciS9U0iSm5zlNZSKt8Ww/BCECTIkgWR\n4akqgPbrQ21gZq+me+41TC3S5akdqxueVK/qHNGq31yZCtMo68BH3IFqpwfGus7M82RARLNsEPMc\nkLZGwrpChlBpJellIlIAzAkJirbERme2Oy8jeZYIYCiAC0ch8ggHq4VQsiAOQSiDCxYScRb1xT0Y\nZrNAePtk5P8xc52ue+8zTC0EgEXTPZGGqT3FzEf8x5qm9tSh+qJlny/9vXNnztdmXtGPWLnxFdeK\nDS82GqY2l5nrfYdG2K299I5qEMJ7JWD73s9Vr3pkTYame7Bt96ceTXfdrWrOMccrlGuyYbH60ZMD\nmjWfvJrT1A3v1uMZqw3poUExXln2X8PRt88QG4DBXRg3QA8iIOEQoFvRYe4rRuPIIYho92VdjCZY\nIFZ6WO8Za9Y9AGZmIrqwsib32a9/ePAWixJkqppbJkHI0Q3vbcy8johIFJQvoiLSxanj7oXge9hE\nhCWgf78x+Gr5g5cR0Vhm3nC65kFEvQWSbpUky1UAJMPUFhqG+hJzc9k2EZ0jiso3g1LOtST0HSsy\nmygo+emGPQXfX0VE57T0wTMM7dM9BYvvGDfsRqWl4qyFvOLVaIlmqJoTaO69d1wwsxtAu4csM6sW\nEv/+X+TcdQ8Ps8utHuQ7+BD2os4A8EFn78eJQEQzrBA/vwWD7EMQAYEINezB29hzcz4aEj0wVvTp\nlar4O1cUJIQF91OravNC3dA7XHbywIDU6p17AIUhnK1KGVyTAZyUPnO+HJwOW+Qws05EF+m6Z/zO\nnG+uFwQpXNPdPzGb77TklfnIaWgqt7Yk1relsanCparOH7764aHpQwacbw8P7U/1jaW8M+cbl1dz\nLgDwWmfsFkVlZ1nVrrGJse0ltQ5W7oSqOrv6mTjk8tRKzCb8OYyNzgovgB6TFhCgawQiWQG6FQ+M\nVxej2NvARy4BmMxYgHy3DvOV02Raj4WZVU333Nc62qKqzmHM/JPvkGFEQnhmxqWHHawW7NYwpDdr\nAT136i1vxicAuSeh39iHp4y5K2PauPtSB/SfepsoKruIaJJveeuTqWPusQ9Lu0gMDY5BWEg/jBh4\nuTRx+M1BkmT9hIjCZdk2H4TbSyt2Kp8uvhOrt7wOt6cOhqljT/4SlJRvQ1rCNNQ3lqGuoUTASXIG\nVJiP70fDygewzrmIi3g1H8SrvMP1d+xs0GDOYWa/shXdhQ3SKzdhoH0Y9UZLJCqcrLgLw2wyhGkA\nLHWNJX6r5JhNNDorJQBfb0EVubj9MnQRN6IJGhJxpJp7b9ia/zmFNC+x8VpN99ziVZvmmabxYmsH\nyxf1HsBs5q3c9KreVvqgsHQjGp0VLsPU5nm89edt37PgyxUbXsreuvuTRU73oUt13XN9ZxOuNc31\n9OZdH7pcbURsaxsOYHf+Yhim+ls/lZbHwy7D0MoPlG9rt6PJVY2DlTtFAAu6MG6AHkQg8T1At2Mh\n8U9WSPddiER7CkKpubqw0FkG13YPjGltlw0CHB0img3gu19dMB8tK6267sWuvIXILVoFt6cOoiib\nYH7OMLU/d5QfRUSjZcn2MIMnEshtmNp7pqm/fKKyC4ps3zIs/eLMgcmzj/AASyuysHLTq4cMQ70u\nLCT2gwumPtNuDYaZ8dmSu5s03V2bGDs+KjP9EsVmDYNHbcSunG+QW7QKRCLCQ+Mxduh1ULUmrNr0\nmsujNj5gGFqnIhRHw/cwn2iBeJ0E6uWC/iMD81stWZ0SiCjOCjHnNUy2+lvqW8D55ncoeokE+ZbZ\nZz0WFBGWcMT+gpK12JA1P0fT3elWiG/2hePq32KQPdInM7afG/BP7MJcJOAs6nv4PJ1N3Is1Lif0\nMcyc3a2TPE6IaIAkWhdbLUGRMZGDpAZnpVJdmy8k9B2L2OhhKCzd4CqpyNIMQz0HQBCAeAAHAKw+\nUSFZSbI8SqBHUuInS6HB/aTKmn1m8cHNpmnqd5hsvtGFufQCMBdApihIt40afI0lKW4SiaKM0vLt\nWJ/1jsurNT1qGNqLJ2J3gO4lUF0YoMdARFPtkO4HMAhAlQv6a2gu4z0unZoAP0NESaKo5M89+08I\nDe4Lw1CxdO1fYbOGYmjaxQgL7of6xlJs27vAU1aVvU/XPeN9y2OHEQTxWkm0vDEs7SJrbHSmoGou\n7Cv8wVtYuqHeMNTRx5MQ3IFt6Yps33L57Nfa9SwDgG9WPNpY21A8Pyl2wo2TRv6fX520L5b9wRNs\nj6Rp43/frpHzmi1v4ED5NiiyHbrhhWGolYapP2Ca+jtdsbenQ0QDQqBseYkmBfnbv5iL8SUK/q6C\nV8iS9d3hGfNs8TGjyDA15B9Yo2fnLfIYhjqVmTcTkWiB8JQJ3BUOi6HBJCc0+3BECrfQoCNu9EIu\nNBahaLuL9VGnZqZHh4iCRFEpGDXo6ogBCVOFls9FQ1MZFq/+s6Eb3mzD1N5jNrdLouVtqyUkNDw0\nHjX1xfB4Gxp0wzuvVd5dV21IEgTpBlGQ++uGN5vZfLuzEiVERKKoPALmR6J7Z+hWS6hYWpFFhqlp\nmu62A0SyZNuj6a7HmPmLE7E3QPdzPH5LICcrwCmBmVfgJC3n/K/DzAWSaCnJ2vdl7Fkjb0Ve8WpI\nkgWTR91+OLcjLCQWU0bfZV269rnU8uo9NxHRR4Ig3S6JylXMbCESY+ec9ZjYuly+d69kS5A9MmJ3\n3nf/BjCzi+Ylhgb30/w5WAAQGZ6i1DYUi9V1+/3mBzGb8KqNypih1wptHSwASE+ageq6AkwZcxc2\n7njPXV69+xlmfqeLtp4J7PdA10vZiX4/FxQeZiMqGlWYK5l5ARGVbNvz2eNbsj+aDCKDSPjSMNQ/\nM3MuAPikAB4moqcr4B4CQAXg2oqq1S9xln0Coh0mGGtQ5sxHfb0X5iWndqpH5ZqoiDR7WuI5R0RH\nQ4JiMC7zBvGnrf8SmL0LRFHZMXHELUHxMaNARGBmHCjfGrx6y+tLiSizrZZVZ2DmAgCPdeVcX6eA\nXkTilTZL6IOzJj1iddjCAQAmm9ix9wtpd/6S/brhGaJqTk9XbQzQ8wjkZAUIcAZimOqMkvJt+ooN\nLyKncAUykme1S54lIgxOnWuXZds9oqjsSew3/oGpY+8dkJowtX9cdKboT49oYPJs0TT1yUTUp4um\nHWxsqpA6Wp2pbShWAax0uWtcB8raF2TlH1jDAJtWpeNqLl33ICy4HxJjx9lk2d6pBr9nGsysMfDC\nf7Db5W6TT7WCS8wyOBsBfOU7doOquc7TDTVY171hmua+vsXBajOmC81FAkMBDFRhDtmBQ394D/t+\neB85y3aj9h4vzAFdjWZ2B4rsuCAxdkJ7LxNAbFQmdMOTIYrKH9ISpin9+44+vIxORIiPGYn0xBkW\nSbT8/pQajRZ5DflhSbRUCYJcKgrSa2eNvNXR4mABgEAChqVfIjnsEVEAZpxqGwN0L4FIVoAAZyDM\nvJeI4koqsl6XJetFQTb/+clB9kgwm/EjMi4TMpJnCQBQUr4d4aEJfo+XJSusllDF5al5nIjuYfaT\nKX10dhimVrq/ZP2ApLgJR+yoqslHTV0RAHytG97iH7f8Y+mAhKlKYr/xCrOJ/ANrvPkH1niIhLVl\nVbtmRYQltHsJLKvKRrgv78jlrjVMU/ffs+UXhAbzmXK4En6Hn64ez9FiCBRlMyobq+Gp98I8h/20\nv+kIIoqWJdtXiuwYHBM50HR56syaukKJ2fx9k6n5bzPQM9BNP9pxANCi1SmQODcxdrzfKsuk2PFy\nTuEPFwK4tdssbINP/PaTsJC4WWOH/touS3YsXvM0evdK9ncskuMmBWft+2I2gG9OlY0Bup+AkxUg\nwBmKT9TwYoviWFZZkzstNLhvu2Mqa3JAEIS0xGmHHZYge29UHNrrd0xNc0PVnBQRlnhDfWNpPBFd\n6K8ay5cYTm0Tin0SFFety3prVX1jqTU5fpIkCDKKD24yt+/93GuY2rXM7AGwnoiG5BSuuDe/ePUF\nAAzD0D4zTO0VANE7c745Oy56hL31nJpc1dix70tMGH4zdN2LffuXeQ1DfadLN+8MwnePbyKi51ag\n9FICghjYAGBRZ9TAiUiSRMvq9KQZCcPSLpYEoVlVpb6xDEt+euZ5IqGa2fy0m6ZxQqia86PcwhXn\npMRPDmq7jFxYugGyZF3HzEcVoz0NTJQk68yZEx6wi6KCJlc1jpZ/b5o6oxMOc4Azg8ByYYAAZziq\n5vrr9j2fOd3eI1v3ebyN2LbnU4/NEuJunSOV0G8cDlbuRG19+9Wg7PzvEBM5CLMmPmSXZfs5ACa3\n3k9EmYrsWEQkaAB0i+LYTkRHtLNh5q2GoWbuKfj+nW9XPl7z1Q8P1mft+/Ir3fBOYuavWx1XqOve\nu72qM9GrOlN0Q32QmQ8y81bdUG/7duVjnjVb3/Tu278c67PewbcrH8PA5DlQZDu+X/usSze1r5l5\n80m5iT0En5r7vUR0e1sFe2bOZeZnTeZHmfmbLrRbOT/YERWVmX7pYQcLAEKDYzBpxC12WbI+24OF\ngRfUNZaWb9n9sab7FOGZGWVV2diw8z2XqrkeNNn4en/JOr+FNAUl6zRm86tTabAkWW/MSJplF8Xm\n4JrDFgGrJQRlVe2LNZlN5BatdBqm9uWptDFA9xOoLgwQ4BeAJFmeEgX5voHJsy29QuPFuoYDRnbe\nd17D0D6UJOWqy2a/Zm+tqVVYuhEbd7yL9KQZiIsZCVVzIrdoJapqcjFr4sOw28KRnfcd79j3xX9V\nzf1rACCiiaKofD8i43JbcvwkkkQFB8q3tSjRP2gY2qsnc05EFEMk3CCKylg2zUGmqceLomwAcJps\nPG+a+t9+KX3diCjcBvFbCcKwkYgUvTDNragiAj7ywLi5C8u27ZBl29vD0y+9PiO5fYsYZhMfLLzF\naxhqf2auONFrdQdE1EeWbB8ymxPCwxK8LneN4PE2OHXDeyMzf+froblj0ohbgv4/e+cdHVd1dfF9\nXp2iLkuyJKtYtmVb7r3ijgFTQ4cQWiD00DsEQhICfCEhIYTQEgi9mWJscO+927JlW73L6nXaK+f7\nQyMjSyNbkmWw4f3W0lro3ffuPXcYzWzfe+4+ibHjvk98L92OtTv+3WgYvhNKfO8qqhK0YPywa85L\nSZhy5Fp+yVZsTX8fM8ffgxa7DU1zY0v6e96Ckq27Nd0zsas+XhY/HtbpQguLnwm67n2SiL5Iz1p4\npyhIAwxTz9J1zyvMvE2RHZNzCtcN6p847ciHQXL8ePi0JmxNfx85RRsgiSqS4sZh3NBfQlWa3QIc\ntnAiEqOAlvwS+9uTR/3akRQ3/si4SXHjEBGa5Ph6xaMvENF7bZy5j8Jfb28aABHARj5OkWtmLgXw\nbOvndcNrB1B9or5Hpxp2iAsnoffoq5GqtPhhuVnH37D78kI01gC474QHYZiM435/n7Jf8H67hNlE\nlFxedXAIgGoAm1veC8ycS0Sz1u94Y/529ePQ8JBE1NQ3WzgYhu/SH1JgAYCuezaUlO+dnZIw5Ygt\nfVLcOBiGD8s3/QWK7GSbGlJfVZurCiQu1nTPtZbA+ulhiSwLi58IzLwDwI1tr2u6+6rNe/63pq6h\nxN4v4QxZFGXkl2w1dx/4wkNEvimjfhMWFdE+GbekfK9H170b/L+mNRflbW+bFOyMRlz0MKOwbMel\nAN5o2+73BnpCFORHwkL66KIgc1VtrqrI9vf8BXM75ZXmN1XtqcLTpwxENDYY8vDWAgsA7CThVh7i\neBSbbiWip7rpKn4E3fB8mZW/+rLBKWcFt90VLCnfC4HEAgPothEtEUlorrUnAEg/WR54zJwHIK+D\ntq1ElNjoqpja6KpIAlAAYN2PIcpNNt7ML9n6+IDkmYiJHHjkemLsWGTkLHVV1+a+W99Y+g2AnTpz\n8Q8dn8UPgyWyLCx+4jDzbiIacTB3+YMH81ZcCmaJSFhhmL4NgiA/tXnPOzhr6uOQJfXIMxXVmcgr\n3myarLeIpl7HKsobHNTbBiAyUJsoKo867ZEPz5n0gCPIEQUA8PgasHbbv66urMlxAri6J+d7GjJ9\nHKKlQI7uEWRDNNt9xWgaDWD1CY6zqNFdVbx934f9Rg2+TBZFGQBQU1eAdTted2m6++HurqSIonyr\nKCjPioLkJEGUTNMwBRI/Z5jX/dAVHfxzWOv/+dFg5goiunjZhhfmJ8aNFeKihtldnmrzQM5St2Z4\nFzH4jtbb3USUgOYi5y4Aq/wHRCxOc6ycLAuLnyFEwoWK7Phg2tg7HblF63G46iD6J06H0x6B4vK9\nRmHpNo9hapcx87fN91OsKCo5l5/1si1QUV6/k/uvmPmro8chhyjI5RfMetYZ7Iw56hlN9+DT7+7y\n6IZ3sH914mcJEf12Cno//2tKswVqf5Q31h+G+xxm3hCovYtjRcmS/TMA46IjU3W3p5brGkrJZONu\n09T/250+RVG+R5bsf2Y2bSkJUxAfMxw+XxMyspegtqG4Qjc8A45XjoiI+qHZAJcALA3k73W6QkRR\nAok3SpJtkmka5brh+Q+atznZ3x4sS/b3mI25UREDfF5fE+oaigUGP2IY2is/cvgWx8Aqq2NhYdEO\nv39P3vRxv02Mix4KZkZlTQ5yitbD62tAYdkuzTC8c5h5TevnFNmxsF/iGXPGDf2l0nq7Kb9kC9bt\neL3cMHzxbRO0iWhWeEjCF+fP/FNAd9G121515RZvvJ+Z/31SJnsK4D+xFwnAG2jLj4j6qRDT/4Yp\nNhsdvblQyI34E7bV+mBGd8UPqxMxpQIYCaABwIrurjYRkV0gqUKSVOfMCfcctS3GzFi/8w0UlG77\nRNPcV3TwvK1ZYPC5CbGjTQJRQdl2AiMdQBYR6Zru/hLA1z05/1MF/9/i2oTYMWMnDL9ebVlNrmso\nwdINz7vc3vrbTVN/50cO06IDrMR3CwuLQAwQBCkyNmoIgGYjxKiIfmjJy9qa/j5lZC+ZCeAokaXp\n7uuyCtZsqKrJiU3tOztIEhXkF29xFR7eqRmG75wOTsCJRGKH/5ITBJHQnAh/yuMXSwkA7AByj5dz\nREQkgG62QXzKBEcaYMFJ8jYX9PuZeWPLfcycbSdp/t+x56JbeYgjlJq/aIu4EX/HbpcJfqynBQYz\nHwJwqAe6mm5TQ4Re4X2PElhA8/tqzJArkV+8+RdEFMLM9W0fliX7W9GRqfOmj7vLJvmtDgzDhzXb\n/jXW420Y27fPRGQVrLmwvrHsMBFNPVVPPp4AEyXJNnLyqJvV1qd/Q4PjMH3cXY6lG154joje/akd\n9Pg5YYksC4ufHzZJVMyOLJEU2SkRUbutK2auJKJhFTVZl9Y1ll5LINWnuxYxm28yc3UHY22uayhW\nXO4aOOzhRzUYpo6C0u0AsOIE53PSIaK5smR/GUCCJCqGT3ebkqi+aJi+P3b0BahA+HM41DtvwGDn\nAIRCh4lNODzpA2QuI6JzWq8UemBcn4+Gvz6Ejb+OZYfPB5Oq4WET/JjG5qs/2ES7jo2IxF7h/QM2\n2tUQqEqw4fJUJwJIb91GRHGiIF9yxpjb1BaBBQCiqGDqmFvx+ZJ70af3KAzsOyd4Z8antoO5yz8H\nMLUrwfmF8WAANgAZ3KZQ+o8NkXBu/4QzjrJXaaFXeD9IkhKkG56BADJ++OgsegJLZFlY/Pw45PE2\nUn3jYYQExbRrzCva1MBsBkwa9m8rve//OS7MXC9L9n+v3vryLbMn3e9Q5Obyc4bhw/qdb3oArGHm\nU/oLhIjOliX751NG/8bRp/coCCSgtqEY63e88VBdY0kyApzoJKIEBcLdj2GMLZiaBYQMEWcgDnaW\nHP/FgdcBDGq5379SdRcRPVGAxjFoLt685WSd0OtBtrg8tUJjU3nARt3wwas1Cgh8anFKdORAryI7\n1LYNsmRDbNQQlFcdQkrCZIwYdLF8MHfFaCIaxMyByxW0gUi4SJZsL4uCEi5JquH21IqyZHtZN7xP\n9oTvWA8hdnSYhIhAzX7hVhrNaYzl+G5hcZrSbNZJlxHRRUQUuKJyAJjZA8JL63b826Vp7tbXkZ65\n0GhyV1UA+K6n4tQNz4O1DUXvfbr4bs/KLX9vXLPtX65PvrvTU3x49wpNd196In0T0fmq4twuCKIu\nirJLlu3v+ZOoTxgiOkOWHN/Kkm3hGWNvdyTGjkHLikNYcDzmTn7YScBVRDQgwOOXj0cMtQis1oxG\nFAhICPQcM9cx8wpmXncaCCwwc4koSItzijbA5W5vkZaZtxICSazIjvWiKD/TpvC4YZod74Kapg4S\nml9vUZAQHZmqoTmP7LgQCb9QZMcHM8bf0+eys192XnzmiyEXzHrOGR6a+FtJsnUrwf9kwGwuzi5c\n1xQoN7qqNg+a7vYAOPjDR2bRU1giy8LiNIOIbIrseF8UlZzYqCFvRkekviMKcpks2f7Y2bIohuF7\nuq6h+JNPF9/t2bT7v77t6R/xp9/d5UnPXKAJgqRLou3fRJTWE/Eys+HT3LcYhi+lsHT7b/OKN92j\n6Z7hPs11LjM3drdfSVQetKuhH00aeePoq+a9Jl4850X7kH7nXCmKyg4iGnIiMYuCdIsiO79L63fW\nWbJkF+Kjh7W7R5btSOkzRQRwSds2AkIjoKoAUMc+1LAXLV+kAhGCIGsAQk8kxlMF3fBeweDshWue\nQn7JFui6F03uauzc/yl2H/wCk0f9Wp018f5+fftMflAUlX1E1LK3uKKyJkdxeWrb9en21uNw1QG0\n5A0CgNtTAwDHPKUItCSTqy/PGHeXPTYqDS1/EsHOKMyZ9KCDQJf4E/+P1YdIFEAh9zyrvVpT9tb0\n93ytC2A3uauxZtsrTSYbv/+pVDX4uWJtF1pYnGbIkv2jmMiBc6eOuc2myHYbALjc1Vi68f/uaXRV\neAH84Xh9+D+4byCiPx3KW3mlJKp3hAbHhw3pf47NroamllSkp2RkL/6lIIg3m6bxQU/E7Xdw75FV\nBCKKFQX5mXnTn7Y57c32XJKkYsSgX4iqGhy8M+OzNwBM7mbfvUVBfunc6U/bfJobhWXb0dGWjk0N\nkQkU1PY6A7s24rB7L1fZD8MNEQQHZMzjRAxHJKrhVQEcZVNARMkAkgEUMXNWd2L/MfCbxPYnovs2\n7HzrAV339iZBoLjo4Th3+jMIdjYvXkVHDLCFh/RRdh/44n0AEwDUMfPnC1c9eXl0ZKocEzkYKQmT\noekerNn2CgYkzYRNCQYAVNcVoK6xFOhc/l6aKKqhMb0Gt2uQJRtS+kyWDuYt/wWA59u2E9FYWXI8\nR6AZDJAiOzM03fUEM5+UmoLMzEQ0O7tg3Rc5hevHxkUPM72+RrO86pAM0POmqVsWDqc5lsiysDiN\nIKJUWbLPnT7uTrvYKlnYYY/ArAn3Or9e8ehDRPSXzib4MnOWLNli4mNGhE8be7vaIiZieg2SkuMn\nSotWP/UmEa30C6TOxhhKJNwoS/ZrACiG4fvGMLVXmLmoi9M9FldFRaQKm3a/jerafMiyHcnxEzAo\n5UwMSJxOO/Z9PIqI4rkbTtpEwq+S4ycg2BkDzb8q0+iqRJCjV7t7c4s3NTC4joiGodnlvNn7CJCa\noNmuRH+MQC8QgEzU4X84gKUo1CXQh5rfO4qI+sqS431Jso0Mccb4GpoqFEV2ZGi6+5pTPV+tNcz8\nVwB/JaIreoX1e2PWhHuC294zsO8cYfeBL4YR0UBJsv1TluyTB/WdI9nUEBSWbucd+z8m09SRFD8e\nowZfApNNFB/ejQ0733CZpn5XJ60m7LKkdnywQ3FKaD4hehRENF0S1UWj0y6zpyRMJVGQUHR415BN\nu99+XxTlRw1D+0dXX5PO4C8vNY2IhuQVb56IZjPSb5m5/RKfxWmHJbIsLE4vzkqMHUutBVYLwc5o\nBDtjzNqGogkAVnWmMyJSREG+YcyQK9S2qzXhIX2Q3Gci5RZuuBHAnzrZX6IkqpvjooeGDEia4RAF\nGXklWwZkF667k4jmtrYuOBEEQbqwvrFEGTn4UkwccQM83ufPsPcAACAASURBVDoczF2Ob9c8g7On\nPg6bGuxrdHmjAHRZZImCnBIemmgDAFlSMSBpBjbveQczxt2FltddN3xYsfFFbnJVBoUGxz3u9TX9\nzjC81UR0K4AlKsR/3oMR1J++3xFMRRju45F4FJtEDeaT/terlygqm4elnh8xOGWuKIqy3TR1ZOav\nHrVt34cbiWgYMxf2xGv2A5IcFd6vvWMtmnOrgp3Rem1DyZrIsL7RcyY+gBbn+QFJ06mobBdWb33Z\nVVi6s/jDkluSGIAoyNma7nqYmRd0cvwMl7tGCiSMmRl5xZuaABxl7OqvzfnfKaN/40iMHXPkemLs\nGISHJDq+XvHI834rhQ5rc54ozLwPwL6T1b/Fj4OVk2VhcXpxzLQrImJ07TRSL0GQqKXcTVuiI1Jt\nkqQO72xnsmT/YGjq+VEzxt/tiI8Zgd5RaZg44np1+tg7gkRRWUBEchdiCwgRpQgkTpo37Sn0TzwD\nTnsEIsP6YvKom5AYOwZb0z+E21OnoIP6dsdDN3yHKmtyjqwEjhx8CWTJji+WP4Qd+z/B3kPfYP6S\n+wxJtuPiuX+jC2c9F3rZWf8ImjbuzkS/m/odoVDU1gKrhQiyYRDCmgA8pirOLxXZsTE0KC4sNXmG\n2CI2BEHCwL6zKTVppkMUlfu7M4eehIgiRVF+WlGcuYrsKFNkx3wiGneMRwqravMCrqSapo6Gpopg\ngcToSSNuOCKwWujTeyRiIgeZuuF5wTC1ONPUYn1aU1oXBBaYuYlIeHX9jtdduu5tfR37shYabk9d\nGYBlbR4bLghyNAAcyFmKwrKdaMmRCnZGITZ6qAHg4s7GYGHRgiWyLCxOL5bml2xjw2x/Ar3RVYn6\nxjIZwJYu9FdrmJro8QWuPVzXUKIZhpbfmY6IaLJhauMJJJZXZx5J9AaAPr1HISSotwLgnC7EFhBB\nkG7snziNHfaIdm1D+s/z51CJ33R/u4XfLSzdjtr65t1NUZAwbeztmDn+HpRVZpi7D8wvEgRRmzHu\nLrKrzYc6iQjx0cMxedRNDlmy3xsMOaABaxNryBe9zvDQxF+PGXLlhROGX98/2Bktf7X8UVTX5cMw\nfMgp2oDdB76ALNtlAl3VvTn0DETURxKV9OS48Q/PmXh/8rxpT8WMGHjRhbJkXyUI4jUdPPZlZU02\nVdZkt2vIzF8FVQmCqjgREtQ74MPxvUcESaJtIjNXHcN/7ZjohveR6rr8BZ8uvtu9Ze+7+q4D8/HV\n8ocb9hz8Ok83vLMDeJvNNgyfY3/2d6hrKMG+zIWYv/R+lFbsBwAEO6IVAO3fcBYWx8HaLrSwOE0g\noggA9czmqnXbX505ZdRvbJK/DIfbW49VW/7eRCS85E9E7hTM7FJkx4L9Wd/9YnTaZUc5r7u99TiU\nv1I3TO2t48QlSKLtZVFUbk7sPUb2eOuxfsfrsKkhmDHuLthtYQCAmIiBak1dQWDXyi4gikpKWEif\ngCe/bGoIBJJY011Pd7d/Zq4UBPGmb9c+88bQAeer8TEjRK+vEQdylrhr64vKiWhZavKsGwWhvVF9\nQu/RMNmIK4DHdLMOe5syOf8TchDTZyxNHHGj2rIi2bfPROQVb8byTS/CNA1EhiUjMqwv6hvLYLIZ\nLQjSjaap/6e78zkRZMn+xuCUuVEjB19yZLKhwXFCXPRwx8LVv3udiL715xQdgZldRPTLJeuf+3BY\n6gW2pLixgm74kJm/2peVv0aZMf63WLPtXzAMHwJte7vcNbrJelW7hi7g98G6kogGH8hZ+guA7ACv\nQ3NdxKMEFhENlETlDzMn3EOxUd8fqC2t2Ie1217BWVOfQElFugfWVp5FN7BEloXFKQ4RTVBkx0uC\nII0WBdlgNt1FZbuzPv72jv5x0UN1w9D4cNUBmUh8XTe8v+tq/5ruvudAzuJpPq0pfHDKXNluC0Vp\nxT5sS/+widn8OzMf06dHFJUnQ4J6Xzd3ysNyi9no2KFXYfeBL7Bi80uYN+0pEBFqGoq8AE64LIqh\n+w5U1eZ6gZntTCxd7mqYpuYB0H4ZpQuYpvEBEe1Pz/zmoX1ZC6cA1KTp7jeZzTclUX1RlmwBt2QF\nQYQoSDoZxtoPkTnjeh6kCn4xVc8+7KZKXDzkSbTd8k2On4C9h75GUtw4DB940ZHrdQ2l+HbtMy8T\nUQ4zrzqROXUVIuolCvLMIf3ntVOTYSHx6NN7FOeXbLkawMtt25n5ayKanJ654LG9mQtmEkgzTf0z\nw9TujIseJvQKT0F24XqkJs886jlNcyMzf5XPNPV3e2IO/oMDxzw8IInqw2n9z1FaCywAiI0agtS+\ns7Fl77toclU2AljcEzFZ/LywRJaFxSkMEU2SRGXZ2KG/dPSNnwhRlFFVm2ffuOstW31j2deFZTsW\nAtAALGH2detf/8xcTETDswvXPZJTuO46k027JCoZPs31R2aef5z4VFGQ75829jZni8Bqvi5gxKCL\nUVC6HWWV+6EqwaiszhIAnPBReJP1t3IK1z+a1u8chAbHtp4Hdh743EskvM/MnhMdh5l3Abi67XUi\nWpFbvOmKwf3Oand6rrImB6ZpNOkwrtyG8mWHUDtwGsc5ZQi0EkUuh7OXTZEdAdM04qKHo20GR2hw\nLMYMudKxLf3Dp9DJwww9SKxNDfHKsr2dmAWAyLBkR2Hp9jlE9O8Oaivu0XTPvwDsQnNZm8WSaDu/\nrHJ/39Fpl2PZxv8Ds4l+CVMhSSqqanOxcdd/NJONT5k5PUB/ncKftHiRIjseMUx9oEBinWH4XjPZ\neDlwgW7h7KS48QG/C5PixiEjezHrhvdsy6/KojtYIsvC4hRGkR2vjh9+rSOlz/eWT5FhyThrymOO\nz5bccz6ApzpbZuRY+Avv3uv/6QqDbWooQoJi2zUQERJix2B/9ncorzrkMkz9113ZyjxGrMWCIN2+\naM3T/xw64Dw1LnqY6PbWISP7O1dFdXaBbngfONExjsPntfWFLx7MXeZMTZ4ttKxKeXwNWL/zjSaT\njT8xcy0RjffAPWsBci8lkOKBsVf11v+B2XQG8t1qcle1K7IMNG9Bbt7zzoSTPKdAlHq89apPc0OR\n2x8WrKrNhU0NmevVmsqI6JLWK21EFCdLtqUCSSkkiDZmA6Zp/I7B3rXbX/NeOOtZ9czJD2HXgfnY\nvu8jCIIE09RhmvqzJhvPnEjQkqi+qCrBvxk95HJnTEQqmtxVoemZC58ordh/LRFNYL91RisMM0CO\nI4CW5PdiZt5zIjFZ/HyxRJaFxSkKEcVLojowOa7996ss2zEgabqckbPkagBd3iLsQTTD1IiZ222B\nAYCmu1Favi/LZP06Zt4Q4PluYZr6f4lod3rmggf3ZS0cD1CDT2t6DcA7zOzqqXECwcw+Ipq2fd/H\nyzJylkYkxo5xuj21vrySLSDQa6ap/8N/n4nmU2zLgOYVFtPUbyso3T4gKe7ow3lN7ioUH96N8cN+\n1W48TfeASOi4/sxJgpkrFdmxfF/WwrmjBl961HfF4cqDKCnfi4vm/MVWU1dgW7nlpW+IaCQzZxGR\nIEm2FdERqQPqmw4Lk0fdhOiIVP9zGeqa7a+any2+25eaPItiew2RDUNvOlyZAZP1C5j5hIqFE9FY\nVQm65bwZzzhUpdkj1mGPwIzxd9vX7XgtuaB021MA7mv9jMnGZ9mFa2+PDEtulyCWXbDOZ5r6JycS\nk8XPG0tkWVicuoTIskMTBNEWqNFuC5MEEsN+6KDakKEb3oby6kNBbVdhDENDTuF6l8n6xcy890QH\nIqIwAPEAypm5gpl3APhRTt/5xURKfWPp3PTMb8YCaADw+bEMV/3u3jeu3/HaYo+33p6SMIVEUUHJ\n4d3YuPttn8MeAZsa3O6LPit/lU4QPjuZ8+kITXf/JiN78dbGporwgX1n2xqayrE3cwGaXFWQJRsW\nrv4d0vqdjUF9z1QO5i57AMCtAGbblOC4ipps4dzpTyPY+X0R8t5RaZg7+RFh4erf6Qdylv5dFOVe\nuuHdAeD9QFt5XUWSbLen9TtHbRFYLRARRgz8hVpQsvVmIrq/xTQWAAzD99fM/NU3RIQkySmJU0kg\nASabyC5Yy5kFq92Gqf3tRGIiIgnAXDS/d3MBrLS2Hn8+WCLLwuLUJc/naxQamioQ7GzvY1VYur3B\nMLXNP0JcR2BmUxDE+1dvefnNWRPvdfQKb67N7PbUYcPON9zM5vITFVhE1EuW7K+Kgny+qgT7vL5G\nRVGc6zXN9RtmPqEE9xPBv1L1HbpQTJuZ1xHRtJ0Znz63Ze+7M8FMkmQ7pOnuP2ma69kd+z+JHZZ6\ngSRLNhimjuyCtbw/Z7HLMHydMoPtafz5ekPzS7beUVC6/WkiQZg48nokx42HIEioqs3D5j3vINgR\nLROJfnsOOiMyNDlIM7xHCawWwkLiERocz9V1ees03bOoJ+MVSUoJDY5rf+wTQEhQDAxTdwBQABwx\n0GLmQiI6Y2v6+x9v3/9xYmhwnF7XUCKbpp5vGL7LT6RSARHNEUXl4xBnjBwWkiBW1eYaLnd1ExH9\ngpk3dbdfi9MHS2RZWJyiMLNblmyvbdr939tmTbzPLgrf/7nmFW9BVV2+D8CPssLRGtM0PhQESVqy\n/rm/2dQQRZbsZl1jqSoK0nua7r7rRPomIqckqptSEqYkjBj0C8WmBKua7sXB3KUzdh/8cgsRDW8p\nnUNE4aIgP0gk3GSaeogoqdma5noOwHutVy5OIBY7gAsBJAHIB/BVZ8sXtYaZtwM407/CIfo0l9ff\n/7KDucvfzshZMj3IEeV1uasVgPYbhu8GZs450fi7i9+r6g+K7Pjd5FE3Ca23OiPDkjFn0oP4YtkD\nAEz/fjH7fLrbtKshAcUO0Gy1AaBdzccTxTC1jOravCmJsWPafbfV1BdBFOU6w/D52rb5/yGQRkTD\ny6sOJgIoONE8LCIaKonKVzMn3OdoObnIzCgs2xG8dvurS4loKDN3yoPO4vTFElkWFqcwuuF9tLIm\ne/D8pfdNG5g8y64qwUJ+ydbGiposzTB8czpZy+2kY5r6u0T0YaOrYgya68LtNgxfT5QguS4qYkDs\n+GG/UlpyvmRJxdAB5wkuT21wZv6qhwH8logiJFHdnhA7JnZo/3mq0xGJ8qpDadv2ffSqy11zBhHd\nciJCi4jmiYL8UWRYX/QKT7FX1uS4q2pzXyeiK5m5W6sxfi8nvdXvZQDOJqLedQ3FfQGUMXNud2Pu\nCkTUH0AMgGx/HG3bU2TJjoRWJWdaUGQHkuLGIzNvVYsQ/LKiJvsxWVLtpqlDEI7+mjEMH8qrDokA\ntvf0PHTD+0pGzpLrUpNnSq3NaplN7Nz/iZvZfOVY7wO/sOqRJHdZsj82dMC5amtrCCJCYuwYDEia\nrmTmr74bbfLDLH56WCLLwuIUxp9kfa6muyfuzfzmGoHEEE13rwLw4clO8O4qftHQo9uXiuy8cXDK\nmY5ASfWpyTPlrPzVVwP4rSQqv0+Onxg3edSvj+Q09ek9CtGRg5xfLn/oat3wvI029eo6S/OKhPrp\nmZMfckRFDGi5HFxenYllG1741H9irduWA23xi5x2QudkQESjZMn+tiI7+jts4VqDq0JVZOdKTXdd\nz8zlrW4NVmSnRyAh4OqTwxYOEPb6409XZMd3AF24M+NzYXTa5UcORTAztqZ/oBPRupOx1cvM6ZKo\n/H7Byid+N3zghfboyIHU5K5CeuY3TXUNxemmqT/b02N2HIt5VlLchICreX3jJyk5hesvgCWyfvJY\nIsvC4hTH/y/vjf6fHwz/dtZQACKA9J5YNSOiBACxaN6O6YyQcCpK4F0lVQ4Cs2kjIhIE+YZhqRco\nzCZKK/aj0VUOuxqGuJjhGNLvbPueQ1/fhm6KLEmyPTyk/zy1lcACAERHDEBa/3m2/VmLngVwQXf6\n/jEhooGiqKwZN+yXzpQ+U0gQRGiaG7sPfTXnYM6yLUQ0E0Ce//2X5fHWiU3uKjjtke36Kijd3sRs\nrmr5XdPdV5ps/Otg7rIbC8t2UP/EM2Cyiaz8NR6vr/6gpnuuOFnz0g3f80S0bteB+Q8S0XCAqnxa\n07/QnFzfbqvwJGK2r97TTPN1Ctxo8ZPCql1oYWHRDlGQbpZEtcxhj1gT5IhaIQpKhSSpjx2zOvUx\nIKIBiuzYIInqoZCg3ktEUclVZMcSIoo/1nOmqa8pKtsV0MSouHwPBFHeDkA2Td3u0xrx5fKHsX3/\nR6iszcX+7G8xf8m98OluQRDEhO7EDQAEmp0UOzbgikRy3DiBSDhPluwriehs/09707AehIj6S5L6\nN5savEqRHR8Q0Yzu/H+RJdvTwwacb++fOI1aSgRV1uaisjpLBjhJEtVDsmQrEATxBn/R5bc27fqv\nu23dzOyCdVzfWNIA4OuWa8zs03XvTbrhDa9vLH1g94EvPtl78Kt/NrrKz9d0z+ju1iTsLMy83qe5\nLvL6mlK8vsZxzPzfkyGwqJlZgiD+kUj4PRGNbdX6VU7R+oDv3ezCdV7D8H3a0/FYnHpQD+SD9ghE\nxMzcrQ9wCwuLnkMSlbtUNfi5mePvdkSG9QXQXN5l9daXmxpdFa9quufBrvRHRPGiqOwdNfjS0IHJ\nswRRVKDpHuzLWqTvz/q2Qje8aR0Vc26uK6fumDvlUUev8JQj1xtdFVi4+mmX19dwITMvk0S1SpLU\niAnDr0NS3Lgj21Pl1ZlYselF6IbvE8PQurV6osiO/DMnP5zYevwWKmtysGrLP6AbXiiy07CpIY01\n9QWqQOIiTXffwMz13RmzIwRBul4UpH+lJs+UoiMHyo2uCt6f9a3Lp7sX6Lrnly11+ajZ7XQmgL4A\nigAs82/nHkEUZdfFc160O+zhAICisl3YsOstjB/2KyTGjgaRiPKqg1i/840mt6fuecP0vSBLtq8l\nyTZlYPIsuyI7hbySLY1Vtbluw/DNYOb9/u3HB4mECQBq/d5l73bngMDxIKIYQZBuk0Xb+QzWfZrr\nfYDf7unX/BjjR0uSbblNCUpO6TMlyDA1M7tgrccw9Y2a7r4AQJwoKjunjv5NUGJs83uS2URWwVre\nsvfdOsPwpTFz6Q8Rq8XJoTO6xRJZFhYWRyAimyjI5efOeCY4LPjoRSa3tx7zl9zrMUwtkZkrOtun\nJKkvDUicftv44b9q5wG1euvL7oLS7U+bpvHCMWK6QBTkD+Oih3FUxABnbUORJ794Cxj8oGFo/wQA\nUZRXpfSZMn3yqF+3e35f1rfYc/CL5T7NPaezMR8Vv6g83y9x6t0TR9zQrrzM8k0vorImB7Mm3IOW\n7USf5saWve96C0u37dZ0z8SeONkIHBGcO8+d/oy9dTkhXffiu3V/bKqpK3jIZPNfRDRBhfhFGJSg\nZIQIxWg0yuH2+WBe0drsUxAk7xXn/EtRZDuYTXy5/GFMHHE9YqOGHDWuy12DL5Y94DFMLRFAJYBJ\noiBfJQhikKZ7VgL4hJk9oiD/WhTlfwwdcJ4aGzVE9HjrsC/r26aqurw8XfdM7knxQ0RjRFFZkRw/\nQekbP9FmmDoy81e5yir21+mGdyIzF/TUWB2hyI7NA5JnjhqTdoXcIupNU8eaba96SivSv/BprquJ\naLwk2T5WZEev0KA4s6a+QDQMX6Gmey5lZqvg9GlOZ3SLtV1oYWHRmqkhQb25rcACALsagtioITqA\nc7vSIZFw1YDkme0EFgCkJs+yy5L9umM9z8xfG6YWV1i248HdB+b/Padw/ROGqfVtEVgAIIlqVErC\n5IDP942fANM0ul2WxjC1l7IL1zdlZC8xW8qvGKaOjOwlqKjOxJRRN6N1vpYi2zFl1E2qqgSloXk1\nqUeQRPWuQSlnSq0FFgBIkoqxQ692SpLtISJKUiAsuxlpsc9iYvAtNMT5DE0IuQvDeykQFhDRkaNu\nsqRuLyrbAQCoqs2DQAJ69zq6SDIAOOzhiI8ZYQD4BTezQTd8d/k09w3M/D9m9hBRHyLhn+dOf9ox\nLPV8sVd4Cvr0HoW5Ux51JvYe3V8S1ed76nUgIlES1YVTR98SMnnkTTZZsqGsYj9sSoijd1RajCTa\nTvo2HBGNFgRp6Oi0y+XWO7WCIGHSyBttpqlfTEQxzLxF1z0pLnf1nNKK9Bs93vozNN2TZgmsnw+W\nyLKwsGiNTZba16lrQZGdApqL/XYeZkWWAj/SPBZ32B8RCX57gWgAr+mG7x5mfjFA0rxhmoFNtE02\nAep+kjEzlxqGb9KuA58f+PjbO7F43bOYv+Re5BZvAkCIjxkeIG4BA5JmOkVRubi747ZFFOWxMZED\n5UBt0ZEDoenuRBl0zzTEKaMp6qgyR0MoAvOQpNogPtpyzae5nt6a/r6rvvEwNN0DmxoasDQSADjs\nEQqAkI5iEwTpppSEKdS2hiURYeTgS1Vm81oiCii0u8Fcp6OXI6H3KKzd/i+s3f4aVCUYvcJTIJAo\nMBvjiajHXvcOmJTQexQJAWpQqooTEaFJHgCjgeaDK8y8mZk/Z+adPbWyaXF6YIksCwuL1mytqs1V\nfFr7Os6mqaPo8C4AWN+VDomEDUVlOwN+sRSW7dBN0whYr04QpGslyVakKkG7VCVopyTaSkRBuinQ\nvbru+Si7cG3AvJ+cwvWGQMKCrsTcFmY+pOnuEaap1SbHT8RZU5/A9HF3oNkgNrAwEUWZCD0mLMBs\nljW5qwK2NbkqIQpykwzxggmICTjmBMSIDJz9fX+8RNPc93298lHPvsyFrqraPPi09q4gzIyisl1e\nADs7ik0SlSG9wlPabacCQJCjFwRBJADtjyV2j9SYyFR5z6Gv4dNcuHDWsxg+8AKkJs/EjPG/xbSx\nd0AU5P8RkbOHxguE2+tr7LA0jk9zEYAez0OzOP2wRJaFhcURmPmwIEhfrN/55lGnyJhNbE3/wMds\n7uhqmRxNd/9xZ8Zn7tr64qOuV1RnIyN7sU83vC+2fUYU5TvsasircyY+EHv52a84Lz/7FeeZkx+O\nsdvCXpJE5YG295tsvF5Qut11IGeZafqPzTMzCkq3Y2/mAo+me/4EAESUKknqX1UleJEs2f5JRMM6\nOw9m1hn8WHrmgibAhMMWDklSUVGTFfD+nML1Dbrh7XTJnePh01z/Ts9c1GgY7WtF78tapBEJ7wAw\nO1omMcEAcFSzYeqvmaYeV1Kx916A923Z867W1nbgYO5y0+OtrwDQYfFmw9ByahuKAxaxdnvrYZg6\nAegJc1oAOFxbX6wdyluBcUOvgSgerSkTYkcjKiKVcJy6lkSUQkQjiMjRjRi+KSnfK7m97dPMqusK\n0OiuZHTTMsTip0W3E9+JKALAx2guMZEH4PJAJ4SIKA9APQADgMbM4zvoz0p8t7A4BSAihyzZ5xMJ\nZ/RLmKoIgijkFG1w67onQ9M9Z3Xn+L0giNcQia/HRw8zw0MTnRXVWY3lVQfJMLUrmHlh2/FFQS4/\nb8YfnW3zjxqaKvD1ikfchqn1BuBmZq3Vc6myZP9MEMR+EaFJel1jqeDzuWp0w3MVM68XRfkegcRn\nU5NnSZFhyXJtQ7F+IGepZrLxoq57n2wTgwrgMkV2XkUg1as1LfCfXKuTROVeAM+EBseZuu6TTdbt\nZ099HC0O48wm0jMXGnsPfV2kG97+bU/1dRciEmTJ9lVYSMKscUOvdkSGpcDlqUZ65kItu2BtpW54\nR8oQ7puC3vdcS4ParSrN52x9KYre8bAecDWQiEIkybbcroYOSk2eFSSJCnKLNjRU1+U36oZvGjMH\nVpPNzw6QRHXPRXP+z+awHV2zfOve9/WsgtUf+zT3NSf8IjSP5RBIqrCpIY5Lz3op4D0ZOUuwK+Oz\nt3yau91ciWimLNlfJaIEWbLrHm+9RCS8oRveh7pi8yBLtuedjl53TB97hzMspA+YGRXVmVi99WWX\n19d4t2Hqb57ANC1OA07q6UIiegFAJTO/QEQPAwhn5kcC3JcLYMzxPpgtkWVhcWpBRCPQnOQuAlgO\nYOMJlqYJA3AFgHgAuWg+ldZuX5KIzosMS3n/3OlPt8sBqm0oxrIN/6e7vbUCM5Ms2bI13f00gA9a\nYvOvTvVDs2v6ZmZmIpqkyM5l5834gyPI0etIf25vPb5Z9WST21NzeUt5HCKKlkR1Q3hoQkxq0qwg\nQZSQV7TJVVKR7jEM33RmTvfXMZwOwCYK8hQAd8ZGDdXttlC5+PBuXdPd+ZruOZuZC7v7enXwGkpE\n4j2iIN+vG94YQRA9Aonv6Yb3SWY+TERxCoT9v8LAkEnoTQIRmBnbUYE3sb/RB3PUccSSAOBMSbRd\nRiTYNd31LYBPO2NEK0nqE4pkf3R02hWO2KghcHvrkJH9nbegdHulbnjHMPPhHnwdbhIF+Y0r5/0b\notg+TW3H/k+M/dnf/c0wtAfbPDddEtVvp46+xZ4QOxpEAhpdldi06z/uipqsFZruOb+z73EiIlGQ\nHwDRY6rsFE02BF33NRim9pBp6u/20FQtTmFOtsg6AGC6/w+7N4BVzDwowH25AMYyc+Bkgi4Ea2Hx\nc4SIQgRBuk8QpFtMUw+VRCXb11z4+IMTET2nKkR0eXz08DdmT3rgKJFVXVeAZRueR1r/c5CaPBOy\nZEdJRTo2736nye2t+7uuex/vqE9FdswfMejiC9P6ndUuRSK7YB22pr+3zutrOsN/79IByTOntz6a\n77+PN+95p1g3vEncZk+NiCLR7PoehOaafCckSDsDEckA9LbjENEwG8TPVYhxCQgyStAkuKBXeWBc\nwcw9WvYoQEznKLLjccPUhwkkNhqm9l/T1P92vM//4/TZm0i4QRLVwYbhyzPZ+C8z5yqyI33s0KuH\nDEiaftT9mu7B50vudfm0psnMvLt1myI7d08aecPw5PijD5sahobPl97X5PHWzWLmLV2MTwYwCM27\nNQfavjcsfrqcbJFVw8zh/v8mANUtv7e5LwdAHZrfgK8x8xvdDdbC4ucGEYVJom1LfMywhKEDzrM5\nHb1QUZWJ7fs/anJ5aj/Sdc/NPzWhRUQpkmjbd/k5q3B8XQAAIABJREFU/7RJrfJtlqx/DsnxE5Ca\nfLQrgttbjy+W3ufRDd8gZs4P1KeqOPPmTnk0KSI0qV1bo6sSX694tEbTPRFElCiJ6sHLz/6nTZKO\n3nFjZny1/OGG+qayS5l5yXHmECWJ6pPM5nWGqQfJkq1A1z3PM/j1H+JL2P+ZPAbNZqSF8K/onexx\nexpBEK8RSHwjKX4CR4X3s9c2FPuyCtaYYH7aMLVloqisHjf0amdKwlRIooLqunxs2vVfV21DyRea\nfvT2JBH1FkUl76p5r6ktDvet2XVgvrkvc+HfdcNn1RO06BSd0S3HrF1IREsB9A7QdNS/GP3L8R39\nAU9h5lIiigKwlIgOMPPaDsZ7utWvq5h51bHis7D4qSMK8pMJsaMTp46+RW1ZVUmIHY2YXoOdXy1/\n6Epd97wNYN2PGmQPw8w5iuxYs3Xv+9MnjrhOJRLg8tSiui4fsyfe3+5+uxqCvn2mCJn5q64G8OfA\nvVKNy10TUGS5PTUgEloymAeHhfTxSpLazlaCiBAbNUSpbyobAqBDkeUXWDv79pkUNaT/PMXp6IXy\nqkPJ29I/+EtDU/k0IvrlyRY8/v63+X9OS4houCTZXpt3xlO2sJAjvm3K0AHnYeHqp540vHW7DMM3\nbcf+T17asve9CaIgGczsNtn4i2nqgcxt7ZKo6IIgBjwFqcpOgUgIPmkTsjjtIaIZAGZ05Zljiixm\nPvMYgx0mot7MXOav1VUe6D72lw1g5goi+gLAeAABRRYzP93ZwC0sfibcNGLghUcEVguKbMeQ/vPs\nuw9+eTt+YiILADTdfUVe8cZlJeV7BvZPnOZ0++ohiSoFyr8BgGBntCIIUnRH/fm0plf3ZS36a3zM\nCGfr15KZsS9rkccwfK/7L1W63NUiMwf0jGpwlWsAjrn1JYnqMykJU6Injrj+SLCxUWk4+4wnnV8u\nf/AC3fBMA7D6WH1YALJkuz+t/zy1lcACADjtERiTdoVza/p7j3t9TdMATCOiUNPUHQDKmbkja4VC\nw/D5auoLneEh7UtZ5pVsadAN76qenofFTwf/ws+qlt+J6KnjPXMiFg5fA2hxar4OwJdtbyAiBxEF\n+//bCWAugC4d/7aw+LlCRIJhaiHBzkCLyUBIUKwgCGL7pZnO9S1RR86TpwDMXKvpnnFN7qrz9mYu\n+GtW3up0n9YEtydgiUOUlO/1mqa25xhd/q+qLi9rzbZXPPWNzfnXja5KbNz1H19JeXqxyUaLe/wO\nn+6uKS5v31V9YxnKKg+ICPBZ1wIRkcnGtUMHnNdODcqSiiH95jlkyX7LMeK08EMkTo6PHhawMHds\n9FAYpj6y5XdmrmPm0mMILDCzbjK/sH7HG00+7WgLq6yCNVxdl+8G8FlPxW9hARxnJes4PAfgEyL6\nNfwWDgBARHEA3mDmc9G81Tjf/1kuAXj/eLkMFhYWzTCzKUu28uq6vOiWQs2tqazN0Q1D73R5jmZR\nRTfIku1xAH2JRE2R7fM13fM4M+f0ZOw9gX/LazWA1ZKk1iT0Ho3t+z/GlFE3g1o5bZeU70VFdaaE\nZkuZjvryENHUosO7fl9UtvM3DFYB6AKJ7+iG93H219Xzpz5cv2brPxeMHXqVvW+fKSQKIgrLdmLT\n7rddYPN+PnYNPsk0dbvTHth3M8gZRYIgtq9ZZBGIek8AHyoA8HjrQSS0d049DqapvVDfVNb/syV3\n/7JfwlTRrobKBaXbGuoaS5sMwzerM6coLSy6glUg2sLiFEYUpEejIwc+MWfSg47WybpN7mp8veJR\nt6a7J3TWHFSWbP9w2MNvHD/sWmdsVBo8vkYczF1m7Mta1GgYvnHMnHnSJnKCiILkuWjOX9T1O/4N\nw9TRP3EaVCUIxYd3o6B0O9g0sn26u39n+iIiEc2nABs7WvkgonGK7PizprlnMCDIkn2Pprseb+vp\nFQhZspfNnfJwTK/wfu3atu37SDuUu/xVTffc3ZlYf84Q0a29e6X95czJDzvbLrpu2PmmN6dow9sC\nSTJgXszMoiBIazXd/UxnTlAS0QAAlxGJIczGVgBft/Zcs7DoDCf1dGFPY4ksC4v2EJEiS7bFQY6o\ncUMHnOcMcvTC4apDZnrmNx7D8P1ON3zt3NI76CdNlhzbLj7zRbuqHF1tJD1zobk3c8Fin69p3kmZ\nRA+gKM4tE4dfPy4pbhyKDu9CXvFmGIYPvcL7weWu1rIL172h6Z47enpcvyATuvIFLAjSg1Hh/Z4+\nc/LDjtY5ZPWNZfhm1ZNu3fCOZOZDPR3rTw0ickiibXtS/Li+owZdqjrs4fD6GrEva5GekbOkjpnV\nIf3PsfVPPEMSBQUFpdt4Z8Znbk33XMdsWtt+FicdS2RZWPwEICIJwCWK7LwdQDSzuVvT3X/tip+P\nJCovDEo5894xQ65slyKgaW58/N0dPtPUo46zFdad2AnABAAxAA4xc0Y3+znfbgv78Nxpv3c67N87\nxVTV5mLxumdduuEddaoIFyKSZMn2lU0NnTak/7ygIEcvlFUeaHaXN7XftnYCJ6JRBLoIRCqzuQrA\nEstn6XuaLUzUv5psXimJiqkbXkkU5EXM5tAJI67r3y9h6lHfGVW1efhu7R+aDFOLZuYubydaWHQF\nS2RZWFgAABTZ8e7otCuuGdh3VsD2jxbd5vJpTYN60qGciKZLku09VXaGBTujjeq6ApnZPKDp7kuZ\nOber/Umi8hiInuyXMJVCg+LU8uqDrqKyXWSY+lXM5lc9FXdP4HdOP1uRHbcRCTGGoW3XDe8/WkQm\nEamyZP9cEMSZ/ROnqZJkE3OLNjS43LVluuGZyczFxxniROPrK4rK3aIgnwXAq+nu/zGbbzFz3ckc\nt7v4Hfaj0XyyM0lVgjdfdvbLToHan91avP7PDYcrM25l5g9+6Dgtfl6csE+WhYXFTwPd8O4oq9x/\n8cC+s9oVw21oOgzD8JkAerLsyQhRVBZNG3ObIz5mJIgIpqkjI2fJiN0H5m8iotSufqHrhu9ZIvog\nM3/1taIoJ+i6dx/A/+Nu1FI82fhXoxb5f9ohS/aXoyNTZ80Yf7ddFJo/hoenXhi859BX9v1Zi5YQ\n0dCT5aVFRLNEUfl6YPIsJTF2rKwZHhzKXfmH0or0+4lo/MkWeN2Bmd0A8oHmw1VBjig9kMACgPCQ\nPurhyozYgI0WFj8wJ2LhYGFhcZrAbL5TVLYT5dVH57abpoGtez9wE9Eb3IXiuMdDluy/HznoEluf\n3qOO+E0JgoQh/eeJsVFDgwC6oTv9MnOeaerPaJr7ZmbzpVNRYB0PIgozTf1Xk0fdfERg+a9jeOqF\nkiw7EgFMPUlj20RB/mL2hPucY4deLUdHpiI+ejhmTrjbkdb/nGhZsv/vBPoW/DlsJ5uc+sZSxTAC\np8lVVGd6AWT/AHFYWBwXS2RZWPwMYOZqw9QuWbr+edfGXf/xFpRux6G8lViw8vHGw1UZ23XD91hP\njmeY2tyUPpMDfr70T5ruUGXnVT053ukCEQ0WReUdWbYrG3a8juzC9TBMvXU7kmLH2QBM7kRfkUQ0\nhYiGdsHz7JKIsGS5d1Rau4ah/c+VmM3JRNSns/PxxzFGkZ2LCeQDSFMV504iOq8rfXQFZs4GsDsj\nZ0m7k6HF5XtQ21CiATjuKVALix8Ca7vQwuJnAjN/R0Sp2YXrbiso3TaV2az1aa63ACw6loljNwcT\nAtWHAwChebHjh1jxOKUQBPFKSVTfGpwyV42LHia4vfU4mLccmXkrMXvSA5Cl5ko+Xl+DBqDDpG0i\nCpIl+2uiIF8S7Iz2eLwNkmFqFUR0MzMvO8ZzI4mE16IjBtgDtUuSiiBntLe2vjAZQFFn5kRE0yRR\n+XZU2qX2fn2mkCjKKCzbOXLz7nc+FgXpAcPUX+1MP11F091X7Tn4xeaq2tzgAUkz7KIoI79kiy8z\nf7XPMHwXWnYMFqcKlsiysPgZ4c+3eeJkjyOK6tr8kq2zU5NntlthyS3a4NZ0zymVqB4I/6nONAAM\nYP+JCFEi6i0K8n/OmfY7e+uSLklxY7Fux+vYfWA+xg69Gl5fE/JLtxE6cJUnIkGWbMv6xIwcMW74\nNSqB1KyCtSg+vMdZWZP1LRHdB+CfbfO5iChEFJWV/RKmOhuaAlZAg2nqaHJVKQBKOzknkiXbO1NG\n3+JIjB3Tak7jEB6S6Ph65WMvEtEHJyOZnpnziGhQQem2m0sr9l0DQDEM30LD1P7RUZFwC4sfA2u7\n0MLiJwg1c6YiOz61qcFrJEn9KxG1d8c8SWi666nt+z5yV9Z8byTPzMgt2sj5pdt8Juv//qFi6SpE\nRKIo3yOKSrnDFrHObgvbIIlqmShIv+lunwKJNybHT6S2NfOIBIwafAmyC9ehpq4ASzc810QQ/nOM\nU55n2tTQIVPG3GKrayjBV8sfRnVdPvolTMHwgRdJDnvES7Jk+46IlKPGAV0bGzVEHp12Bcoq96O2\nvv1CVVbBWgb4gH87rjOMkES1V0Lv0e0aQoJiEBuVZgC4uJN9dRlmrjFN4wWvr3G419c4SDd891sC\ny+JUw1rJsrD4ieH3afpcUYJmDel3jtNpj6SyqowJh/JW3ioI4q9N0/jwZMfAzBuIhF8tXvent8ND\nkzg0KFYqrz5kuD21tYbhO5+ZK052DN1FFJWnHLbwB2aMu8sZHpoIAKiqzQ1aueXvLwuCNJfZ6PKX\nuSipw6MjB9gCtQU5omCaJhau+X0DEb1gGL5nO+pHkmxXDUye7TR0L1ZvfRlTx9yKuOhhR9oH9ztL\nWLHpxTPKqzOfAfBIy3VZdsxNih3nVGQ7xg+7Fks3vIDhAy9CQuxo6LoXmfkrcSB3WaNh+K7twrR6\nOewRRkfpYCHOGLUYiASatzgBulGRHTcDHMzMGzXd/X/MvCPQs0TkADAYgBtAxsk6aWlhcbKxRJaF\nxU8MgcR7w4L7zJk75RGHKDYvaCTEjlb6J07HojVPv0VEG5k572THwWzOJ6KFlTVZ8yprsmIAHAKw\nqjtmm0SUJonqrYIg9TMMLcMwfa+djDJARBQmCNLDc6c8anPaI45cjwzri7mTH1G+WfXkJcziuYrs\n+EzT3Td2NvfHNLSC+sYyDUC7wtEeXwMM0+czTT3heFtrRIJNklTKKdqImMiBRwksABAFCROGX2tf\nsPKJ24nody0nRpnNJk1vLoqckjAZQY5e2J/9LXYd+ByCIMHrazRNUxvHzAc7Mx8/B+oaSlRN90KW\n1HaNJRX7PAD2EVGEJKqboyNT49L6ne2wqSEoKd+bsPfQggsEQbrdNPV3vp8fyZKo/lkU5NvstnBd\n1z2iYfqqiYQ7mc2vuxCbhcUpgbVdaGHxE4KISBCkB8YNu+aIwGohPKQPBiROF0RRuf2HioeZvcz8\nBTP/m5lXdEdgSZL6e1mybxuUMve28cN/NW9g39l3SaK6WxSVk1H/7+zoiFSttcBqISSoNyLDUzBl\n9G9sEaFJF8uS7e+d7dQwtbcO5a3Q3QEKHmdkfWeIgrygM7lLmuZakle8ubGmvgAxkYMD3hMSFAtR\nVAQAR7yiNN393oHcZY0tL390ZCpmjL8bV5zzCkYMvBCiIK/posACMxcRCWt2H/hca7vQlFu0kZtc\nlY0AlsiS7W99+0xOnD3xAUdc9DBEhCZh6IDzhHnTnnIIJLxKRL1bnpMl23uRYX1vu3D2846Lz/xL\nyGVnv+ycMf7uBEW2f0QkXNCV+CwsTgUskWVh8dNCNQytV6/wlICNMb0GqZKojAnYeApCROcosuP+\ni2a/YB+ddpmU0mcyxg69Srlg1p/tkqg8S0STenhIm6o4O/xcVCQHiIBp4+50mKZxAxGFd3Rva5j5\noMnm3xatfqopv2QLfJoLdQ2l2Lz7Hd/+nMVVmu6+p5PxfVRRk+VuclfD7akJeIOue6EbXhlAa0X3\nnctTk7Fu+2ueFqFnsonc4k3Ymv6BS9NdD3Zy/KPQdPc1h/JWFXy37g+N2YXrkV+yFau3vuzasOut\nOt3wngNANUz98pGDfqG03VYMDY5DcvxEIhJuAAAiGgII58/+//buPLyq8s4D+Pd3zrlrQiAkQMK+\n7woCIsUNUSy4b+3UaX202+horbW2VWv7VGdqbWvHunaemW4ztm5d1GJxQ5SissgmCgQCYQuEhCV7\nbu69Z/nNH7kwAW4ghJzcJHw/z5PHnHPee84v3oebb97znvedcU80O5qP1D4U9pmAC6Z9IxKwwk+e\nxFQVRJ0CbxcSdS9JEbFjjVWh5mv8HdIQO6ie57bq6bHOIBiIfn/KuM9nRcI9j9ifHc3HmWOuCa/b\n9PJ30b6Dq5eX7VtvuK6N5os7A4DtxFFxcBOmn/klREI5yM0ZlDhQvW06gLdac2LHSTwgIquXffy7\nHzhOcrxhGDEFnnXd5E9Vtbw151DVmIhcWL5/w+L9lVv6Thx1BQKBI2dk2Fr6vppmcLnr2lXNXueK\nyOzdFWuf2bl31eezIr0TiWRdUFV3uG7y66q6KjVY/hrLDM1W9eKuZ/8JwLLjjYdS1f0iMnF/5dYb\naur2flEg4aQTe03V+72qVonIEMsMuZFwr7Svz88dHt61d+Xk1Oa1IwefH7DS3Hos7DMBhmHmAxgN\n4KR63IgyiT1ZRN2IqnqGGXhhY8kbztHHHCeBjSVvxGyn8b8yUVtbeJ47oV/+2LTHCvLHGYC0a6+c\nqm4CsHzl+ueSze9seurho0/+gP59z0BWJA8A4Hq2ADjm//MJzv9yItkwxfXssO0kejtO4lutDVjN\nzlHkenZ/z3MWvvnBw/b+yhKoKmy7EUUlb+nqDS822HbszjSvq0/ajTd7nl1Y11BxSdKOnZm0Y+NV\n9UMRGWGZoe15vYb9ZvLY626dMOryOyPhXm8HrPDC1LqBx6snrqp/TCTr58WTdRd5nvuYqh4KeAcd\nN2HFk3VpX1tZsxOu61wfDGRtBOSMYCCa9g9/EYFlhl0Ax62FqLNhTxZRN+M48QeKd7w7T1Xzxo+c\nZ0XDudhXWYxV65+P2U7jAgAfZLrG1hIxahvjVbmHbh81F4tXQUTS3zM7BbbTeP323cve2l2+dsLw\nQedGVRU7yz5CTnYhLpx2BwCgtr4ctfXlBoAP2/v6rZHqmZpbXbfnzoVLf3a/5zm5qp5hBcLVphHY\nJ5BbReQJVS1O89pqAKsObYuIYZmhRVPGf75g7PA5h/7wNs4cfVXW4pVPzSw/UPQkgK+3sc76YCD6\n2oYtC66ZOuELR/y+qY8dwM6ylbj64p+Z1bWl4z5c++vhxTsWx88cc0346LuCdQ0ViCdqDACb2lIH\nUaZIZ3kytjWrWRNR64hIf8sMPeR57pc8dUIBK1zmuMmfq3pPt2XweaaYhvXAwIKzfnDh2Xce8YtX\nVfH20p/GKg4UfVtV271nLjX2Z6Yh5lPhUK+JMybfEhjYbxIAoLZ+LxYtf6wh1lj1b66X/J2I8dWA\nFblA1a2ynfj/AFjUkVMOiEjIssLvZkfyJk0YeVlWJJyL8gMb7U3bFjquZ3/N89znT/D6eTnZhS9d\nPfunPY4ON43xary88J6469kFbZ1UVEQKLTO0anD/aXljh80JhUM52Lt/Az7Z/CrGj5iHcSMuBQAc\nqNqGtz78iXfmmGt04sjLzUO12E4c7yx7NFZVs+sJ24m36/JPRKeiNbmFIYuoG0uFBVNVT+q2Vmch\nIjmWGVo7dMCMgWeOuTqYHc1HXUMF1hb9JbG7Yl2x48TPUdVGH69vWVb4F+q5/9Kzx4Ck5zmoa6gw\nIPLvrptcaRqB1wYXTjMGFEyKxhO1WlTydkPSrl9mO/Er2nPB7eOxzODD/fLH3T37nLsjzZcyqq7d\njQVLHmx03eSY40xuChH54cRRVzw4Zfzn0w4f+duie2tq6vfOVdXlba1RRPIMw/qWaVi3A9K7X/5Y\njBv+WRQetYbi3xbdV18X21+bHc3rMbT/OT0SyXp72+6ljkL/6jjxW9p9+SeiU9Ca3MLbhUTdWKpH\npUsGLABQ1VoROXvHnuWPbNv94U2AmALYEPmN6yZ/4GfASl3fEZF/MwzLq63f+xVP3YhlBouTduyg\nIdZrs2fck90sKMiYYZdkL1z6s/MOVJW8Ew71CKlqImk3PAvgeVVtcT3CthIR0zSDd0ybcGPk6LUi\ne+UMxIhB5xlbdy25FcdfSqm+MV6TBHDMZKmqioTdYAGoP5U6VfUggB+KSOnwgef+8rypt0bTtYuE\ne7k19WV31NaXxz8pnv8ZQBsBvHKy00sQdRYMWUTUqalqJYBbReQOANkA6jqqR0NE8iwztHpQ4dSC\niSMvC0Ujeag4uGniqvUv/KchhhzdE1NVsxM1dWWRof3POX/owBlw3QQ2b393yoGqkgdEZIaqVrRz\niTlQjfTKGZD2YN/eo0M79qyYdIJzvLyjbMVPpk28EaFg9hEHyvZ/CtdNVgLY0D7lYm3Z/vXqqQdD\njuw4c5wEDlSVhAB8nJos9812uiZRxvDpQiLqElTVUdXqjrxlZJmhHw8bOLPw/Km3hXJ7DkYomIXB\nhVNxxayHgrYbDzRfm9HzHCz+6CnMnPxVnDf1VgzsNwlD+k/HpefelzVm2CUDAlbkDz6U2KDqaWM8\n/XCpuoYK1/OcYxcqbEZVdwrkv9/64CcNB6t3AGh6mnLX3tVYsvKZRtuJ/2s7jjFb5brJkvVb/n5E\n76qqYvXGl5Iixvt+r0YgImdbVujpUDDrBRHjmyKSfn4JonbAMVlERGmIiGEYVv01F/88ku7pxk+K\n5yPWWIkZk24BAOwqW4WNJW9i7vnH3pmznQT+/OYdccdNjlTVPe1ZZzAQeW700NmfP/rpvaTdgFcW\nfjeWsOvPBxAKWJG7DDFHe+put53Gp9G0xJGmflYxDOvbhpj3m2Yg7Lq2KWJus53Yt1R1YXvWKyKD\nLDP0fk52Ye+Rg8/v4amLLTsW1zXEK3c6TnxW6tZiuxMRM2CFnzONwJVjhl0SioR7mXv2fdJQVvGJ\n53r2Faq6xI/rUvfFMVlERC1IzdY+DUASwHJVTRzVJKzqBdMFLADomV2IA5Ulh7dr6veiT++RadsG\nrBBysgsTlTU7RwJo15BlO/Hvbt7+zsWOm+w9bvhnA9FILsoPFGHV+ucaXM9+1jJDnzPN4Dcnjro8\n3LvnEKO6ds/k9Vv/frljxytDgex9njrLADzhuvZ/iMgTjpsYBCChqmXtWechqloqIqMqa3Zcs7ao\n4kqFOo4T/yuAN/3spTQM67s9ewy48tKZ90UPTXg6euhFWWX71uO9Fb/8u4gMaTa/F1G7YMgiotOK\niAQDVvhJ0wjc3CtnYMJxE1LfcEBMM/gD100+2axpo2FYtVW1pbm5OYOOOc/+yi3ewert7r7KLYE+\nuSMRsMLYX7k17TU99dDQeDAAYH97/zyqWiYik0t2vf+DrTuX3OSpk2WZ4a22E/sJgLJIqOerV170\ncDQcygEAFPaZICMGnx96Y8lDhUP6Ty/01Bm/advCmw3D/LKqvgRge3vXmKZmG8CfU1++ExHTMkPf\nmTHplujRM8r37zsRA/pNMkvL19wM4PGOqIdOHwxZRHRaCViRZ/N6Dbvq/Gm3hyOhnDAAVNfuwaLl\nv3jEMoOu4yafAZqezDTN4NOrN7z4ndnnfPuIp/fqGvaheMd7CcdNPvrO0ke/7qmT73muGIZl1Nbv\nNXKyC4+45s49K+B5bqmqbvTjZ0rNGv+N1NdhwUDWm5PGXns4YP3//ggmjb0OxTsW4dJz7w8MGzAj\n8PqSh34vIv842Rnou4g8AFm9ew5Je3BAv0nR8gNFM8GQRe2MA9+J6LQhIqMUevXsc+6ORJoFj145\nAzB7xrejEPmxiBxetNDz7If3V25ZveAfP6rfvnsZ9h0sxieb/+b+ffEPY56696h6P3Lc+ADPc/IA\nzYJ6t73x/r/HtpUuhe0kEE/UYv2WBd7Sj3/bYDuNN3f8z4tx+bkj044Z6dN7BGrqm/JUbs/BGDpg\nhhhifrVDC+w4Mc9zTNuJpz3YmKjxPM+p7OCa6DTAniwiOp1cPrT/dKRbhDg3ZxCi4VyjrqFiGoBl\nAKCqCRGZXVW76/oVnzz7rwDyPXXXOE78MVVdm2qjAA4tzvdrEdnx0afPPvTh2v+eLhDHNIOvuW7y\nQVVtr2kQToLsq2uoGNy75+BjjtQ17EPzoNk3b3S4tHz1mR1ZXUdpWt4na/HWnf+4ZNyIzx4ROh03\nic3bFjY6buJ/M1UfdV8MWUR0OgmYZrDFHnzTCCiO+lxMjR96MfV1Qqmn8Vp8Ik9ELAAXAOgNYIOq\nFrXmvG2RtBue+rT4tV8NKpiS1fx2p6qHDVtfx/BB5x7eV1u/13Zdu8WZ4bs624ndvaboz8sByRo5\n5EIJWCFU1ZZixbr/jdluYiGANs9oT9QSTuFARKcNEZkaDvZYcsPcp6JHT4bZ0HgQr77zvZjr2f1U\n9ZRmOD/O9a+yzNDvopHegexoPg5UlQRU9RPbaby+vad2SF0vELDCC3v3HHr2lPGfi+b2HIKauj34\ntHg+GhO1mDPzXlhmEPFEHV555zuNttM41c/Ql2kiMjEYiD7uOMkLTNNyPPUSUH3c9eyHu+rSU5Q5\nXLuQiOgowUB06bCBn5k6/Yybgod6d5J2DO8sezRWXbv7GduJf8+P64rIBQEr8sbsGd+O9ssbA6Bp\nAtNPi19zNmx9vcxxE2P9WCZIRIIi5l2WGbjLcZMFphGQXjkDdcakL5s9on2wd/8GrFz/fEMiWfu0\n7STua+/rd0Yi0gNNqwfsZ7iitmLIIiI6iojkBqzIfBFjyuDCaYbjxr3S8rWGiPFHx4nf5tdcTaFg\n1odnT/zSzBGDzzvm2JsfPFy/7+Dmb6iq7+OCRKTQMoPfV+BL6rlRywptStqxH6tqh0ynQNRdMGQR\nEbVARCahaWxUEsACVT3u8jOneK2AQOL/fMWvDdMMHnO8ZNcHWLn+udcTyfrL/aqBiNoXZ3wnImqB\nqq4DsK6DLicKQCT9mPvUfjPtQSLqsjhPFhFEkGglAAAN0klEQVSRz1Q1GQxE1u7auzrt8ZLSD+qT\ndsNfO7gsIvIZQxYRUQdI2rHvL1/3P7Gqml2H96l6KCp529tfWdwA4PnMVUdEfuCYLCKiDmIY5k2G\nmL/K7TnE6xHtY5YfLILjxEttJ36Fqpac+AxE1Flw4DsRUTsTEQFgtvXRfxGJAJiL1GSkAFZoZ/kg\nJqJWY8giImonIpJvmsEHVfUWz7Ojlhna73nOY566j6VmhSei0whDFhFROxCRPMsMrRk2cEbBxFFX\nBbOj+ThYvR2rNrwQq6rZ+YHtxC/za34tIuqcWpNbOPCdiOgETCN4/5D+0/t9ZvJXgz2y+kBEkJ87\nHHNm3huNhHNnAuD8VkR0DIYsIqITEMFXJoy6LHT0ftOwMHHk5dnBQPT2TNTVHYlIH9OwfhQKZm8M\nBbOLLSv0SxEZkum6iNqCk5ESEZ2A6zk9siP5aY9lRfMhYvTt4JK6JREZbZrBpUP6n501cvCFYcMw\nsWPPitu37Fz8NRG5VFWXZbpGopPBkEVEdAKWGdqxr3LLyP59Jx5zrOLgJsf1nLUZKKvbCViRP00e\nd0PuuOFzDt9l6dt7VLB/nwnBf6x65m8i0p8LOlNXwtuFREQnYDuNP121/vkG24kfsb+uoQJFJW/a\njhN/PEOlQUQmishcERmbqRrag4hMEDFGjRk6+5jfSwMLzkKPaN8wgEszUBpRm7Eni4joxH5f33jg\nwlcXfe+6CSMui/bI6iv7KrfYm7cvcjx17lHVTzu6IBE5M2BFXggFs4fmZBfaNXVlgWAgWmw7jV9Q\n1c0dXU87GNYrZ6BtGOmXcMzvPcKqrts9vINrIjolDFlERCegqp6I3Ow48d+s2/zKbYZhDnRde53j\nJp5R1U0dXY+IDDbN4PtnT/xij+GDzxNDDHiei+Id7565euNLS0VkvKpWdHRdp6istr7cUvXSLqRd\nXVNqA9jT8WURtR1vFxIRtYI2WZK0Y/8cT9RdYDvxOzMRsADANIPfGzN0dmTkkAvESAUSwzAxdvgc\nY9iAz0QNw7ozE3WdorWumyjfsWfFMQf2VW5BVW2pAni948siajuGLCKiLsYQ4/oRgy8IpDs2csgF\nYcsMfqGjazpVqqq2E//C0o9/W7+26C9Obf1e1Mf2Y8PW1713lv485nr2F1U1kek6iU4GbxcSEXUx\nqmqZRvqPb9MIAF30s11VV4nIWUUlb91XVPLWtQAMEWOR4yYeVlU+wUldDnuyiIi6GBHjnZ1lK9Mu\n47Oz7CPb89w3Orqm9qKqW20n/jXbiefZTjw3acduYMCiroohi4ioi7Gdxkc+LZ6f2Fe55Yj9e/dv\nQNG2t5OOm/iPDJVGRM1wgWgioi5IRC4zjcALuT0HS++eQ4IHq7clq2vLXNdLXqeq72W6PqLurjW5\nhSGLiKiLEpEQgCsBDASwA8ACVbUzWhTRaYIhi4iI6BSJSJYh5l2mGbjd9Zw80wzusu3YowB+p6pe\npuujzGDIIiIiAE0TmAIYAaBcVYsyXU9XISJZlhVeWpA3dtQZo6+O5GQX4GD1NqzZ+KeG2oaKtx0n\nfgOD1umpNbmlzQPfReRzIrJBRFwRmXKcdnNFZJOIbBGRe9t6PSIiOnkiMiAYiC62zNDm3j2HvBIK\nZK8KBqIbjve5Tf/PEPOufnljRl90zt2RPr1HIBTMQv++Z2De+T/MioR6zgFweaZrpM6rzT1ZqcVI\nPQD/BeAeVV2Tpo0JYDOAS9C0HMJKADem+yuKPVlERO1LRHIsM1Q0fuS8vhNHXWFZZhCeeti+exmW\nr/t9vesmp6pqcabr7MyCgcieOTPv7Z+fO+KYY1t3LsGqDc8vTCQbjli4WkTOBvDZ1OZbqrqyA0ql\nDtaa3NLmCesOLSchctzzTwewVVV3pNq+COBqAOyqJiLynXy5IH9cr8ljrzv8WW+IgRGDzkV9w77I\nhpI3fgjgpgwW2Om5ntO7R1ZB2mM52QUAZMChbRHpEbAiC8KhnCnDB54bBoBtuz+8LxiIrrGdxitU\ntbZjqqbOwu95sgYAKG22vTu1j4iIfBYMRL84euhF0XTHRgw+31TPvbqja+pqTCNYerB6e9pjB6q3\ne6re4U6DgBV5flDBlLNvuPSJrGkTbzSnTbzRvOGzT2YNKpw6PWBFXuiwoqnTOG5PlogsBJAuwn9f\nVV9rxflP6l6kiDzYbHOxqi4+mdcTEdERQqYZSnvAMkNQaJdcfqcj2U7sF2s2/umxfnmjs0wzeHh/\nY6IWnxbPj9tO4y8BQESGWWbokhmTbgkbhnm4nSEGZky6JbSrbOVsERmmqukTG3V6IjILwKyTec1x\n/4Gp6pxTqAdoGoc1qNn2IDT1ZrV0vQdP8XpERJTiusk3dpZ9NKawz/hjktauvavUNILLMlFXF/Pb\nuobyefPfe+CSM0ZflZ2TXYADVdt0ffH8mOsmHlPVD1PtZvTLH2tbVih89AksM4iCPuPt3eVrZwJg\nyOqiUh0/iw9ti8iPTvSa9vorpqWBWasAjBKRoQDKAPwTgBvb6ZpERHQcrmc/XVL6wR2DCs4KDeg3\n6fD+6trdWLPxpUbbiT2UwfK6BFV1ReR624lfuWr9c98AZICqV2Q7jc0DFgDEbbuxxbs3STumAOL+\nV0ydyak8XXgtgCcB5AOoAbBWVeeJSH8Av1bVy1Pt5gF4HIAJ4Leq+kgL5+PThURE7UxEzjWN4Pxe\nOQMC/fLGRKprd8crDm42PHVv8zz3D5mur7sQkRzTCJRfffHPItnR/COO1ccO4G+L7m10PbuAg9+7\nD05GSkREEJEggCsAjARQAeBlVa3LbFXdj2WFHoiGc++fNf2bWbk5TSNlqmp3Y/FHT8Ri8apHHCfx\n4wyXSO2IIYuIiKiDiIgYRuCbIvKjcLCHBQjiyVpX1XvI85wntLP8wqV2wZBFRETUwUQkAOCM1Oan\nXLS7e2LIIiIiIvKBr2sXEhEREVHLGLKIiIiIfMCQRUREROQDhiwiIiIiHzBkEREREfmAIYuIiIjI\nBwxZRERERD5gyCIiIiLyAUMWERERkQ8YsoiIiIh8wJBFRERE5AOGLCIiIiIfMGQRERER+YAhi4iI\niMgHDFlEREREPmDIIiIiIvIBQxYRERGRDxiyiIiIiHzAkEVERETkA4YsIiIiIh8wZBERERH5gCGL\niIiIyAcMWUREREQ+YMgiIiIi8gFDFhEREZEPGLKIiIiIfMCQRUREROQDhiwiIiIiHzBkEREREfmA\nIYuIiIjIBwxZRERERD5gyCIiIiLyAUMWERERkQ8YsoiIiIh8wJBFRERE5AOGLCIiIiIfMGQRERER\n+YAhi4iIiMgHDFlEREREPmDIIiIiIvIBQxYRERGRDxiyiIiIiHzAkEVERETkA4YsIiIiIh8wZBER\nERH5gCGLiIiIyAcMWUREREQ+YMgiIiIi8gFDFhEREZEPGLKIiIiIfMCQRUREROQDhiwiIiIiH7Q5\nZInI50Rkg4i4IjLlOO12iMgnIrJWRD5q6/Wo8xKRWZmugdqO71/Xxvev6+J71/2dSk/WpwCuBbDk\nBO0UwCxVPUtVp5/C9ajzmpXpAuiUzMp0AXRKZmW6AGqzWZkugPxltfWFqroJAESkNc1b1YiIiIio\nu+iIMVkK4B0RWSUiX++A6xERERFlnKhqywdFFgIoSHPo+6r6WqrNewDuUdU1LZyjUFX3ikgfAAsB\n3Kmq76dp13IhRERERJ2Mqh73Tt1xbxeq6px2KGBv6r/7ReQVANMBHBOyTlQoERERUVfSXrcL0wYk\nEYmKSI/U91kALkXTgHkiIiKibu1UpnC4VkRKAcwAsEBE3kjt7y8iC1LNCgC8LyIfA1gB4O+q+vap\nFk1ERETU2R13TBYRERERtU2nmfFdRB4VkSIRWSciL4tIz0zXRK3X2slpqfMQkbkisklEtojIvZmu\nh1pPRH4nIhUiwuEXXZCIDBKR91KfmetF5JuZrolaR0TCIrJCRD5OvXcPHq99pwlZAN4GMEFVJwEo\nBnB/huuhk9PayWmpExARE8DTAOYCGA/gRhEZl9mq6CT8Hk3vHXVNNoC7VXUCmobc3MF/f12DqsYB\nXKSqkwFMBjBXRM5pqX2nCVmqulBVvdTmCgADM1kPnRxV3aSqxZmug1ptOoCtqrpDVW0ALwK4OsM1\nUSulpsGpynQd1DaqWq6qH6e+rwdQBKB/Zqui1lLVWOrbIIAAAK+ltp0mZB3lKwBez3QRRN3YAACl\nzbZ3p/YRUQcSkaEAzkJT5wJ1ASJipB7oqwDwtqqubKltm5fVaYtWTm76AICkqj7fkbXRibXm/aMu\ng0+8EGWYiGQD+AuAu1I9WtQFpO66TU6NHX9FRCao6oZ0bTs0ZJ1oclMRuQXAZQAu7pCC6KS0x+S0\n1GnsATCo2fYgNPVmEVEHEJEAgL8C+KOqvprpeujkqWpNatWbuQDShqxOc7tQROYC+C6Aq1MDy6jr\n4uz9nd8qAKNEZKiIBAH8E4D5Ga6J6LQgIgLgtwA2qurjma6HWk9E8kWkV+r7CIA5aBpTl1anCVkA\nngKQDWChiKwVkV9luiBqvZYmp6XOSVUdAN8A8BaAjQBeUtUWPyiocxGRFwAsBTBaREpF5MuZrolO\nyrkAvgTgotTvu7Wpjgbq/AoBvCsi6wB8hKYxWS2OIedkpEREREQ+6Ew9WURERETdBkMWERERkQ8Y\nsoiIiIh8wJBFRERE5AOGLCIiIiIfMGQRERER+YAhi4iIiMgH/wfFhtau+2o+NAAAAABJRU5ErkJg\ngg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# Generate a dataset and plot it\n", - "np.random.seed(0)\n", - "X, y = sklearn.datasets.make_moons(400, noise=0.25)\n", - "plt.scatter(X[:,0], X[:, 1], s=60, c=y, cmap=plt.cm.Spectral)" - ] - }, - { - "cell_type": "code", - "execution_count": 102, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 102, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAAHfCAYAAABj+c0fAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XecleWZ//HPdeoUqkAEpaiggiJYEFExFiyoiUaNmmyM\ndU3bZDdr1jWbTTHZJJtfiomJ2SRqookxtmjsvaAoFlRARRSlqfQmZcqp1++P+wxTOGcozsMM8H2/\nXrxg5jznPPc5M8z5zn1fz3WbuyMiIiIiHSvW2QMQERER2REpZImIiIhEQCFLREREJAIKWSIiIiIR\nUMgSERERiYBCloiIiEgEPnLIMrM/mdlSM3u9xed2MbPHzGy2mT1qZr0+6nlEREREticdMZN1AzCx\nzee+CTzm7vsAT5Q+FhEREdlpWEc0IzWzPYD73P2A0sdvAUe7+1Iz6w9McvfhH/lEIiIiItuJqGqy\ndnX3paV/LwV2jeg8IiIiIl1SIuoTuLub2UbTZeU+JyIiItJVubttyfFRhaylZtbf3ZeY2QBgWbmD\ntnSwsvMysyvd/crOHod0ffpekS2h7xfZXFszORTVcuG9wAWlf18A3B3ReURERES6pI5o4XALMAXY\n18zeN7OLgJ8AJ5jZbOC40sciIiIiO42PvFzo7p+tcNPxH/WxRVqY1NkDkO3GpM4egGxXJnX2AGTH\n1SEtHLbqxGaumiwRERHZHmxNbtG2OiIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIiIhIB\nhSwRERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQUskREREQioJAlIiIi\nEgGFLBEREZEIKGSJiIiIREAhS0RERCQCClkiIiIiEVDIEhEREYmAQpaIiIhIBBSyRERERCKgkCUi\nIiISAYUsERERkQgoZImIiIhEQCFLREREJAIKWSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQ\nJSIiIhIBhSwRERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQUskREREQi\noJAlIiIiEgGFLBEREZEIKGSJiIiIREAhS0RERCQCClkiIiIiEVDIEhEREYmAQpaIiIhIBBSyRERE\nRCKgkCUiIiISAYUsERERkQgoZImIiIhEQCFLREREJAIKWSIiIiIRUMgSERERiYBCloiIiEgEFLJE\nREREIqCQJSIiIhIBhSwRERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQU\nskREREQioJAlIiIiEgGFLBEREZEIKGSJiIiIREAhS0RERCQCClkiIiIiEUhE+eBmNh9YCxSAnLuP\njfJ8IiIiIl1FpCELcOAYd18V8XlEREREupRtsVxo2+AcIiIiIl1K1CHLgcfN7GUzuzTic4mIiIh0\nGVEvFx7p7ovNrB/wmJm95e6Tm240sytbHDvJ3SdFPB4RERGRTTKzY4BjPtJjuHuHDGaTJzL7HrDe\n3X9R+tjdXUuJIrJTMrODIP0tyJ8KxRSk5kHmZ8AN7p7r7PGJSGtbk1siC1lmVgPE3X2dmdUCjwLf\nd/dHS7crZInITsks9mlI/hmOroJRMagC3gOeqoPlr0DmRHfPdPY4RaRZVwtZewL/KH2YAG529/9t\ncbtClojsdMxsACTmwD9XQ/82txaAvzXAgqvcc9/ujPGJSHldKmRt8sQKWSKyEzJLXAmjroDTq8of\nsRy4dg3kPubu2W04NBFpx9bkFnV8FxHZplITYUSFgAXQD6iKAcO21YhEJBoKWSIi25aH7jbtHmJs\n+iAR6eIUskREtqnsAzCzofLtS4FMDnh3W41IRKKhkCUisk0VroU3HT4oc1seeKge/Gq1cRDZ/qnw\nXURkGzOzT0DyNjg8BQcmQguHBcCkOvjwOch8QiFLpGvR1YUiItsJMxsBqf8APzM0I02+A40/A251\n90Jnj09EWlPIEhEREYmAWjiIiIiIdBEKWSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIi\nIhIBhSwR6XRmFjOzeGePQ0SkIylkiUinMbOTzaqfA3JAzqxqtpldYmb62SQi2z01IxWRTmGW/DZU\n/RecUAP7AXFgDvB4HXz4OGTOUudzEekq1PFdRLYLZjYWqp+Cr9RA9za35oA/1cHib7j7HzpjfCIi\nbanju4hsJ9KXw5FVGwcsgCRwQi2krzAz/SImItsthSwR6QxHwj7t/PzZE8gNAmq21YBERDqaQpaI\ndIYCFNu5uQi40f5BIiJdmkKWiHQCfxjezFe+fTaQnuXuDdtsSCIiHUwhS0Q6QfYqeCEHy8vc1gA8\nWgeN/7OtRyUi0pF0daGIdAqz+Och8Xs4IgUjE5AA3nF4uh6y10H2Mu+sH1AiIm2ohYOIbFfM7ABI\nfQM4BTwO8anQ+FN3f7KzxyYi0pJCloiIiEgE1CdLREREpItQyBIRERGJgEKWiIiISAQUskREREQi\noJAlIiIiEgGFLBEREZEIJDp7ACIi25KZ7QPsA6wHprh7tpOHJCI7KIUskR2cmSWBT0H8MPAcFB8B\nnt7Zuqmb2ShI/xHS+0P/LNQbrMYs+WPI/3Rnez1EJHpqRiqyAzOzoyB5N/RLwojukHeYXgcNiyB7\nkrvP7+wxbgtmNhKSU2BiNxhtzb9fLgPuqIM1f3TP/FtnjlFEujZ1fBeRDcxsf0i+COfWwrAWtzjw\nfAGeWg65fd19bWeNcWuZWS1wKtAHeB94xN1zlY+vfgaOGw9jy/zMqQeuboDMge4+O6Ihi8h2Th3f\nRaSF9JXw8erWAQvAgCPisFd3sAu3/bi2nplZ0uy/E7BsCFx/IPyiP9ychKUxs89VuM8gKBwKB1X4\n4VgDHJKA5JcjHLqI7IRUkyWyAzKzBMROh0Pa+UVqbC0s+BLw6202sI8oCT/oAf9+HtT0bv509ULg\nr3BtzKxYdL+lzd32hF0ykKyq/Mi7J2Ha/lGMWUR2XprJEtkx1YJ5mKUpJwcsBhhqVvOCWfr/zGx4\n26PMLG5mp5lVP2pW86ZZ9RNmdlapmH6bMrN+DpdfCLW929y2O/A5qEnAb8ws3ubmLNTVhGXSStY5\nFFd26IBFZKenmSyRHdN6IA+rU9A2kqwE/gr0Ak5JQfVhsOBgmHqhWepqyH3L3d3MukPqceixHxzZ\nLZQ/rR4BU8bC6rlmdoy7r96Gz+mzw6HYvcKNA4FekFoOE4BHAcysByRvgGICFgB7lLmnAy/VQeaG\nSEYtIjstzWSJ7IDcvQCxG+H5NsXgOULAOhK4ABgF7A0cn4R/rYZuX4PYxeHY9E0wfBR8pRscBAwG\nRgNf6gajhkP6jm31fABiMHhXqG7vmF3Dz7SBzZ9J/QRG7AkTDe4BPmxzDwceycH6ecDjHTxkEdnJ\nKWSJ7LByP4Rpa+C5AuRLn3uTMIM1pszxtcCnaiHxfTMbDMWT4NSqjX9MGDAxBRxhZvtGN34ws6Fm\ndoSZ7VGEJasg097xK6FA6MuAmdVA8QI4Lh3C5GHA7wlh6xVgMnA18OpqyE4Ajqwyu7vKbGG12fyU\n2VVmtmeUz09EdmxaLhT5iMwsBcTcvbGzx9KSuy82s7HwzC3wzCjYMw9LauDYtjVLLQwGkj0hexHs\nU4R0heMSwMgYvHIq8HZHj93MJkDVryA9FHpkYU0abPZrZOxEyk9nLQOWh38+VvrUUKjNh1AJMA4Y\nCUwHPgBSwKHA5HlJ+FYKLj0KavYCywGvw7+8Al80s3Pc/YGOfo4isuNTyBLZSmZ2BlR9B2x0+Lhq\nAWR+AvwxLNd1PnefB4wLRe1vjYGqb0NVO7NPBqQLUFcLVe2EMYB0Amjnir2tY2anQ/pvcFoN7AvE\nq8NM3MxRBe7P/5Fc4yVQ1TJorQZuDg2v/tvdm2a7cpCPhyXBpu4N3YDxLe75JkCvbnDppVDb8jKB\n3SE1ElJ/htvMbIS7v9/Rz1VEdmwKWSJbwSz1Y+jxr3BSLQwnvInP2xMevwpWnmpmZxIqxfciFKHP\n7MxtW9z9LeAts9QYmL8X7Fvh6sA6YG0aeBre/TJ4ujmgtPVOPWFaqMOEWcHEn+G8GhjU4pYEoR6s\nJrGC27M/J9e4H/guUL0Y6uaGNc3v5dyvaXGn2ZCrg4W1rcq0WpleF6dxl5PbBKwmA8NZE9PhK8B/\ndciTFJGdhmqyRLZQaauaf4Mv1sL+QJzwX2kocEkt7HICxKdBYgH0fRi6PQ+p983iF3bmuIPcb+GV\nAqyrcPuUPMTvAx6EhpWlmZ4y3gE+bAAe6eABnga7xloHrJaGAT0KBTj/dfjm0/CD2fD1PPTPuf+8\n5ZHuXoT8j+G++vKlXLOBuQWgZ9t2rS2NClN2Z27d0xGRnZlmskS2WPpyOLo6FIq3lQBOqIZ7D4Av\nA+uqQv3Pqm4w/f/MkkPcc9+PcnRmNgA4AMgCL7p7Q9Nt7v6OWeqncP1/wOk1sCdhpqqOELBeWgm5\nr5daOJwDdz8Ba2tDt/QqQliZ4fBYA+TOjmBZdDjsWe6FbXp2wJAkrOzv7q2aqJqZAXF3zzd/tvhr\nWD0arjkHxtfAEINGYFoDzMxD/jNxuKu93zYTrf4SEdl8+sEhssV8HAxrZ/+qPYG1wG2EUuy9gCKQ\nr4bYd83sHuA1YAIkz4JYLWRfAr/J3dv2GNhsZrYbpK+DxHGwawZyBqtiZqnflXpf5QHcs98zi82B\n2/4HYn0hnYf1KYjfA7nL3H1ROM5fMrPDYdJP4PHjoSoHmSQknobcN9192taOtR11UJejcsU9UJcn\nLME2Pe9Rafh2DE4vQjJttiIPvy7C1e6+zswugexf4MnLwEYBDZD7GxT+AKxwWL8QqnavcLZ3oFCE\n5zrwOYrITkIbRItsIbOqxXBJf/hYmVszwIuE9gDjCf2omn6XyRJaMU1fA74cuveHg2shbTCnHt41\nKFzqXrx5y8dku0JyOozrC0cmmuvRVwH31sOiJyB7esu6sNLMzzBCW/j57r6mncfvCfQDVkbZgNTM\n9oLkTLi8Klz911YD8ItGyA9x92VmNjEJdx4NVQdBrIbQx/4ZaJgL72dh3KbGGzf71hD49uehuu2M\n1jrgt1DfCEe6e4fWn4nI9mVrcotCluwUzGwkpP4FEiOhuBoabwDua720VPG+NcDJhEL29yF5Dhz+\neTiuzdV364A/E65mGwKcVubRHPgLkC7CubHWReVLgRsaoPGT7v7Elj2/1O/gwEvg1DIF7Tngt+vh\nw7Pc/dEtedzOYFZ1Nww7Cc6sCvVuTfLArQ3w/m3ujReZWY8ELDofage3eQwH7ofsG3BXo/tn2z+f\npdLwxAA4eALUDCQ023oLeATqG+DHOfcfdeiTFJHtjkKWSBthtib1W4hdCGOTsHsirDRNXQerlkH2\n403LY+Xvm7wC/NswoAi947CkAKsdij3gi7Sezfozoc/UK4Ru6v0qjGoO8ATwhTK3vQ488LJ7w6Fb\n8BxTkFgNX61p7gnV1ssOjz/i3nDy5j5uZzGzWkg/AOlDYFwt9DZYUYQXGyD7FGQ/7e4ZM/vK3vDT\nz5UvjqMO+CU05mGgu7e7L6GZpWLwrwm4rAB9HWIpmNYIP3D3+yJ5oiKyXdma3KKaLNnBJS6HXufD\nRdWtW1ge3B2eroYpT5rZeRA7E+LdIfcacKu7r4PkldD9G/C52jCJ1eQD4GaMPwCH4xxA6NS0DPgn\n4Gmgbztj6keo2SpnBHDvAWa2q7sv3cwn2S/M+FQKWAC7GfhhZjbU3eds5uN2CnevM7NjITMenv4i\nxAZCYR5kfwdMbVryrILj9qsQsCDc0A8yi0Pvhyc3cc4s8HMz+wXQHcg3uNd33LMSkZ2RQpbssMws\nCcn/grNqN+4RbsDYBLw4DGqehYOTUBODuXUw/1dmdhkkroAL09CjzX0HAv9Emr8wnCm8y0tkKZBl\nNJAk1EOtoXLoWUMogyonAaSzkOtJWD/cHOsgnwzLghXaX1FHNfQqwBtVZvdl4PMtmnZ2OaUgNbn0\np5JCcROPU2z112aft1ICFhHZIuqTJdsFM0uZ2blm1f8wq3nYLHFlqVVB2+NiZnayWdXfoWoqJGtC\nhU5bReAWYGQc/iMNx8fgCOC8Wri0BhK/hn3YOGA1GUQjPXkTYyR5DqNAbEP90AGEJcNKXiLspVdO\nHdCQJNRvY2a7mdk4M9u3VKi+EXdfC8kX4I2KZ0zyIseQsf+AqsHwiTRscXF9V9MI98+o3PCLNcDK\nUD3/8rYbVWtmNtos9j2z+E/N7Dwz6/AO+SLSdSlkSZdnZvtAcj7sfh2c8Ck49SQYfQUk5prFv9Li\nuL6QmgF97oLjz4JPjYYxqZAn7qf1hMYcQiH1KWz832BXYO8UDGinjQDA7mQ5hBcZyBQM421CoDsc\neBWY2eZ4J1x5+DaVQ9ZLBUjcAwwxq54EyTmhoWn1q5B+xyz2qfL3a/w2PFwPS8rc9jJJ5jOakDjO\nCdN6p0S9ufM2cMciyM0qc0OB0E01Bje6+/oyh0TKzHYxq5oMNVPg8O/AMZfDkN9BYpmZVfgaisiO\nRoXv0qWFIujkXJjYDw5p8/2yCri+HurPBR6A1Cw4cN9wIWDLQzOEoLUXcEzpc3cRuopXqi9/FlgB\ntPd+eG1pDPuVPn6DcO6DgEXAHYQ8sw/hbf81IOeQew8G9IMzaqB36b45YGoRnloDuX+CxB1wQqkJ\naIoQEOcA/6iHxq+7F64r81qdC/EbYG+DfasgS4pXSLOaC8i1qhJ7GPIvwf8W3L/bzhPs8sxsTBKe\nGA3pgyFdS3jln4G6lTA9C8dv6427zSwOqWnhe/GkVOsrJD8AbqqHzKnuPmlbjktEPhpdXSg7HDO7\nFIZeBZ/vVv6IWcA9r0HjVdDtRriM8hO0qwmh6DJC3dJfCQGr0mTOGuAa4BuU3wN5OXA98O8tbl9C\nuMLwIOAwQv30DMLS4RKgRwbWzYDcBEh+F/wr0Dcf7r8oDbGXofEiSP8dThoFB5f5/7EC+F0WCruV\nu2LOzHqD/bKWxD/thSf3I88+tH6bB3gBeBKuz7hfWuEF2CJmFgNOr4LLC7CfQdbhrhxc5e6zO+Ic\n7Zx7twR8NQ7nF6FbHOY2wi+A2909F+W5K4znk/Cxm+HL3cvv+7jlV5CKSOfT1YWyA6q+GMZUCFhQ\nmiXaF5I/gEOovALeG+gJ/A6YUDpuCZVDVjVAAW7Owj+1uTLxQ+BvpcdpGcD6E9o63EpYFnSa32QT\nHu5XfBU4BHJXAFfCkvGlB3nD3eea2f4Q2xtGV/iP3BcYlorx9vyE2b/l3f/U8lZ3X21mf0iQO+tM\nSFb6abAEGnPwboWbt4iZxVNwWzeYeAzU7kFp8x24+EX4vJmd4+4PdMS5yim14PhW6U8XUPVFGFch\nYEHpCtKRZra7uy/cliMTkW1LIUu2ubCcQn/CGtgSb386tXvlK/EgzNEkc5Ab2P5xEK726ws8BDSs\ngQ9q4PBk+c7i0x1iz8LSt+Gq82FEEvrEQ5uGt4EDKb/U2As4G/gDYdYsTenN1uDddIzbvlRD7vws\nLMvCZwmFYbnSH4DhsFt+47mnlvZiGHO6LSH/m6RZn5z7z9oc8EIDrJ4D3cptfLweeAPMQ1fUjywG\nX+8LJ18ENS2vbTweksMheSPcXmodUa5gbAdk/UOgryQB1Gbhwz6AQpbIDkyF7xI5M+tjZqea2elm\n8R9Bcimk34HUXEi9Zxb7Smm5qen4YWap35ulVkN2BNxNWHIr15x9LZBJQLEA77czigLh/WwUcCkQ\nq42RjcW4idZX7BcJS3yP1UHmX90zX4TcMHjjfXiGUMeVAo6m8kzFLoQtdFJtjhlGkQn0J1lzKuyR\ngOf7wD2D4JYkvFNl9jhQA/XtruEbdfSmyMUhVf4gFPw389D06Qt3QP0cWl9buRK4EeoMfunui9s7\nz+Yws1gC/vOUNgGryUBgJMTiYYpvJ1GcC8vb+RpmgPVpSleQisiOSzNZskml1gEnAv8Kqd2B9yD7\nV+DuUhPHSvfrAenfQ+IM+FgGVnWHXWNwErAb4e1/wUB46Kew8iSz1DLgXEh0h24OYy2EokWE/Xlf\nJ0z+zCPUYmWAdUWwh6E4Ed5Nhpqlco1AXyPMMjV1aD8w0Z1XGc4iXuHXxOgPVOMspEi+WCB3lbu/\nBuDuC82qDXYn1Gp1JxS8d6/wzFcT8k+532EOYh6PcXbpEdZBt9MIkex5OGYyjMqzPBlu3aXM/YvA\nq4ygSC9gOPjM0F7+Fy2PcveHzezs2+C6Wug+AOxDKC6DmMGPcvD/Kgx+Sw2JQ7dKmysDjISqt8Me\nQ9/voHN2cZlrYMopMKa2/I/YaQ6JZ9xzy7f50ERkm1LIknaZ2emQuAWoDvvxjQCKo2HaqbBitZld\nAPEjILEXFBZC/s/u/rqZVUPqWRixDxydhj9XhfBzPmEprEiYmRoCHFMLd50G4wowOh5KlN6zcIXf\ne8CngaGEvlbXEHpXHUgIMgtisHwi2LswaD/4Sww+WTo+Rogv0whd2M9v8cyGsYa3yVHP5eSZywfk\nCCNsgNjt8Dkzu7J5KdNWwughYUxG6HU1pMKr9hKhyXg5aWIkaaDAGOA3wKlsmBuLN0LvF/CZzh0H\nwAWx1jVfToyHSZJhJrAHMAiqZ4cvykbc/UEzG5SF41bDp9NwQAJWZ8KL0puQ5D6qmIEbzdc/Li89\nn6YuY6WouTPNmk+GxqfhlmPhzOrmpvRFwhWoT9RB7rJOHJ+IbCMKWVKWmR0E8R9C8pTw/vg5wr58\nTcbG4PU+cO/9cEAWuqVgYREWfN0s+QEwCXYbCqenQw1UHhhHaOA9hzA7BaGgPAOcCwxrUYi0H7A3\n4SrAx4DjCIXjh9B6qW5/4ONVcP0wmJeBA6vgUQuVR70Js0qDCJM9u7YYfxboz2vM5Thap5RS/BtC\nmKoqrSU2/B6m/xIuqYXnCX+eAcbTnB+cEOhmAF+q8MrWUyRHDc2LiVmay+oPh8QLFPaNs6xo/CpW\nYCzObsBaUrxAL9ZxNjn+WHpF1kOhEFYBKzkgCbf0h/SB0D0BvA3HzoYfxszOL7r/vZ37bo73cpB7\nmfCV7UZ4teuBxwmvaxwyufBF3Cm4u5vZGfDBNfDLz8PgHFQbLDDILYLcZ929cudYEdlhqIWDtFLa\nnPcuiI+HQ2pgPjAcOLLCPW4nXKWXJ8weFQiF4RBq2wEW0LzdS4wQkg4iRItlhLCyFLiQ5t/6XyMs\nEa4jRJECIZZ8mY23yIFQj3VTBpgNsaHg1XCihaBWrmv7zcB+JHmbk3iLMYSwM5MwE/M8FB3GufvU\n5tcl+RYcPQCOjIfs9XfC38NLjzmLUL9eDXyVcpM3xtMMZzLnkmcdYSbrClqXuf8QcmMhPgpiU4mz\nggQ1FDmY3Ib5uVsI8fIhqG+AI919+kbnMuubhHdOh54j2xSQLQZugIYsTHD358u8QJstZnZjCi44\nl9CJrEkjoZpuLhSzMNTd53+U82yPzGwX4ATCN8VM4OVNXOghIl2UWjjIZild3TeBMFuzEnjYN2yG\nm74dhn4czqoK78s/Jmx6XM5awnLekcBYmqNCjjBxMa3076b5jfmEmqqWM2IfIywHPgI8DJwFPEV4\nPzoZ2JMQK1YDk4AbgYvYuHfVQKAqDXU1KbLzc9hQZ0q6uVFoS68RQt3ZFFhCA/AoMJVQtdUd6A+x\n5fBcyuzlHEx097VmdgQ88yhMHQQH1sJw4sygwFTC4t2ZpXH8FbiH0E2+qWl8EXiNJJOZUCrgf5mw\nAU/LgJUDHGLVEOsPfJICIWC2lgZeglwRXigXsAiv2qXDId02YAEMAE6AqifgSkKR3FZLw6hTaB2w\nIHyFPg38CnLZcMr5H+U82yN3XwXc1tnjEJHOoZC1kzCzgcAZwOFx+ERvYADEPoTCYoinzH6Ug3ug\n6lg4oyq8secJfz9FeJ8eQugr1RQLnifMpxze5mxJQkBaRghhtYS3YKd1wGrp48CvCEXtrxCW21q2\nx+pN6L5+L6G+qm0usNLxdUP7AUtwBvIhC/hNaXyDgQbCUt5CwvJnihjv8SzhgvtRhLL2BYSy/EGQ\nfA87PIYvMbMj3X2ame1nZK/vxzMX7IHFZ2Gs43Ra12B9FngA+CUwGCNNgnl0J8M55OlTGsXLwCVt\nnsWb4ZnMfRcGHNX6BdigqfYpC6/l2mlJn4KLDik/7Qfh+dpDcJyZpbd2s2gz2yMNw/evcHsCGAfJ\nyeEL+pFmzEREtjeRhSwzm0h414wD17t7R13NJJsQlrb4OOENdjakr4Dkp2F4DHqkjAWsYglFoDcx\njiPLy/Dfq+AiqKoOF541NdLsVnqYNOE98hHgM4SlwOmEdghlR0GoV7qTMNM1l7B0V0k1IdpMIfSf\nKpcvDDgKuI5QkdSyaUAGWIExgu68wWogRqH0+YXhZSBJWNo7nTDPspA8yzmPMA/VZCrwIAmM3Sky\nFKirhhkvm1U9BZyZhtrD8PhgnGlUE+ajWkoTZrXWYtxDFW/gOHsQ5vbeIdQsXUjr6weXEvbby8NX\nF8LfPyDMi7X1GpCH+Vn3MeVeySYO3WrbuT0NxKBYbC6M2xoDekA23k6Y6wOxWOWrBEREdliRhKzS\nctQ1wPGEd7ipZnavu5fby1U6iJklIPljSPwLfKy0XcvCWtjd4JwNV6qFxarFrOJmVrEf8/kAZ3Et\n2N6hfukEwjLbh4QNWF4lRIIjCFdH3QxcTHhfLtdmoElfwkxYX8JK0cbLXq0VCAXr49s5ZhdCPFjX\n5txTAcPZld7M5BM4vwcOpcB05pPjbMJsmhEC5BzgDsaTbxWw3gEeIY1zEb6hpgzgxBjcdxTMeiRP\n5uX1UFwLsRi9qHzhXA+cUQzifU4gyxzCLNQngZuAO2DdGOieCuetfwesABe7+6Nmdt5f4NZToXp/\nwn/URuBV8KdgfXszWE0M3l4YNkksq9Q/oJHWjcK21NK1kCpQuX3qqhDk2mtiJiKyQ4pqJmss8G5T\noauZ3UqYPlDIioiZJSH+NFSNDdu39SLMzywlLF+1bRU5gLB6+AAFcoTAMrJ029OlP+cQlv16E2qM\nLi4d8x5DR7T+AAAgAElEQVRhVitGeH8uV1gOoY4qSViE24uwQDa2wrFrCAX0u9L+pEqxdHvTW3qG\nELBeAJwEC+mN041QdDYTOItGHuI2Gkhh9MZZTYosBbJMaPPoD5MizxnQKmBROt9pKZg/Mk/mpqnQ\nsDfUOutovX1Oa8ZqulOgH9AUdhyIQXYlXPoUTIxBdQaed/izu38I4O73mtnEh+Cn98OB1ZCth1Qc\nnsjBf2zOLyyNcNVkGDMSupX7jz45vHi/d/fiph6rEnefW202exaMHlnm9gLwAjRkQgt8EZGdSlQh\na3da/+b6AWHHXCkj7FfHSELR0FPuvm4L798XUlNhlz3CbFMvNlwjRxWte363tBeh1HoMYXWxKSgc\nQ5i9+gvwBcLy3XOEENQfOBj4EyFkvUSYsCznBULh+suEoPcYoaFo2+W1AqHovRehtcMrhC5L5cwl\nBK3rCZVUKwhhciyhnuvdDVFxBKEy6jxgX7IsIst61tONEA1fonU0WgmsIdbOuWPAuFp4anyOzPSX\nYEw12XSWuYQrK9sqkGAqB7eZwXsvPNIy4PaMe8WiaHd/BhhnZv1zIQUvyYVC6s31wHp49mb4+Ceh\npmnerx54GrKzYEkefrIFj1dWI/zbvfBgN6jZo8XnM8DdIWC9SPgGEhHZqUQVsjbrEmUzu7LFh5Pc\nfVIko+miQrhK3wTV+8LAfFi5WZw0S/8Bspe7e7l9ZMpI3wsHDoaJNMeGwYSGnXcS+lSdXm4EhPfu\nAbSOG0boR7WQEJCOIQSZDwghq2nm6gvADaXP7d/iMYqE2qp5hGXGuwltGs4lbKw8j9DCoZbQTGAK\nIQxeSghmzwJv0dwaocl6Qhg7kTBHlQO+Qvg2vpY4OY6iuGEHw7bPqGVXcid04mw5B1UPxKkl327f\nzJ4GsQEZOHUWPOBkD4nx96oiF9HcTR4gR5w7GUi21XnzwCPgOfjp5l7KX9rzb4v3/XP3opmd9gH8\nz//Bv/SCYgJ8BaTjcF8OvuLuq7f0ccuc52kzO/tmuKkPJPeE6jrIzQq1WA9k4QK1LRCR7Y2ZHUN4\nA9xqUYWshYRr9psMIrxDt+LuV0Z0/i7PzPaB5BQ4sTscaM3LX2uBu74Ai4aY2Vkt35ys1KSjxceH\nJ+AXUH14CB5tl6zihAqgqwmLZ22LyYs0bxNTzhhCL6hjCLNNTeFjCaFnVQ/CVXo3EdorjCgdN4sQ\noIYTQtN5hLA3o/S5lYROT3mgD2H2bb/SfacRZoXuLP19cOlc8wmBbwwhAI4GfkeYj5pBjAaOp8C4\nFvn+bUJ8LKeKMG04nzDXRulVKLCuNK5K/zWWFaAwx93XmNlRwKFGww/h2uNgUAH2SMVYC7yO4exH\njmzp0eYCTwIr4H2H/6twgg7l7jngm2b2/RXhhUsAr+fc22tgujXnedDMdl0Cpy4JX+Q64F53f68j\nzyMisq2UJn4mNX1sZt/b0seIKmS9DOxtZnsQ1n/OJawXyQbpq+CobnBIm2TUA/hcDVx9ImQPN7OX\ngG9A6gvAHmbxPCSeKG1APDFP1kLgqFR2XE2IEXPYeKuXdwlF5G3rj5r0JESRbOn+JxDmfp4t3X4X\n4UrDLwC/JSyEDSb0utqdEMb+SghyRxNC2luEGahs6bimXuvLCO0ZwlWCoR7sdsLsXqE0xs/T3LU9\nARxCjEn0IrthPqtJI6HxRNOjtdzNcAVhIXQI8A9CF7D+hMXKXYGFvEYId21lgRczkL0OQmdvQso7\nMVzROe9smH9+d3z8eZBcS4iYD5VetX7ASsgW4GR339RVAB3K3Rto/sJFdY48oXjvnijPIyKyvYgk\nZLl73sy+SrjePw78UVcWbthouRfQGxLHw5gK61JJ4IgaeOp2sN3DjFOBEGAOTsGUk8PszgTC/Eil\ngNUkTlgMa2kBYbZoApWKtkNdVw9Cq86BhPmfBwiTFAVCfl5EaL1wJiEkLSaErRzh6sRehOL7h0of\nQ1+K9MB4jzsp0psiudJjxgjLkqeXju1DWG6spC/9MZYTSt8PKX32beAJQiQaTKge6xv+5FZA4yKo\n2Qdi54DNILQ37Vc6toos8GDpuY5o8dqsA+7AyFYZTK4yeyQD33X3aQDuXgfcaGZ/bYQH/gFHHge1\nny2N43XwJ0Nivdzd32znSYmIyA5C2+psA6VwdTGk/xsKu4f9dGuT0N4esXMItUznEmaFCoRZoAcJ\n8y6fI4SnmYSIcWGFxykSmmLWE/pU1RCCUT0hViQo3w3AgVsJM0wNhCC1iNCM9OTSY44pHXdi6T5N\ns1GL2HANHf2BEcRZjfEaB1LkFArECLHqjtIze58EYbJzBqGn1a6EmbDLqfy7wIuM4HEayGWXQSID\nMS+N9IDS6OKE2axfQCYbBj3D4KYrIFFVGvENpWOThHm9LPA8KRpJU2SgQz3GB3YAcHrpWszp4E9A\nQw7OcPdHW44qtNLg4jT8Zwb2Miim4KkM/NDdn67wZEREpAvTtjpdUAhYqVuh5+kwMR2W7tYR6ona\n6y60ljB71NSOMkGoW3qUsGzXdL99CbNE82ndUrPJa4TYMJEQIz4k1EQtJ8wwrSEsC46nuc1DljAX\nNI9Q1XRs6bZBhLqupiW8pqsBm/QjzPwcCTzPSeR5nEUMZyl9KHAQTu+Wr02Lex9BgancS45/JoS2\nhYQFvzcJvdjbKpLieQ4lx1OQaQS+CqnebDwv93b43Bx3/y8z6xkDb9qU5zlCRdhRbe4zniwfkOUW\nZrEHcBrNG/nEgXFgA6DmJvi7me1aWo5rHhzcnAldU83BG1X4LSKy01HIipCZ1SbggTw1R8M/07yP\nXU/ClWhvEa7Ka8sJbQyOaPP5dYT375bl3AlCv6vbCV3QRxGWEhsIpXHPEArPmxpuFwj9q6oIdVKZ\n0jE/IwS2AmEWrWkz5paxqMkMQpuD5aXnQmlcjwPLSbOIz1BgT8L2NjnyfJzW32w5wixWVemeh+Ek\nWcezXA2MpsBgQqi7v3SOIa3uHecePkYd/YDFIQF+70/wnbOhZjAhaOUJDSMehLpc2PAQYJ1BwwpI\n9iHMA/57mWfY1Ovewc5g4y5jlEY0EJgfCsj+bGbD0/DdWCg2i8UgY/DHHPwvW3F1oIiIbN8UsrZA\naRnoEMKa22x3X9jOsckUPBknNSbPcTQHrCZHE8qu+9C68LwprOQJoafVo5Zub9v8ciihfPsZwkxX\nihBj+hLmXRYQlgdXE/pf9STkgnTpPElC/VHTdXZHEa7+m8fGIesDQqPSswj73o4tjXcatWSZQK5V\nGf4nCJVfvyoduQuh8PzV0iMXCF24rgfGUeQ8irzOqyzgdVZTpEj+Hrjpk3H6xAoMI856jFnsiXMW\nOe6FhjjcmnP/Sczsg5vh/6WhR3corISUwcwcfNndX4bQ1iBpdu2z8LVTIV0kXAdZzmJCzCsXsJoM\nh+6L4EgzeycJjxwB1YdAvBuwChLPw5emw2fNbGxTc14REdk5qCarHWZ2AKT+HeLjwHtCrhfUFqC2\nCCvSEHsWMl9y9zll7vuZAXDdCuLdcvw75ffie51QSL4boV6qgdDCIAv8Cxt3UnfCVXyfoPzSIIQL\nuxKEgvbrCcuDSXpTZB1Onp6EgNeTMLnyHCGAjSUs81WXzvMGcF/puINLj/kuoah9AgleYDCrWAn0\nx3kf51zKb1C3mFBcPpKwbc1AwuLkboTZrMGEqrOXCB1sjTDPNxc+yLgPMrP94zBlF6geAsn9Sq/U\nZFi/Ct7NhieaKz2ZWYQmXL2ABe7+btvxmFmfJEw/FHadAckLaN3hqslbhC6aF1R4pQGeBZ8Evzc4\n+2zoW66N6bNQmAxTG93b7qTdLjPra3BeAoYXYEURbnH3mVvyGCIi0jFUk9WBzFLfCxsrj0vC0MSG\na8R4BziJEBFeOg6enmpmh7j7vJb3r4Kvj4du925YuCrnAELkuIYwnzKbMEP0BKFeqm3IMkLj/IcI\nW9y0nR1bSIgGXyLEAwP+kxS/41OsZhowm5XU8zRhtqsnIbD1KZ3z6tI4MoQ5ppGE+qtJQAyjO3G6\nE+NhxlPkKIr8jbDguYQw93UmMIxQ8u6EubC7gVMIc2VvEqrDmp7ZAYRwdRitN0N+BgrzwpQZ7j7T\nzPZYCZesgy+8Dj0MFuahJgb7joSfxsKrZw6vZ+DT7v5KhRcdd19pZmNegevycMqzED+zzHGDCF3C\n1lM+IjvwKqzPw/IBkK7UJ34cxCfDaDMb7u5vVRpXS0mzyxPwg+FQ3B1q1kH+VbisymxSBs4uXc0o\nIiJdmEJWGWZ2FvT4T7i0unWjzmGEeqXbCZ3Gx8eg0AOmXE2ojd7AYciuhIW8WbyOb1Ra3eQdwrJg\nFXA4odapgbAFze6woX95k10Jy37/R5iR2osQAGcQ6rhGEuaHcoS+UsuJs543CQHnDIr8g9Vk+SQh\n4jR1kfgkMe6hN29zKgUWAjOYyWpgAhn6A3U0kCYsKiZLo3yfsAS4B2FG6s7SiPuURmmEULUfoWfU\nYFpHxyo2jqDrgSmQycBVG17P0Jn858DPzaxnEl47HHY7GhJN38QF4Dk4eHLYkHyku6+o8KLj7kuB\n08xsxCx45jHo9XFINMXWOuAhaIzB+geg9tNQ3fYShalQXA8rDapHVO7oSiK8ZoW3wsroJkNWwuzS\nWrjyYqjq1eLTx0HiLjj2XXjWzH5DyLBPf5S9B0VEJDoKWWVVfR9OqSn/vjmUEIpeJez3NzYOk08w\ns74t39QNPlwL/ceT5x0mk2MErVtiQoghTxH6Qr1I8zYyowjNBa4FxtG8x+B0YDpDyLM/WSbzEPVA\nASPEnv6EWajxhOXH9cBt5MiTJcxb9QIuIs/t3M9qHifOXsTI4MxhKHAmBdKlM84hw+rS/fZqM3In\nzH3tQ1hYvJBwbeEqQorYmzAztRshRL0ITGbjRhNzaH6VC4S5vIegrgC/cPdXy3wBiMEXh0K/CW2+\nf+PAxyGxEnq9Af8GfKfc/Vs9D/dZZnbAy3DjS3D07pApgC0OmzHfnoXL5sA9f4DRR0G3/oTrPqdC\n/RxYn4MTDC4utLdLdHhuXnqK7TKzeBL+9zNQ06vNbQngLKj6BRw4CK5ZDfn1UGdmF7v7Q5t6bBER\n2bZUk9WGmfWBxCL4VoqKe9jNI0SMfy59/Os1sOr4puJqgJjZ1/eFH30GaqYBD5CkyKEUN+zx9ybG\nSyTJUyTeolaq5ebJCwg9sBYDcYzVXECOPQhB5k7CDFJfYCFJ3iOOcTBODxK8j/M2B1NgAk6MMC30\nVTZcNcf/g2wSbDwkh9N8nSCExg+PEDpiPUCYHxtDmIVaQghM6wn93q8Drijdz2neDbiWMKvVdFnd\nhbSufVoP/AZyBVhvUF2EeBJmNcL33f2uCi8+VWYLPgeDB1e4fQnwJ1iRce9X6THKMbOBpadZAKZ4\naeuZ0gUPp1fBZQ57GHyYgWsdbixtr3N0T7j/69Ct3Dd0I/BzyORhL3dfVOaQlmM4qg/c/7WN14o3\neLI0wOMJ34m3QkM29Ot6ZEuer4iIbD7VZHWMNMQLlQMWhHqmpkkJBxoShP4KGzjcMAe++SJUjYXY\nIHI8z4vM4RVyQAN5n0jB3gNWUcRYySJewluFrCE0l5K/SzW3s4RQqLSCEHzeJixiziCXS5JjDM8n\nM8ToS4HRNF859xJhqa6ptqgRyEGxAHc9A6cXoXoQxEqNH+rnQDEG7/+9dInjdFg6PVT8JxOwJAtv\nxeGwX0OvGMRyhLk0I8y9HUqoEMuyYfHTU6WZniKhT/39UF+EX+bhu4RVx1zefc2mvkB56Nd2TrCl\nvuG8u2zqcdpy9w8ov8dmnpBp76xw12caYNGLMHRcm8ZnDjwCmTg8mNtEwCrp02MTG6z3IPQXM8IM\n46eh+k74nZkN1UbMIiJdh0LWxpZBMQPLqstfcwZh/qBpD725QHEZYaVrg9IMxxFPwGMvQL+DoFsf\nCraMwrrFUEjB4p4w4kzCzM+LOGEO5kVCGXhLa4hzNx8jy3LCtX4jwkB5E3gKnm2EH6Tga0vxCWdS\nqGkKU0VCL6hJwPktHnEK5BPwUAbOy8GRk+DrsTCNVt8INzr82d0/LHWrJ1vhzdvMUil4ciYceWCL\nz8cJoQ7gTmiogxnXwKgqKObCzYtL29LcXDqsYv1UW3H48EOoblut1iRcT8nazX28j8rd3cwmPgHP\nL4DuY6GmJ+Hr8yzULYM52eY+XZsyfxkkilSO+UtpPeu4N5CCfpkwCzd1q5+IiIh0KC0XlmGW/BEM\nuwzOrdq4zKYe+D1hu5tuwPX1sO4Sd7+1/GOZAcfF4RMxSOXCJr13xeAb+8O3zwo9EygSmiPcQZIc\nfclyKFBFnDkYr3EMeca3meBYCNwE8xrc9yqdK5WCXxfhgkGQq4b0e5BKEzbOGUQo6J4C+ZdgVQ4O\nbq/X1+a/XnZcFdz3z1DTdoZpFnAnrM2HCbf1hFKtDPD+1s66JMyuHAlXnNHchL2VByE7Da7Jun9j\nax5/a5lZL4OL0vDlIvSOwcLGsJXPre6e2czHsDS89SnYZ0SZ2+uA3xDaxLYMWjfB2jlwUXvLrCIi\nsvW2JrcoZJVhZrWQeh6G7g0TqsICVNMi10OEj2sbQ+US33HP/XwrztEvAfPOh9qWtUV5wuzUgyQL\nGWLFEeSSJ1KkbRE0wKOQmwrXZt2/2uaxexP2pqkGBqbgohj0rwqF0qk43JOByzZVH7Ql4mYXx+C3\no4C9oSoHzID170EuBye2rFf7qMysXxJmHg99xkKs6ZvICXsKPggf5uCAjgiQncHMjk7Bg2dCzb40\nx/wVhJYSexO6oLX0G1i7Ek7T3ohdj5mNIJQ3pgmXAT/i7pu8CEJEuhaFrA4UglbiO8CXIRmHfBxi\nqyE/H5KrIPcKFK79KG/kZnZSEu46DJKjIVlFKHWfBHVr4cks3NMXrr4Uatt2xFoGXBc2KB7t7u9s\n4jxGuCyyFpjvm1H3tJXPZ/c4fCkVWsZnG0Kvi1ui6OlkZsNS8EAV7HYAVBvYG1BfD8uz8Al3f7Oj\nz7ktmdmxafhjCvoNAFsLtesI7WIPp/X86kLgRliRg/568+46zGyXdKjjO2wkxNKQmA31a6A+F3q5\nPdvZYxSRzaeQFQEzSxIaVmWBxR1dWGxmw5LwDYOzHNJxeKsxbCR4F6FY/PpqOPcYqB3Khv34Cs9B\nJgeXFt3/1pHj2Z6UwuORwDGE3DGZ0Ddqhyj+Lj2/scCeCfjaEDjoXKhOtThmFXAD1NfBVwvuN3TO\nSKWt0rZar46GfU6CVMvi19nAHSFoHeHuMzprjCKyZRSydkClN9pTquA/C3AgkDd4IAs/d/fXOnt8\nsm2YWToNNzl8chTEe0LyA6ibAzGHb+bdf93ZY5RmZnbOAPjjFyq09XgB/Cl4uNH9lG0+OBHZKgpZ\nIjs4M9sLOCcOfQphM8lbo1r+la1XYzbpZDh6VIXbG4GfQbYA/dx9m10JKyJbT32yRHZw7j4X+Eln\nj0Pa57Bbe43aqoA05OrDDlQKWSI7qPY6boqIyNZ5v73Gbw1AJvTv3ez+cCKy/VHIEhHpYI1wzRRY\nX2nn7lehmICH3H1dhUNEZAegkCUi0vHu+xDeuhcy2RafdMKG6pOgPgPf6pyhici2osJ3EZEImFn3\nNNxchBP2hWIaYu9CvgFWZOHsjmzQKyLR09WFIiJdjJkNASYSOr5PA57dUXq5iexMFLJEREREIrA1\nuUU1WSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIiIhIBhSwRERGRCChkiYiIiERAIUtE\nREQkAgpZIiIiIhFQyBIRERGJgEKWyE7OzHrHza6oMpuXMltTbTY7ZvY1M+vW2WMTEdmeae9CkZ2Y\nme2RhBf2gR6HQXUvYAUwBerfg8VZONzdl3f2OEVEOps2iBaRzWZmloKZx8A+R0C85W0OPAbZV+C5\nRvfjOmeEIiJdhzaIFpEtcXgaBo9rE7AADDgWUg6Hm9nQThibiMh2TyFLZOd17AFQVemHQBLYGwrA\nsdtwTCIiOwyFLJGdVywWJq0qiofb9XNCRGQr6IenyM7rhZlQX6kqswDMDiHr+W04JhGRHYZClsjO\n64k6WP16qHPfyItQcJjl7q9v64GJiOwIdHWhyE7MzEYlYPJYqD4Ukr2AlcDzkH0N1uTgMHef19nj\nFBHpbGrhICJbzMyGJOGbRfh8AWoTsBa4Lg8/c/elnT0+EZGuQCFLRD4SM4u7e6GzxyEi0tWoT5aI\nfCQKWCIiHUchS0RERCQCClkiIiIiEVDIEhEREYmAQpaIiIhIBBSyRERERCKgkCUiIiISAYUsERER\nkQgoZImIiIhEQCFLREREJAIKWSIiIiIRUMgSERERiYBCloiIiEgEFLJEREREIqCQJSIiIhIBhSwR\nERGRCChkiYiIiERAIUtEREQkAgpZIiIiIhFQyBIRERGJgEKWiIiISAQiCVlmdqWZfWBm00p/JkZx\nHhEREZGuKhHR4zpwlbtfFdHji4iIiHRpUS4XWoSPLSIiItKlRRmyvmZmM8zsj2bWK8LziIiIiHQ5\n5u5bd0ezx4D+ZW76b+AFYHnp4/8BBrj7JW3u7+6u2S4RERHp8rYmt2x1TZa7n7A5x5nZ9cB9FW67\nssWHk9x90taO5/+3d38xlt51Hcc/X7cuthUUQ9L/hmpKYrmwJbGQUGGioaleUHqBRRJFMaZJxQtj\njLYktldGYvRGgyERkQutqUZIm1rt1rAJGBAJTW0oTdtIdYulcgGJFHC37deLOZDpOrvb+fPtOWf2\n9Uomc57zzDzP9+Lk5J3nmfMbAID9UlUbSTb2dIzdXsk67UGrLurupxePfyPJT3T3u0/6GVeyAIC1\n8LJeyTqDD1TVVdn8lOGXktw8dB4AgJU0ciXrJZ3YlSwAYE3splus+A4AMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADBBZAAADRBYAwACRBQAwQGQBAAwQWQAAA0QWAMAAkQUAMEBkAQAMEFkAAANEFgDA\nAJEFADBAZAEADNh1ZFXVO6vqC1X1fFW94aR9t1bV41X1aFVdt/cxAQDWyzl7+N2Hk9yY5ENbn6yq\nK5PclOTKJJckeaCqXtfdL+zhXAAAa2XXV7K6+9HufmybXTckubO7T3T3k0meSHLNbs8DALCOJv4m\n6+IkT23ZfiqbV7QAAM4ap71dWFVHkly4za7buvueHZynT3H8O7ZsHu3uozs4JgDAiKraSLKxl2Oc\nNrK6+227OOaXk1y2ZfvSxXPbHf+OXRwfAGDU4sLP0e9sV9XtOz3Gft0urC2P707yrqo6XFWXJ7ki\nyWf36TwAAGthL0s43FhVx5K8Kcm9VXVfknT3I0nuSvJIkvuS3NLd294uBAA4qGpZ/VNV3d115p8E\nAFiu3XSLFd8BAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsA\nYIDIAgAYILIAAAaILACAASILAGCAyAIAGCCyAAAG7DqyquqdVfWFqnq+qt6w5fnXVtW3qurBxdcH\n92dUAID1cc4efvfhJDcm+dA2+57o7qv3cGwAgLW268jq7keTpKr2bxoAgANi6m+yLl/cKjxaVdcO\nnQMAYGWd9kpWVR1JcuE2u27r7ntO8Wv/leSy7v7a4m+1Pl5Vr+/u/9nm+Hds2Tza3Udf2tgAAHOq\naiPJxp6O0d17HeITSX6zuz+/k/1V1d3tXiMAsPJ20y37dbvwuyetqtdU1aHF4x9JckWSf9+n8wAA\nrIW9LOFwY1UdS/KmJPdW1X2LXW9N8lBVPZjkb5Lc3N1f3/uoAADrY8+3C3d9YrcLAYA1sczbhQDA\nCquqc6vql8+r+sy5VY+dW3VvVV1XVVpgiCtZAHDAVdUPH04+dXHy6jcm3//KJF9J+p+TZ59NPnU8\nuaG7jy97zlW2m24RWQBwgFVVHU4efUvyo9cmh7buey7Jnck3jyUf+d/u9y1pxLXgdiEAcLKfOj+5\n+M0nBVayuVjmDcl5LyTvrapXLWG2A01kAcAB9r3JO65Ozj/VJZhXJbkwOZHkLS/jWGcFkQUAB1gl\n571iy3qW23nFi76xX0QWABxgx5PPPJY8e6r9J5IcSw4neejlm+rsILIA4GC78z+y+Y+Ft/OvyQvf\nk3y+u594OYc6G4gsADjAuvsbzyW/8BfJtx7M5pWrJPlGkgeS5z+RfP3byS8ub8KDyxIOAHAWqKpr\nvy/5vRPJGw8nJ44nhw4lf3s8eX93/+ey51t11skCAE6rql6dzQ8VfrW7v7nsedaFyAIAGGAxUgCA\nFSGyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaILACAASILAGCAyAIA\nGOkZzZ4AAARrSURBVCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaI\nLACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaI\nLACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABogsAIABIgsAYIDIAgAYILIAAAaI\nLACAASILAGCAyAIAGCCyAAAGiCwAgAEiCwBggMgCABggsgAABuw6sqrqD6rqi1X1UFX9XVX9wJZ9\nt1bV41X1aFVdtz+jcjarqo1lz8B68FphJ7xemLSXK1n3J3l9d/94kseS3JokVXVlkpuSXJnk+iQf\nrCpXzNirjWUPwNrYWPYArJWNZQ/AwbXr+OnuI939wmLzX5Jcunh8Q5I7u/tEdz+Z5Ikk1+xpSgCA\nNbNfV5jem+TvF48vTvLUln1PJblkn84DALAWzjndzqo6kuTCbXbd1t33LH7m/UmOd/dfneZQfYrj\nb/s8bKeqbl/2DKwHrxV2wuuFKaeNrO5+2+n2V9UvJfnZJD+95ekvJ7lsy/ali+dOPna95CkBANbM\nXj5deH2S30pyQ3d/e8uuu5O8q6oOV9XlSa5I8tm9jQkAsF5OeyXrDP44yeEkR6oqST7d3bd09yNV\ndVeSR5I8l+SW7nZbEAA4q5T+AQDYf0tdv6qq7qiqp6rqwcXX9cuch9VTVdcvFrV9vKp+e9nzsNqq\n6smq+rfF+4k/U+BFqurPq+qZqnp4y3M/VFVHquqxqrq/qn5wmTOyGk7xWtlxsyx7kdBO8kfdffXi\n6x+WPA8rpKoOJfmTbC5qe2WSn6+qH1vuVKy4TrKxeD+xPh8n+0g230+2+p0kR7r7dUn+abEN271W\ndtwsy46sJPEpQ07lmiRPdPeT3X0iyV9nc7FbOB3vKWyruz+Z5GsnPf32JB9dPP5okne8rEOxkk7x\nWkl2+P6yCpH164v/f/hhl2k5ySVJjm3ZtrAtZ9JJHqiqz1XVry57GNbCBd39zOLxM0kuWOYwrLwd\nNct4ZC3udT+8zdfbk/xpksuTXJXk6SR/OD0Pa8WnMtipN3f31Ul+JsmvVdVPLnsg1sfik/DedziV\nHTfLXpZweEnOtKDpd1TVnyW5Z3gc1svJC9telhf/yyZ4ke5+evH9q1X1sWzecv7kcqdixT1TVRd2\n91eq6qIk/73sgVhN3f3d18ZLbZZlf7rwoi2bNyZ5+FQ/y1npc0muqKrXVtXhJDdlc7Fb+H+q6ryq\neuXi8flJrov3FM7s7iTvWTx+T5KPL3EWVthummX8StYZfKCqrsrm5dkvJbl5yfOwQrr7uap6X5J/\nTHIoyYe7+4tLHovVdUGSjy0WRz4nyV929/3LHYlVUlV3JnlrktdU1bEkv5vk95PcVVW/kuTJJD+3\nvAlZFdu8Vm5PsrHTZrEYKQDAgFX4dCEAwIEjsgAABogsAIABIgsAYIDIAgAYILIAAAaILACAAf8H\nYwJ7l+X9lxkAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# Take a look here: http://stackoverflow.com/questions/28160335/plot-a-document-tfidf-2d-graph\n", - "from sklearn.decomposition import PCA\n", - "import matplotlib.pyplot as plt\n", - " \n", - "pca = PCA(n_components=2).fit(train_vecs)\n", - "data2D = pca.transform(train_vecs)\n", - "plt.scatter(data2D[:,0], data2D[:,1], s=80, c=train_tags)" - ] - }, - { - "cell_type": "code", - "execution_count": 119, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "LogisticRegressionCV(Cs=10, class_weight=None, cv=None, dual=False,\n", - " fit_intercept=True, intercept_scaling=1.0, max_iter=100,\n", - " multi_class='ovr', n_jobs=1, penalty='l2', refit=True,\n", - " scoring=None, solver='lbfgs', tol=0.0001, verbose=0)" - ] - }, - "execution_count": 119, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Fit a logistic rgeression classifier\n", - "clf = sklearn.linear_model.LogisticRegressionCV()\n", - "clf.fit(train_vecs, train_tags)" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Loss after iteration 0: 0.333610\n", - "Loss after iteration 1000: 0.000334\n" - ] - } - ], - "source": [ - "X= train_vecs\n", - "y=train_tags\n", - "y=y.astype(int)\n", - "num_examples = len(X) # training set size\n", - "nn_input_dim = len(train_vecs[0]) # input layer dimensionality\n", - "nn_output_dim = 2 # output layer dimensionality\n", - "\n", - "# Gradient descent parameters (I picked these by hand)\n", - "epsilon = 0.01 # learning rate for gradient descent\n", - "reg_lambda = 0.01 # regularization strength \n", - "\n", - "\n", - "def forward(W1, b1, W2, b2, x):\n", - " z1 = x.dot(W1) + b1\n", - " a1 = np.tanh(z1)\n", - " z2 = a1.dot(W2) + b2\n", - " exp_scores = np.exp(z2)\n", - " # softmax\n", - " y_hat = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", - " return y_hat, z1, a1, z2\n", - "\n", - "def predict(model, x):\n", - " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", - " y_hat, _, _, _ = forward(W1, b1, W2, b2, x)\n", - " return np.argmax(y_hat, axis=1)\n", - "\n", - "def calculate_loss(model):\n", - " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", - " y_hat, _, _, _ = forward(W1, b1, W2, b2, X)\n", - " correct_logprobs = -np.log(y_hat[range(num_examples), y])\n", - " data_loss = np.sum(correct_logprobs)\n", - " return 1./num_examples * data_loss\n", - "\n", - "\n", - "# This function learns parameters for the neural network and returns the model.\n", - "# - nn_hdim: Number of nodes in the hidden layer\n", - "# - num_passes: Number of passes through the training data for gradient descent\n", - "# - print_loss: If True, print the loss every 1000 iterations\n", - "def build_model(nn_hdim, num_passes=2000, print_loss=False):\n", - " \n", - " # Initialize the parameters to random values. We need to learn these.\n", - " np.random.seed(0)\n", - " W1 = np.random.randn(nn_input_dim, nn_hdim) / np.sqrt(nn_input_dim)\n", - " b1 = np.zeros((1, nn_hdim))\n", - " W2 = np.random.randn(nn_hdim, nn_output_dim) / np.sqrt(nn_hdim)\n", - " b2 = np.zeros((1, nn_output_dim))\n", - "\n", - " # This is what we return at the end\n", - " model = {}\n", - " \n", - " # Gradient descent. For each batch...\n", - " for i in range(0, num_passes):\n", - " # feedforward\n", - " y_hat, z1, a1, z2 = forward(W1, b1, W2, b2, X)\n", - " \n", - " # Backpropagation\n", - " delta3 = y_hat\n", - " delta3[range(num_examples), y] -= 1\n", - " #print [range(num_examples), y]\n", - " dW2 = (a1.T).dot(delta3)\n", - " db2 = np.sum(delta3, axis=0, keepdims=True)\n", - " delta2 = delta3.dot(W2.T) * (1 - np.power(a1, 2))\n", - " dW1 = np.dot(X.T, delta2)\n", - " db1 = np.sum(delta2, axis=0)\n", - "\n", - " # Gradient descent parameter update\n", - " W1 += -epsilon * dW1\n", - " b1 += -epsilon * db1\n", - " W2 += -epsilon * dW2\n", - " b2 += -epsilon * db2\n", - " \n", - " # Assign new parameters to the model\n", - " model = { 'W1': W1, 'b1': b1, 'W2': W2, 'b2': b2}\n", - " \n", - " # Optionally print the loss.\n", - " # This is expensive because it uses the whole dataset, so we don't want to do it too often.\n", - " if print_loss and i % 1000 == 0:\n", - " print \"Loss after iteration %i: %f\" %(i, calculate_loss(model))\n", - " #print y_hat[:2]\n", - " \n", - " \n", - " return model\n", - "\n", - "# Build a model with a 3-dimensional hidden layer\n", - "model = build_model(3, print_loss=True)\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 2", - "language": "python", - "name": "python2" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 2 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.10" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} From 609c506db054a4d3cfb6f4f1251a0bbc58816c06 Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 7 Feb 2016 10:01:04 -0500 Subject: [PATCH 21/36] cleaning and updating --- ...ogistic_regression_theano-checkpoint.ipynb | 2 +- logistic_regression_theano.ipynb | 2 +- theano_tutorial.ipynb | 2160 ----------------- 3 files changed, 2 insertions(+), 2162 deletions(-) delete mode 100644 theano_tutorial.ipynb diff --git a/.ipynb_checkpoints/logistic_regression_theano-checkpoint.ipynb b/.ipynb_checkpoints/logistic_regression_theano-checkpoint.ipynb index 3104e2f..28b194d 100644 --- a/.ipynb_checkpoints/logistic_regression_theano-checkpoint.ipynb +++ b/.ipynb_checkpoints/logistic_regression_theano-checkpoint.ipynb @@ -88,7 +88,7 @@ "import theano\n", "import theano.tensor as T\n", "rng = numpy.random\n", - "\n", + "##############\n", "N = 400 # training sample size\n", "feats = 784 # number of input variables\n", "\n", diff --git a/logistic_regression_theano.ipynb b/logistic_regression_theano.ipynb index 3104e2f..28b194d 100644 --- a/logistic_regression_theano.ipynb +++ b/logistic_regression_theano.ipynb @@ -88,7 +88,7 @@ "import theano\n", "import theano.tensor as T\n", "rng = numpy.random\n", - "\n", + "##############\n", "N = 400 # training sample size\n", "feats = 784 # number of input variables\n", "\n", diff --git a/theano_tutorial.ipynb b/theano_tutorial.ipynb deleted file mode 100644 index 3cb4eeb..0000000 --- a/theano_tutorial.ipynb +++ /dev/null @@ -1,2160 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#Theano Tutorial" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# The code is from Theano Tutorial: http://deeplearning.net/software/theano/tutorial/\n", - "# See this tutorial too" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from theano import *\n", - "import theano.tensor as T\n", - "from theano import function" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "5.0\n", - "28.4\n" - ] - } - ], - "source": [ - "# A function to add to scalars\n", - "x = T.dscalar('x')\n", - "y = T.dscalar('y')\n", - "z = x + y\n", - "f = function([x, y], z)\n", - "print f(2, 3)\n", - "print f(16.3, 12.1)\n", - "# T.dscalar is the type we assign to “0-dimensional arrays (scalar) of doubles (d)”" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], - "source": [ - "# x and y are instances of TensorVariable. \n", - "print type(x)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "TensorType(float64, scalar)\n" - ] - } - ], - "source": [ - "# x and y are are assigned the theano Type dscalar in their type field:\n", - "print x.type" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "TensorType(float64, scalar)\n" - ] - } - ], - "source": [ - "print z.type" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Adding two matrices:\n", - "x = T.dmatrix('x')\n", - "y = T.dmatrix('y')\n", - "z = x + y\n", - "f = function([x, y], z)" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], - "source": [ - "# Again x and y are instances of TensorVariable, but \n", - "# dmatrix is the Type for matrices of doubles. \n", - "print type(x)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "TensorType(float64, matrix)\n" - ] - } - ], - "source": [ - "print x.type" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "TensorType(float64, matrix)\n" - ] - } - ], - "source": [ - "print z.type" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[ 11. 22.]\n", - " [ 33. 44.]]\n" - ] - } - ], - "source": [ - "# Then we can use our new function on 2D arrays:. \n", - "print f([[1, 2], [3, 4]], [[10, 20], [30, 40]])\n" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[ 11., 22.],\n", - " [ 33., 44.]])" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# The variable is a NumPy array. We can also use NumPy arrays directly as inputs:\n", - "import numpy\n", - "f(numpy.array([[1, 2], [3, 4]]), numpy.array([[10, 20], [30, 40]]))\n" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Plural Constructors\n", - "from theano.tensor import *\n", - "x, y, z = dmatrices(3) # creates three matrix Variables with no names\n", - "x, y, z = dmatrices('x', 'y', 'z') # creates three matrix Variables named 'x', 'y' and 'z'" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "TensorType(float64, matrix)\n" - ] - } - ], - "source": [ - "print x.type" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Random numbers, etc.: http://deeplearning.net/software/theano/tutorial/examples.html" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "gpu\n", - "float32\n" - ] - } - ], - "source": [ - "print(theano.config.device)\n", - "print(theano.config.floatX)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "# Logistic Regression Example" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Initial model:\n", - "Final model:\n", - "target values for D:\n", - "[0 1 1 1 0 0 1 1 1 0 1 1 0 1 1 1 0 0 1 1 1 0 0 1 0 1 1 1 0 0 1 0 1 1 0 0 1\n", - " 1 0 1 1 1 1 0 1 0 0 0 0 0 1 1 1 0 1 1 1 1 0 0 0 1 1 1 0 0 0 0 1 0 0 1 0 0\n", - " 0 0 1 0 1 1 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 1 1 0 1 0 1 0 0 1 0 0 0 0 0\n", - " 1 1 1 1 1 0 0 0 1 1 0 1 0 1 0 1 0 0 1 0 1 1 0 0 0 1 0 0 0 0 0 0 1 0 1 1 0\n", - " 0 0 0 1 0 1 0 1 1 1 0 1 1 1 1 1 1 1 1 1 0 1 0 1 0 0 1 1 1 1 0 0 0 1 0 0 0\n", - " 1 1 1 1 1 1 0 1 0 1 1 1 0 1 1 0 1 1 1 0 0 0 1 0 0 1 0 0 1 1 0 1 1 0 0 1 0\n", - " 1 1 1 1 1 0 0 0 1 1 0 0 0 1 0 1 0 0 1 1 0 1 0 1 1 1 0 1 1 0 0 1 1 0 1 0 1\n", - " 0 0 1 1 0 0 0 1 1 1 1 1 0 0 1 1 0 1 0 0 0 0 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0\n", - " 0 1 0 1 0 1 0 0 1 0 0 0 1 0 1 0 1 0 0 0 1 0 0 0 1 0 1 1 1 0 0 1 0 0 0 1 1\n", - " 1 0 1 1 0 0 0 0 0 1 1 1 1 0 1 0 0 1 0 1 1 0 0 1 0 1 1 0 1 1 1 1 0 0 1 1 1\n", - " 1 1 1 1 1 1 0 0 0 0 1 1 1 0 1 1 0 0 1 1 1 0 0 1 1 1 0 0 1 0]\n", - "prediction on D:\n", - "[1 1 0 1 1 1 1 1 0 1 0 1 0 1 1 0 0 0 1 1 0 1 0 1 0 1 1 1 0 1 0 0 0 1 1 0 1\n", - " 1 1 0 0 0 1 0 1 0 0 1 1 1 1 1 0 0 0 1 1 0 0 0 0 0 1 1 0 0 1 0 1 1 0 0 0 1\n", - " 1 1 1 0 1 0 1 0 0 0 1 1 0 1 0 0 1 1 0 0 1 0 0 0 0 0 1 0 1 1 1 0 1 0 1 1 0\n", - " 1 0 1 1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 1 0 0 0 0 1 1 0 0 0 1 0 0 0 1\n", - " 0 1 0 0 0 0 1 1 1 0 0 1 0 0 1 0 0 1 0 1 1 1 0 0 1 1 0 0 0 0 1 0 0 1 1 0 1\n", - " 0 1 1 1 1 1 1 0 1 1 0 1 1 1 1 0 1 1 1 1 1 0 1 0 1 0 0 0 1 1 0 0 0 0 1 0 0\n", - " 1 1 1 1 1 0 1 0 1 0 1 1 0 0 1 0 1 0 0 1 0 0 0 0 1 1 0 0 0 0 0 1 1 0 1 0 1\n", - " 0 0 1 1 1 0 1 0 1 1 1 0 1 0 1 0 1 1 1 0 1 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0\n", - " 0 1 0 0 0 1 0 1 0 1 1 0 1 1 0 1 1 0 0 0 1 0 0 1 0 1 0 1 0 0 1 0 1 0 0 1 1\n", - " 0 1 0 1 0 0 0 0 0 0 0 1 1 1 1 0 1 0 1 1 1 1 0 0 0 1 1 0 1 1 0 1 0 0 1 0 0\n", - " 0 0 0 1 1 1 0 1 1 0 1 1 0 1 1 1 0 1 0 0 0 1 1 1 0 1 1 1 1 0]\n" - ] - } - ], - "source": [ - "# Logistic Regression: http://deeplearning.net/software/theano/tutorial/examples.html\n", - "import numpy\n", - "import theano\n", - "import theano.tensor as T\n", - "rng = numpy.random\n", - "\n", - "N = 400 # training sample size\n", - "feats = 784 # number of input variables\n", - "\n", - "# generate a dataset: D = (input_values, target_class)\n", - "D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", - "training_steps = 100\n", - "#np.asarray(your_data, dtype=theano.config.floatX)\n", - "\n", - "# Declare Theano symbolic variables\n", - "x = T.matrix(\"x\")\n", - "y = T.vector(\"y\")\n", - "\n", - "# initialize the weight vector w randomly\n", - "#\n", - "# this and the following bias variable b\n", - "# are shared so they keep their values\n", - "# between training iterations (updates)\n", - "w = theano.shared(rng.randn(feats), name=\"w\")\n", - "\n", - "# initialize the bias term\n", - "b = theano.shared(0., name=\"b\")\n", - "#print b.eval()\n", - "print(\"Initial model:\")\n", - "#print(w.get_value())\n", - "#print(b.get_value())\n", - "\n", - "# Construct Theano expression graph\n", - "p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1\n", - "prediction = p_1 > 0.5 # The prediction thresholded\n", - "xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function\n", - "cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize\n", - "gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost\n", - " # w.r.t weight vector w and\n", - " # bias term b\n", - " # (we shall return to this in a\n", - " # following section of this tutorial)\n", - "\n", - "# Compile\n", - "train = theano.function(\n", - " inputs=[x,y],\n", - " outputs=[prediction, xent],\n", - " updates=((w, w - 0.1 * gw), (b, b - 0.1 * gb)),\n", - " allow_input_downcast=True) # added downcasting...\n", - "predict = theano.function(inputs=[x], outputs=prediction)\n", - "\n", - "# Train\n", - "for i in range(training_steps):\n", - " pred, err = train(D[0], D[1])\n", - "\n", - "print(\"Final model:\")\n", - "#print(w.get_value())\n", - "#print(b.get_value())\n", - "print(\"target values for D:\")\n", - "print(D[1])\n", - "print(\"prediction on D:\")\n", - "print(predict(D[0]))\n", - "#----------------------------------------------------------" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "25000\n", - "200\n", - "200\n", - "7142\n", - "6994\n", - "0\n", - "200\n", - "200\n", - "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", - "200\n", - "200\n", - "(200, 7142)\n", - "\n", - "\n", - "\n", - "Done fitting classifier on training data...\n", - "\n", - "================================================== \n", - "\n", - "Results with 5-fold cross validation:\n", - "\n", - "================================================== \n", - "\n", - "********************\n", - "\t accuracy_score\t0.715\n", - "********************\n", - "precision_score\t0.765432098765\n", - "recall_score\t0.62\n", - "\n", - "classification_report:\n", - "\n", - " precision recall f1-score support\n", - "\n", - " 0.0 0.68 0.81 0.74 100\n", - " 1.0 0.77 0.62 0.69 100\n", - "\n", - "avg / total 0.72 0.71 0.71 200\n", - "\n", - "\n", - "confusion_matrix:\n", - "\n", - "[[81 19]\n", - " [38 62]]\n" - ] - } - ], - "source": [ - "# Get text data\n", - "#----------------\n", - "from collections import namedtuple\n", - "\n", - "all_data = [] \n", - "DataDoc= namedtuple('DataDoc', 'tag words')\n", - "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", - " for line_no, line in enumerate(alldata):\n", - " label=line.split()[0]\n", - " word_list=line.lower().split()[1:]\n", - " all_data.append(DataDoc(label, word_list))\n", - " #print my_data[line_no]\n", - " #break\n", - "train_data = all_data[:25000]\n", - "test_data = all_data[25000:50000]\n", - "print len(train_data)\n", - "\n", - "train_data=train_data[:100]+train_data[12500:12600]\n", - "test_data=test_data[:100]+test_data[12500:12600]\n", - "print len(train_data)\n", - "print len(test_data)\n", - "#--------------------\n", - "# Let's get a dictionary of all the words in training data\n", - "# These will be our bag-of-words features\n", - "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", - "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", - "from collections import defaultdict\n", - "def get_space(train_data):\n", - " \"\"\"\n", - " input is a list of namedtuples\n", - " get a dict of word space\n", - " key=word\n", - " value=len of the dict at that point \n", - " (that will be the index of the word and it is unique since the dict grows as we loop)\n", - " \"\"\"\n", - " word_space=defaultdict(int)\n", - " for doc in train_data:\n", - " for w in doc.words:\n", - " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", - " # but that doesn't matter.\n", - " word_space[w]=len(word_space)\n", - " return word_space\n", - "\n", - "word_space=get_space(train_data)\n", - "print len(word_space)\n", - "print word_space[\"love\"]\n", - "#-------------------------\n", - "import numpy as np\n", - "\n", - "def get_sparse_vec(data_point, space):\n", - " # create empty vector\n", - " sparse_vec = np.zeros((len(space)))\n", - " for w in set(data_point.words):\n", - " # use exception handling such that this function can also be used to vectorize \n", - " # data with words not in train (i.e., test and dev data)\n", - " try:\n", - " sparse_vec[space[w]]=1\n", - " except:\n", - " continue\n", - " return sparse_vec\n", - "\n", - " \n", - "\n", - "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", - "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", - "#test_vecs= get_sparse_vectors(test_data, word_space)\n", - "\n", - "#print train_vecs, test_vecs[0]\n", - "print len(train_data[12500:12600])\n", - "print len(train_vecs)\n", - "print len(test_vecs)\n", - "#-------------------------\n", - "# We should usually get tags automatically based on input data file.\n", - "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", - "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", - "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", - "# and test_data with the same setup for class label. \n", - "# The rest of the data in the file is unknown and we don't use that part.\n", - "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", - "# later, for now we will hard-code the labels.\n", - "\n", - "from random import shuffle, randint\n", - "\n", - "\n", - "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "\n", - "\n", - "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", - "# tags= [train_data[i].tag for i in range(len(train_data))]\n", - "print train_tags[-1], train_vecs[-1][:10]\n", - "print len(train_tags)\n", - "print len(test_tags)\n", - "#--------------------\n", - "train_vecs=np.array(train_vecs)\n", - "train_tags=np.array(train_tags)\n", - "print train_vecs.shape\n", - "#--------------------------------\n", - "# Classification with scikit-learn\n", - "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", - "# Let's use sklearn to train an svm classifier:\n", - "#-------------------------------------------------\n", - "\n", - "import argparse\n", - "import codecs\n", - "import time\n", - "import sys\n", - "import os, re, glob\n", - "import nltk\n", - "from collections import defaultdict\n", - "from random import shuffle, randint\n", - "import numpy as np\n", - "from numpy import array, arange, zeros, hstack, argsort\n", - "import unicodedata\n", - "from scipy.sparse import csr_matrix\n", - "from sklearn.svm import SVC, LinearSVC\n", - "from sklearn import preprocessing\n", - "from sklearn.cross_validation import StratifiedKFold\n", - "from sklearn.grid_search import GridSearchCV\n", - "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", - "from sklearn import metrics\n", - "from sklearn.cross_validation import train_test_split\n", - "from sklearn.decomposition import TruncatedSVD\n", - "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", - "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "from sklearn.linear_model import LogisticRegression\n", - "from sklearn import cross_validation\n", - "import gensim\n", - "n_jobs = 2\n", - "\n", - "#train_vecs=array(train_vecs)\n", - "train_vecs=np.array(train_vecs)\n", - "train_tags=np.array(train_tags)\n", - "\n", - "print type(train_tags)\n", - "print type(train_vecs)\n", - "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", - "clf.fit(train_vecs, train_tags)\n", - "print \"\\nDone fitting classifier on training data...\\n\"\n", - "\n", - "#------------------------------------------------------------------------------------------\n", - "print \"=\"*50, \"\\n\"\n", - "print \"Results with 5-fold cross validation:\\n\"\n", - "print \"=\"*50, \"\\n\"\n", - "#------------------------------------------------------------------------------------------\n", - "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", - "print \"*\"*20\n", - "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", - "print \"*\"*20\n", - "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", - "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", - "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", - "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", - "#----------------------" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(200, 7142)\n", - "(200,)\n" - ] - } - ], - "source": [ - "# This creates an artficial dataset (code from the Theano tutorial):\n", - "D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", - "#print D\n", - "print D[0].shape\n", - "print D[1].shape" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(array([[ 0., 0., 0., ..., 1., 1., 1.],\n", - " [ 0., 0., 0., ..., 1., 1., 1.],\n", - " [ 0., 0., 0., ..., 1., 1., 1.],\n", - " ..., \n", - " [ 0., 0., 0., ..., 1., 0., 1.],\n", - " [ 0., 0., 0., ..., 1., 1., 1.],\n", - " [ 0., 0., 0., ..., 1., 1., 1.]], dtype=float32), array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", - " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", - " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", - " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", - " 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))\n", - "(200, 7142)\n", - "(200,)\n" - ] - } - ], - "source": [ - "#But let's use our data to construct D:\n", - "# Let's ensure our x is float32, for use with Theano:\n", - "x= train_vecs\n", - "x=x.astype(theano.config.floatX)\n", - "y=train_tags\n", - "y=y.astype(int)\n", - "# Now create the dataset, and check dimensions, etc.\n", - "D=(x,y)\n", - "print D\n", - "print D[0].shape\n", - "print D[1].shape" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Initial model:\n", - "Final model:\n", - "target values for D:\n", - "[1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", - " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", - " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - "prediction on D:\n", - "[1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", - " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n", - " 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", - " 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n" - ] - } - ], - "source": [ - "# Use with logistic regression:\n", - "# Logistic Regression: http://deeplearning.net/software/theano/tutorial/examples.html\n", - "import numpy\n", - "import theano\n", - "import theano.tensor as T\n", - "rng = numpy.random\n", - "#theano.config.optimizer='fast_compile'\n", - "\n", - "#N = 400 # training sample size\n", - "#feats = 784# \n", - "feats = 7142 # number of input variables\n", - "\n", - "# generate a dataset: D = (input_values, target_class)\n", - "#D = (rng.rand(N, feats).astype(theano.config.floatX), rng.randint(size=N, low=0, high=2))\n", - "training_steps = 1000\n", - "#np.asarray(your_data, dtype=theano.config.floatX)\n", - "\n", - "# Declare Theano symbolic variables\n", - "x = T.matrix(\"x\")\n", - "y = T.vector(\"y\")\n", - "\n", - "# initialize the weight vector w randomly\n", - "#\n", - "# this and the following bias variable b\n", - "# are shared so they keep their values\n", - "# between training iterations (updates)\n", - "w = theano.shared(rng.randn(feats), name=\"w\")\n", - "\n", - "# initialize the bias term\n", - "b = theano.shared(0., name=\"b\")\n", - "#print b.eval()\n", - "print(\"Initial model:\")\n", - "#print(w.get_value())\n", - "#print(b.get_value())\n", - "\n", - "# Construct Theano expression graph\n", - "p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1\n", - "prediction = p_1 > 0.5 # The prediction thresholded\n", - "xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function\n", - "cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize\n", - "gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost\n", - " # w.r.t weight vector w and\n", - " # bias term b\n", - " # (we shall return to this in a\n", - " # following section of this tutorial)\n", - "\n", - "# Compile\n", - "train = theano.function(\n", - " inputs=[x,y],\n", - " outputs=[prediction, xent],\n", - " updates=((w, w - 0.1 * gw), (b, b - 0.1 * gb)),\n", - " allow_input_downcast=True) # added downcasting...\n", - "predict = theano.function(inputs=[x], outputs=prediction)\n", - "\n", - "# Train\n", - "for i in range(training_steps):\n", - " pred, err = train(D[0], D[1])\n", - "\n", - "print(\"Final model:\")\n", - "#print(w.get_value())\n", - "#print(b.get_value())\n", - "print(\"target values for D:\")\n", - "print(D[1])\n", - "print(\"prediction on D:\")\n", - "print(predict(D[0]))\n", - "#----------------------------------------------------------" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Now try the code with different values of \"training_steps\" and see what you get.\n", - "# For example, you can try:\n", - "# training_steps= 100, training_steps=500, training_steps=10000" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# We need to create some functions to load the IBDB data:\n", - "from collections import namedtuple\n", - "from collections import defaultdict\n", - "\n", - "def get_space(train_data):\n", - " \"\"\"\n", - " input is a list of namedtuples\n", - " get a dict of word space\n", - " key=word\n", - " value=len of the dict at that point \n", - " (that will be the index of the word and it is unique since the dict grows as we loop)\n", - " \"\"\"\n", - " word_space=defaultdict(int)\n", - " for doc in train_data:\n", - " for w in doc.words:\n", - " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", - " # but that doesn't matter.\n", - " word_space[w]=len(word_space)\n", - " return word_space\n", - "\n", - "def get_sparse_vec(data_point, space):\n", - " # create empty vector\n", - " sparse_vec = np.zeros((len(space)))\n", - " for w in set(data_point.words):\n", - " # use exception handling such that this function can also be used to vectorize \n", - " # data with words not in train (i.e., test and dev data)\n", - " try:\n", - " sparse_vec[space[w]]=1\n", - " except:\n", - " continue\n", - " return sparse_vec\n", - "\n", - "def get_data():\n", - " '''\n", - " \n", - " '''\n", - " all_data = [] \n", - " DataDoc= namedtuple('DataDoc', 'tag words')\n", - " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", - " for line_no, line in enumerate(alldata):\n", - " label=line.split()[0]\n", - " word_list=line.lower().split()[1:]\n", - " all_data.append(DataDoc(label, word_list))\n", - " all_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - " all_tags+=all_tags\n", - " return all_data, all_tags\n", - " #--------------------------------------------------\n", - " \n", - "all_data, all_tags= get_data()\n", - "\n", - "#train_data=train_data[:100]+train_data[12500:12600]\n", - "#test_data=test_data[:100]+test_data[12500:12600]\n", - "print len(train_data)\n", - "print len(test_data)\n", - "\n", - "\n", - "\n", - "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", - "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", - "\n", - "\n", - "train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]+ [ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "\n", - "\n", - "test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "\n", - "\n", - "\n", - "# Let's get a dictionary of all the words in training data\n", - "# These will be our bag-of-words features\n", - "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", - "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", - "from collections import defaultdict\n", - "\n", - "\n", - "word_space=get_space(train_data)\n", - "print len(word_space)\n", - "print word_space[\"love\"]\n", - "\n", - "\n", - "\n", - "\n", - "all_data = [] \n", - "DataDoc= namedtuple('DataDoc', 'tag words')\n", - "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", - " for line_no, line in enumerate(alldata):\n", - " label=line.split()[0]\n", - " word_list=line.lower().split()[1:]\n", - " all_data.append(DataDoc(label, word_list))\n", - " #print my_data[line_no]\n", - " #break\n", - "train_data = all_data[:25000]\n", - "test_data = all_data[25000:50000]\n", - "print len(train_data)\n", - "\n", - "train_data=train_data[:100]+train_data[12500:12600]\n", - "test_data=test_data[:100]+test_data[12500:12600]\n", - "print len(train_data)\n", - "print len(test_data)\n", - "#--------------------\n", - "# Let's get a dictionary of all the words in training data\n", - "# These will be our bag-of-words features\n", - "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", - "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", - "\n", - "def get_space(train_data):\n", - " \"\"\"\n", - " input is a list of namedtuples\n", - " get a dict of word space\n", - " key=word\n", - " value=len of the dict at that point \n", - " (that will be the index of the word and it is unique since the dict grows as we loop)\n", - " \"\"\"\n", - " word_space=defaultdict(int)\n", - " for doc in train_data:\n", - " for w in doc.words:\n", - " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", - " # but that doesn't matter.\n", - " word_space[w]=len(word_space)\n", - " return word_space\n", - "\n", - "word_space=get_space(train_data)\n", - "print len(word_space)\n", - "print word_space[\"love\"]\n", - "#-------------------------\n", - "import numpy as np\n", - "\n", - "def get_sparse_vec(data_point, space):\n", - " # create empty vector\n", - " sparse_vec = np.zeros((len(space)))\n", - " for w in set(data_point.words):\n", - " # use exception handling such that this function can also be used to vectorize \n", - " # data with words not in train (i.e., test and dev data)\n", - " try:\n", - " sparse_vec[space[w]]=1\n", - " except:\n", - " continue\n", - " return sparse_vec\n", - "\n", - " \n", - "\n", - "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", - "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", - "#test_vecs= get_sparse_vectors(test_data, word_space)\n", - "\n", - "#print train_vecs, test_vecs[0]\n", - "print len(train_data[12500:12600])\n", - "print len(train_vecs)\n", - "print len(test_vecs)\n", - "#-------------------------\n", - "# We should usually get tags automatically based on input data file.\n", - "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", - "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", - "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", - "# and test_data with the same setup for class label. \n", - "# The rest of the data in the file is unknown and we don't use that part.\n", - "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", - "# later, for now we will hard-code the labels.\n", - "\n", - "from random import shuffle, randint\n", - "\n", - "\n", - "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "\n", - "\n", - "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", - "# tags= [train_data[i].tag for i in range(len(train_data))]\n", - "print train_tags[-1], train_vecs[-1][:10]\n", - "print len(train_tags)\n", - "print len(test_tags)\n", - "#--------------------\n", - "train_vecs=np.array(train_vecs)\n", - "train_tags=np.array(train_tags)\n", - "print train_vecs.shape\n", - "#--------------------------------\n", - "# Classification with scikit-learn\n", - "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", - "# Let's use sklearn to train an svm classifier:\n", - "#-------------------------------------------------\n", - "\n", - "import argparse\n", - "import codecs\n", - "import time\n", - "import sys\n", - "import os, re, glob\n", - "import nltk\n", - "from collections import defaultdict\n", - "from random import shuffle, randint\n", - "import numpy as np\n", - "from numpy import array, arange, zeros, hstack, argsort\n", - "import unicodedata\n", - "from scipy.sparse import csr_matrix\n", - "from sklearn.svm import SVC, LinearSVC\n", - "from sklearn import preprocessing\n", - "from sklearn.cross_validation import StratifiedKFold\n", - "from sklearn.grid_search import GridSearchCV\n", - "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n", - "from sklearn import metrics\n", - "from sklearn.cross_validation import train_test_split\n", - "from sklearn.decomposition import TruncatedSVD\n", - "from sklearn.feature_selection import SelectKBest, f_classif, chi2\n", - "from sklearn.multiclass import OneVsOneClassifier, OneVsRestClassifier\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "from sklearn.linear_model import LogisticRegression\n", - "from sklearn import cross_validation\n", - "import gensim\n", - "n_jobs = 2\n", - "\n", - "#train_vecs=array(train_vecs)\n", - "train_vecs=np.array(train_vecs)\n", - "train_tags=np.array(train_tags)\n", - "\n", - "print type(train_tags)\n", - "print type(train_vecs)\n", - "clf = OneVsRestClassifier(SVC(C=1, kernel = 'linear', gamma=1, verbose= False, probability=False))\n", - "clf.fit(train_vecs, train_tags)\n", - "print \"\\nDone fitting classifier on training data...\\n\"\n", - "\n", - "#------------------------------------------------------------------------------------------\n", - "print \"=\"*50, \"\\n\"\n", - "print \"Results with 5-fold cross validation:\\n\"\n", - "print \"=\"*50, \"\\n\"\n", - "#------------------------------------------------------------------------------------------\n", - "predicted = cross_validation.cross_val_predict(clf, train_vecs, train_tags, cv=5)\n", - "print \"*\"*20\n", - "print \"\\t accuracy_score\\t\", metrics.accuracy_score(train_tags, predicted)\n", - "print \"*\"*20\n", - "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", - "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", - "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", - "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", - "#----------------------" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "space_len: 975\n", - "train_vecs.shape: 25000, 975\n", - "dev_vecs.shape: 5000, 975\n", - "test_vecs.shape: 20000, 975\n", - "Subtensor{int64}.0\n", - "... building the model\n", - "... training the model\n", - "epoch 1, minibatch 41/41, validation error 52.083333 %\n", - " epoch 1, minibatch 41/41, test error of best model 50.505051 %\n", - "epoch 2, minibatch 41/41, validation error 52.083333 %\n", - "epoch 3, minibatch 41/41, validation error 52.083333 %\n", - "epoch 4, minibatch 41/41, validation error 52.083333 %\n", - "epoch 5, minibatch 41/41, validation error 52.083333 %\n", - "epoch 6, minibatch 41/41, validation error 52.062500 %\n", - " epoch 6, minibatch 41/41, test error of best model 50.484848 %\n", - "epoch 7, minibatch 41/41, validation error 51.895833 %\n", - " epoch 7, minibatch 41/41, test error of best model 50.328283 %\n", - "epoch 8, minibatch 41/41, validation error 51.687500 %\n", - " epoch 8, minibatch 41/41, test error of best model 50.111111 %\n", - "epoch 9, minibatch 41/41, validation error 51.354167 %\n", - " epoch 9, minibatch 41/41, test error of best model 49.944444 %\n", - "epoch 10, minibatch 41/41, validation error 51.020833 %\n", - " epoch 10, minibatch 41/41, test error of best model 49.641414 %\n", - "epoch 11, minibatch 41/41, validation error 50.500000 %\n", - " epoch 11, minibatch 41/41, test error of best model 49.363636 %\n", - "epoch 12, minibatch 41/41, validation error 50.125000 %\n", - " epoch 12, minibatch 41/41, test error of best model 49.000000 %\n", - "epoch 13, minibatch 41/41, validation error 49.687500 %\n", - " epoch 13, minibatch 41/41, test error of best model 48.661616 %\n", - "epoch 14, minibatch 41/41, validation error 49.125000 %\n", - " epoch 14, minibatch 41/41, test error of best model 48.257576 %\n", - "epoch 15, minibatch 41/41, validation error 48.645833 %\n", - " epoch 15, minibatch 41/41, test error of best model 47.853535 %\n", - "epoch 16, minibatch 41/41, validation error 48.291667 %\n", - " epoch 16, minibatch 41/41, test error of best model 47.585859 %\n", - "epoch 17, minibatch 41/41, validation error 47.979167 %\n", - " epoch 17, minibatch 41/41, test error of best model 47.217172 %\n", - "epoch 18, minibatch 41/41, validation error 47.812500 %\n", - " epoch 18, minibatch 41/41, test error of best model 47.000000 %\n", - "epoch 19, minibatch 41/41, validation error 47.604167 %\n", - " epoch 19, minibatch 41/41, test error of best model 46.737374 %\n", - "epoch 20, minibatch 41/41, validation error 47.395833 %\n", - " epoch 20, minibatch 41/41, test error of best model 46.520202 %\n", - "epoch 21, minibatch 41/41, validation error 47.041667 %\n", - " epoch 21, minibatch 41/41, test error of best model 46.292929 %\n", - "epoch 22, minibatch 41/41, validation error 46.770833 %\n", - " epoch 22, minibatch 41/41, test error of best model 46.030303 %\n", - "epoch 23, minibatch 41/41, validation error 46.645833 %\n", - " epoch 23, minibatch 41/41, test error of best model 45.813131 %\n", - "epoch 24, minibatch 41/41, validation error 46.125000 %\n", - " epoch 24, minibatch 41/41, test error of best model 45.570707 %\n", - "epoch 25, minibatch 41/41, validation error 45.895833 %\n", - " epoch 25, minibatch 41/41, test error of best model 45.348485 %\n", - "epoch 26, minibatch 41/41, validation error 45.812500 %\n", - " epoch 26, minibatch 41/41, test error of best model 45.121212 %\n", - "epoch 27, minibatch 41/41, validation error 45.666667 %\n", - " epoch 27, minibatch 41/41, test error of best model 44.994949 %\n", - "epoch 28, minibatch 41/41, validation error 45.458333 %\n", - " epoch 28, minibatch 41/41, test error of best model 44.848485 %\n", - "epoch 29, minibatch 41/41, validation error 45.312500 %\n", - " epoch 29, minibatch 41/41, test error of best model 44.676768 %\n", - "epoch 30, minibatch 41/41, validation error 45.229167 %\n", - " epoch 30, minibatch 41/41, test error of best model 44.606061 %\n", - "epoch 31, minibatch 41/41, validation error 45.125000 %\n", - " epoch 31, minibatch 41/41, test error of best model 44.515152 %\n", - "epoch 32, minibatch 41/41, validation error 44.895833 %\n", - " epoch 32, minibatch 41/41, test error of best model 44.378788 %\n", - "epoch 33, minibatch 41/41, validation error 44.833333 %\n", - " epoch 33, minibatch 41/41, test error of best model 44.303030 %\n", - "epoch 34, minibatch 41/41, validation error 44.750000 %\n", - " epoch 34, minibatch 41/41, test error of best model 44.176768 %\n", - "epoch 35, minibatch 41/41, validation error 44.500000 %\n", - " epoch 35, minibatch 41/41, test error of best model 44.080808 %\n", - "epoch 36, minibatch 41/41, validation error 44.458333 %\n", - " epoch 36, minibatch 41/41, test error of best model 43.959596 %\n", - "epoch 37, minibatch 41/41, validation error 44.416667 %\n", - " epoch 37, minibatch 41/41, test error of best model 43.878788 %\n", - "epoch 38, minibatch 41/41, validation error 44.375000 %\n", - " epoch 38, minibatch 41/41, test error of best model 43.813131 %\n", - "epoch 39, minibatch 41/41, validation error 44.354167 %\n", - " epoch 39, minibatch 41/41, test error of best model 43.757576 %\n", - "epoch 40, minibatch 41/41, validation error 44.291667 %\n", - " epoch 40, minibatch 41/41, test error of best model 43.656566 %\n", - "epoch 41, minibatch 41/41, validation error 44.250000 %\n", - " epoch 41, minibatch 41/41, test error of best model 43.530303 %\n", - "epoch 42, minibatch 41/41, validation error 44.166667 %\n", - " epoch 42, minibatch 41/41, test error of best model 43.500000 %\n", - "epoch 43, minibatch 41/41, validation error 44.166667 %\n", - "epoch 44, minibatch 41/41, validation error 44.041667 %\n", - " epoch 44, minibatch 41/41, test error of best model 43.368687 %\n", - "epoch 45, minibatch 41/41, validation error 44.104167 %\n", - "epoch 46, minibatch 41/41, validation error 44.041667 %\n", - "epoch 47, minibatch 41/41, validation error 44.041667 %\n", - "epoch 48, minibatch 41/41, validation error 43.979167 %\n", - " epoch 48, minibatch 41/41, test error of best model 43.126263 %\n", - "epoch 49, minibatch 41/41, validation error 43.979167 %\n", - "epoch 50, minibatch 41/41, validation error 43.937500 %\n", - " epoch 50, minibatch 41/41, test error of best model 43.035354 %\n", - "epoch 51, minibatch 41/41, validation error 43.875000 %\n", - " epoch 51, minibatch 41/41, test error of best model 43.015152 %\n", - "epoch 52, minibatch 41/41, validation error 43.833333 %\n", - " epoch 52, minibatch 41/41, test error of best model 42.984848 %\n", - "epoch 53, minibatch 41/41, validation error 43.708333 %\n", - " epoch 53, minibatch 41/41, test error of best model 42.868687 %\n", - "epoch 54, minibatch 41/41, validation error 43.729167 %\n", - "epoch 55, minibatch 41/41, validation error 43.708333 %\n", - "epoch 56, minibatch 41/41, validation error 43.645833 %\n", - " epoch 56, minibatch 41/41, test error of best model 42.772727 %\n", - "epoch 57, minibatch 41/41, validation error 43.541667 %\n", - " epoch 57, minibatch 41/41, test error of best model 42.727273 %\n", - "epoch 58, minibatch 41/41, validation error 43.541667 %\n", - "epoch 59, minibatch 41/41, validation error 43.520833 %\n", - " epoch 59, minibatch 41/41, test error of best model 42.691919 %\n", - "epoch 60, minibatch 41/41, validation error 43.520833 %\n", - "epoch 61, minibatch 41/41, validation error 43.500000 %\n", - " epoch 61, minibatch 41/41, test error of best model 42.616162 %\n", - "epoch 62, minibatch 41/41, validation error 43.520833 %\n", - "epoch 63, minibatch 41/41, validation error 43.500000 %\n", - "epoch 64, minibatch 41/41, validation error 43.520833 %\n", - "epoch 65, minibatch 41/41, validation error 43.437500 %\n", - " epoch 65, minibatch 41/41, test error of best model 42.515152 %\n", - "epoch 66, minibatch 41/41, validation error 43.437500 %\n", - "epoch 67, minibatch 41/41, validation error 43.416667 %\n", - " epoch 67, minibatch 41/41, test error of best model 42.474747 %\n", - "epoch 68, minibatch 41/41, validation error 43.395833 %\n", - " epoch 68, minibatch 41/41, test error of best model 42.429293 %\n", - "epoch 69, minibatch 41/41, validation error 43.395833 %\n", - "epoch 70, minibatch 41/41, validation error 43.375000 %\n", - " epoch 70, minibatch 41/41, test error of best model 42.373737 %\n", - "epoch 71, minibatch 41/41, validation error 43.354167 %\n", - " epoch 71, minibatch 41/41, test error of best model 42.368687 %\n", - "epoch 72, minibatch 41/41, validation error 43.312500 %\n", - " epoch 72, minibatch 41/41, test error of best model 42.328283 %\n", - "epoch 73, minibatch 41/41, validation error 43.270833 %\n", - " epoch 73, minibatch 41/41, test error of best model 42.313131 %\n", - "epoch 74, minibatch 41/41, validation error 43.229167 %\n", - " epoch 74, minibatch 41/41, test error of best model 42.282828 %\n", - "epoch 75, minibatch 41/41, validation error 43.229167 %\n", - "epoch 76, minibatch 41/41, validation error 43.229167 %\n", - "epoch 77, minibatch 41/41, validation error 43.208333 %\n", - " epoch 77, minibatch 41/41, test error of best model 42.202020 %\n", - "epoch 78, minibatch 41/41, validation error 43.187500 %\n", - " epoch 78, minibatch 41/41, test error of best model 42.186869 %\n", - "epoch 79, minibatch 41/41, validation error 43.166667 %\n", - " epoch 79, minibatch 41/41, test error of best model 42.166667 %\n", - "epoch 80, minibatch 41/41, validation error 43.166667 %\n", - "epoch 81, minibatch 41/41, validation error 43.145833 %\n", - " epoch 81, minibatch 41/41, test error of best model 42.151515 %\n", - "epoch 82, minibatch 41/41, validation error 43.125000 %\n", - " epoch 82, minibatch 41/41, test error of best model 42.146465 %\n", - "epoch 83, minibatch 41/41, validation error 43.083333 %\n", - " epoch 83, minibatch 41/41, test error of best model 42.126263 %\n", - "epoch 84, minibatch 41/41, validation error 43.020833 %\n", - " epoch 84, minibatch 41/41, test error of best model 42.116162 %\n", - "epoch 85, minibatch 41/41, validation error 43.020833 %\n", - "epoch 86, minibatch 41/41, validation error 43.020833 %\n", - "epoch 87, minibatch 41/41, validation error 42.979167 %\n", - " epoch 87, minibatch 41/41, test error of best model 42.070707 %\n", - "epoch 88, minibatch 41/41, validation error 42.958333 %\n", - " epoch 88, minibatch 41/41, test error of best model 42.045455 %\n", - "epoch 89, minibatch 41/41, validation error 42.937500 %\n", - " epoch 89, minibatch 41/41, test error of best model 42.030303 %\n", - "epoch 90, minibatch 41/41, validation error 42.916667 %\n", - " epoch 90, minibatch 41/41, test error of best model 42.035354 %\n", - "epoch 91, minibatch 41/41, validation error 42.895833 %\n", - " epoch 91, minibatch 41/41, test error of best model 42.030303 %\n", - "epoch 92, minibatch 41/41, validation error 42.875000 %\n", - " epoch 92, minibatch 41/41, test error of best model 42.030303 %\n", - "epoch 93, minibatch 41/41, validation error 42.875000 %\n", - "epoch 94, minibatch 41/41, validation error 42.854167 %\n", - " epoch 94, minibatch 41/41, test error of best model 42.010101 %\n", - "epoch 95, minibatch 41/41, validation error 42.854167 %\n", - "epoch 96, minibatch 41/41, validation error 42.833333 %\n", - " epoch 96, minibatch 41/41, test error of best model 42.005051 %\n", - "epoch 97, minibatch 41/41, validation error 42.791667 %\n", - " epoch 97, minibatch 41/41, test error of best model 41.984848 %\n", - "epoch 98, minibatch 41/41, validation error 42.770833 %\n", - " epoch 98, minibatch 41/41, test error of best model 41.979798 %\n", - "epoch 99, minibatch 41/41, validation error 42.770833 %\n", - "epoch 100, minibatch 41/41, validation error 42.770833 %\n", - "epoch 101, minibatch 41/41, validation error 42.770833 %\n", - "epoch 102, minibatch 41/41, validation error 42.770833 %\n", - "epoch 103, minibatch 41/41, validation error 42.770833 %\n", - "epoch 104, minibatch 41/41, validation error 42.770833 %\n", - "epoch 105, minibatch 41/41, validation error 42.791667 %\n", - "epoch 106, minibatch 41/41, validation error 42.791667 %\n", - "epoch 107, minibatch 41/41, validation error 42.791667 %\n", - "epoch 108, minibatch 41/41, validation error 42.791667 %\n", - "epoch 109, minibatch 41/41, validation error 42.770833 %\n", - "epoch 110, minibatch 41/41, validation error 42.770833 %\n", - "epoch 111, minibatch 41/41, validation error 42.770833 %\n", - "epoch 112, minibatch 41/41, validation error 42.770833 %\n", - "epoch 113, minibatch 41/41, validation error 42.770833 %\n", - "epoch 114, minibatch 41/41, validation error 42.750000 %\n", - " epoch 114, minibatch 41/41, test error of best model 41.868687 %\n", - "epoch 115, minibatch 41/41, validation error 42.750000 %\n", - "epoch 116, minibatch 41/41, validation error 42.729167 %\n", - " epoch 116, minibatch 41/41, test error of best model 41.863636 %\n", - "epoch 117, minibatch 41/41, validation error 42.750000 %\n", - "epoch 118, minibatch 41/41, validation error 42.750000 %\n", - "epoch 119, minibatch 41/41, validation error 42.750000 %\n", - "epoch 120, minibatch 41/41, validation error 42.729167 %\n", - " epoch 120, minibatch 41/41, test error of best model 41.853535 %\n", - "epoch 121, minibatch 41/41, validation error 42.729167 %\n", - "Optimization complete with best validation score of 42.729167 %,with test performance 41.853535 %\n", - "The code run for 122 epochs, with 10.362868 epochs/sec\n", - "The code for file best_model.pkl ran for 11.8s\n", - "Now predicting...\n", - "Predicted values for the first 10 examples in test set:\n", - "[0 0 0 0 0 0 1 0 0 0]\n" - ] - } - ], - "source": [ - "\"\"\"\n", - "This tutorial introduces logistic regression using Theano and stochastic\n", - "gradient descent.\n", - "\n", - "Logistic regression is a probabilistic, linear classifier. It is parametrized\n", - "by a weight matrix :math:`W` and a bias vector :math:`b`. Classification is\n", - "done by projecting data points onto a set of hyperplanes, the distance to\n", - "which is used to determine a class membership probability.\n", - "\n", - "Mathematically, this can be written as:\n", - "\n", - ".. math::\n", - " P(Y=i|x, W,b) &= softmax_i(W x + b) \\\\\n", - " &= \\frac {e^{W_i x + b_i}} {\\sum_j e^{W_j x + b_j}}\n", - "\n", - "\n", - "The output of the model or prediction is then done by taking the argmax of\n", - "the vector whose i'th element is P(Y=i|x).\n", - "\n", - ".. math::\n", - "\n", - " y_{pred} = argmax_i P(Y=i|x,W,b)\n", - "\n", - "\n", - "This tutorial presents a stochastic gradient descent optimization method\n", - "suitable for large datasets.\n", - "\n", - "\n", - "References:\n", - "\n", - " - textbooks: \"Pattern Recognition and Machine Learning\" -\n", - " Christopher M. Bishop, section 4.3.2\n", - "\n", - "\"\"\"\n", - "from collections import namedtuple, defaultdict\n", - "from random import shuffle, randint\n", - "#----------------------------------------------------\n", - "__docformat__ = 'restructedtext en'\n", - "\n", - "import cPickle\n", - "import gzip\n", - "import os\n", - "import sys\n", - "import timeit\n", - "\n", - "import numpy\n", - "import numpy as np\n", - "import theano\n", - "import theano.tensor as T\n", - "#----------------------------------------------------\n", - "\n", - "def get_data():\n", - " '''\n", - " \n", - " '''\n", - " all_data = [] \n", - " DataDoc= namedtuple('DataDoc', 'tag words')\n", - " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", - " for line_no, line in enumerate(alldata):\n", - " label=line.split()[0]\n", - " word_list=line.lower().split()[1:]\n", - " all_data.append(DataDoc(label, word_list))\n", - " train_data = all_data[:25000]\n", - " dev_data = all_data[25000:27500]+all_data[47500:50000]\n", - " test_data=all_data[27500:47500]\n", - " # labels\n", - " train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - " dev_tags= [ 1.0 for i in range(2500)] + [ 0.0 for i in range(2500)]\n", - " test_tags= [ 1.0 for i in range(10000)] + [ 0.0 for i in range(10000)]\n", - " return train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", - " #--------------------------------------------------\n", - "#train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", - "########################\n", - "\n", - "\n", - "# Let's get a dictionary of all the words in training data\n", - "# These will be our bag-of-words features\n", - "# We won't need this function, since we will use gensim's built-in method \"Dictionary\" from the corpus module\n", - "# --> corpora.Dictionary, but we provide this so that you are clear on one way of how to do this.\n", - "def get_space(train_data):\n", - " \"\"\"\n", - " input is a list of namedtuples\n", - " get a dict of word space\n", - " key=word\n", - " value=len of the dict at that point \n", - " (that will be the index of the word and it is unique since the dict grows as we loop)\n", - " \"\"\"\n", - " word_space=defaultdict(int)\n", - " for doc in train_data:\n", - " for w in doc.words:\n", - " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", - " # but that doesn't matter.\n", - " word_space[w]+=1\n", - " return word_space\n", - "\n", - "# train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", - "# word_space=get_space(train_data)\n", - "# word_space={w: word_space[w] for w in word_space if word_space[w] > 500}\n", - "# space_len=len(word_space)\n", - "# print \"space_len: \", space_len\n", - "def get_sparse_vec(data_point, space):\n", - " # create empty vector\n", - " sparse_vec = np.zeros((len(space)))\n", - " for w in set(data_point.words):\n", - " # use exception handling such that this function can also be used to vectorize \n", - " # data with words not in train (i.e., test and dev data)\n", - " try:\n", - " sparse_vec[space[w]]=1\n", - " except:\n", - " continue\n", - " return sparse_vec\n", - "\n", - " \n", - "# train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", - "# test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", - "# dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", - "# #---------------------------\n", - "# train_vecs=np.array(train_vecs)\n", - "# train_tags=np.array(train_tags)\n", - "# dev_vecs=np.array(dev_vecs)\n", - "# dev_tags=np.array(dev_tags)\n", - "# test_vecs=np.array(test_vecs)\n", - "# test_tags=np.array(test_tags)\n", - "# print train_vecs.shape\n", - "# print dev_vecs.shape\n", - "# print test_vecs.shape\n", - "\n", - "\n", - "def load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags):\n", - " #------------------------------\n", - " # Modified from Theano tutorial.\n", - " # I basically pass data_x, data_y instead of data_xy\n", - " def shared_dataset(data_x, data_y, borrow=True):\n", - " \"\"\" Function that loads the dataset into shared variables\n", - "\n", - " The reason we store our dataset in shared variables is to allow\n", - " Theano to copy it into the GPU memory (when code is run on GPU).\n", - " Since copying data into the GPU is slow, copying a minibatch everytime\n", - " is needed (the default behaviour if the data is not in a shared\n", - " variable) would lead to a large decrease in performance.\n", - " \"\"\"\n", - " shared_x = theano.shared(numpy.asarray(data_x,\n", - " dtype=theano.config.floatX), borrow=borrow)\n", - " shared_y = theano.shared(numpy.asarray(data_y,\n", - " dtype=theano.config.floatX),\n", - " borrow=borrow)\n", - " # When storing data on the GPU it has to be stored as floats\n", - " # therefore we will store the labels as ``floatX`` as well\n", - " # (``shared_y`` does exactly that). But during our computations\n", - " # we need them as ints (we use labels as index, and if they are\n", - " # floats it doesn't make sense) therefore instead of returning\n", - " # ``shared_y`` we will have to cast it to int. This little hack\n", - " # lets ous get around this issue\n", - " return shared_x, T.cast(shared_y, 'int32')\n", - " #-----------------------------------------------------------------\n", - " train_set_x, train_set_y = shared_dataset(train_vecs, train_tags)\n", - " valid_set_x, valid_set_y = shared_dataset(dev_vecs, dev_tags)\n", - " test_set_x, test_set_y = shared_dataset(test_vecs, test_tags)\n", - "\n", - " rval = [(train_set_x, train_set_y), (valid_set_x, valid_set_y),\n", - " (test_set_x, test_set_y)]\n", - " return rval\n", - "\n", - "#rval=load_data(train_vecs, train_tags)\n", - "#print rval\n", - "\n", - "\n", - "class LogisticRegression(object):\n", - " \"\"\"Multi-class Logistic Regression Class\n", - "\n", - " The logistic regression is fully described by a weight matrix :math:`W`\n", - " and bias vector :math:`b`. Classification is done by projecting data\n", - " points onto a set of hyperplanes, the distance to which is used to\n", - " determine a class membership probability.\n", - " \"\"\"\n", - "\n", - " def __init__(self, input, n_in, n_out):\n", - " \"\"\" Initialize the parameters of the logistic regression\n", - "\n", - " :type input: theano.tensor.TensorType\n", - " :param input: symbolic variable that describes the input of the\n", - " architecture (one minibatch)\n", - "\n", - " :type n_in: int\n", - " :param n_in: number of input units, the dimension of the space in\n", - " which the datapoints lie\n", - "\n", - " :type n_out: int\n", - " :param n_out: number of output units, the dimension of the space in\n", - " which the labels lie\n", - "\n", - " \"\"\"\n", - " # start-snippet-1\n", - " # initialize with 0 the weights W as a matrix of shape (n_in, n_out)\n", - " self.W = theano.shared(\n", - " value=numpy.zeros(\n", - " (n_in, n_out),\n", - " dtype=theano.config.floatX\n", - " ),\n", - " name='W',\n", - " borrow=True\n", - " )\n", - " # initialize the biases b as a vector of n_out 0s\n", - " self.b = theano.shared(\n", - " value=numpy.zeros(\n", - " (n_out,),\n", - " dtype=theano.config.floatX\n", - " ),\n", - " name='b',\n", - " borrow=True\n", - " )\n", - "\n", - " # symbolic expression for computing the matrix of class-membership\n", - " # probabilities\n", - " # Where:\n", - " # W is a matrix where column-k represent the separation hyperplane for\n", - " # class-k\n", - " # x is a matrix where row-j represents input training sample-j\n", - " # b is a vector where element-k represent the free parameter of\n", - " # hyperplane-k\n", - " self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b)\n", - "\n", - " # symbolic description of how to compute prediction as class whose\n", - " # probability is maximal\n", - " self.y_pred = T.argmax(self.p_y_given_x, axis=1)\n", - " # end-snippet-1\n", - "\n", - " # parameters of the model\n", - " self.params = [self.W, self.b]\n", - "\n", - " # keep track of model input\n", - " self.input = input\n", - "\n", - " def negative_log_likelihood(self, y):\n", - " \"\"\"Return the mean of the negative log-likelihood of the prediction\n", - " of this model under a given target distribution.\n", - "\n", - " .. math::\n", - "\n", - " \\frac{1}{|\\mathcal{D}|} \\mathcal{L} (\\theta=\\{W,b\\}, \\mathcal{D}) =\n", - " \\frac{1}{|\\mathcal{D}|} \\sum_{i=0}^{|\\mathcal{D}|}\n", - " \\log(P(Y=y^{(i)}|x^{(i)}, W,b)) \\\\\n", - " \\ell (\\theta=\\{W,b\\}, \\mathcal{D})\n", - "\n", - " :type y: theano.tensor.TensorType\n", - " :param y: corresponds to a vector that gives for each example the\n", - " correct label\n", - "\n", - " Note: we use the mean instead of the sum so that\n", - " the learning rate is less dependent on the batch size\n", - " \"\"\"\n", - " # start-snippet-2\n", - " # y.shape[0] is (symbolically) the number of rows in y, i.e.,\n", - " # number of examples (call it n) in the minibatch\n", - " # T.arange(y.shape[0]) is a symbolic vector which will contain\n", - " # [0,1,2,... n-1] T.log(self.p_y_given_x) is a matrix of\n", - " # Log-Probabilities (call it LP) with one row per example and\n", - " # one column per class LP[T.arange(y.shape[0]),y] is a vector\n", - " # v containing [LP[0,y[0]], LP[1,y[1]], LP[2,y[2]], ...,\n", - " # LP[n-1,y[n-1]]] and T.mean(LP[T.arange(y.shape[0]),y]) is\n", - " # the mean (across minibatch examples) of the elements in v,\n", - " # i.e., the mean log-likelihood across the minibatch.\n", - " return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])\n", - " # end-snippet-2\n", - "\n", - " def errors(self, y):\n", - " \"\"\"Return a float representing the number of errors in the minibatch\n", - " over the total number of examples of the minibatch ; zero one\n", - " loss over the size of the minibatch\n", - "\n", - " :type y: theano.tensor.TensorType\n", - " :param y: corresponds to a vector that gives for each example the\n", - " correct label\n", - " \"\"\"\n", - "\n", - " # check if y has same dimension of y_pred\n", - " if y.ndim != self.y_pred.ndim:\n", - " raise TypeError(\n", - " 'y should have the same shape as self.y_pred',\n", - " ('y', y.type, 'y_pred', self.y_pred.type)\n", - " )\n", - " # check if y is of the correct datatype\n", - " if y.dtype.startswith('int'):\n", - " # the T.neq operator returns a vector of 0s and 1s, where 1\n", - " # represents a mistake in prediction\n", - " return T.mean(T.neq(self.y_pred, y))\n", - " else:\n", - " raise NotImplementedError()\n", - "\n", - "\n", - "def sgd_optimization(learning_rate=0.13, n_epochs=1000,\n", - " batch_size=600):\n", - " \"\"\"\n", - " Demonstrate stochastic gradient descent optimization of a log-linear\n", - " model\n", - "\n", - " This is demonstrated on MNIST.\n", - "\n", - " :type learning_rate: float\n", - " :param learning_rate: learning rate used (factor for the stochastic\n", - " gradient)\n", - "\n", - " :type n_epochs: int\n", - " :param n_epochs: maximal number of epochs to run the optimizer\n", - "\n", - " :type dataset: string\n", - " :param dataset: the path of the MNIST dataset file from\n", - " http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz\n", - "\n", - " \"\"\"\n", - " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", - " train_set_x, train_set_y = datasets[0]\n", - " valid_set_x, valid_set_y = datasets[1]\n", - " test_set_x, test_set_y = datasets[2]\n", - " print train_set_x.shape[0]\n", - " # compute number of minibatches for training, validation and testing\n", - " n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size\n", - " n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size\n", - " n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size\n", - "\n", - " ######################\n", - " # BUILD ACTUAL MODEL #\n", - " ######################\n", - " print '... building the model'\n", - "\n", - " # allocate symbolic variables for the data\n", - " index = T.lscalar() # index to a [mini]batch\n", - "\n", - " # generate symbolic variables for input (x and y represent a\n", - " # minibatch)\n", - " x = T.matrix('x') # data, presented as rasterized images\n", - " y = T.ivector('y') # labels, presented as 1D vector of [int] labels\n", - "\n", - " # construct the logistic regression class\n", - " # Each MNIST image has size 28*28\n", - " # MAM: We change size: n_in=space_len\n", - " classifier = LogisticRegression(input=x, n_in=space_len, n_out=2)\n", - "\n", - " # the cost we minimize during training is the negative log likelihood of\n", - " # the model in symbolic format\n", - " cost = classifier.negative_log_likelihood(y)\n", - "\n", - " # compiling a Theano function that computes the mistakes that are made by\n", - " # the model on a minibatch\n", - " test_model = theano.function(\n", - " inputs=[index],\n", - " outputs=classifier.errors(y),\n", - " givens={\n", - " x: test_set_x[index * batch_size: (index + 1) * batch_size],\n", - " y: test_set_y[index * batch_size: (index + 1) * batch_size]\n", - " }\n", - " )\n", - "\n", - " validate_model = theano.function(\n", - " inputs=[index],\n", - " outputs=classifier.errors(y),\n", - " givens={\n", - " x: valid_set_x[index * batch_size: (index + 1) * batch_size],\n", - " y: valid_set_y[index * batch_size: (index + 1) * batch_size]\n", - " }\n", - " )\n", - "\n", - " # compute the gradient of cost with respect to theta = (W,b)\n", - " g_W = T.grad(cost=cost, wrt=classifier.W)\n", - " g_b = T.grad(cost=cost, wrt=classifier.b)\n", - "\n", - " # start-snippet-3\n", - " # specify how to update the parameters of the model as a list of\n", - " # (variable, update expression) pairs.\n", - " updates = [(classifier.W, classifier.W - learning_rate * g_W),\n", - " (classifier.b, classifier.b - learning_rate * g_b)]\n", - "\n", - " # compiling a Theano function `train_model` that returns the cost, but in\n", - " # the same time updates the parameter of the model based on the rules\n", - " # defined in `updates`\n", - " train_model = theano.function(\n", - " inputs=[index],\n", - " outputs=cost,\n", - " updates=updates,\n", - " givens={\n", - " x: train_set_x[index * batch_size: (index + 1) * batch_size],\n", - " y: train_set_y[index * batch_size: (index + 1) * batch_size]\n", - " }\n", - " )\n", - " # end-snippet-3\n", - "\n", - " ###############\n", - " # TRAIN MODEL #\n", - " ###############\n", - " print '... training the model'\n", - " # early-stopping parameters\n", - " patience = 5000 # look as this many examples regardless\n", - " patience_increase = 2 # wait this much longer when a new best is\n", - " # found\n", - " improvement_threshold = 0.995 # a relative improvement of this much is\n", - " # considered significant\n", - " validation_frequency = min(n_train_batches, patience / 2)\n", - " # go through this many\n", - " # minibatche before checking the network\n", - " # on the validation set; in this case we\n", - " # check every epoch\n", - "\n", - " best_validation_loss = numpy.inf\n", - " test_score = 0.\n", - " start_time = timeit.default_timer()\n", - "\n", - " done_looping = False\n", - " epoch = 0\n", - " while (epoch < n_epochs) and (not done_looping):\n", - " epoch = epoch + 1\n", - " for minibatch_index in xrange(n_train_batches):\n", - "\n", - " minibatch_avg_cost = train_model(minibatch_index)\n", - " # iteration number\n", - " iter = (epoch - 1) * n_train_batches + minibatch_index\n", - "\n", - " if (iter + 1) % validation_frequency == 0:\n", - " # compute zero-one loss on validation set\n", - " validation_losses = [validate_model(i)\n", - " for i in xrange(n_valid_batches)]\n", - " this_validation_loss = numpy.mean(validation_losses)\n", - "\n", - " print(\n", - " 'epoch %i, minibatch %i/%i, validation error %f %%' %\n", - " (\n", - " epoch,\n", - " minibatch_index + 1,\n", - " n_train_batches,\n", - " this_validation_loss * 100.\n", - " )\n", - " )\n", - "\n", - " # if we got the best validation score until now\n", - " if this_validation_loss < best_validation_loss:\n", - " #improve patience if loss improvement is good enough\n", - " if this_validation_loss < best_validation_loss * \\\n", - " improvement_threshold:\n", - " patience = max(patience, iter * patience_increase)\n", - "\n", - " best_validation_loss = this_validation_loss\n", - " # test it on the test set\n", - "\n", - " test_losses = [test_model(i)\n", - " for i in xrange(n_test_batches)]\n", - " test_score = numpy.mean(test_losses)\n", - "\n", - " print(\n", - " (\n", - " ' epoch %i, minibatch %i/%i, test error of'\n", - " ' best model %f %%'\n", - " ) %\n", - " (\n", - " epoch,\n", - " minibatch_index + 1,\n", - " n_train_batches,\n", - " test_score * 100.\n", - " )\n", - " )\n", - "\n", - " # save the best model\n", - " with open('best_model.pkl', 'w') as f:\n", - " cPickle.dump(classifier, f)\n", - "\n", - " if patience <= iter:\n", - " done_looping = True\n", - " break\n", - "\n", - " end_time = timeit.default_timer()\n", - " print(\n", - " (\n", - " 'Optimization complete with best validation score of %f %%,'\n", - " 'with test performance %f %%'\n", - " )\n", - " % (best_validation_loss * 100., test_score * 100.)\n", - " )\n", - " print 'The code run for %d epochs, with %f epochs/sec' % (\n", - " epoch, 1. * epoch / (end_time - start_time))\n", - " print ('The code for file ' +\n", - " 'best_model.pkl' +\n", - " ' ran for %.1fs' % ((end_time - start_time)))\n", - "\n", - "\n", - "def predict():\n", - " \"\"\"\n", - " An example of how to load a trained model and use it\n", - " to predict labels.\n", - " \"\"\"\n", - "\n", - " # load the saved model\n", - " classifier = cPickle.load(open('best_model.pkl'))\n", - "\n", - " # compile a predictor function\n", - " predict_model = theano.function(\n", - " inputs=[classifier.input],\n", - " outputs=classifier.y_pred)\n", - "\n", - " # We can test it on some examples from test test\n", - " datasets=load_data(train_vecs, train_tags, dev_vecs, dev_tags, test_vecs, test_tags)\n", - " #train_set_x, train_set_y = datasets[0]\n", - " #valid_set_x, valid_set_y = datasets[1]\n", - " test_set_x, test_set_y = datasets[2]\n", - " test_set_x = test_set_x.get_value()\n", - " predicted_values = predict_model(test_set_x[:10])\n", - " print (\"Predicted values for the first 10 examples in test set:\")\n", - " print predicted_values\n", - "\n", - "\n", - "if __name__ == '__main__':\n", - " train_data, train_tags, dev_data, dev_tags, test_data, test_tags=get_data()\n", - " word_space=get_space(train_data)\n", - " word_space={w: word_space[w] for w in word_space if word_space[w] > 600}\n", - " space_len=len(word_space)\n", - " print(\"space_len: %d\" % space_len)\n", - " train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", - " test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", - " dev_vecs= [get_sparse_vec(data_point, word_space) for data_point in dev_data]\n", - " #del word_space\n", - " #---------------------------\n", - " train_vecs=np.array(train_vecs)\n", - " train_tags=np.array(train_tags)\n", - " dev_vecs=np.array(dev_vecs)\n", - " dev_tags=np.array(dev_tags)\n", - " test_vecs=np.array(test_vecs)\n", - " test_tags=np.array(test_tags)\n", - " #del train_data, train_tags, dev_data, dev_tags, test_data, test_tags\n", - " print('train_vecs.shape: %d, %d' % train_vecs.shape)\n", - " print('dev_vecs.shape: %d, %d' % dev_vecs.shape)\n", - " print('test_vecs.shape: %d, %d' % test_vecs.shape)\n", - " sgd_optimization()\n", - " #------------------------------------------------------\n", - " print('Now predicting...')\n", - " predict()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 2", - "language": "python", - "name": "python2" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 2 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.10" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} From ae12fb5e11fb7aae5d72d5859358212efb611678 Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 7 Feb 2016 10:02:21 -0500 Subject: [PATCH 22/36] cleaning and updating --- best_model.pkl => data/best_model.pkl | 0 hamlet.txt => data/hamlet.txt | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename best_model.pkl => data/best_model.pkl (100%) rename hamlet.txt => data/hamlet.txt (100%) diff --git a/best_model.pkl b/data/best_model.pkl similarity index 100% rename from best_model.pkl rename to data/best_model.pkl diff --git a/hamlet.txt b/data/hamlet.txt similarity index 100% rename from hamlet.txt rename to data/hamlet.txt From d0714c08e5c7bd643e52a88f518c3575b2ad2161 Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 7 Feb 2016 10:15:12 -0500 Subject: [PATCH 23/36] cleaning and updating --- ...orial_part_6_vector_space-checkpoint.ipynb | 229 +++++----------- python_tutorial_part_6_vector_space.ipynb | 247 +++++------------- 2 files changed, 142 insertions(+), 334 deletions(-) diff --git a/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb index d09e61f..21c002e 100644 --- a/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb +++ b/.ipynb_checkpoints/python_tutorial_part_6_vector_space-checkpoint.ipynb @@ -23,7 +23,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 5, "metadata": { "collapsed": false }, @@ -33,8 +33,8 @@ "output_type": "stream", "text": [ "25000\n", - "25000\n", - "25000\n" + "200\n", + "200\n" ] } ], @@ -54,15 +54,15 @@ "test_data = all_data[25000:50000]\n", "print len(train_data)\n", "\n", - "#train_data=train_data[:100]+train_data[12500:12600]\n", - "#test_data=test_data[:100]+test_data[12500:12600]\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", "print len(train_data)\n", - "print len(test_data)\n" + "print len(test_data)" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 6, "metadata": { "collapsed": false }, @@ -71,8 +71,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "113562\n", - "113538\n" + "7142\n", + "6994\n" ] } ], @@ -105,7 +105,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 7, "metadata": { "collapsed": false }, @@ -114,9 +114,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "25000\n", - "25000\n", - "25000\n" + "0\n", + "200\n", + "200\n" ] } ], @@ -142,14 +142,14 @@ "#test_vecs= get_sparse_vectors(test_data, word_space)\n", "\n", "#print train_vecs, test_vecs[0]\n", - "print len(train_data)\n", + "print len(train_data[12500:12600])\n", "print len(train_vecs)\n", "print len(test_vecs)" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 8, "metadata": { "collapsed": false }, @@ -159,8 +159,8 @@ "output_type": "stream", "text": [ "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", - "25000\n", - "25000\n" + "200\n", + "200\n" ] } ], @@ -177,8 +177,8 @@ "from random import shuffle, randint\n", "\n", "\n", - "train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", "\n", "\n", "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", @@ -192,16 +192,7 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 5, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -210,7 +201,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "(25000, 113562)\n" + "(200, 7142)\n" ] } ], @@ -222,20 +213,56 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\n", + "Done fitting classifier on training data...\n", + "\n", + "================================================== \n", + "\n", + "Results with 5-fold cross validation:\n", + "\n", + "================================================== \n", + "\n", + "********************\n", + "\t accuracy_score\t0.715\n", + "********************\n", + "precision_score\t0.765432098765\n", + "recall_score\t0.62\n", + "\n", + "classification_report:\n", + "\n", + " precision recall f1-score support\n", + "\n", + " 0.0 0.68 0.81 0.74 100\n", + " 1.0 0.77 0.62 0.69 100\n", + "\n", + "avg / total 0.72 0.71 0.71 200\n", + "\n", + "\n", + "confusion_matrix:\n", + "\n", + "[[81 19]\n", + " [38 62]]\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], "source": [ "# Classification with scikit-learn\n", "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", @@ -268,7 +295,7 @@ "from sklearn.linear_model import LogisticRegression\n", "from sklearn import cross_validation\n", "import gensim\n", - "n_jobs = 4\n", + "n_jobs = 2\n", "\n", "#train_vecs=array(train_vecs)\n", "train_vecs=np.array(train_vecs)\n", @@ -292,123 +319,9 @@ "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", - "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)" + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + " \n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "X= train_vecs\n", - "y=train_tags\n", - "y=y.astype(int)\n", - "num_examples = len(X) # training set size\n", - "nn_input_dim = len(train_vecs[0]) # input layer dimensionality\n", - "nn_output_dim = 2 # output layer dimensionality\n", - "\n", - "# Gradient descent parameters (I picked these by hand)\n", - "epsilon = 0.01 # learning rate for gradient descent\n", - "reg_lambda = 0.01 # regularization strength \n", - "\n", - "\n", - "def forward(W1, b1, W2, b2, x):\n", - " z1 = x.dot(W1) + b1\n", - " a1 = np.tanh(z1)\n", - " z2 = a1.dot(W2) + b2\n", - " exp_scores = np.exp(z2)\n", - " # softmax\n", - " y_hat = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", - " return y_hat, z1, a1, z2\n", - "\n", - "def predict(model, x):\n", - " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", - " y_hat, _, _, _ = forward(W1, b1, W2, b2, x)\n", - " return np.argmax(y_hat, axis=1)\n", - "\n", - "def calculate_loss(model):\n", - " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", - " y_hat, _, _, _ = forward(W1, b1, W2, b2, X)\n", - " correct_logprobs = -np.log(y_hat[range(num_examples), y])\n", - " data_loss = np.sum(correct_logprobs)\n", - " return 1./num_examples * data_loss\n", - "\n", - "\n", - "# This function learns parameters for the neural network and returns the model.\n", - "# - nn_hdim: Number of nodes in the hidden layer\n", - "# - num_passes: Number of passes through the training data for gradient descent\n", - "# - print_loss: If True, print the loss every 1000 iterations\n", - "def build_model(nn_hdim, num_passes=2000, print_loss=False):\n", - " \n", - " # Initialize the parameters to random values. We need to learn these.\n", - " np.random.seed(0)\n", - " W1 = np.random.randn(nn_input_dim, nn_hdim) / np.sqrt(nn_input_dim)\n", - " b1 = np.zeros((1, nn_hdim))\n", - " W2 = np.random.randn(nn_hdim, nn_output_dim) / np.sqrt(nn_hdim)\n", - " b2 = np.zeros((1, nn_output_dim))\n", - "\n", - " # This is what we return at the end\n", - " model = {}\n", - " \n", - " # Gradient descent. For each batch...\n", - " for i in range(0, num_passes):\n", - " # feedforward\n", - " y_hat, z1, a1, z2 = forward(W1, b1, W2, b2, X)\n", - " \n", - " # Backpropagation\n", - " delta3 = y_hat\n", - " delta3[range(num_examples), y] -= 1\n", - " #print [range(num_examples), y]\n", - " dW2 = (a1.T).dot(delta3)\n", - " db2 = np.sum(delta3, axis=0, keepdims=True)\n", - " delta2 = delta3.dot(W2.T) * (1 - np.power(a1, 2))\n", - " dW1 = np.dot(X.T, delta2)\n", - " db1 = np.sum(delta2, axis=0)\n", - "\n", - " # Gradient descent parameter update\n", - " W1 += -epsilon * dW1\n", - " b1 += -epsilon * db1\n", - " W2 += -epsilon * dW2\n", - " b2 += -epsilon * db2\n", - " \n", - " # Assign new parameters to the model\n", - " model = { 'W1': W1, 'b1': b1, 'W2': W2, 'b2': b2}\n", - " \n", - " # Optionally print the loss.\n", - " # This is expensive because it uses the whole dataset, so we don't want to do it too often.\n", - " if print_loss and i % 1000 == 0:\n", - " print \"Loss after iteration %i: %f\" %(i, calculate_loss(model))\n", - " #print y_hat[:2]\n", - " \n", - " \n", - " return model\n", - "\n", - "# Build a model with a 3-dimensional hidden layer\n", - "model = build_model(3, print_loss=True)\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/python_tutorial_part_6_vector_space.ipynb b/python_tutorial_part_6_vector_space.ipynb index 18126ee..21c002e 100644 --- a/python_tutorial_part_6_vector_space.ipynb +++ b/python_tutorial_part_6_vector_space.ipynb @@ -23,7 +23,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 5, "metadata": { "collapsed": false }, @@ -33,8 +33,8 @@ "output_type": "stream", "text": [ "25000\n", - "25000\n", - "25000\n" + "200\n", + "200\n" ] } ], @@ -54,15 +54,15 @@ "test_data = all_data[25000:50000]\n", "print len(train_data)\n", "\n", - "#train_data=train_data[:100]+train_data[12500:12600]\n", - "#test_data=test_data[:100]+test_data[12500:12600]\n", + "train_data=train_data[:100]+train_data[12500:12600]\n", + "test_data=test_data[:100]+test_data[12500:12600]\n", "print len(train_data)\n", - "print len(test_data)\n" + "print len(test_data)" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 6, "metadata": { "collapsed": false }, @@ -71,8 +71,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "113562\n", - "113538\n" + "7142\n", + "6994\n" ] } ], @@ -105,7 +105,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 7, "metadata": { "collapsed": false }, @@ -114,9 +114,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "25000\n", - "25000\n", - "25000\n" + "0\n", + "200\n", + "200\n" ] } ], @@ -142,23 +142,14 @@ "#test_vecs= get_sparse_vectors(test_data, word_space)\n", "\n", "#print train_vecs, test_vecs[0]\n", - "print len(train_data)\n", + "print len(train_data[12500:12600])\n", "print len(train_vecs)\n", "print len(test_vecs)" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 4, + "execution_count": 8, "metadata": { "collapsed": false }, @@ -168,8 +159,8 @@ "output_type": "stream", "text": [ "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", - "25000\n", - "25000\n" + "200\n", + "200\n" ] } ], @@ -186,8 +177,8 @@ "from random import shuffle, randint\n", "\n", "\n", - "train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", + "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", + "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", "\n", "\n", "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", @@ -201,16 +192,7 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 5, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -219,7 +201,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "(25000, 113562)\n" + "(200, 7142)\n" ] } ], @@ -231,20 +213,56 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\n", + "Done fitting classifier on training data...\n", + "\n", + "================================================== \n", + "\n", + "Results with 5-fold cross validation:\n", + "\n", + "================================================== \n", + "\n", + "********************\n", + "\t accuracy_score\t0.715\n", + "********************\n", + "precision_score\t0.765432098765\n", + "recall_score\t0.62\n", + "\n", + "classification_report:\n", + "\n", + " precision recall f1-score support\n", + "\n", + " 0.0 0.68 0.81 0.74 100\n", + " 1.0 0.77 0.62 0.69 100\n", + "\n", + "avg / total 0.72 0.71 0.71 200\n", + "\n", + "\n", + "confusion_matrix:\n", + "\n", + "[[81 19]\n", + " [38 62]]\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GT 750M\n" + ] + } + ], "source": [ "# Classification with scikit-learn\n", "# Now we have: train_tags, train_vecs, test_tags, test_vecs\n", @@ -277,7 +295,7 @@ "from sklearn.linear_model import LogisticRegression\n", "from sklearn import cross_validation\n", "import gensim\n", - "n_jobs = 4\n", + "n_jobs = 2\n", "\n", "#train_vecs=array(train_vecs)\n", "train_vecs=np.array(train_vecs)\n", @@ -301,132 +319,9 @@ "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", - "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)" + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", + " \n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "X= train_vecs\n", - "y=train_tags\n", - "y=y.astype(int)\n", - "num_examples = len(X) # training set size\n", - "nn_input_dim = len(train_vecs[0]) # input layer dimensionality\n", - "nn_output_dim = 2 # output layer dimensionality\n", - "\n", - "# Gradient descent parameters (I picked these by hand)\n", - "epsilon = 0.01 # learning rate for gradient descent\n", - "reg_lambda = 0.01 # regularization strength \n", - "\n", - "\n", - "def forward(W1, b1, W2, b2, x):\n", - " z1 = x.dot(W1) + b1\n", - " a1 = np.tanh(z1)\n", - " z2 = a1.dot(W2) + b2\n", - " exp_scores = np.exp(z2)\n", - " # softmax\n", - " y_hat = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)\n", - " return y_hat, z1, a1, z2\n", - "\n", - "def predict(model, x):\n", - " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", - " y_hat, _, _, _ = forward(W1, b1, W2, b2, x)\n", - " return np.argmax(y_hat, axis=1)\n", - "\n", - "def calculate_loss(model):\n", - " W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']\n", - " y_hat, _, _, _ = forward(W1, b1, W2, b2, X)\n", - " correct_logprobs = -np.log(y_hat[range(num_examples), y])\n", - " data_loss = np.sum(correct_logprobs)\n", - " return 1./num_examples * data_loss\n", - "\n", - "\n", - "# This function learns parameters for the neural network and returns the model.\n", - "# - nn_hdim: Number of nodes in the hidden layer\n", - "# - num_passes: Number of passes through the training data for gradient descent\n", - "# - print_loss: If True, print the loss every 1000 iterations\n", - "def build_model(nn_hdim, num_passes=2000, print_loss=False):\n", - " \n", - " # Initialize the parameters to random values. We need to learn these.\n", - " np.random.seed(0)\n", - " W1 = np.random.randn(nn_input_dim, nn_hdim) / np.sqrt(nn_input_dim)\n", - " b1 = np.zeros((1, nn_hdim))\n", - " W2 = np.random.randn(nn_hdim, nn_output_dim) / np.sqrt(nn_hdim)\n", - " b2 = np.zeros((1, nn_output_dim))\n", - "\n", - " # This is what we return at the end\n", - " model = {}\n", - " \n", - " # Gradient descent. For each batch...\n", - " for i in range(0, num_passes):\n", - " # feedforward\n", - " y_hat, z1, a1, z2 = forward(W1, b1, W2, b2, X)\n", - " \n", - " # Backpropagation\n", - " delta3 = y_hat\n", - " delta3[range(num_examples), y] -= 1\n", - " #print [range(num_examples), y]\n", - " dW2 = (a1.T).dot(delta3)\n", - " db2 = np.sum(delta3, axis=0, keepdims=True)\n", - " delta2 = delta3.dot(W2.T) * (1 - np.power(a1, 2))\n", - " dW1 = np.dot(X.T, delta2)\n", - " db1 = np.sum(delta2, axis=0)\n", - "\n", - " # Gradient descent parameter update\n", - " W1 += -epsilon * dW1\n", - " b1 += -epsilon * db1\n", - " W2 += -epsilon * dW2\n", - " b2 += -epsilon * db2\n", - " \n", - " # Assign new parameters to the model\n", - " model = { 'W1': W1, 'b1': b1, 'W2': W2, 'b2': b2}\n", - " \n", - " # Optionally print the loss.\n", - " # This is expensive because it uses the whole dataset, so we don't want to do it too often.\n", - " if print_loss and i % 1000 == 0:\n", - " print \"Loss after iteration %i: %f\" %(i, calculate_loss(model))\n", - " #print y_hat[:2]\n", - " \n", - " \n", - " return model\n", - "\n", - "# Build a model with a 3-dimensional hidden layer\n", - "model = build_model(3, print_loss=True)\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] } ], "metadata": { From a2ed3d3482b028615e600eb42374068c53c0f253 Mon Sep 17 00:00:00 2001 From: mageed Date: Sun, 7 Feb 2016 10:16:03 -0500 Subject: [PATCH 24/36] cleaning and updating --- ...rt_3_rule_based_classifier-checkpoint.ipynb | 18 +++--------------- ...tutorial_part_3_rule_based_classifier.ipynb | 1 + 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb b/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb index c9e87f6..5855f86 100644 --- a/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb +++ b/.ipynb_checkpoints/python_tutorial_part_3_rule_based_classifier-checkpoint.ipynb @@ -34,6 +34,7 @@ ], "source": [ "import re\n", + "#-------------------\n", "def clean_lexicon():\n", " positive_words= open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\", \"r\").readlines()\n", " new_pos_list=[]\n", @@ -420,12 +421,8 @@ ] }, { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], + "cell_type": "markdown", + "metadata": {}, "source": [ "# Miscellaneous code to loop over lines from a file, and do something (e.g., counting positive and negative words)" ] @@ -590,15 +587,6 @@ " print count_pos #entry, lines.index(l)\n", " count_pos=0\n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/python_tutorial_part_3_rule_based_classifier.ipynb b/python_tutorial_part_3_rule_based_classifier.ipynb index 2f2ff62..5855f86 100644 --- a/python_tutorial_part_3_rule_based_classifier.ipynb +++ b/python_tutorial_part_3_rule_based_classifier.ipynb @@ -34,6 +34,7 @@ ], "source": [ "import re\n", + "#-------------------\n", "def clean_lexicon():\n", " positive_words= open(\"/Users/mam/CORE/TEACHING/smm/PROJECT-PROBLEMS/pos.swn.txt\", \"r\").readlines()\n", " new_pos_list=[]\n", From 97e4178da9de371ffc999f922c041bc25b9b14c2 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed Date: Thu, 7 Apr 2016 23:43:22 -0400 Subject: [PATCH 25/36] Added files via upload --- bag_of_words_svm_classifier.ipynb | 244 ++++++++++++++++++++++++++++++ 1 file changed, 244 insertions(+) create mode 100644 bag_of_words_svm_classifier.ipynb diff --git a/bag_of_words_svm_classifier.ipynb b/bag_of_words_svm_classifier.ipynb new file mode 100644 index 0000000..bccf57b --- /dev/null +++ b/bag_of_words_svm_classifier.ipynb @@ -0,0 +1,244 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# Simplest bag-of-words classifier, ever!" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['This is line 1', 'this is line 2', 'I am line 3']\n", + "[' POSITIVE', ' POSITIVE', ' NEGATIVE']\n" + ] + } + ], + "source": [ + "lines=[\"This is line 1\\t POSITIVE\", \\\n", + " \"this is line 2\\t POSITIVE\", \\\n", + " \"I am line 3\\t NEGATIVE\"]\n", + "data_x=[]\n", + "data_y=[]\n", + "for line in lines:\n", + " sent, tag = line.split(\"\\t\")\n", + " data_x.append(sent)\n", + " data_y.append(tag)\n", + "\n", + "print data_x\n", + "print data_y\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "this 0\n", + "is 1\n", + "am 6\n", + "1 3\n", + "i 5\n", + "3 7\n", + "2 4\n", + "line 2\n" + ] + } + ], + "source": [ + "from collections import defaultdict\n", + "\n", + "def get_word_space(data_x):\n", + " word_space=defaultdict(int)\n", + " for sent in data_x:\n", + " #lowercase all words\n", + " sent=sent.lower()\n", + " words=sent.split()\n", + " for w in words:\n", + " if w not in word_space:\n", + " word_space[w]=len(word_space)\n", + " return word_space\n", + "\n", + "#---------------------\n", + "space =get_word_space(data_x)\n", + "for w in space:\n", + " print w, space[w]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 1. 0. 0. 0. 0. 0. 0. 0.]\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "# print x\n", + "# print space[\"am\"]\n", + "#x[space[\"am\"]]=1\n", + "#print x\n", + "sent=\"I like this\"\n", + "def get_space(sent, space):\n", + " vector= np.zeros(len(space))\n", + " words=sent.lower().split()\n", + " for w in words:\n", + " if w in space:\n", + " vector[space[w]]=1\n", + " return vector\n", + "\n", + "v= get_space(\"Let's all go home this\", space)\n", + "print v\n", + "\n", + " \n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[array([ 0., 0., 1., 0., 0., 0., 0., 0.]), array([ 1., 0., 0., 0., 0., 0., 0., 0.])]\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "\n", + "sentences= [\"Mapping the geographical diffusion of new words line\",\\\n", + "\"Challenges of studying and processing This dialects in sm\"]\n", + "\n", + "def get_space_vec(sent, space):\n", + " vec= np.zeros(len(space))\n", + " #print vec\n", + " for w in sent.lower():\n", + " if w in space:\n", + " #print w, \"--->\" , space[w]\n", + " vec[space[w]]= 1\n", + " return vec\n", + "\n", + "data_vecs=[]\n", + "for sent in sentences:\n", + " #print sent\n", + " sent=sent.split()\n", + " vector= get_space_vec(sent, space)\n", + " #print \"New disney vector: \",\n", + " #print vector\n", + " data_vecs.append(vector)\n", + "\n", + "print data_vecs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['Hey', 'there,', 'people']\n" + ] + } + ], + "source": [ + "sent=\"Hey there, people\"\n", + "x=sent.split()\n", + "print x" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.11" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From 791494e985ba7d3c91f44a9a67593fcbfb497206 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed Date: Sat, 11 Mar 2017 22:58:41 -0800 Subject: [PATCH 26/36] Adding regular expressions unit --- regular_expressions.ipynb | 719 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 719 insertions(+) create mode 100644 regular_expressions.ipynb diff --git a/regular_expressions.ipynb b/regular_expressions.ipynb new file mode 100644 index 0000000..5eaf5fa --- /dev/null +++ b/regular_expressions.ipynb @@ -0,0 +1,719 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Regular Expressions in Python" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "Python provides a powerful regular expression module (re).\n", + "A regular expression is a special sequence of characters of which you can \n", + "think as rules that helps us match certain types of content \n", + "within string literals. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## The backslash \"\\\" & Raw Strings" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "From your knowledge of string literals, you already know that \n", + "a backslash \"\\\" is interpreted by the Python parser as an escape\n", + "character. For example, in the following string, in order to use an internal quotes, we have to skip them by the backslash character \"\\\"." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This is a string literal that has a quote \" character.\n" + ] + } + ], + "source": [ + "text= \"This is a string literal that has a quote \\\" character.\" \n", + "print(text)" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "The parser also interprets the backslash in specific ways when followed by \n", + "specific sequences of characters. For example, the parser replaces the \n", + "‘\\n’ excape sequence by a newline character." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This has a quote \" char followed by \n", + "\n", + "\n", + " three new lines!!.\n" + ] + } + ], + "source": [ + "text= \"This has a quote \\\" char followed by \\n\\n\\n three new lines!!.\" \n", + "print(text)" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "The re module itself also makes use of\n", + "backslash characters to escape special regex characters, which results in \n", + "us needing to having to escape the escape character itself at times.\n", + "This relsults in unreadable code. A good solution to this problem is to use what is known as a \"raw string\", which is simply achieved by prefixing\n", + "a string literal with the ‘r’ character (right before the opening quote of the string). When we do this, the parser will treat the string literal as is without attempting to make any internal substitutions. See the example below:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This has a quote \\\" char followed by \\n\\n\\n three new lines!!.\n" + ] + } + ], + "source": [ + "raw_text= r\"This has a quote \\\" char followed by \\n\\n\\n three new lines!!.\" \n", + "print(raw_text)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## And Three for the Road: match(), search(), and findall()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "<_sre.SRE_Match at 0x105bacf38>" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# match() will only find matches if they occur at the beginning of \n", + "# the searched string:\n", + "import re\n", + "text=\"apple berry orange berry\"\n", + "re.match(r'apple',text)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'apple'" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#The above means there is a match and Python is returning the matching \n", + "# Object. \n", + "# We can access the matched pattern with: group(0)\n", + "my_match=re.match(r'apple',text)\n", + "my_match.group(0)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "None\n" + ] + } + ], + "source": [ + "# Since \"berry\" is not in the beginning of the string, there will be\n", + "# no match.\n", + "print(re.match(r'berry',text))" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "<_sre.SRE_Match object at 0x106d33780>\n", + "<_sre.SRE_Match object at 0x106d33780>\n", + "<_sre.SRE_Match object at 0x106d33780>\n" + ] + } + ], + "source": [ + "# search() is like match(), excpet that it is not restricted to finding a match\n", + "# at the beginning: It will find a match anywhere in the string:\n", + "print(re.search(r'berry',text))\n", + "print(re.search(r'apple',text))\n", + "print(re.search(r'orange',text))" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'berry'" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Note that search() stops looking after it finds the first match.\n", + "# As such, even though there are wto examples of the string \"berry\",\n", + "# match() only returns one match (the first match)\n", + "my_berry_match=re.search(r'berry',text)\n", + "my_berry_match.group(0)" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Start index: 6\n", + "End index: 11\n" + ] + } + ], + "source": [ + "# We can actually access the indexes of the matched \"berry\" string:\n", + "start=my_berry_match.start()\n", + "end=my_berry_match.end()\n", + "print(\"Start index: %s\" % start)\n", + "print(\"End index: %s\" % end)" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "berry\n" + ] + } + ], + "source": [ + "print(text[6:11])" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['berry', 'berry']\n" + ] + } + ], + "source": [ + "# findall() is like search(), but is exhaustive: It finds all the matches\n", + "all_berry_matches=re.findall(r'berry',text)\n", + "print(all_berry_matches)" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'berry'" + ] + }, + "execution_count": 49, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Since it returns a list of what matched, findall() does not work with\n", + "# grouping. Instead, just access each item in the returned list as \n", + "# what would have been a group \n", + "all_berry_matches[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### More on Grouping" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "#hashtag1\n", + "#hashtag2\n" + ] + } + ], + "source": [ + "# We can surround certain surround certain parts of the regex in paranthese\n", + "# and access them later on via group numbers\n", + "tweet=\"This is a tweet with #hashtag1 and #hashtag2 https://cnn.com\"\n", + "my_hashtags=re.search(r'(#\\S+)\\s+\\S+\\s+(#\\S+)', tweet)\n", + "print(my_hashtags.group(1)) # whatever is in the first ()\n", + "print(my_hashtags.group(2)) # whatever is in the second ()" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "<_sre.SRE_Match object at 0x106be5d78>\n" + ] + } + ], + "source": [ + "print(my_hashtags)" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " and \n" + ] + } + ], + "source": [ + "# We can surround certain surround certain parts of the regex in paranthese\n", + "# and access them later on via group numbers\n", + "tweet=\"This is a tweet with #hashtag1 and #hashtag2 https://cnn.com\"\n", + "my_hashtags=re.search(r'(#\\S+)(?P\\s+\\S+\\s+)(#\\S+)', tweet)\n", + "print(my_hashtags.group(\"my_and_group\")) " + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "None\n" + ] + } + ], + "source": [ + "#tweet=\"This is a tweet with #hashtag1 and #hashtag2 https://cnn.com\"\n", + "tweet_modified=\"This is a tweet with #hashtag1 #hashtag2 https://cnn.com\"\n", + "\n", + "my_hashtags=re.search(r'#\\S+\\s+\\S+\\s+#\\S+', tweet_modified)\n", + "print(my_hashtags)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# (#\\S+) matches a hashtag \"#\", followed by one or more non-whitespaces\n", + "#----------------------------------------\n", + "# \\s+ matches one or more whitespaces\n", + "#----------------------------------------\n", + "# \\s+\\S+\\s+: Basically matches the \" and \" in the tweet, \n", + "# (note the preceding and following spaces)." + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('#hashtag1', '#hashtag2')\n" + ] + } + ], + "source": [ + "# groups() will return all matched groups as a tuple:\n", + "print(my_hashtags.groups())" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['#hashtag1', '#hashtag2']\n" + ] + } + ], + "source": [ + "# The pattern with search() above is useful if you specifically wanted\n", + "# a pattern that has \"hashtag+space(s)+and+space(s)+hashtag\"\n", + "# If you want just to get all hashtags in a tweet, just use \"findall\"\n", + "my_hashtags=re.findall(r'(#\\S+)', tweet)\n", + "print(my_hashtags)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['https://cnn.com']\n" + ] + } + ], + "source": [ + "my_url=re.findall(r'(https://\\S+.\\S+)', tweet)\n", + "print(my_url)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Compiling for Re-Use" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['#hashtag1', '#hashtag2', 'https://cnn.com']\n" + ] + } + ], + "source": [ + "# Compile a pattern for reuse.\n", + "#------------------------------\n", + "# The \"|\" helps us match a hashtag or an URL (so if both exist,\n", + "# we capture BOTH)\n", + "p=re.compile(r'(#\\S+|https://\\S+.\\S+)')\n", + "matches=re.findall(p, tweet) # \n", + "print(matches)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Sidenote: re.sub with grouping" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This is a tweet with and \n" + ] + } + ], + "source": [ + "# Using the paranthes to capture a group is useful\n", + "# if you wanted to substitute\n", + "new_tweet=re.sub(r'(#\\S+)', '', tweet)\n", + "print(new_tweet)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "This is a tweet with and \n" + ] + } + ], + "source": [ + "new_tweet=re.sub(r'(#\\S+)', '', tweet)\n", + "print(new_tweet)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Grouping by name" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "#hashtag1\n", + "#hashtag2\n" + ] + } + ], + "source": [ + "# Add ?P before a pattern to group by name\n", + "my_hashtags=re.search(r'(?P#\\S+)\\s+\\S+\\s+(?P#\\S+)', tweet)\n", + "print(my_hashtags.group(\"first\")) # whatever is in the first ()\n", + "print(my_hashtags.group(\"second\")) # whatever is in the second ()" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "['interesting', 'last']" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Find all words with the character \"s\"\n", + "story=\"Samy told me an interesting story was airing on CBC last night...\"\n", + "re.findall(r'\\w+s\\w+', story)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "['interesting', 'story', 'was', 'last']" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Since \\w* matches zero or more characters, we can get all words\n", + "# with \"s\" as follows:\n", + "re.findall(r'\\w*s\\w*', story)" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "['Samy', 'interesting', 'story', 'was', 'last']" + ] + }, + "execution_count": 35, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Well, almost! Let's ignore case with \"re.I\" to catch \"Samy\" as well.\n", + "re.findall(r'\\w*s\\w*', story, re.I)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.12" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From fc50c105ec63358cbd1e434d94f7ade73da9dd79 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed Date: Sat, 11 Mar 2017 22:59:20 -0800 Subject: [PATCH 27/36] Adding processing raw text unit --- processing_raw_text.ipynb | 1010 +++++++++++++++++++++++++++++++++++++ 1 file changed, 1010 insertions(+) create mode 100644 processing_raw_text.ipynb diff --git a/processing_raw_text.ipynb b/processing_raw_text.ipynb new file mode 100644 index 0000000..1bb913c --- /dev/null +++ b/processing_raw_text.ipynb @@ -0,0 +1,1010 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Processing Raw Text" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Also see: \n", + "## http://www.nltk.org/book/ch03.html, https://docs.python.org/2/howto/urllib2.html" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download a book from Project Gutenberg with Python:" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Type of 'response' is :\n", + "Type of 'raw' is :\n" + ] + } + ], + "source": [ + "from urllib2 import Request, urlopen\n", + "\n", + "url=\"http://www.gutenberg.org/files/54255/54255-0.txt\"\n", + "response = urlopen(url)\n", + "raw = response.read().decode('utf8')\n", + "#--------------------------------------------------\n", + "# Check types...\n", + "print(\"Type of \\'response\\' is %s:\")% type(response)\n", + "print(\"Type of \\'raw\\' is %s:\")% type(raw)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The Project Gutenberg EBook of Narrative of Travels in Europe, Asia, and\r\n", + "Africa, in the Seventeenth Century, Volum, by Evliya Çelebi and Joseph Hammer-Purgstall\r\n", + "\r\n" + ] + } + ], + "source": [ + "print(raw[:165])" + ] + }, + { + "cell_type": "code", + "execution_count": 82, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['hey', ',', 'guys', ',', 'how', 'is', 'life', '?', '?', '?', '!']\n" + ] + } + ], + "source": [ + "from nltk import word_tokenize\n", + "t=\"hey, guys, how is life???!\"\n", + "tt =word_tokenize(t)\n", + "print(tt)" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[('hey', 'NN'), (',', ','), ('guys', 'NNS'), (',', ','), ('how', 'WRB'), ('is', 'VBZ'), ('life', 'NN'), ('?', '.'), ('?', '.'), ('?', '.'), ('!', '.')]\n" + ] + } + ], + "source": [ + "ttt = pos_tag(tt)\n", + "print(ttt)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Tokenize and pos-tag the text:" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "144822\n", + "144822\n" + ] + } + ], + "source": [ + "from nltk import word_tokenize, pos_tag\n", + "#------------------------------\n", + "tokens = word_tokenize(raw)\n", + "print(len(tokens))\n", + "tagged=pos_tag(tokens)\n", + "print(len(tagged))" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[u'\\ufeffThe', u'Project', u'Gutenberg', u'EBook', u'of', u'Narrative', u'of', u'Travels', u'in', u'Europe', u',', u'Asia', u',', u'and', u'Africa', u',', u'in', u'the', u'Seventeenth', u'Century', u',', u'Volum', u',', u'by', u'Evliya', u'\\xc7elebi', u'and', u'Joseph', u'Hammer-Purgstall', u'This', u'eBook', u'is', u'for', u'the', u'use', u'of', u'anyone', u'anywhere', u'in', u'the', u'United', u'States', u'and', u'most', u'other', u'parts', u'of', u'the', u'world', u'at']\n" + ] + } + ], + "source": [ + "print(tokens[:50]) # list of unicode items" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "collapsed": false, + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(u'\\ufeffThe', 'NN'), (u'Project', 'NNP'), (u'Gutenberg', 'NNP'), (u'EBook', 'NNP'), (u'of', 'IN'), (u'Narrative', 'NNP'), (u'of', 'IN'), (u'Travels', 'NNP'), (u'in', 'IN'), (u'Europe', 'NNP')]\n" + ] + } + ], + "source": [ + "print(tagged[:10]) # list of tuples (word,pos_tag pairs)" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['hello', 'hi']\n" + ] + } + ], + "source": [ + "wds=[\"hello\", \"hi\", \"life\"]\n", + "h_wds= [w for w in wds if w.startswith(\"h\")]\n", + "\n", + "\n", + "new_words=[]\n", + "for w in wds:\n", + " if w.startswith(\"h\"):\n", + " new_words.append(w)\n", + "print(new_words)" + ] + }, + { + "cell_type": "code", + "execution_count": 87, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['plays']\n" + ] + } + ], + "source": [ + "pairs=[ (\"Alex\", \"NN\"), (\"plays\", \"VBZ\") ]\n", + "verbs=[ x[0] for x in pairs if x[1]==\"VBZ\"]\n", + "print(verbs)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "##### Note: The pos tagger of course makes mistakes, but it performs reasonably well." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## List comprehension on \"tagged\"" + ] + }, + { + "cell_type": "code", + "execution_count": 88, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Project\n", + "Gutenberg\n", + "EBook\n", + "Narrative\n", + "Travels\n", + "Europe\n", + "Asia\n", + "Africa\n", + "Seventeenth\n", + "Century\n", + "Volum\n", + "Evliya\n", + "Çelebi\n", + "Joseph\n", + "Hammer-Purgstall\n", + "United\n", + "Project\n", + "Gutenberg\n", + "License\n", + "United\n", + "Europe\n", + "Asia\n", + "Africa\n", + "Seventeenth\n", + "Century\n", + "II\n", + "Evliya\n", + "Çelebi\n", + "Evliya\n", + "Çelebi\n", + "Joseph\n", + "Hammer-Purgstall\n", + "Release\n", + "Date\n", + "February\n", + "[\n", + "EBook\n", + "Character\n", + "***\n", + "START\n", + "THIS\n", + "PROJECT\n", + "GUTENBERG\n", + "EBOOK\n", + "NARRATIVE\n", + "OF\n", + "TRAVELS\n", + "***\n", + "Produced\n", + "Turgut\n" + ] + } + ], + "source": [ + "# Named enitities:\n", + "ne=[pair[0] for pair in tagged if pair[-1]==\"NNP\"]\n", + "for e in ne[:50]:\n", + " print(e)" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "breadth\n", + "remarkable\n", + "ruby-coloured\n", + "particular\n", + "tombs\n", + "gun-shot’s\n", + "yellow\n", + "rapid\n", + "mild\n", + "mile\n", + "sleep\n", + "legal\n", + "forty-six\n", + "Elephant\n", + "dish\n", + "follow\n", + "abundant\n", + "religious\n", + "washing-tubs\n", + "dreadful\n", + "seventy-seven\n", + "pardon\n", + "hunting\n", + "swam\n", + "outdated\n", + "becas\n", + "mosque\n", + "young\n", + "“Mevlúd-námeh\n", + "underwent\n", + "answered\n", + "tail\n", + "foster\n", + "obstinate\n", + "stable\n", + "suite\n", + "Precious\n", + "farsang’s\n", + "worth\n", + "orderly\n", + "virtuous\n", + "Sheikh-ul-islám\n", + "amorous\n", + "exempt\n", + "www.gutenberg.org\n", + "perishable\n", + "navigable\n", + "limpid\n", + "fat\n", + "father’s\n" + ] + } + ], + "source": [ + "# Adjectives\n", + "adjs= set([pair[0] for pair in tagged if pair[-1]==\"JJ\"]) # we pass the list to set to uniqify\n", + "adjs= list(adjs) #Cast to list again so that we access only few in print\n", + "# Note: 'set' object has no attribute '__getitem__' and so we cannot do adjs[:15] on a set\n", + "for a in adjs[:50]:\n", + " print(a)" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "15238\n" + ] + } + ], + "source": [ + "# How many ne?; note these are not uniqified\n", + "print(len(ne))" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1263\n" + ] + } + ], + "source": [ + "# How many uniqe adjs?\n", + "print(len(adjs))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Get collocations" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Project Gutenberg-tm; three hundred; hundred houses; Black Sea;\n", + "thousand men; two hundred; one hundred; great number; fifty aspers;\n", + "next day; Project Gutenberg; Uzún Hassan; three days; thousand houses;\n", + "five hours; Sultán Murad; Ahmed Páshá; Kizil Irmák; five hundred;\n", + "Mustafa Páshá\n" + ] + } + ], + "source": [ + "from nltk import Text\n", + "text=Text(tokens)\n", + "#print(type(text))\n", + "text.collocations()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Accessing webpages/html" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\n", + " \n", + " \n", + " Sh\n" + ] + } + ], + "source": [ + "from bs4 import BeautifulSoup\n", + "url=\"http://www.bbc.com/news/technology-38892383\"\n", + "response = urlopen(url)\n", + "html = response.read().decode('utf8')\n", + "print(html[:200])" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Shopping\n", + "robots\n", + "on\n", + "the\n", + "march\n", + "in\n", + "Ocado\n", + "-\n", + "BBC\n", + "News\n" + ] + } + ], + "source": [ + "raw = BeautifulSoup(html, \"lxml\").get_text()\n", + "tokens = word_tokenize(raw)\n", + "tok=tokens[:10]\n", + "for t in tok:\n", + " print(t)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Working with unicode" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2167789138\tمعَ فجر العام الجديد : رجوتُ إلهيَ أن يجعلني ويجعلكمِ من أسعدِ خلقهِ ، و يرزقني ويرزقكم أضعاافَ أمنيآتِكم حتَى ترضون ...صباحكم رضى||$||\"@jumana_sj2: ولا اقول عاادي كمان يدخلو اغاني كوريه خنضحك #عشاق_كوريا_يطالبون_قناة_mbc_بفتح_قناه_mbc_korea_مترجمه_بالعربيه_للمعجبين_العرب\"||$||\"@s_h_osho: نبي قناه كوريه ليش فيه قناه هنديه ومافيه كوريه؟ #عشاق_كوريا_يطالبون_قناة_mbc_بفتح_قناه_mbc_korea_مترجمه_بالعربيه_للمعجبين_العرب\"||$||\"@LINA_ALADEEB: بعيداً عن خيالات الحب احياناً السعاده تكون عباره ع\n" + ] + } + ], + "source": [ + "import codecs\n", + "ara_text=codecs.open(\"sample_concat.tsv\", \"r\", \"utf-8\").readlines()[0]\n", + "print(ara_text[:500])" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2167789138\tمع فجر العام الجديد : رجوت إلهي أن يجعلني ويجعلكم من أسعد خلقه ، و يرزقني ويرزقكم أضعااف أمنيآتكم حتى ترضون ...صباحكم رضى||$||\"@jumana_sj2: ولا اقول عاادي كمان يدخلو اغاني كوريه خنضحك #عشاق_كوريا_يطالبون_قناة_mbc_بفتح_قناه_mbc_korea_مترجمه_بالعربيه_للمعجبين_العرب\"||$||\"@s_h_osho: نبي قناه كوريه ليش فيه قناه هنديه ومافيه كوريه؟ #عشاق_كوريا_يطالبون_قناة_mbc_بفتح_قناه_mbc_korea_مترجمه_بالعربيه_للمعجبين_العرب\"||$||\"@LINA_ALADEEB: بعيدا عن خيالات الحب احيانا السعاده تكون عباره عن - برنامج \n" + ] + } + ], + "source": [ + "def remove_unicode_diac(text):\n", + " \"\"\"Takes Arabic in utf-8 and returns same text without diac\"\"\"\n", + " # Replace diacritics with nothing \n", + " text = text.replace(u\"\\u064B\", \"\")# fatHatayn\n", + " text = text.replace(u\"\\u064C\", \"\") # Dammatayn\n", + " text = text.replace(u\"\\u064D\", \"\")# kasratayn\n", + " text = text.replace(u\"\\u064E\", \"\")# fatHa\n", + " text = text.replace(u\"\\u064F\", \"\") # Damma\n", + " text = text.replace(u\"\\u0650\", \"\")# kasra\n", + " text = text.replace(u\"\\u0651\", \"\")# shaddah\n", + " text = text.replace(u\"\\u0652\", \"\")# sukuun\n", + " text = text.replace(u\"\\u0670\", \"`\") # dagger 'alif\n", + " return text\n", + "\n", + "ara_text_no_diac =remove_unicode_diac(ara_text)\n", + "print(ara_text_no_diac[:500])" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "<type 'unicode'>\n" + ] + } + ], + "source": [ + "print(type(ara_text_no_diac))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Regular expressions preview!" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hey there, take a look: <URL> #love_robots!\n" + ] + } + ], + "source": [ + "import re\n", + "# This will replace the URL \"http://www.bbc.com/news/technology-38892383\" with a string token \"<URL>\"\n", + "tweet=\"Hey there, take a look: http://www.bbc.com/news #love_robots!\"\n", + "tweet = re.sub(r'https?://[^\\s<>\"]+|www\\.[^\\s<>\"]+', '<URL>',tweet)\n", + "print(tweet)" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['take']\n" + ] + } + ], + "source": [ + "e_ending=[w for w in tweet.split() if re.search('e$', w)]\n", + "print(e_ending) # Note that \"there,\" ends in \",\"" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['there', 'take']\n" + ] + } + ], + "source": [ + "import string\n", + "punc = [char for char in string.punctuation]\n", + "def clean_punc(punc, text):\n", + " for i in punc:\n", + " text=text.replace(i, \"\")\n", + " return text\n", + "\n", + "tweet=clean_punc(punc, tweet)\n", + "e_ending=[w for w in tweet.split() if re.search('e$', w)]\n", + "print(e_ending) # Note that \"there,\" ends in \",\"" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['!', '\"', '#', '$', '%', '&', \"'\", '(', ')', '*', '+', ',', '-', '.', '/', ':', ';', '<', '=', '>', '?', '@', '[', '\\\\', ']', '^', '_', '`', '{', '|', '}', '~']\n" + ] + } + ], + "source": [ + "print(punc)" + ] + }, + { + "cell_type": "code", + "execution_count": 95, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(0, 'hey people')\n", + "(1, 'how are you?')\n", + "(2, 'life is good!')\n" + ] + } + ], + "source": [ + "alldata=[\"hey people\", \"how are you?\", \"life is good!\"]\n", + "for line_no, line in enumerate(alldata):\n", + " print(line_no, line)" + ] + }, + { + "cell_type": "code", + "execution_count": 99, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['bromwell', 'high', 'is', 'a', 'cartoon', 'comedy', '.', 'it', 'ran', 'at', 'the', 'same', 'time', 'as', 'some', 'other', 'programs', 'about', 'school', 'life', ',', 'such', 'as', '\"', 'teachers', '\"', '.', 'my', '35', 'years', 'in', 'the', 'teaching', 'profession', 'lead', 'me', 'to', 'believe', 'that', 'bromwell', \"high's\", 'satire', 'is', 'much', 'closer', 'to', 'reality', 'than', 'is', '\"', 'teachers', '\"', '.', 'the', 'scramble', 'to', 'survive', 'financially', ',', 'the', 'insightful', 'students', 'who', 'can', 'see', 'right', 'through', 'their', 'pathetic', \"teachers'\", 'pomp', ',', 'the', 'pettiness', 'of', 'the', 'whole', 'situation', ',', 'all', 'remind', 'me', 'of', 'the', 'schools', 'i', 'knew', 'and', 'their', 'students', '.', 'when', 'i', 'saw', 'the', 'episode', 'in', 'which', 'a', 'student', 'repeatedly', 'tried', 'to', 'burn', 'down', 'the', 'school', ',', 'i', 'immediately', 'recalled', '.', '.', '.', '.', '.', '.', '.', '.', '.', 'at', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', 'high', '.', 'a', 'classic', 'line', ':', 'inspector', ':', \"i'm\", 'here', 'to', 'sack', 'one', 'of', 'your', 'teachers', '.', 'student', ':', 'welcome', 'to', 'bromwell', 'high', '.', 'i', 'expect', 'that', 'many', 'adults', 'of', 'my', 'age', 'think', 'that', 'bromwell', 'high', 'is', 'far', 'fetched', '.', 'what', 'a', 'pity', 'that', 'it', \"isn't\", '!']\n" + ] + } + ], + "source": [ + "line=\"\"\"_*0 bromwell high is a cartoon comedy . it ran at the same time as some other programs about school life , such as \" teachers \" . my 35 years in the teaching profession lead me to believe that bromwell high's satire is much closer to reality than is \" teachers \" . the scramble to survive financially , the insightful students who can see right through their pathetic teachers' pomp , the pettiness of the whole situation , all remind me of the schools i knew and their students . when i saw the episode in which a student repeatedly tried to burn down the school , i immediately recalled . . . . . . . . . at . . . . . . . . . . high . a classic line : inspector : i'm here to sack one of your teachers . student : welcome to bromwell high . i expect that many adults of my age think that bromwell high is far fetched . what a pity that it isn't ! \"\"\"\n", + "line.split()[0]\n", + "words=line.split()[1:]\n", + "print(words)" + ] + }, + { + "cell_type": "code", + "execution_count": 101, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from collections import defaultdict\n", + "space=defaultdict(int)\n", + "for w in words:\n", + " space[w]=len(space)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "defaultdict(int,\n", + " {'!': 96,\n", + " '\"': 41,\n", + " ',': 74,\n", + " '.': 93,\n", + " '35': 25,\n", + " ':': 85,\n", + " 'a': 94,\n", + " 'about': 17,\n", + " 'adults': 88,\n", + " 'age': 89,\n", + " 'all': 59,\n", + " 'and': 64,\n", + " 'as': 22,\n", + " 'at': 76,\n", + " 'believe': 33,\n", + " 'bromwell': 91,\n", + " 'burn': 72,\n", + " 'can': 47,\n", + " 'cartoon': 4,\n", + " 'classic': 76,\n", + " 'closer': 38,\n", + " 'comedy': 5,\n", + " 'down': 73,\n", + " 'episode': 67,\n", + " 'expect': 86,\n", + " 'far': 91,\n", + " 'fetched': 92,\n", + " 'financially': 43,\n", + " 'here': 81,\n", + " 'high': 91,\n", + " \"high's\": 35,\n", + " 'i': 86,\n", + " \"i'm\": 80,\n", + " 'immediately': 74,\n", + " 'in': 68,\n", + " 'insightful': 44,\n", + " 'inspector': 79,\n", + " 'is': 91,\n", + " \"isn't\": 95,\n", + " 'it': 95,\n", + " 'knew': 63,\n", + " 'lead': 30,\n", + " 'life': 19,\n", + " 'line': 77,\n", + " 'many': 87,\n", + " 'me': 61,\n", + " 'much': 37,\n", + " 'my': 89,\n", + " 'of': 89,\n", + " 'one': 83,\n", + " 'other': 15,\n", + " 'pathetic': 52,\n", + " 'pettiness': 55,\n", + " 'pity': 94,\n", + " 'pomp': 54,\n", + " 'profession': 29,\n", + " 'programs': 16,\n", + " 'ran': 8,\n", + " 'reality': 39,\n", + " 'recalled': 75,\n", + " 'remind': 60,\n", + " 'repeatedly': 70,\n", + " 'right': 49,\n", + " 'sack': 82,\n", + " 'same': 11,\n", + " 'satire': 36,\n", + " 'saw': 66,\n", + " 'school': 74,\n", + " 'schools': 61,\n", + " 'scramble': 41,\n", + " 'see': 48,\n", + " 'situation': 58,\n", + " 'some': 14,\n", + " 'student': 85,\n", + " 'students': 65,\n", + " 'such': 21,\n", + " 'survive': 42,\n", + " 'teachers': 85,\n", + " \"teachers'\": 53,\n", + " 'teaching': 28,\n", + " 'than': 40,\n", + " 'that': 95,\n", + " 'the': 74,\n", + " 'their': 65,\n", + " 'think': 90,\n", + " 'through': 50,\n", + " 'time': 12,\n", + " 'to': 86,\n", + " 'tried': 71,\n", + " 'welcome': 85,\n", + " 'what': 93,\n", + " 'when': 65,\n", + " 'which': 68,\n", + " 'who': 46,\n", + " 'whole': 57,\n", + " 'years': 26,\n", + " 'your': 84})" + ] + }, + "execution_count": 102, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "space" + ] + }, + { + "cell_type": "code", + "execution_count": 105, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0. 0.]\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "vec = np.zeros(len(space))\n", + "print(vec)" + ] + }, + { + "cell_type": "code", + "execution_count": 107, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 0. 0. 0. 0. 1. 1. 0. 0. 1. 0. 0. 1. 1. 0. 1. 1. 1. 1.\n", + " 0. 1. 0. 1. 1. 0. 0. 1. 1. 0. 1. 1. 1. 0. 0. 1. 0. 1.\n", + " 1. 1. 1. 1. 1. 1. 1. 1. 1. 0. 1. 1. 1. 1. 1. 0. 1. 1.\n", + " 1. 1. 0. 1. 1. 1. 1. 1. 0. 1. 1. 1. 1. 1. 1. 0. 1. 1.\n", + " 1. 1. 1. 1. 1. 1. 0. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1.\n", + " 1. 1. 1. 1. 1. 1. 1.]\n" + ] + } + ], + "source": [ + "for w in words:\n", + " vec[space[w]]=1\n", + "print(vec)" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "abc\n", + "cd\n" + ] + } + ], + "source": [ + "x=[\"a\", \"ab\", \"abc\", \"cd\", \"xxx\"]\n", + "for i in x:\n", + " if \"c\" in i:\n", + " print(i)" + ] + }, + { + "cell_type": "code", + "execution_count": 109, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['abc', 'cd']\n" + ] + } + ], + "source": [ + "c_list=[i for i in x if \"c\" in i]\n", + "print(c_list)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.12" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From c4ee27b90609780fb3b5359e9d53870ea4f287f2 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed <mumageed@gmail.com> Date: Sat, 11 Mar 2017 23:05:06 -0800 Subject: [PATCH 28/36] Update README.md Just updates README some! --- README.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index a97e86f..44fea55 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,18 @@ # A Python Tutorial: -This is code I wrote for courses I teach at Indiana University. -The first parts of the code in this tutorial are meant for Python beginners, and the code grows more advanced as you advance through the later parts. +This is code I wrote for courses I taught at Indiana University and then University of British Columbia. +The first parts of the code in this tutorial are meant for Python beginners, and the code grows more advanced as in later parts. -In the context of this tutorial, I plan to include sections covering the Natural Language Toolkit (NLTK), gensim, scikit-learn, visualization, numpy, etc. +In the context of this tutorial, I have added sections covering processing text, use of the Natural Language Toolkit (NLTK), gensim, scikit-learn. I plan to add parts on visualization, numpy, etc. In addition, I plan to add more advanced code covering practical machine learning issues like vector space models to perform certain tasks like sentiment analysis. Finally, I also plan to introduce some deep learning tools and provide some relevant code. -The courses teach skills for at the intersection of fields like natural language processing, machine learning, social media mining, text mining, data science, etc. +The courses teach skills data science skills (i.e, skills at the intersection of natural language processing, applied machine learning, and social media mining). The code is written primarily in Python 2.7. A migration to Python 3 shoul be straightforward. + Some of the code is written and run during class sessions and so it is shared without much polishing. +In some places, you may find some repetition (primarily for pedagogical purposes inside class). I provide some comments, before I push here, as much as I can. From a0942281d8ea29850e387bc045eae296b8b9c0c4 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed <mumageed@gmail.com> Date: Sun, 19 Mar 2017 10:20:07 -0700 Subject: [PATCH 29/36] Add files via upload Simplifying the vector_space tutorial. --- python_tutorial_part_6_vector_space.ipynb | 370 ++++++++++++++++------ 1 file changed, 272 insertions(+), 98 deletions(-) diff --git a/python_tutorial_part_6_vector_space.ipynb b/python_tutorial_part_6_vector_space.ipynb index 21c002e..325da56 100644 --- a/python_tutorial_part_6_vector_space.ipynb +++ b/python_tutorial_part_6_vector_space.ipynb @@ -21,9 +21,173 @@ "# distribution of movie review sentiment data." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## namedtuple" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "female\n", + "Visual Arts\n" + ] + } + ], + "source": [ + "# While Python tuples is indexed numerically (like a list), a named tuple assigns names to fields and \n", + "# is also indexed numerically. This makes it possible to access the fields in a named tuple using these names\n", + "# as if they were attributes of an object (via dotting into the namedtuple)\n", + "# See also here: https://docs.python.org/2/library/collections.html\n", + "from collections import namedtuple\n", + "Student = namedtuple(\"Student\", [\"name\", \"age\", \"gender\", \"course\"])\n", + "#--------------------------------------------------------------------\n", + "# Note: You can also provide field names as a space-delimited string, rather than a list.\n", + "#Student = namedtuple(\"Student\", \"name age gender course\")\n", + "#--------------------------------------------------------------------\n", + "\n", + "angela=Student(name=\"Angela\", age=45, gender=\"female\", course=\"Python\")\n", + "soha=Student(name=\"Soha\", age=25, gender=\"female\", course=\"Visual Arts\")\n", + "print(angela.gender)\n", + "print(soha.course)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Soha\n", + "25\n", + "female\n", + "Visual Arts\n" + ] + } + ], + "source": [ + "# A namedtuple is also iterable like a tuple\n", + "for i in soha:\n", + " print(i)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'Visual Arts'" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# You can access a namedtuple the same way you access a tuple or a list:\n", + "soha[-1]" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Student(name='Angela', age=45, gender='female', course='Python')\n", + "Student(name='Soha', age=25, gender='female', course='Visual Arts')\n" + ] + } + ], + "source": [ + "# We can now create a list where we append the two namedtuples above.\n", + "# i.e., a list of namedtuples\n", + "all_students=[]\n", + "all_students.append(angela)\n", + "all_students.append(soha)\n", + "for s in all_students:\n", + " print(s)" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "- Angela is 45 years old.\n", + "- Soha is 25 years old.\n" + ] + } + ], + "source": [ + "for s in all_students:\n", + " print(\"- {} is {} years old.\").format(s.name, s.age)" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# We should usually get tags automatically based on input data file.\n", + "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", + "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", + "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", + "# and test_data with the same setup for class label. \n", + "# The rest of the data in the file is unknown/neutral/-1 and we don't use that part.\n", + "\n", + "def map_tags(post_index):\n", + " # if post is positive, tag=1, if it is negative tag=0, if it is neutral, tag=-1\n", + " tag=-1\n", + " if post_index < 12500:\n", + " tag=1\n", + " elif post_index < 25000:\n", + " tag=0\n", + " elif post_index < 37500:\n", + " tag=1\n", + " elif post_index < 50000:\n", + " tag=0\n", + " return tag" + ] + }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 79, "metadata": { "collapsed": false }, @@ -32,37 +196,52 @@ "name": "stdout", "output_type": "stream", "text": [ - "25000\n", - "200\n", - "200\n" + "50000\n", + "**************************************************\n", + "DataDoc(tag=1, words=['bromwell', 'high', 'is', 'a', 'cartoon', 'comedy', '.', 'it', 'ran', 'at', 'the', 'same', 'time', 'as', 'some', 'other', 'programs', 'about', 'school', 'life', ',', 'such', 'as', '\"', 'teachers', '\"', '.', 'my', '35', 'years', 'in', 'the', 'teaching', 'profession', 'lead', 'me', 'to', 'believe', 'that', 'bromwell', \"high's\", 'satire', 'is', 'much', 'closer', 'to', 'reality', 'than', 'is', '\"', 'teachers', '\"', '.', 'the', 'scramble', 'to', 'survive', 'financially', ',', 'the', 'insightful', 'students', 'who', 'can', 'see', 'right', 'through', 'their', 'pathetic', \"teachers'\", 'pomp', ',', 'the', 'pettiness', 'of', 'the', 'whole', 'situation', ',', 'all', 'remind', 'me', 'of', 'the', 'schools', 'i', 'knew', 'and', 'their', 'students', '.', 'when', 'i', 'saw', 'the', 'episode', 'in', 'which', 'a', 'student', 'repeatedly', 'tried', 'to', 'burn', 'down', 'the', 'school', ',', 'i', 'immediately', 'recalled', '.', '.', '.', '.', '.', '.', '.', '.', '.', 'at', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', 'high', '.', 'a', 'classic', 'line', ':', 'inspector', ':', \"i'm\", 'here', 'to', 'sack', 'one', 'of', 'your', 'teachers', '.', 'student', ':', 'welcome', 'to', 'bromwell', 'high', '.', 'i', 'expect', 'that', 'many', 'adults', 'of', 'my', 'age', 'think', 'that', 'bromwell', 'high', 'is', 'far', 'fetched', '.', 'what', 'a', 'pity', 'that', 'it', \"isn't\", '!'])\n", + "**************************************************\n" ] } ], "source": [ "from collections import namedtuple\n", "\n", - "all_data = [] \n", - "DataDoc= namedtuple('DataDoc', 'tag words')\n", - "with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", - " for line_no, line in enumerate(alldata):\n", - " label=line.split()[0]\n", - " word_list=line.lower().split()[1:]\n", - " all_data.append(DataDoc(label, word_list))\n", - " #print my_data[line_no]\n", - " #break\n", - "train_data = all_data[:25000]\n", - "test_data = all_data[25000:50000]\n", - "print len(train_data)\n", + "def get_all_data():\n", + " \"\"\"\n", + " Returns a list of namedtuples from the IMDB file.\n", + " Each namedtuple has two named fields:\n", + " tag= class label (0 for \"negative\" and 1 for \"positive\")\n", + " word_list the list of words in the review\n", + " \"\"\"\n", + " # a list to house all the data\n", + " all_data = [] \n", + " \n", + " DataDoc= namedtuple('DataDoc', 'tag words')\n", + " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", + " for line_no, line in enumerate(alldata):\n", + " post_index=int(line.split()[0].split(\"*\")[-1])\n", + " label=map_tags(post_index)\n", + " word_list=line.lower().split()[1:]\n", + " all_data.append(DataDoc(label, word_list))\n", + " return all_data\n", "\n", - "train_data=train_data[:100]+train_data[12500:12600]\n", - "test_data=test_data[:100]+test_data[12500:12600]\n", - "print len(train_data)\n", - "print len(test_data)" + "# Call the function to get the data\n", + "all_data= get_all_data()\n", + "# The data are 100K reviews as explained earlier\n", + "# Since the last 50K are unknown, let's throw them away\n", + "all_data=all_data[:50000]\n", + "print(len(all_data))\n", + "print(\"*\"*50)\n", + "# print the first namedtuple\n", + "print(all_data[0])\n", + "print(\"*\"*50)\n", + "# print the last namedtuple\n", + "#print(all_data[-1])" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 80, "metadata": { "collapsed": false }, @@ -71,8 +250,35 @@ "name": "stdout", "output_type": "stream", "text": [ - "7142\n", - "6994\n" + "500\n" + ] + } + ], + "source": [ + "# The data set is big, and we want to only work with a very small sample of it.\n", + "# Let's randomize the reviews and then take only 500 of them and call them train_data.\n", + "# We will then do cross-validation on these later.\n", + "from random import shuffle\n", + "shuffle(all_data)\n", + "#-------------------------\n", + "train_data = all_data[:500]\n", + "#------------------------\n", + "print len(train_data)" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "13848\n", + "13828\n" ] } ], @@ -105,7 +311,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 82, "metadata": { "collapsed": false }, @@ -114,9 +320,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "0\n", - "200\n", - "200\n" + "0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n" ] } ], @@ -138,18 +342,15 @@ " \n", "\n", "train_vecs= [get_sparse_vec(data_point, word_space) for data_point in train_data]\n", - "test_vecs= [get_sparse_vec(data_point, word_space) for data_point in test_data]\n", - "#test_vecs= get_sparse_vectors(test_data, word_space)\n", - "\n", - "#print train_vecs, test_vecs[0]\n", - "print len(train_data[12500:12600])\n", - "print len(train_vecs)\n", - "print len(test_vecs)" + "# Get class labels\n", + "train_tags=[train_data[i].tag for i in range(len(train_data))]\n", + "# Let's look at the last training data point\n", + "print train_tags[-1], train_vecs[-1][:10]" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 83, "metadata": { "collapsed": false }, @@ -158,54 +359,12 @@ "name": "stdout", "output_type": "stream", "text": [ - "0.0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", - "200\n", - "200\n" - ] - } - ], - "source": [ - "# We should usually get tags automatically based on input data file.\n", - "# In the input data file we have, we know that the first 12500 data points are positive/1.0 and the next 12500 are\n", - "# negative/0.0 then the next 12500 is poitive and the fourth chunk is negative.\n", - "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", - "# and test_data with the same setup for class label. \n", - "# The rest of the data in the file is unknown and we don't use that part.\n", - "# We could write code to extract label automatically and we will do this based on a standardized format we will work with\n", - "# later, for now we will hard-code the labels.\n", - "\n", - "from random import shuffle, randint\n", - "\n", - "\n", - "train_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "test_tags=[ 1.0 for i in range(100)] + [ 0.0 for i in range(100)]\n", - "\n", - "\n", - "#train_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "#test_tags=[ 1.0 for i in range(12500)] + [ 0.0 for i in range(12500)]\n", - "# Side note: If the first token in each line were the tag, we could get tags as follows:\n", - "# tags= [train_data[i].tag for i in range(len(train_data))]\n", - "print train_tags[-1], train_vecs[-1][:10]\n", - "print len(train_tags)\n", - "print len(test_tags)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(200, 7142)\n" + "(500, 13848)\n" ] } ], "source": [ + "# scikit-learn likes to take data as numpy arrays. So, let's change our data accordingly:\n", "train_vecs=np.array(train_vecs)\n", "train_tags=np.array(train_tags)\n", "print train_vecs.shape" @@ -213,7 +372,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 84, "metadata": { "collapsed": false }, @@ -234,32 +393,25 @@ "================================================== \n", "\n", "********************\n", - "\t accuracy_score\t0.715\n", + "\t accuracy_score\t0.644\n", "********************\n", - "precision_score\t0.765432098765\n", - "recall_score\t0.62\n", + "precision_score\t0.657692307692\n", + "recall_score\t0.657692307692\n", "\n", "classification_report:\n", "\n", " precision recall f1-score support\n", "\n", - " 0.0 0.68 0.81 0.74 100\n", - " 1.0 0.77 0.62 0.69 100\n", + " 0 0.63 0.63 0.63 240\n", + " 1 0.66 0.66 0.66 260\n", "\n", - "avg / total 0.72 0.71 0.71 200\n", + "avg / total 0.64 0.64 0.64 500\n", "\n", "\n", "confusion_matrix:\n", "\n", - "[[81 19]\n", - " [38 62]]\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using gpu device 0: GeForce GT 750M\n" + "[[151 89]\n", + " [ 89 171]]\n" ] } ], @@ -319,8 +471,30 @@ "print \"precision_score\\t\", metrics.precision_score(train_tags, predicted)\n", "print \"recall_score\\t\", metrics.recall_score(train_tags, predicted)\n", "print \"\\nclassification_report:\\n\\n\", metrics.classification_report(train_tags, predicted)\n", - "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)\n", - " \n" + "print \"\\nconfusion_matrix:\\n\\n\", metrics.confusion_matrix(train_tags, predicted)" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.52\n" + ] + } + ], + "source": [ + "# Usually, we calculate a basline as the majority class in training data.\n", + "# Here, to simplify, we just get the majority class in all the data (see support, which is the number of data points in each\n", + "# class, in the classification report above)\n", + "majority_class=260/500.0\n", + "print(majority_class)" ] } ], @@ -340,7 +514,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", - "version": "2.7.10" + "version": "2.7.12" } }, "nbformat": 4, From 84dd3d3773a94b432987508b2dc714566a4eda75 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed <mumageed@gmail.com> Date: Sun, 19 Mar 2017 10:26:05 -0700 Subject: [PATCH 30/36] Add files via upload Adding some explanations to the tutorial... --- python_tutorial_part_6_vector_space.ipynb | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/python_tutorial_part_6_vector_space.ipynb b/python_tutorial_part_6_vector_space.ipynb index 325da56..03f48e1 100644 --- a/python_tutorial_part_6_vector_space.ipynb +++ b/python_tutorial_part_6_vector_space.ipynb @@ -18,7 +18,8 @@ "outputs": [], "source": [ "# This is code to build a vector space model, with SVMs on Andrew Mass' \n", - "# distribution of movie review sentiment data." + "# distribution of movie review sentiment data.\n", + "# Since we use Python's namedtuple on the code, let's take a look at what a namedtuple is first" ] }, { @@ -170,6 +171,20 @@ "# So basically the train_data has 25K (with the first half positive and the second half negative)\n", "# and test_data with the same setup for class label. \n", "# The rest of the data in the file is unknown/neutral/-1 and we don't use that part.\n", + "#------------------------------------------\n", + "# Format of the data is as below, with each line starting with an index.\n", + "# For example, \"_*0\" is the index in the first line. We will ignore the \"_*\" part and cast the index into\n", + "# an int\n", + "#------------------------------------------\n", + "\"\"\"\n", + "_*0 bromwell high is a cartoon comedy ....\n", + "_*1 homelessness ( or houselessness as george carlin stated )...\n", + "_*2 brilliant over-acting by lesley ann warren .\n", + "\"\"\"\n", + "#------------------------------------------\n", + "# Let's build a function that takes the index in the file and returns a numerical index that can be seen \n", + "# by the classifier we will use later\n", + "#------------------------------------------\n", "\n", "def map_tags(post_index):\n", " # if post is positive, tag=1, if it is negative tag=0, if it is neutral, tag=-1\n", From 5594236ab7cd506bcbd5d70c17ec3a66fdbc0c69 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed <mumageed@gmail.com> Date: Mon, 20 Mar 2017 13:13:23 -0700 Subject: [PATCH 31/36] Add files via upload Further cleaning... --- python_tutorial_part_6_vector_space.ipynb | 176 ++++++++++++++++++++-- 1 file changed, 165 insertions(+), 11 deletions(-) diff --git a/python_tutorial_part_6_vector_space.ipynb b/python_tutorial_part_6_vector_space.ipynb index 03f48e1..7bb6c5a 100644 --- a/python_tutorial_part_6_vector_space.ipynb +++ b/python_tutorial_part_6_vector_space.ipynb @@ -195,11 +195,58 @@ " tag=0\n", " elif post_index < 37500:\n", " tag=1\n", - " elif post_index < 50000:\n", - " tag=0\n", + " else:\n", + " pass\n", " return tag" ] }, + { + "cell_type": "code", + "execution_count": 87, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(0, 'bromwell high is a cartoon comedy ....')\n", + "(1, 'homelessness ( or houselessness as george carlin stated )')\n", + "(2, 'brilliant over-acting by lesley ann warren')\n" + ] + } + ], + "source": [ + "l=[\"bromwell high is a cartoon comedy ....\", \\\n", + " \"homelessness ( or houselessness as george carlin stated )\",\\\n", + " \"brilliant over-acting by lesley ann warren\"]\n", + "\n", + "for no, post in enumerate(l):\n", + " print(no, post)" + ] + }, + { + "cell_type": "code", + "execution_count": 98, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "<type 'int'>\n" + ] + } + ], + "source": [ + "line=\"_*0 bromwell high is a cartoon comedy ....\"\n", + "label= int(line.split()[0].split(\"*\")[-1])\n", + "print(type(label))" + ] + }, { "cell_type": "code", "execution_count": 79, @@ -231,11 +278,11 @@ " # a list to house all the data\n", " all_data = [] \n", " \n", - " DataDoc= namedtuple('DataDoc', 'tag words')\n", + " DataDoc= namedtuple('DataDoc', ['tag', 'words'])\n", " with open('/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata-id.txt') as alldata:\n", " for line_no, line in enumerate(alldata):\n", - " post_index=int(line.split()[0].split(\"*\")[-1])\n", - " label=map_tags(post_index)\n", + " #post_index=int(line.split()[0].split(\"*\")[-1])\n", + " label=map_tags(line_no)\n", " word_list=line.lower().split()[1:]\n", " all_data.append(DataDoc(label, word_list))\n", " return all_data\n", @@ -316,7 +363,7 @@ " for w in doc.words:\n", " # indexes of words won't be in sequential order as they occur in data (can you tell why?), \n", " # but that doesn't matter.\n", - " word_space[w]=len(word_space)\n", + " word_space[w]=len(word_space+1)\n", " return word_space\n", "\n", "word_space=get_space(train_data)\n", @@ -326,7 +373,7 @@ }, { "cell_type": "code", - "execution_count": 82, + "execution_count": 103, "metadata": { "collapsed": false }, @@ -335,13 +382,109 @@ "name": "stdout", "output_type": "stream", "text": [ - "0 [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n" + "[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + "[ 0. 0. 0. 1. 0. 0. 0. 0. 0. 0.]\n" ] } ], "source": [ "import numpy as np\n", - "\n", + "x=np.zeros(10)\n", + "print(x)\n", + "x[3]=1\n", + "print(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 0. 0. 0. ..., 0. 0. 0.]\n" + ] + } + ], + "source": [ + "big=np.zeros(len(word_space))\n", + "print(big)" + ] + }, + { + "cell_type": "code", + "execution_count": 106, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "13828\n" + ] + } + ], + "source": [ + "w=\"love\"\n", + "word_index=word_space[w]\n", + "print(word_index)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "big[13828]=1" + ] + }, + { + "cell_type": "code", + "execution_count": 107, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[10, 20, 33, 44, 2, 6]\n" + ] + } + ], + "source": [ + "numbers=[10, 20, 33, 44, 50, 2, 6, 77]\n", + "less_than_fifty= [i for i in numbers if i < 50]\n", + "print(less_than_fifty)" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0\n", + "[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n" + ] + } + ], + "source": [ "def get_sparse_vec(data_point, space):\n", " # create empty vector\n", " sparse_vec = np.zeros((len(space)))\n", @@ -349,7 +492,8 @@ " # use exception handling such that this function can also be used to vectorize \n", " # data with words not in train (i.e., test and dev data)\n", " try:\n", - " sparse_vec[space[w]]=1\n", + " word_index= space[w]\n", + " sparse_vec[word_index]=1\n", " except:\n", " continue\n", " return sparse_vec\n", @@ -360,9 +504,19 @@ "# Get class labels\n", "train_tags=[train_data[i].tag for i in range(len(train_data))]\n", "# Let's look at the last training data point\n", - "print train_tags[-1], train_vecs[-1][:10]" + "print(train_tags[-1])\n", + "print(train_vecs[-1][:10])" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, { "cell_type": "code", "execution_count": 83, From 71f94fced32bc1edb535fa4872d97dcddd9945e0 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed <mumageed@gmail.com> Date: Fri, 24 Mar 2017 10:05:18 -0700 Subject: [PATCH 32/36] Adding a unit on classes. --- classes.ipynb | 317 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 317 insertions(+) create mode 100644 classes.ipynb diff --git a/classes.ipynb b/classes.ipynb new file mode 100644 index 0000000..5440c5d --- /dev/null +++ b/classes.ipynb @@ -0,0 +1,317 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# A Quick Look at Python Classes" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "678678\n", + "Alex\n", + "['Python', 'Soical Media Intelligence']\n", + "Alex has uid 678678, and is taking: ['Python', 'Soical Media Intelligence']\n", + "\n", + "\n", + " A student class, holding name, id, and courses taken...\n", + " \n" + ] + } + ], + "source": [ + "class Student(object):\n", + " \"\"\"\n", + " A student class, holding name, id, and courses taken...\n", + " \"\"\"\n", + " def __init__(self, name, student_id, courses):\n", + " self.name=name\n", + " self.student_id = student_id\n", + " self.courses = courses\n", + "\n", + " def get_id(self):\n", + " return self.student_id\n", + " \n", + " def get_name(self):\n", + " return self.name\n", + "\n", + " def get_courses(self):\n", + " return self.courses\n", + "\n", + " def __str__(self):\n", + " return \"%s has uid %s, and is taking: %s\\n\" % (self.name, self.student_id, self.courses)\n", + "\n", + "#--------------------------------------------------------------------\n", + "alex=Student(\"Alex\", 678678, [\"Python\", \"Soical Media Intelligence\"])\n", + "print(alex.get_id())\n", + "print(alex.get_name())\n", + "print(alex.get_courses())\n", + "#----------\n", + "print(alex)\n", + "print(Student.__doc__)\n", + "#---------------------------------------------------------------------" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "7675456353\n", + "Sara\n", + "Deep Learning\n", + "Sara has uid 7675456353, and is taking: Deep Learning\n", + "\n" + ] + } + ], + "source": [ + "#--------------------------------------------------------------------\n", + "sara=Student(\"Sara\", 7675456353, \"Deep Learning\")\n", + "print(sara.get_id())\n", + "print(sara.get_name())\n", + "print(sara.get_courses())\n", + "#----------\n", + "#print(sara)\n", + "#---------------------------------------------------------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "- Title of the Book: Deep Learning for NLP\n", + "- Price of the Book: 100\n", + "- New price of the Book: 180\n" + ] + } + ], + "source": [ + "class Book(object):\n", + " \"\"\"\n", + " A Book class with some getters! (Bad doc!)\n", + " \"\"\"\n", + " def __init__(self, title, b_id, price):\n", + " self.title = title\n", + " self.b_id = b_id\n", + " self.price = price\n", + "\n", + " def get_id(self):\n", + " return self.b_id\n", + " \n", + " def get_title(self):\n", + " return self.title\n", + "\n", + " def get_price(self):\n", + " return self.price\n", + " \n", + " def update_price(self, price):\n", + " self.price =price\n", + " \n", + "deep_learning=Book(\"Deep Learning for NLP\", \"888-22-33308\", 100)\n", + "\n", + "print('- Title of the Book: {}').format(deep_learning.get_title())\n", + "print('- Price of the Book: {}').format(deep_learning.get_price())\n", + "# Update the price\n", + "deep_learning.update_price(180)\n", + "print('- New price of the Book: {}').format(deep_learning.get_price())" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "- Title of the Book: Deep Learning for NLP\n", + "- Price of the Book: 100\n", + "- New price of the Book: 180\n" + ] + } + ], + "source": [ + "class Book(object):\n", + " \"\"\"\n", + " A Book class with some getters! (Bad doc!)\n", + " \"\"\"\n", + " def __init__(self, title, b_id, price):\n", + " self.title = title\n", + " self.b_id = b_id\n", + " self.price = price\n", + "\n", + "# def get_id(self):\n", + "# return self.b_id\n", + " \n", + "# def get_title(self):\n", + "# return self.title\n", + "\n", + "# def get_price(self):\n", + "# return self.price\n", + " \n", + " def update_price(self, new_price):\n", + " self.price = new_price\n", + " \n", + "deep_learning=Book(\"Deep Learning for NLP\", \"888-22-33308\", 100)\n", + "\n", + "print('- Title of the Book: {}').format(deep_learning.title)\n", + "print('- Price of the Book: {}').format(deep_learning.price)\n", + "# Update the price\n", + "deep_learning.update_price(180)\n", + "print('- New price of the Book: {}').format(deep_learning.price)" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " A Book class with some getters! (Bad doc!)\n", + " \n" + ] + } + ], + "source": [ + "print(deep_learning.__doc__)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Subclassing" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "- Issue of the Magazine: 12-2\n", + "- Title of the Magazine: Time\n", + "- Price of the Magazine: 10\n", + "- New price of the Magazine: 15\n" + ] + } + ], + "source": [ + "class Magazine(Book):\n", + " \"\"\"\n", + " Subclass of the Book class...\n", + " Only adds the method to get issue info.\n", + " \"\"\"\n", + " def __init__(self, title, b_id, price, issue):\n", + " # Just invoke the __init__ for the parent class\n", + " Book.__init__(self, title, b_id, price)\n", + " self.issue = issue\n", + "\n", + " def get_issue(self):\n", + " return self.issue\n", + " \n", + "time=Magazine(\"Time\", \"000-22-4444\", 10, \"12-2\")\n", + "\n", + "print('- Issue of the Magazine: {}').format(time.get_issue())\n", + "#-----------------------------------------------------------\n", + "# Everything else works like it should with the parent class\n", + "print('- Title of the Magazine: {}').format(time.get_title())\n", + "print('- Price of the Magazine: {}').format(time.get_price())\n", + "# Update the price\n", + "time.update_price(15)\n", + "print('- New price of the Magazine: {}').format(time.get_price())" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " Subclass of the Book class...\n", + " Only adds the method to get issue info.\n", + " \n" + ] + } + ], + "source": [ + "# Note: Subclass does not inherit doc from parent class:\n", + "print(time.__doc__)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.12" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From 1189f8366fae05a24b5a5b2cb3746f6455b0eec1 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed <mumageed@gmail.com> Date: Mon, 27 Mar 2017 15:19:30 -0700 Subject: [PATCH 33/36] Add files via upload Text classification with Naive Bayes, using sklearn... --- text_classification_Naive_Bayes.ipynb | 556 ++++++++++++++++++++++++++ 1 file changed, 556 insertions(+) create mode 100644 text_classification_Naive_Bayes.ipynb diff --git a/text_classification_Naive_Bayes.ipynb b/text_classification_Naive_Bayes.ipynb new file mode 100644 index 0000000..50ecb25 --- /dev/null +++ b/text_classification_Naive_Bayes.ipynb @@ -0,0 +1,556 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# A Vector Space Model, with scikit-learn Naive Bayes" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/mam/anaconda/lib/python2.7/site-packages/matplotlib/font_manager.py:273: UserWarning: Matplotlib is building the font cache using fc-list. This may take a moment.\n", + " warnings.warn('Matplotlib is building the font cache using fc-list. This may take a moment.')\n" + ] + } + ], + "source": [ + "%matplotlib inline\n", + "import csv\n", + "import pandas\n", + "import sklearn\n", + "import numpy as np\n", + "from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer\n", + "from sklearn.naive_bayes import MultinomialNB\n", + "from sklearn.metrics import classification_report, f1_score, accuracy_score, confusion_matrix\n", + "from sklearn.pipeline import Pipeline\n", + "from sklearn.cross_validation import StratifiedKFold, cross_val_score " + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "50001\n" + ] + } + ], + "source": [ + "# Read the data\n", + "reviews = [line.rstrip() for line in open(\"/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata_2column.txt\")]\n", + "print(len(reviews))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\"label\",\"message\"\n", + "\"1\",\"bromwell high is a cartoon comedy . it ran at the same time as some other programs about school life such as \"\" teachers \"\" . my 35 years in the teaching profession lead me to believe that bromwell high's satire is much closer to reality than is \"\" teachers \"\" . the scramble to survive financially the insightful students who can see right through their pathetic teachers' pomp the pettiness of the whole situation all remind me of the schools i knew and their students . when i saw the episode in which a student repeatedly tried to burn down the school i immediately recalled . . . . . . . . . at . . . . . . . . . . high . a classic line : inspector : i'm here to sack one of your teachers . student : welcome to bromwell high . i expect that many adults of my age think that bromwell high is far fetched . what a pity that it isn't !\"\n" + ] + } + ], + "source": [ + "# The data have a header and we print it\n", + "print(reviews[0])\n", + "# print first data point.\n", + "# data format is each review as a line, csv\n", + "# clomun one is the sentiment tag --> 1=positive sentiment, 0=negative sentiment\n", + "# column 2 is the review\n", + "print(reviews[1])" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "<div>\n", + "<table border=\"1\" class=\"dataframe\">\n", + " <thead>\n", + " <tr style=\"text-align: right;\">\n", + " <th></th>\n", + " <th>label</th>\n", + " <th>message</th>\n", + " </tr>\n", + " </thead>\n", + " <tbody>\n", + " <tr>\n", + " <th>0</th>\n", + " <td>\"label\"</td>\n", + " <td>\"message\"</td>\n", + " </tr>\n", + " <tr>\n", + " <th>1</th>\n", + " <td>\"1\"</td>\n", + " <td>\"bromwell high is a cartoon comedy . it ran at...</td>\n", + " </tr>\n", + " <tr>\n", + " <th>2</th>\n", + " <td>\"1\"</td>\n", + " <td>\"homelessness ( or houselessness as george car...</td>\n", + " </tr>\n", + " <tr>\n", + " <th>3</th>\n", + " <td>\"1\"</td>\n", + " <td>\"brilliant over-acting by lesley ann warren . ...</td>\n", + " </tr>\n", + " <tr>\n", + " <th>4</th>\n", + " <td>\"1\"</td>\n", + " <td>\"this is easily the most underrated film inn t...</td>\n", + " </tr>\n", + " </tbody>\n", + "</table>\n", + "</div>" + ], + "text/plain": [ + " label message\n", + "0 \"label\" \"message\"\n", + "1 \"1\" \"bromwell high is a cartoon comedy . it ran at...\n", + "2 \"1\" \"homelessness ( or houselessness as george car...\n", + "3 \"1\" \"brilliant over-acting by lesley ann warren . ...\n", + "4 \"1\" \"this is easily the most underrated film inn t..." + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Let's actually read the file again with pandas\n", + "import csv\n", + "import pandas as pd\n", + "reviews = pd.read_csv(\"/Users/mam/CORE/RESEARCH/DEEPLEARNING/Doc2Vec/data/aclImdb/alldata_2column.txt\",\\\n", + " sep=',', quoting=csv.QUOTE_NONE, names=[\"label\", \"message\"])\n", + "\n", + "# Let's print a preview with the \"head\" command\n", + "reviews.head(n=5)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/mam/anaconda/lib/python2.7/site-packages/sklearn/cross_validation.py:516: Warning: The least populated class in y has only 1 members, which is too few. The minimum number of labels for any class cannot be less than n_folds=10.\n", + " % (min_labels, self.n_folds)), Warning)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 0.82083583 0.8122 0.8234 0.8028 0.8204 0.834 0.8218\n", + " 0.8372 0.8034 0.8082 ]\n" + ] + } + ], + "source": [ + "reviews_data=reviews[\"message\"]\n", + "reviews_tags=reviews[\"label\"]\n", + "\n", + "pipeline = Pipeline([\n", + " ('bow', CountVectorizer(analyzer='word')), # get counts of tokens\n", + " ('tfidf', TfidfTransformer()), # get tf-idf scores\n", + " ('classifier', MultinomialNB()), # train on tf-idf vectors with the Naive Bayes classifier\n", + "])\n", + "\n", + "# Do 10-fold cross validation\n", + "scores = cross_val_score(pipeline, \n", + " reviews_data, \n", + " reviews_tags, \n", + " cv=10, \n", + " scoring='accuracy',\n", + " n_jobs=-1, # use all machine cores\n", + " )\n", + "print(scores)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.818423583283\n" + ] + } + ], + "source": [ + "avg= sum(scores/10.0)\n", + "print(avg)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Code fr " + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{'last_letter': 'x'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import nltk\n", + "def gender_features(word):\n", + " return {'last_letter': word[-1]}\n", + "gender_features('Alex')" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{'last_letter': 'e'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "gender_features('Nicole')" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from nltk.corpus import names\n", + "labeled_names = ([(name, 'male') for name in names.words('male.txt')] +\\\n", + " [(name, 'female') for name in names.words('female.txt')])\n", + "import random\n", + "random.shuffle(labeled_names)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "featuresets = [(gender_features(n), gender) for (n, gender) in labeled_names]\n", + "train_set, test_set = featuresets[500:], featuresets[:500]\n", + "classifier = nltk.NaiveBayesClassifier.train(train_set)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "({'last_letter': u'i'}, 'female')" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "featuresets[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[({'last_letter': u'i'}, 'female'),\n", + " ({'last_letter': u'b'}, 'male'),\n", + " ({'last_letter': u't'}, 'male'),\n", + " ({'last_letter': u'e'}, 'female'),\n", + " ({'last_letter': u'n'}, 'male'),\n", + " ({'last_letter': u'y'}, 'female'),\n", + " ({'last_letter': u'e'}, 'female'),\n", + " ({'last_letter': u'a'}, 'female'),\n", + " ({'last_letter': u'e'}, 'female'),\n", + " ({'last_letter': u'a'}, 'female')]" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "train_set[0:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'female'" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "classifier.classify(gender_features('Rebecca'))" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'male'" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "classifier.classify(gender_features('Jordon'))" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'female'" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "classifier.classify(gender_features('Vivienne'))" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.746\n" + ] + } + ], + "source": [ + "print(nltk.classify.accuracy(classifier, test_set))" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Most Informative Features\n", + " last_letter = u'a' female : male = 34.4 : 1.0\n", + " last_letter = u'k' male : female = 32.7 : 1.0\n", + " last_letter = u'f' male : female = 16.6 : 1.0\n", + " last_letter = u'p' male : female = 11.9 : 1.0\n", + " last_letter = u'v' male : female = 11.2 : 1.0\n" + ] + } + ], + "source": [ + "classifier.show_most_informative_features(5)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from nltk.classify import apply_features\n", + "train_set = apply_features(gender_features, labeled_names[500:])\n", + "test_set = apply_features(gender_features, labeled_names[:500])" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "nltk.util.LazyMap" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "type(train_set)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "({'last_letter': u'i'}, 'female')\n" + ] + } + ], + "source": [ + "print(train_set[0])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.12" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From 095f9cc74969fd94da20b594e158d1c61b7ac656 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed <mumageed@gmail.com> Date: Mon, 27 Mar 2017 15:24:43 -0700 Subject: [PATCH 34/36] Add files via upload --- text_classification_Naive_Bayes.ipynb | 308 +------------------------- 1 file changed, 1 insertion(+), 307 deletions(-) diff --git a/text_classification_Naive_Bayes.ipynb b/text_classification_Naive_Bayes.ipynb index 50ecb25..461e416 100644 --- a/text_classification_Naive_Bayes.ipynb +++ b/text_classification_Naive_Bayes.ipynb @@ -220,316 +220,10 @@ } ], "source": [ + "# Let's get average accuracy...\n", "avg= sum(scores/10.0)\n", "print(avg)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Code fr " - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "{'last_letter': 'x'}" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import nltk\n", - "def gender_features(word):\n", - " return {'last_letter': word[-1]}\n", - "gender_features('Alex')" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "{'last_letter': 'e'}" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "gender_features('Nicole')" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "from nltk.corpus import names\n", - "labeled_names = ([(name, 'male') for name in names.words('male.txt')] +\\\n", - " [(name, 'female') for name in names.words('female.txt')])\n", - "import random\n", - "random.shuffle(labeled_names)" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "featuresets = [(gender_features(n), gender) for (n, gender) in labeled_names]\n", - "train_set, test_set = featuresets[500:], featuresets[:500]\n", - "classifier = nltk.NaiveBayesClassifier.train(train_set)" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "({'last_letter': u'i'}, 'female')" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "featuresets[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[({'last_letter': u'i'}, 'female'),\n", - " ({'last_letter': u'b'}, 'male'),\n", - " ({'last_letter': u't'}, 'male'),\n", - " ({'last_letter': u'e'}, 'female'),\n", - " ({'last_letter': u'n'}, 'male'),\n", - " ({'last_letter': u'y'}, 'female'),\n", - " ({'last_letter': u'e'}, 'female'),\n", - " ({'last_letter': u'a'}, 'female'),\n", - " ({'last_letter': u'e'}, 'female'),\n", - " ({'last_letter': u'a'}, 'female')]" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "train_set[0:10]" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "'female'" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "classifier.classify(gender_features('Rebecca'))" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "'male'" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "classifier.classify(gender_features('Jordon'))" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "'female'" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "classifier.classify(gender_features('Vivienne'))" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.746\n" - ] - } - ], - "source": [ - "print(nltk.classify.accuracy(classifier, test_set))" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Most Informative Features\n", - " last_letter = u'a' female : male = 34.4 : 1.0\n", - " last_letter = u'k' male : female = 32.7 : 1.0\n", - " last_letter = u'f' male : female = 16.6 : 1.0\n", - " last_letter = u'p' male : female = 11.9 : 1.0\n", - " last_letter = u'v' male : female = 11.2 : 1.0\n" - ] - } - ], - "source": [ - "classifier.show_most_informative_features(5)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "from nltk.classify import apply_features\n", - "train_set = apply_features(gender_features, labeled_names[500:])\n", - "test_set = apply_features(gender_features, labeled_names[:500])" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "nltk.util.LazyMap" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "type(train_set)" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "({'last_letter': u'i'}, 'female')\n" - ] - } - ], - "source": [ - "print(train_set[0])" - ] } ], "metadata": { From c53817fbf1aeefd5da864ca0afda038398ff99b5 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed <mumageed@gmail.com> Date: Fri, 31 Mar 2017 12:35:34 -0700 Subject: [PATCH 35/36] Adding a first pandas tutorial... --- pandas_tutorial_1.ipynb | 727 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 727 insertions(+) create mode 100644 pandas_tutorial_1.ipynb diff --git a/pandas_tutorial_1.ipynb b/pandas_tutorial_1.ipynb new file mode 100644 index 0000000..8364d0e --- /dev/null +++ b/pandas_tutorial_1.ipynb @@ -0,0 +1,727 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Pandas:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Pandas has two core data structures: Series & DataFrame" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Series" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0 2\n", + "1 4\n", + "2 6\n", + "3 8\n", + "dtype: int64\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "from pandas import Series, DataFrame\n", + "counts= Series([2, 4, 6, 8])\n", + "print(counts)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[2 4 6 8]\n" + ] + } + ], + "source": [ + "print(counts.values)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Int64Index([0, 1, 2, 3], dtype='int64')\n" + ] + } + ], + "source": [ + "print(counts.index)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a 2\n", + "b 4\n", + "c 6\n", + "d 8\n", + "dtype: int64\n" + ] + } + ], + "source": [ + "# We can create customized indexes:\n", + "counts= Series([2, 4, 6, 8], index=[\"a\", \"b\", \"c\", \"d\"])\n", + "print(counts)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2\n" + ] + } + ], + "source": [ + "# We can use the indexes to access values:\n", + "# Note: We need to use quotes around an index:\n", + "print(counts[\"a\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a 2\n", + "b 4\n", + "c 22\n", + "d 8\n", + "dtype: int64\n" + ] + } + ], + "source": [ + "# Reassign:\n", + "counts[\"c\"]=22\n", + "print(counts)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a 2\n", + "b 4\n", + "dtype: int64\n" + ] + } + ], + "source": [ + "print(counts[[\"a\", \"b\"]]) # Note the double square brackets" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a 7.389056e+00\n", + "b 5.459815e+01\n", + "c 3.584913e+09\n", + "d 2.980958e+03\n", + "dtype: float64\n" + ] + } + ], + "source": [ + "# We can perform operations, similar to Numpy, while preserving the index values\n", + "print(np.exp(counts))" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a 0.6\n", + "b 1.2\n", + "c 6.6\n", + "d 2.4\n", + "dtype: float64\n" + ] + } + ], + "source": [ + "print(counts*0.3)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a 2\n", + "b 4\n", + "c 22\n", + "d 8\n", + "dtype: int64\n" + ] + } + ], + "source": [ + "print(counts)" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Alex 10\n", + "Evan 20\n", + "Gabi 15\n", + "John 12\n", + "Juan 20\n", + "Mary 13\n", + "Noha 9\n", + "dtype: int64\n" + ] + } + ], + "source": [ + "# We can create a Series from a Python dictionary:\n", + "d={\"Alex\": 10, \"John\": 12, \"Mary\": 13, \"Gabi\": 15, \"Noha\": 9,\\\n", + " \"Juan\": 20, \"Evan\": 20}\n", + "grades=Series(d)\n", + "print(grades)" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Alex False\n", + "Evan True\n", + "Gabi True\n", + "John False\n", + "Juan True\n", + "Mary True\n", + "Noha False\n", + "dtype: bool\n" + ] + } + ], + "source": [ + "print(grades > 12)" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Alex 12.0\n", + "Evan 24.0\n", + "Gabi 18.0\n", + "John 14.4\n", + "Juan 24.0\n", + "Mary 15.6\n", + "Noha 10.8\n", + "dtype: float64\n" + ] + } + ], + "source": [ + "raised= grades * 1.2\n", + "print(raised)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## DataFrame" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " age courses names\n", + "0 25 Python Alex\n", + "1 27 Perl John\n", + "2 32 Deep Learning Mary\n", + "3 19 Pattern Recognition Gabi\n", + "4 23 Data Mining Noha\n", + "5 20 Computational Archives Juan\n", + "6 21 Health Informatics Evan\n" + ] + } + ], + "source": [ + "# The DataFrame is a (possibly heterogeneous) spreadsheet-like (think Excel) data structure\n", + "# that enables both row and column indexing. Intutively, we can think about a DataFrame as \n", + "# a dict of Series\n", + "\n", + "data= {\"courses\": [\"Python\", \"Perl\", \"Deep Learning\", \"Pattern Recognition\", \"Data Mining\",\\\n", + " \"Computational Archives\", \"Health Informatics\"],\n", + " \"age\": [25, 27, 32, 19, 23, 20, 21],\n", + " \"names\": [\"Alex\", \"John\", \"Mary\", \"Gabi\", \"Noha\", \"Juan\", \"Evan\"]}\n", + "\n", + " \n", + "frame=DataFrame(data)\n", + "print(frame)" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " age courses names\n", + "s1 25 Python Alex\n", + "s2 27 Perl John\n", + "s3 32 Deep Learning Mary\n", + "s4 19 Pattern Recognition Gabi\n", + "s5 23 Data Mining Noha\n", + "s6 20 Computational Archives Juan\n", + "s7 21 Health Informatics Evan\n" + ] + } + ], + "source": [ + "frame=DataFrame(data, index=[\"s1\", \"s2\", \"s3\", \"s4\", \"s5\", \"s6\", \"s7\" ])\n", + "print(frame)" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "s1 Alex\n", + "s2 John\n", + "s3 Mary\n", + "s4 Gabi\n", + "s5 Noha\n", + "s6 Juan\n", + "s7 Evan\n", + "Name: names, dtype: object\n" + ] + } + ], + "source": [ + "print(frame[\"names\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "s1 Alex\n", + "s2 John\n", + "s3 Mary\n", + "s4 Gabi\n", + "s5 Noha\n", + "s6 Juan\n", + "s7 Evan\n", + "Name: names, dtype: object\n" + ] + } + ], + "source": [ + "print(frame.names)" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "age 25\n", + "courses Python\n", + "names Alex\n", + "Name: s1, dtype: object\n" + ] + } + ], + "source": [ + "# Rows can be retrieved by e.g., the \"ix\" indexing field:\n", + "print(frame.ix[\"s1\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "age 27\n", + "courses Perl\n", + "names John\n", + "Name: s2, dtype: object\n" + ] + } + ], + "source": [ + "print(frame.ix[\"s2\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " age courses names School\n", + "s1 25 Python Alex UBC\n", + "s2 27 Perl John UBC\n", + "s3 32 Deep Learning Mary UBC\n", + "s4 19 Pattern Recognition Gabi UBC\n", + "s5 23 Data Mining Noha UBC\n", + "s6 20 Computational Archives Juan UBC\n", + "s7 21 Health Informatics Evan UBC\n" + ] + } + ], + "source": [ + "# add a coulmn\n", + "frame[\"School\"]=\"UBC\"\n", + "print(frame)" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Index([u'age', u'courses', u'names', u'School'], dtype='object')\n" + ] + } + ], + "source": [ + "print(frame.columns)" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Index([u's1', u's2', u's3', u's4', u's5', u's6', u's7'], dtype='object')\n" + ] + } + ], + "source": [ + "print(frame.index)" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "amazement 544395\n", + "loathing 74923\n", + "admiration 65759\n", + "grief 42947\n", + "terror 35705\n", + "ecstasy 30206\n", + "rage 8738\n", + "vigilance 695\n", + "Name: label, dtype: int64\n" + ] + } + ], + "source": [ + "import statsmodels.api as sm\n", + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "from patsy import dmatrices\n", + "from random import shuffle, randint, sample\n", + "import seaborn as sns\n", + "import numpy as np\n", + "%matplotlib inline\n", + "\n", + "emotion = pd.read_csv('emotions_p1_extended_lang_id_noduplic_denoised.csv', delimiter=',', header=0)\n", + "#----------------------------------------\n", + "print(pd.value_counts(emotion[\"label\"]))" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "<div>\n", + "<table border=\"1\" class=\"dataframe\">\n", + " <thead>\n", + " <tr style=\"text-align: right;\">\n", + " <th></th>\n", + " <th>message_id</th>\n", + " <th>message</th>\n", + " <th>label</th>\n", + " <th>lang_id</th>\n", + " </tr>\n", + " </thead>\n", + " <tbody>\n", + " <tr>\n", + " <th>0</th>\n", + " <td>596908062054555648</td>\n", + " <td>Last week , Yuki Kawauchi ran 3 HM in 3 consec...</td>\n", + " <td>admiration</td>\n", + " <td>en</td>\n", + " </tr>\n", + " <tr>\n", + " <th>1</th>\n", + " <td>257202468386115584</td>\n", + " <td>Had a Turkish bath today . #amazing</td>\n", + " <td>amazement</td>\n", + " <td>en</td>\n", + " </tr>\n", + " <tr>\n", + " <th>2</th>\n", + " <td>223865330487930880</td>\n", + " <td>Taking my 6yo niece shopping #imintrouble #goi...</td>\n", + " <td>ecstasy</td>\n", + " <td>nl</td>\n", + " </tr>\n", + " <tr>\n", + " <th>3</th>\n", + " <td>411617825149566976</td>\n", + " <td><USER> <USER> <USER> Britt and I tried for so ...</td>\n", + " <td>grief</td>\n", + " <td>en</td>\n", + " </tr>\n", + " <tr>\n", + " <th>4</th>\n", + " <td>267380735453835264</td>\n", + " <td>I love this new song of one direction gotta ad...</td>\n", + " <td>amazement</td>\n", + " <td>en</td>\n", + " </tr>\n", + " </tbody>\n", + "</table>\n", + "</div>" + ], + "text/plain": [ + " message_id message \\\n", + "0 596908062054555648 Last week , Yuki Kawauchi ran 3 HM in 3 consec... \n", + "1 257202468386115584 Had a Turkish bath today . #amazing \n", + "2 223865330487930880 Taking my 6yo niece shopping #imintrouble #goi... \n", + "3 411617825149566976 <USER> <USER> <USER> Britt and I tried for so ... \n", + "4 267380735453835264 I love this new song of one direction gotta ad... \n", + "\n", + " label lang_id \n", + "0 admiration en \n", + "1 amazement en \n", + "2 ecstasy nl \n", + "3 grief en \n", + "4 amazement en " + ] + }, + "execution_count": 59, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "emotion.head()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.12" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From c42eb163f18d8bf88f806262afcdfb58636e1021 Mon Sep 17 00:00:00 2001 From: Muhammad Abdul-Mageed <mumageed@gmail.com> Date: Fri, 31 Mar 2017 12:37:59 -0700 Subject: [PATCH 36/36] Updating numpy tutorial. --- python_tutorial_part_4_numpy.ipynb | 540 ++++++++++++++++++++++++++--- 1 file changed, 500 insertions(+), 40 deletions(-) diff --git a/python_tutorial_part_4_numpy.ipynb b/python_tutorial_part_4_numpy.ipynb index ad848ce..84940f2 100644 --- a/python_tutorial_part_4_numpy.ipynb +++ b/python_tutorial_part_4_numpy.ipynb @@ -11,7 +11,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 64, "metadata": { "collapsed": false }, @@ -20,7 +20,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "<type 'numpy.ndarray'>\n", + " <type 'numpy.ndarray'>\n", "a --> [2 3 4 5]\n", "b --> [5 6 7 8]\n", "a+b --> [ 7 9 11 13]\n" @@ -28,20 +28,304 @@ } ], "source": [ - "from numpy import *\n", - "#from numpy import array\n", + "# Import numpy, conventionally as \"np\"\n", "import numpy as np\n", - "a= array([2,3,4,5])\n", - "b=array((5,6,7,8))\n", + "# Numpy enables creation of N-dimensional arrays of data, or ndarrays\n", + "a=np.array([2,3,4,5])\n", + "b=np.array((5,6,7,8))\n", "print type(a)\n", "print \"a -->\", a\n", "print \"b -->\", b\n", - "print \"a+b -->\", a+b\n" + "print \"a+b -->\", a+b" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(4,)\n" + ] + } + ], + "source": [ + "# We can get the shape of the array, which is a tuple of the sizes of its dimensions\n", + "print(a.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n" + ] + } + ], + "source": [ + "z=np.zeros(10)\n", + "print(z)" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(10,)\n" + ] + } + ], + "source": [ + "print(z.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1\n" + ] + } + ], + "source": [ + "print(z.ndim)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]]\n" + ] + } + ], + "source": [ + "# If we had an 2*5 ndarray, and we can intialize with \"zeros\" or \"ones\":\n", + "x=np.zeros([2, 5])\n", + "print(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(2, 5)\n", + "2\n" + ] + } + ], + "source": [ + "print(x.shape)\n", + "print(x.ndim)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0. 0. 0. 0. 0.]]\n" + ] + } + ], + "source": [ + "# Or we can initialize with a shape of 4, 9:\n", + "x=np.zeros([4, 9])\n", + "print(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(4, 9)\n", + "2\n" + ] + } + ], + "source": [ + "print(x.shape)\n", + "print(x.ndim)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[ 1. 1. 1.]\n", + " [ 1. 1. 1.]\n", + " [ 1. 1. 1.]\n", + " [ 1. 1. 1.]]\n", + "\n", + " [[ 1. 1. 1.]\n", + " [ 1. 1. 1.]\n", + " [ 1. 1. 1.]\n", + " [ 1. 1. 1.]]]\n" + ] + } + ], + "source": [ + "# We can also create an array of > 2 dimensions\n", + "# Consider the following from the documentation of scipy: https://docs.scipy.org/doc/numpy-dev/user/quickstart.html:\n", + "\"\"\"\n", + "When you print an array, NumPy displays it in a similar way to nested lists, but with the following layout:\n", + "\n", + " the last axis is printed from left to right,\n", + " the second-to-last is printed from top to bottom,\n", + " the rest are also printed from top to bottom, with each slice separated from the next by an empty line.\n", + "\n", + "One-dimensional arrays are then printed as rows, bidimensionals as matrices and tridimensionals as lists of matrices.\n", + "\"\"\"\n", + "x=np.ones([2, 4, 3])\n", + "print(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(2, 4, 3)\n", + "3\n" + ] + } + ], + "source": [ + "print(x.shape)\n", + "print(x.ndim)" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[[ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]]\n", + "\n", + " [[ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]]\n", + "\n", + " [[ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]]]\n", + "\n", + "\n", + " [[[ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]]\n", + "\n", + " [[ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]]\n", + "\n", + " [[ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]\n", + " [ 0. 0. 0. 0. 0.]]]]\n" + ] + } + ], + "source": [ + "x=np.zeros([2, 3, 4, 5])\n", + "print(x)" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 40, "metadata": { "collapsed": false }, @@ -50,32 +334,168 @@ "name": "stdout", "output_type": "stream", "text": [ - "This will give an error!!!\n", - "a+c -->" + "(2, 3, 4, 5)\n", + "4\n" ] - }, + } + ], + "source": [ + "print(x.shape)\n", + "print(x.ndim)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": false + }, + "outputs": [ { - "ename": "ValueError", - "evalue": "operands could not be broadcast together with shapes (4,) (6,) ", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m<ipython-input-4-5f9c99476f2e>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mc\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m8\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m8\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m9\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"This will give an error!!!\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"a+c -->\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0mc\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mValueError\u001b[0m: operands could not be broadcast together with shapes (4,) (6,) " + "name": "stdout", + "output_type": "stream", + "text": [ + "float64\n" + ] + } + ], + "source": [ + "print(x.dtype)" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[1 1 1]\n", + " [1 1 1]\n", + " [1 1 1]\n", + " [1 1 1]]\n", + "\n", + " [[1 1 1]\n", + " [1 1 1]\n", + " [1 1 1]\n", + " [1 1 1]]]\n", + "int32\n" + ] + } + ], + "source": [ + "# Note array data type...\n", + "x=np.ones([2, 4, 3], dtype=np.int32)\n", + "print(x)\n", + "print(x.dtype)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Operations on arrays:" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[5 5 5]\n", + " [5 5 5]\n", + " [5 5 5]\n", + " [5 5 5]]\n", + "\n", + " [[5 5 5]\n", + " [5 5 5]\n", + " [5 5 5]\n", + " [5 5 5]]]\n" ] } ], + "source": [ + "print(x*5)" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[ 0.2 0.2 0.2]\n", + " [ 0.2 0.2 0.2]\n", + " [ 0.2 0.2 0.2]\n", + " [ 0.2 0.2 0.2]]\n", + "\n", + " [[ 0.2 0.2 0.2]\n", + " [ 0.2 0.2 0.2]\n", + " [ 0.2 0.2 0.2]\n", + " [ 0.2 0.2 0.2]]]\n" + ] + } + ], + "source": [ + "print(x/5.0)" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[1 1 1]\n", + " [1 1 1]\n", + " [1 1 1]\n", + " [1 1 1]]]\n" + ] + } + ], + "source": [ + "# We can slice\n", + "my_slice=x[1:2]\n", + "print(my_slice)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [], "source": [ "# You can only add arrays of the same shape / equal length:\n", - "c=array([5,8,8,9,5,2])\n", - "print \"This will give an error!!!\"\n", - "print \"a+c -->\", a+c" + "c=np.array([5,8,8,9,5,2])\n", + "# print \"This will give an error if you print it!!!\"\n", + "# print \"a+c -->\", a+c" ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 51, "metadata": { "collapsed": false }, @@ -84,20 +504,22 @@ "name": "stdout", "output_type": "stream", "text": [ - "a+1 --> [3 4 5 6]\n" + "a --> [2 3 4 5]\n", + "a+1 --> [4 5 6 7]\n" ] } ], "source": [ "# broadcasting\n", "# If you add an array to a scalar, the scalar gets broadcast across all the array elements\n", - "print \"a+1 -->\", a+1\n", + "print \"a -->\", a\n", + "print \"a+1 -->\", a+2\n", "# Now you can broadcast arrays and so you can add arrays of different shapes..." ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 52, "metadata": { "collapsed": false }, @@ -116,16 +538,22 @@ } ], "source": [ - "import numpy as np\n", "x= np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.float32)\n", "print \"Printing array x: \", x,\"\\n\"\n", "print \"\\\"Shape of array x is:\\\" \", x.shape,\"\\n\"\n", "print \"\\\"Value at x[0][1] is:\\\" \", x[0][1] # gives row0, c1 --> we start index from zero!" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## More operations" + ] + }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 53, "metadata": { "collapsed": false }, @@ -149,7 +577,43 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 55, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 1 6 15 6]\n" + ] + } + ], + "source": [ + "print(x*y)" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# You cannot do the below:\n", + "# You will get an error:\n", + "# ValueError: operands could not be broadcast together with shapes (4,) (5,)\n", + "x=np.array([1, 3, 5, 6])\n", + "y=np.array([1,2,3,1, 9])\n", + "d=y[1:]-y[:-1]\n", + "print(x*y)" + ] + }, + { + "cell_type": "code", + "execution_count": 59, "metadata": { "collapsed": false }, @@ -166,12 +630,12 @@ "source": [ "print sum(a)\n", "# cumsum adds every emelement to the previous element\n", - "print cumsum(a)" + "print np.cumsum(a)" ] }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 61, "metadata": { "collapsed": false }, @@ -186,14 +650,12 @@ "-------------------\n", "[2 3 4 5 6]\n", "-------------------\n", - "[2 4 6]\n", - "-------------------\n", - "[ 100. 215.443469 464.15888336 1000. ]\n" + "[ 2 7 12 17 22 27 32 37 42 47]\n", + "-------------------\n" ] } ], "source": [ - "import numpy as np\n", "#numpy.arange: http://docs.scipy.org/doc/numpy/reference/generated/numpy.arange.html\n", "\"\"\"\n", "numpy.arange([start, ]stop, [step, ]dtype=None)\n", @@ -208,7 +670,7 @@ "print \"-------------------\"\n", "print np.arange(2,7)\n", "print \"-------------------\"\n", - "print np.arange(2,7, 2)\n", + "print np.arange(2,50, 5)\n", "print \"-------------------\"" ] }, @@ -255,7 +717,6 @@ } ], "source": [ - "import numpy as np\n", "#------------------\n", "print \"numpy.zeros\"\n", "#------------------\n", @@ -336,12 +797,11 @@ } ], "source": [ - "import numpy as np\n", "#------------------\n", "print \"\\n numpy.linspace\"\n", "#------------------\n", "\"\"\"\n", - " numpy.linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None)[source]¶\n", + " numpy.linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None)[source]\n", " Return evenly spaced numbers over a specified interval.\n", " Returns num evenly spaced samples, calculated over the interval [start, stop].\n", " The endpoint of the interval can optionally be excluded.\n", @@ -392,7 +852,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", - "version": "2.7.10" + "version": "2.7.12" } }, "nbformat": 4,