From 012c415e16ee637e904c14c0a87abd3a28e79d63 Mon Sep 17 00:00:00 2001 From: Samson <45101270+sammysamsamsama@users.noreply.github.com> Date: Fri, 7 Feb 2020 13:19:14 -0600 Subject: [PATCH 01/24] Added copy of lab 1 --- Labs/Lab-1/Copy_of_Lab_1.ipynb | 1138 ++++++++++++++++++++++++++++++++ 1 file changed, 1138 insertions(+) create mode 100644 Labs/Lab-1/Copy_of_Lab_1.ipynb diff --git a/Labs/Lab-1/Copy_of_Lab_1.ipynb b/Labs/Lab-1/Copy_of_Lab_1.ipynb new file mode 100644 index 0000000..cab1a58 --- /dev/null +++ b/Labs/Lab-1/Copy_of_Lab_1.ipynb @@ -0,0 +1,1138 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "colab": { + "name": "Copy of Lab-1.ipynb", + "provenance": [], + "collapsed_sections": [], + "toc_visible": true + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "O5vg8KKRq0sy", + "colab_type": "text" + }, + "source": [ + "# Lab 1\n", + "\n", + "## Python Notebooks on Google Colab\n", + "\n", + "Data 1401's Labs, Homework, and Exams will be all in form of iPython notebooks. You may already be familiar with python notebooks if you have used Jupyter before, for example in Data 1301. If so, you are welcome to use whatever means you have to run Jupyter notebooks for this course, though you may get limited support. Our primary means of running python notebooks will be through [Google Colab](https://colab.research.google.com) and we will be storing files on google drive.\n", + "\n", + "You will need a google account. If you do not have one or you wish to use a different account for this course, please follow [these instructions](https://edu.gcfglobal.org/en/googledriveanddocs/getting-started-with-google-drive/1/) to make an account.\n", + "\n", + "Once you are ready with your account, you can continue in Colab. Click on the following badge to open this notebook in Colab:\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github//afarbin/DATA1401-Spring-2020/blob/master/Labs/Lab-1/Lab-1.ipynb)\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "mm8NAwqDBt7L", + "colab_type": "code", + "outputId": "2f2e6a3a-5416-43d7-e26e-930e447467f0", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 122 + } + }, + "source": [ + "from google.colab import drive\n", + "drive.mount('/content/drive')" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n", + "\n", + "Enter your authorization code:\n", + "··········\n", + "Mounted at /content/drive\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "FVt_1hPt1dAK", + "colab_type": "text" + }, + "source": [ + "## Notebooks in Colab\n", + "\n", + "You now are presumably in Colab. Word of caution, by default, Google Colab does not save your notebooks, so if you close your session, you will loose your work.\n", + "\n", + "So first thing: from the file menu above select \"Save a copy in Drive\"." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "x0JBL_RFrDDj", + "colab_type": "text" + }, + "source": [ + "## Storing Notebooks in Google Drive\n", + "A better way to work is to save your notebooks directly into Google Drive and upload directly to Git (where you will be downloading and uploading your homework). In order properly setup Git, we'll need to work more directly in your Google Drive.\n", + "\n", + "On the left sidebar, press the file icon to see a listing of files accessibile to this Notebook. Then press \"Mount Drive\" and follow the instructions to mount your Google Drive in this notebook. A new cell will be inserted into this notebook, which after you run by pressing the play button will instruct you to follow a link to log into your Google Account and enable access to your Drive in another tab. Finally you will copy a link from the new tab back into the cell in this notebook. Once you are done, press refresh under files in the left sidebar and you should have \"drive/My Drive\" appear." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "hwJ6wJk3tiLv", + "colab_type": "text" + }, + "source": [ + "## Github\n", + "All the class material will be stored on github. You will also submit your homework using github. To do so, you will need a github account.\n", + "\n", + "If you do not already have a github account or wish to create a new one for this course, create one:\n", + "* Browse to [github.com](https://github.com).\n", + "* Click the green “Sign up for GitHub”\tbutton.\n", + "* Follow instructions for creating an account.\n", + "* Make sure you remember your github username and password.\n", + "\n", + "Write an email to the course TA titled \"Data 1401: Github account\" with your github username (not your password) as the contents.\n", + "\n", + "## Google Groups\n", + "\n", + "Class annoucements will be made via google groups. If you did not already receive an invite to the class google group, had trouble with the invite, or wish to use a different email address, write an email to the course TA titled \"Data 1401: Google Group\" with your preferred email.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "TjfIzdQZqvzk", + "colab_type": "text" + }, + "source": [ + "## Introduction: Unix, Git, and Jupyter\n", + "\n", + "This lab aims to introduce you to basic Unix, familiarize you with iPython notebooks and get you setup to submit your homework.\n", + "*italicized text*" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "C_LmOgzFqvzp", + "colab_type": "text" + }, + "source": [ + "\n", + "\n", + "### Terminal, Shell, and ssh\n", + "\n", + "\n", + "The terminal is a simple program that generally runs another program, taking mostly keyboard input from you, passing it to this other program, and taking the output of the program and displaying on the screen for you.\n", + "\n", + "The terminal usually runs a program called a shell. Shells present a command prompt where you can type in commands, which are then executed when you press enter. In most shells, there are some special commands which the shell will execute. Everything else you type in, the shell will assume is a name of a program you want to run and arguments you want to pass that program. So if the shell doesn't recognize something you type in, it'll try to find a program with a name that is the same as the first word you gave it. \n", + "\n", + "### Shell in Colab\n", + "\n", + "Unfortunately, google Colab does not allow you to open a terminal window. Jupyter does, so if you are running in Jupyter (which most of you will not be), you may choose to open a terminal window by returning to the jupyter file list tab and selecting new terminal from the top right.\n", + "\n", + "For Colab, we will have to do something non-ideal, but functional. There are several ways to execute shell commands from within a python notebook. For example, you can use any shell command by putting \"!\" in front of the command:\n", + "\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "KJ5f-WO0wcAv", + "colab_type": "code", + "outputId": "2f31b640-40cf-44fb-f705-f0549d502518", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + } + }, + "source": [ + "!ls\n", + "!echo \"----------\"\n", + "!ls sample_data" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "drive sample_data\n", + "----------\n", + "anscombe.json\t\t mnist_test.csv\n", + "california_housing_test.csv mnist_train_small.csv\n", + "california_housing_train.csv README.md\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "8f-n4AXFw-dD", + "colab_type": "text" + }, + "source": [ + "Unfortunately, every time you use \"!\" a new environment is created and the state reverted to the original state. Try to understand the difference between the following two sets of commands:\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "99nrBYTWxZJr", + "colab_type": "code", + "outputId": "26a2b96c-1381-4dca-ff10-b6d6ef3e1a82", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 68 + } + }, + "source": [ + "!echo \"Technique 1:\"\n", + "!ls\n", + "!cd sample_data\n", + "!ls" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Technique 1:\n", + "drive sample_data\n", + "drive sample_data\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "2-Znf97Lxl-Z", + "colab_type": "code", + "outputId": "7d739354-6bcd-4934-b3e9-8884feb28994", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + } + }, + "source": [ + "!echo \"Technique 2:\"\n", + "!ls ; cd sample_data ;ls" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Technique 2:\n", + "drive sample_data\n", + "anscombe.json\t\t mnist_test.csv\n", + "california_housing_test.csv mnist_train_small.csv\n", + "california_housing_train.csv README.md\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "4x9n1rAkxyYl", + "colab_type": "text" + }, + "source": [ + "Notebooks allow a bit of \"magic\" (using \"%\") to avoid some of these limitations:\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "vLBPTX4rx3gd", + "colab_type": "code", + "outputId": "c459c076-c2d7-4149-834a-b9e2903c2a1a", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 119 + } + }, + "source": [ + "!echo \"Technique 3:\"\n", + "!ls \n", + "%cd sample_data \n", + "!ls" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Technique 3:\n", + "drive sample_data\n", + "/content/sample_data\n", + "anscombe.json\t\t mnist_test.csv\n", + "california_housing_test.csv mnist_train_small.csv\n", + "california_housing_train.csv README.md\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "U8XpvPjcyH0w", + "colab_type": "text" + }, + "source": [ + "For our purposes, we are just going to explicitly start a new shell and interact with it in the output cell. Execute the following cell. You will be able to type and execute commands. Look around a bit using \"ls\" and \"cd. You can stop the cell from running by typing \"exit\"." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "MIDFitLZyuZy", + "colab_type": "code", + "outputId": "5a308bc9-fa60-4821-8108-e8112b82c468", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + } + }, + "source": [ + "!/bin/bash --noediting" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "bash: cannot set terminal process group (120): Inappropriate ioctl for device\n", + "bash: no job control in this shell\n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# pwd\n", + "/content/sample_data\n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# which ls\n", + "/bin/ls\n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# ls /bin\n", + "\u001b[0m\u001b[01;32mbash\u001b[0m \u001b[01;32mjournalctl\u001b[0m \u001b[01;32msync\u001b[0m\n", + "\u001b[01;32mbunzip2\u001b[0m \u001b[01;32mkill\u001b[0m \u001b[01;32msystemctl\u001b[0m\n", + "\u001b[01;32mbzcat\u001b[0m \u001b[01;32mkmod\u001b[0m \u001b[01;36msystemd\u001b[0m\n", + "\u001b[01;36mbzcmp\u001b[0m \u001b[01;32mless\u001b[0m \u001b[01;32msystemd-ask-password\u001b[0m\n", + "\u001b[01;32mbzdiff\u001b[0m \u001b[01;32mlessecho\u001b[0m \u001b[01;32msystemd-escape\u001b[0m\n", + "\u001b[01;36mbzegrep\u001b[0m \u001b[01;36mlessfile\u001b[0m \u001b[01;32msystemd-hwdb\u001b[0m\n", + "\u001b[01;32mbzexe\u001b[0m \u001b[01;32mlesskey\u001b[0m \u001b[01;32msystemd-inhibit\u001b[0m\n", + "\u001b[01;36mbzfgrep\u001b[0m \u001b[01;32mlesspipe\u001b[0m \u001b[01;32msystemd-machine-id-setup\u001b[0m\n", + "\u001b[01;32mbzgrep\u001b[0m \u001b[01;32mln\u001b[0m \u001b[01;32msystemd-notify\u001b[0m\n", + "\u001b[01;32mbzip2\u001b[0m \u001b[01;32mlogin\u001b[0m \u001b[01;32msystemd-sysusers\u001b[0m\n", + "\u001b[01;32mbzip2recover\u001b[0m \u001b[01;32mloginctl\u001b[0m \u001b[01;32msystemd-tmpfiles\u001b[0m\n", + "\u001b[01;36mbzless\u001b[0m \u001b[01;32mls\u001b[0m \u001b[01;32msystemd-tty-ask-password-agent\u001b[0m\n", + "\u001b[01;32mbzmore\u001b[0m \u001b[01;32mlsblk\u001b[0m \u001b[01;32mtar\u001b[0m\n", + "\u001b[01;32mcat\u001b[0m \u001b[01;36mlsmod\u001b[0m \u001b[01;32mtempfile\u001b[0m\n", + "\u001b[01;32mchgrp\u001b[0m \u001b[01;32mmkdir\u001b[0m \u001b[01;32mtouch\u001b[0m\n", + "\u001b[01;32mchmod\u001b[0m \u001b[01;32mmknod\u001b[0m \u001b[01;32mtrue\u001b[0m\n", + "\u001b[01;32mchown\u001b[0m \u001b[01;32mmktemp\u001b[0m \u001b[01;32mudevadm\u001b[0m\n", + "\u001b[01;32mcp\u001b[0m \u001b[01;32mmore\u001b[0m \u001b[01;32mulockmgr_server\u001b[0m\n", + "\u001b[01;32mdash\u001b[0m \u001b[37;41mmount\u001b[0m \u001b[37;41mumount\u001b[0m\n", + "\u001b[01;32mdate\u001b[0m \u001b[01;32mmountpoint\u001b[0m \u001b[01;32muname\u001b[0m\n", + "\u001b[01;32mdd\u001b[0m \u001b[01;32mmv\u001b[0m \u001b[01;32muncompress\u001b[0m\n", + "\u001b[01;32mdf\u001b[0m \u001b[01;32mnetworkctl\u001b[0m \u001b[01;32mvdir\u001b[0m\n", + "\u001b[01;32mdir\u001b[0m \u001b[01;36mnisdomainname\u001b[0m \u001b[01;32mwdctl\u001b[0m\n", + "\u001b[01;32mdmesg\u001b[0m \u001b[01;36mpidof\u001b[0m \u001b[01;32mwhich\u001b[0m\n", + "\u001b[01;36mdnsdomainname\u001b[0m \u001b[01;32mps\u001b[0m \u001b[01;36mypdomainname\u001b[0m\n", + "\u001b[01;36mdomainname\u001b[0m \u001b[01;32mpwd\u001b[0m \u001b[01;32mzcat\u001b[0m\n", + "\u001b[01;32mecho\u001b[0m \u001b[01;36mrbash\u001b[0m \u001b[01;32mzcmp\u001b[0m\n", + "\u001b[01;32megrep\u001b[0m \u001b[01;32mreadlink\u001b[0m \u001b[01;32mzdiff\u001b[0m\n", + "\u001b[01;32mfalse\u001b[0m \u001b[01;32mrm\u001b[0m \u001b[01;32mzegrep\u001b[0m\n", + "\u001b[01;32mfgrep\u001b[0m \u001b[01;32mrmdir\u001b[0m \u001b[01;32mzfgrep\u001b[0m\n", + "\u001b[01;32mfindmnt\u001b[0m \u001b[01;32mrun-parts\u001b[0m \u001b[01;32mzforce\u001b[0m\n", + "\u001b[37;41mfusermount\u001b[0m \u001b[01;32msed\u001b[0m \u001b[01;32mzgrep\u001b[0m\n", + "\u001b[01;32mgrep\u001b[0m \u001b[01;36msh\u001b[0m \u001b[01;32mzless\u001b[0m\n", + "\u001b[01;32mgunzip\u001b[0m \u001b[01;36msh.distrib\u001b[0m \u001b[01;32mzmore\u001b[0m\n", + "\u001b[01;32mgzexe\u001b[0m \u001b[01;32msleep\u001b[0m \u001b[01;32mznew\u001b[0m\n", + "\u001b[01;32mgzip\u001b[0m \u001b[01;32mstty\u001b[0m\n", + "\u001b[01;32mhostname\u001b[0m \u001b[37;41msu\u001b[0m\n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# printenv\n", + "CUDNN_VERSION=7.6.5.32\n", + "LS_COLORS=rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:mi=00:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arc=01;31:*.arj=01;31:*.taz=01;31:*.lha=01;31:*.lz4=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.tzo=01;31:*.t7z=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lrz=01;31:*.lz=01;31:*.lzo=01;31:*.xz=01;31:*.zst=01;31:*.tzst=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.alz=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.cab=01;31:*.wim=01;31:*.swm=01;31:*.dwm=01;31:*.esd=01;31:*.jpg=01;35:*.jpeg=01;35:*.mjpg=01;35:*.mjpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.m4a=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.oga=00;36:*.opus=00;36:*.spx=00;36:*.xspf=00;36:\n", + "LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64\n", + "LESSCLOSE=/usr/bin/lesspipe %s %s\n", + "LANG=en_US.UTF-8\n", + "HOSTNAME=a2026fa2008e\n", + "OLDPWD=/\n", + "CLOUDSDK_CONFIG=/content/.config\n", + "NVIDIA_VISIBLE_DEVICES=all\n", + "DATALAB_SETTINGS_OVERRIDES={\"kernelManagerProxyPort\":6000,\"kernelManagerProxyHost\":\"172.28.0.3\",\"jupyterArgs\":[\"--ip=\\\"172.28.0.2\\\"\"]}\n", + "ENV=/root/.bashrc\n", + "PAGER=cat\n", + "NCCL_VERSION=2.4.8\n", + "TF_FORCE_GPU_ALLOW_GROWTH=true\n", + "JPY_PARENT_PID=24\n", + "NO_GCE_CHECK=True\n", + "PWD=/content/sample_data\n", + "HOME=/root\n", + "LAST_FORCED_REBUILD=20191217\n", + "CLICOLOR=1\n", + "DEBIAN_FRONTEND=noninteractive\n", + "LIBRARY_PATH=/usr/local/cuda/lib64/stubs\n", + "GLIBCPP_FORCE_NEW=1\n", + "TBE_CREDS_ADDR=172.28.0.1:8008\n", + "SHELL=/bin/bash\n", + "TERM=xterm-color\n", + "GCS_READ_CACHE_BLOCK_SIZE_MB=16\n", + "PYTHONWARNINGS=ignore:::pip._internal.cli.base_command\n", + "MPLBACKEND=module://ipykernel.pylab.backend_inline\n", + "CUDA_PKG_VERSION=10-1=10.1.243-1\n", + "CUDA_VERSION=10.1.243\n", + "NVIDIA_DRIVER_CAPABILITIES=compute,utility\n", + "SHLVL=3\n", + "PYTHONPATH=/env/python\n", + "NVIDIA_REQUIRE_CUDA=cuda>=10.1 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=396,driver<397 brand=tesla,driver>=410,driver<411\n", + "COLAB_GPU=0\n", + "GLIBCXX_FORCE_NEW=1\n", + "PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/tools/node/bin:/tools/google-cloud-sdk/bin:/opt/bin\n", + "LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libtcmalloc.so.4\n", + "LESSOPEN=| /usr/bin/lesspipe %s\n", + "GIT_PAGER=cat\n", + "_=/usr/bin/printenv\n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# echo $SHELL\n", + "/bin/bash\n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# \n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# \n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# \n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# ^C\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "q-4hfZBywW25", + "colab_type": "text" + }, + "source": [ + "While in this instance your shell is running in a this notebook, you can also run terminals natively on your own computer. On Linux or MacOS, you just have to run a program called terminal. In Windows you can start a \"command prompt\". \n", + "\n", + "\n", + "Type in \"ls\" into the terminal and press enter. The shell will find a program called \"ls\", a standard tool in Unix, and run it. \"ls\" lists the contents (files and directories) of your current directory. If you are just starting in this course, you probably only see the git repository you cloned. \n", + "\n", + "A subtle point to realize here is that while the terminal is running in the browser that is running on the computer in front of you, the shell is actually running on a machine on google hardware. The shell prompt typically displays the name of the machine you are using. What you are not seeing is that there is an intermidate program between the terminal running on your computer and the shell running on google. This intermidary program is taking your input from the terminal sending it over the network to google and bringing back the responses for you terminal to display.\n", + "\n", + "A bit of extra information. If you start a terminal on your own computer, the shell runs locally. The \"ls\" command would then list contents of a directory on your computer. You can typically connect to Unix computers by evoking a shell running on that machine over the network. In this case, you would have to initiate this intermidiary program yourself. The program is called \"ssh\" (secure shell). You can \"ssh\" to another machine from your machine, by simply typing \"ssh\" followed by the machine name or IP address. Most likely you would be prompted for a password, after which you would dropped into the prompt of a shell running on the remote machine. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "51Eya4LBqvzs", + "colab_type": "text" + }, + "source": [ + "## Programs and Environment Variables\n", + "\n", + "You have a listing of your current directory, but you don't know where that directory resides. You can see what directory you are using the command \"pwd\" (print working directory). Issue the command and look at the response. You'll get a slash (\"/\") separated list, known as the path, of the directory hierarchy of your current working directory. On Colab, this will start with \"contents\"\n", + "\n", + "Now back to thinking about the command prompt. Since \"ls\" is a program, it most be stored somewhere. It is clearly not in your working directory, because you didn't see it when you executed \"ls\". We can ask the shell to tell us where it found \"ls\" using the \"which ls\" command. Note that \"which\" is also a program. \"which ls\" comes back with \"/bin/ls\", telling you the \"ls\" program is sitting in \"/bin\" directory of the system. \n", + "\n", + "Lets see what else is in there by issuing a \"ls /bin\" command. You will get a long list of programs. You can run any of these programs by just typing their names and pressing enter. You may be able to guess what some of these programs do, but if you want to know, most of them provide you help, using \"--help\" or \"-h\" flag. For example execute \"ls --help\". For more information about a program or command, you can use Unix's manual pages using the \"man\" command. Try typing \"man ls\". Note that you will need to press space to scroll through lengthy manual pages and \"q\" to exit back to the shell prompt. \n", + "\n", + "Another command interesting is \"echo\". \"echo\" simply prints whatever you put after it to the screen. Try executing \"echo Hello World.\"\n", + "\n", + "At this point, you may wonder how was it that the shell knew to look for programs in \"/bin\"? The shell keeps a list of places to look for programs an environment variable with the name \"PATH\". The shell keeps a table that map string variable names to string expressions. When the shell starts, its configuration files set some environment variables that it uses. You can see the full list of defined environment variables using the command \"printenv\".\n", + "\n", + "You can use a environment variable in a shell by prepending name of the variable with a dollar sign character (\"\\$\"). So you can print out the PATH environment variable using the command \"echo $PATH\". What you will see is a colon (\":\") separated list of directories that the shell will search (in order) whenever you type in anything.\n", + "\n", + "You can set you own environment variables. Different shells have different syntax. Lets first figure out what shell we are running. \n", + "\n", + "*Exercise 1:* Use the \"echo\" command to print out the value of the \"SHELL\" environment variable:" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "YS7YFiPwqvzu", + "colab_type": "text" + }, + "source": [ + "!/bin/bash --noediting" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "QtAUpDAWNjyj", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!/bin/bash -noediting" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "YoEgruUhqvzw", + "colab_type": "text" + }, + "source": [ + "## Navigating Directories\n", + "\n", + "You can change your current directory using the \"cd\" shell command. Note that \"cd\" is not a Unix program. Once in a directory, you can use the \"ls\" command to list the contents or \"pwd\" to remind yourself your current working directory. You can move back one level in your current directory hierarchy using \"cd ..\". In general \"..\" represents the path to a directory one level above your current directory, \"../..\" represents two levels up, and so on. \".\" represents the current directory. If you look at the PATH environment variable, you'll notice that the last item is \".\", telling the shell to look into your current directory for commands. Finally the \"~\" character always refers to your home directory.\n", + "\n", + "Some other file manipulation commands:\n", + "\n", + " - The \"mkdir\" command creates new directories. \n", + " - \"cp\" and \"mv\" allow you to copy and move (or rename) files, taking 2 arguments: the original path/filename and the target path/filename. \n", + " - The \"rm\" and \"rmdir\" commands remove (delete) files and directories.\n", + "\n", + "\n", + "*Exercise 2:* Using the \"cd\" command, navigate into \"drive/My\\ Drive\" directory. Create a new directory called \"Data-1441\", and another directory inside \"Data-1441\" called \"Lab-1-Solutions\". Perform the rest of the lab in this directory." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "A16VzZ3G0J8x", + "colab_type": "code", + "outputId": "d599ec86-6d8a-49cd-af4b-cc6721b318a6", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + } + }, + "source": [ + "!/bin/bash --noediting" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "bash: cannot set terminal process group (120): Inappropriate ioctl for device\n", + "bash: no job control in this shell\n", + "\u001b]0;root@a2026fa2008e: /content/sample_data\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/sample_data\u001b[00m# cd ../drive/My\\ Drive\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive\u001b[00m# mkdir Data-1441\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive\u001b[00m# cd Data-1441\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441\u001b[00m# ls\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441\u001b[00m# mkdir Lab-1-Solutions\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441\u001b[00m# exit\n", + "exit\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "o38c4lbsqvzy", + "colab_type": "text" + }, + "source": [ + "## Exploring Unix Filesystem\n", + "\n", + "You can look at the root directory of the system by issuing \"ls /\". As explained in lecture, Unix uses the file system to communicate with devices and between processes. \"/etc\" keeps the configuration files of the system. \"/bin\" and \"/sbin\" store most of the standard Unix programs. \"/usr\" stores installes programs and their associate files, with \"/usr/bin\" usually storing the commands you can run. \n", + "\n", + "*Exercise 3:* List the \"/dev\" directory. How many SSD storage devices do you see? How many partitions does each device have? (Answer in box below)" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "yNj2LXzP2ksl", + "colab_type": "code", + "outputId": "3b32b8ca-0a31-4e55-ecab-1ea3b0e9aa44", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 425 + } + }, + "source": [ + "!/bin/bash --noediting\n", + "# 1 storage device, 12 partitons" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "bash: cannot set terminal process group (120): Inappropriate ioctl for device\n", + "bash: no job control in this shell\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441/Lab-1-Solutions\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441/Lab-1-Solutions\u001b[00m# lssda\n", + "bash: lssda: command not found\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441/Lab-1-Solutions\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441/Lab-1-Solutions\u001b[00m# lsda\n", + "bash: lsda: command not found\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441/Lab-1-Solutions\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441/Lab-1-Solutions\u001b[00m# lsblk\n", + "NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT\n", + "loop0 7:0 0 110G 0 loop \n", + "sda 8:0 0 120G 0 disk \n", + "├─sda1 8:1 115.9G 0 part /etc/hosts\n", + "├─sda2 8:2 0 16M 0 part \n", + "├─sda3 8:3 0 2G 0 part \n", + "├─sda4 8:4 0 16M 0 part \n", + "├─sda5 8:5 0 2G 0 part \n", + "├─sda6 8:6 512B 0 part \n", + "├─sda7 8:7 0 512B 0 part \n", + "├─sda8 8:8 16M 0 part \n", + "├─sda9 8:9 0 512B 0 part \n", + "├─sda10 8:10 0 512B 0 part \n", + "├─sda11 8:11 8M 0 part \n", + "└─sda12 8:12 0 32M 0 part \n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441/Lab-1-Solutions\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441/Lab-1-Solutions\u001b[00m# exit\n", + "exit\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7P9EG0KOqvz2", + "colab_type": "text" + }, + "source": [ + "## Text File Manipulation\n", + "\n", + "As explained in lecture, Unix stores most information in text files. For example, the list of all users and their home directories are stored in \"/etc/passwd\". Let get some familiarity with the most commonly used commands to manipulate files.\n", + "\n", + " - You can see the contents contents a file using the \"cat\" (concatenate) command. Try executing \"cat /etc/passwd\". You'll get a huge list that will go by your screen quickly. \n", + " \n", + " - To go through the file page by page, you can use the \"less\" or \"more\" commands. \n", + " \n", + " - You can see the first or last N (N=10 by default) lines of a file using \"head\" or \"tail\" commands. For example \"tail -20 /etc/passwd\" will list the last 20 lines. \n", + " \n", + " - You can search a test file using the \"grep\" command, which takes a string keyword as the first argument and a filename as the second, and by default prints out every line in the file that contrains the string. So for example you can do \"grep \\$USER /etc/passwd\" to find the line corresponding to your account. Some useful flags: \n", + " \n", + " - \"-i\" ignores the case of the keyword\n", + " - \"-v\" display those lines that do NOT match \n", + " - \"-n\" precede each matching line with the line number \n", + " - \"-c\" print only the total count of matched lines \n", + " \n", + " For example \"grep -c \\$USER /etc/passwd\" should show that you are in the password file just once. \n", + " \n", + " - The \"wc\" (word count) command counts the number of lines, words, and characters in a file. By default \"wc\" gives you all three numbers, but \"-w\", \"-l\", or \"-c\" flags \n", + "\n", + "*Exercise 4:* Count how many lines in the password file contain the letter \"w\". " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "UlsANMuf2qMs", + "colab_type": "code", + "outputId": "15574af9-3dd4-4634-efe4-9804c090b6ac", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 119 + } + }, + "source": [ + "!/bin/bash --noediting\n", + "# 3" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "bash: cannot set terminal process group (120): Inappropriate ioctl for device\n", + "bash: no job control in this shell\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441/Lab-1-Solutions\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441/Lab-1-Solutions\u001b[00m# grep -ic w /etc/passwd\n", + "3\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441/Lab-1-Solutions\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441/Lab-1-Solutions\u001b[00m# exit\n", + "exit\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "SZuhLbD8qvz5", + "colab_type": "text" + }, + "source": [ + "## Redirection\n", + "\n", + "Unix provides programs \"pipes\" for input and output. Most of what you see on the screen when you run a program was written to the \"stdout\" (standard output) pipe. Other pipes are \"stdin\" (standard input) and \"stderr\" (standard error), where error messages are written.\n", + "\n", + "As discussed in lecture, the basic commands of are simple, but you can chain them to do complicated things. Redirection is how you chain these commands, directing the output of one command to the input of the next.\n", + "\n", + "As an example, consider the \"cat\" command. Cat takes stdin and outputs it to stdout. Type \"cat\" and press enter and confirm. You can get back to the command prompt by pressing \"control-c\" (sends terminate singal) or \"control-d\" (end of file character). Note that from now on we will use the convention: \"control-d\" = \"^D\"\n", + "\n", + "*Exercise 5a:* Using \"cat\" and indirection you can write things into a file. The \">\" symbol directs stdout into a file. Try \"cat > favorite-colors-list.txt\" and then type in your 3 favorite colors, each on it's own line. Use \"^D\" to end your input." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "H5vxtcXnqvz6", + "colab_type": "text" + }, + "source": [ + "Use \"cat\", \"more\", or \"less\" to confirm that you file is as you expect it. \">>\" allows you to append to the file. \n", + "\n", + "*Exercise 5b:* Append 2 more colors to your file." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "twRKNaGy3XGw", + "colab_type": "code", + "outputId": "a1ddc514-a73c-4686-e2b1-4f3777fe3000", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 221 + } + }, + "source": [ + "!/bin/bash --noediting\n", + "# white\n", + "# black\n", + "# red\n", + "# blue\n", + "# orange" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "bash: cannot set terminal process group (120): Inappropriate ioctl for device\n", + "bash: no job control in this shell\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441/Lab-1-Solutions\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441/Lab-1-Solutions\u001b[00m# cat > favorite-colors-list.txt\n", + "white\n", + "black\n", + "red\n", + "\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441/Lab-1-Solutions\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441/Lab-1-Solutions\u001b[00m# cat >> favorite-colors-list.txt\n", + "blue\n", + "orange\n", + "\n", + "\u001b]0;root@a2026fa2008e: /content/drive/My Drive/Data-1441/Lab-1-Solutions\u0007\u001b[01;32mroot@a2026fa2008e\u001b[00m:\u001b[01;34m/content/drive/My Drive/Data-1441/Lab-1-Solutions\u001b[00m# ^C\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "DZODNKiAqvz8", + "colab_type": "text" + }, + "source": [ + "The \"sort\" command sorts what it sees on stdin. Instead of taking input from the terminal, you can direct the shell to take stdin from a file using \"<\". Try \"sort < favorite-color-list.txt\" and \"sort < favorite-color-list.txt > sorted-favorite-color-list.txt\".\n", + "\n", + "Finally, instead of piping input / output into files, you can directly chain one program into another using \"|\". So for example, you can do \"cat /etc/passwd | grep -i \\$USER | wc -l\". \n", + "\n", + "*Exercise 5c:* Use indirection to count the number of users on TACC with your first name. Copy the command you used into box below." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "oP9XlZl_3iZD", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!/bin/bash --noediting" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "v5IaZXNyqvz_", + "colab_type": "text" + }, + "source": [ + "## Git\n", + "\n", + "`git` is a Version Control System (VCS), typically used to organize the source code of software project but also good source of documents or web-pages. An instance of `git` server stores repositories, each typically containing the code relevant to a specific project. Users create local `clones` of repositories, change and develop the local copies of the code, `commit` the changes to their local repository, `push` to the server as a contribution, \n", + "`pull` updates from the server, and `merge` changes between local and remote versions. \n", + "\n", + "Besides cloning, repositories can be branched or forked. A repository generally starts with a `master` branch that evolves as push requests are merged in. Creating a new branch from an existing branch creates a snapshot of the which can evolve independently or be merged in later. Branches are easy to make and delete, and can serve various purposes. They can represent a stable version of software package. Or a parallel development for different operating system. A fork of a repository is a standalone instance of the repository which can be stored and managed independently from the original, where you can work independently without constraints or interference. \n", + "\n", + "[GitHub](github.com) provides a massive publically accessible instance of a `git` system besides sharing code, projects can be developed by the open source community. It provides tools for managing your repository and a wiki for documentation. Contributions to public software on GitHub generally require making a merge request which would be judged by the managers of the repository. That's why most software packages enourage you to create a new fork, so you can work independently.\n", + "\n", + "Lets take a look at some repositories:\n", + "\n", + "* [This class](https://github.com/afarbin/DATA1401-Spring-2020)\n", + "\n", + "\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "J_R64sQDqv0A", + "colab_type": "text" + }, + "source": [ + "## Plan\n", + "\n", + "You made a clone of the class repository at start of this lab. We will create a new fork where you can keep track and submit your work, following [these instructions](https://help.github.com/articles/fork-a-repo/).\n", + "\n", + "Goto to github.com and log in.\n", + "\n", + "Next, lets create a fork of the [class repository](https://github.com/afarbin/DATA1401-Spring-2019). Click the link and press the \"Fork\" button on the top right. Select your repository as where you want to place the fork.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "edTvE6rOqv0C", + "colab_type": "text" + }, + "source": [ + "Now we will check out your fork in your Google Drive / Colab.\n", + "\n", + "Note: Jupyter allows you to run shell directly in a notebook. We will use `!` and `%` to call shell commands directly in this notebook. Follow along yourself. Either create a new notebook or open a terminal. \n", + "\n", + "Start by listing the contents of your current directory." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "e5tXg0f8qv0D", + "colab_type": "code", + "outputId": "6fa0b41a-aeb3-4de6-bd5d-511ca8afd957", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 442 + } + }, + "source": [ + "%cd /content/drive/My\\ Drive\n", + "!ls" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/drive/My Drive\n", + " 0E84CCE9-ED53-4F31-B77C-6496E957ED04.jpeg\n", + " 51FD2468-94A1-4F13-974E-508BDD804C73.jpeg\n", + " A6A4BE08-543B-4088-941D-B6EF032E84E3.jpeg\n", + "'All Files'\n", + " Biothing.gdoc\n", + "'Colab Notebooks'\n", + " Conquest_1.8.zip\n", + "'Conquest Models.zip'\n", + "'Copy of The One Page Novel Scene Spreadsheet.gsheet'\n", + "'Copy of Vicentio.gslides'\n", + " Data-1441\n", + " Factorio.zip\n", + "'Gears v27.dwg'\n", + "'Gears v31.dwg'\n", + "'HWN Proposed Silent Auction Basket Themes.gdoc'\n", + "'May 2016 Newsletter.pdf'\n", + " MCArch\n", + "'[MV] IU(아이유) _ Friday(금요일에 만나요) (Feat. Jang Yi-jeong(장이정) of HISTORY(히스토리)).mp3'\n", + "'Provide Access to Clean Water.gslides'\n", + " resources.zip\n", + "\"Samson's Reflection.gdoc\"\n", + "'Text File (1).txt'\n", + "'Text File.txt'\n", + " TIG-1_zpse93244e6.JPG.crdownload\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "WYsyYcg1qv0J", + "colab_type": "text" + }, + "source": [ + "Make a new directory:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Z7noY1hMqv0L", + "colab_type": "code", + "outputId": "c215b7ad-276d-4e34-baba-66b394c877ab", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "!mkdir Data-1401-Repo\n", + "%cd Data-1401-Repo" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/drive/My Drive/Data-1401-Repo\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "fwsBdTnYqv0Q", + "colab_type": "text" + }, + "source": [ + "From the github page for your fork, press the green \"Clone or download\" button and copy the URL.\n", + "\n", + "Goto to your notebook and use the following command to clone the repository, pasting the URL you just copied:\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "8w42MH6Jqv0S", + "colab_type": "code", + "outputId": "e274141d-df09-4a6a-8599-e8d9867e6320", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 119 + } + }, + "source": [ + "# What you past here should look like:\n", + "!git clone https://github.com/sammysamsamsama/DATA1401-Spring-2020.git" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Cloning into 'DATA1401-Spring-2020'...\n", + "remote: Enumerating objects: 24, done.\u001b[K\n", + "remote: Counting objects: 100% (24/24), done.\u001b[K\n", + "remote: Compressing objects: 100% (16/16), done.\u001b[K\n", + "remote: Total 24 (delta 3), reused 23 (delta 2), pack-reused 0\u001b[K\n", + "Unpacking objects: 100% (24/24), done.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "cOAuqTVUqv0V", + "colab_type": "text" + }, + "source": [ + "Go into the directory:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "b1Ew4tEZqv0X", + "colab_type": "code", + "outputId": "dd96ce57-cc07-472a-81c2-8e99bceb9276", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 51 + } + }, + "source": [ + "%cd DATA1401-Spring-2020\n", + "!ls" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/drive/My Drive/Data-1401-Repo/DATA1401-Spring-2020\n", + "Labs Lectures\tREADME.md syllabus.pdf\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "IrhWToc-qv0a", + "colab_type": "text" + }, + "source": [ + "We will now connect your fork to the original so you can pull changes from there. \n", + "\n", + "Check remote status:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "JxtMYR-9qv0c", + "colab_type": "code", + "outputId": "94194753-9622-4048-c4e2-e5a7ef408b4e", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 51 + } + }, + "source": [ + "!git remote -v" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "origin\thttps://github.com/sammysamsamsama/DATA1401-Spring-2020.git (fetch)\n", + "origin\thttps://github.com/sammysamsamsama/DATA1401-Spring-2020.git (push)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "9ud3X0fBqv0f", + "colab_type": "text" + }, + "source": [ + "Now use the original class URL to set your upstream:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "pgJlKxBqqv0h", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!git remote add upstream https://github.com/afarbin/DATA1401-Spring-2020.git" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "id2yUEt9qv0k", + "colab_type": "code", + "outputId": "8afb6720-d840-4dfa-f46a-fd14eba94241", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 85 + } + }, + "source": [ + "!git remote -v" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "origin\thttps://github.com/sammysamsamsama/DATA1401-Spring-2020.git (fetch)\n", + "origin\thttps://github.com/sammysamsamsama/DATA1401-Spring-2020.git (push)\n", + "upstream\thttps://github.com/afarbin/DATA1401-Spring-2020.git (fetch)\n", + "upstream\thttps://github.com/afarbin/DATA1401-Spring-2020.git (push)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "sAkgeJ6Iqv0n", + "colab_type": "text" + }, + "source": [ + "From now on, you can get the newest version of class material by using:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "AGDsfTFLqv0o", + "colab_type": "code", + "outputId": "517b1d5d-059d-46b0-b416-f4b0c3f5d17f", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "!git pull" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Already up to date.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "u9RAhs5b4vXY", + "colab_type": "text" + }, + "source": [ + "We will submit your Lab 1 using git at the next Lab." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "PPfGmFQI40HR", + "colab_type": "code", + "colab": {} + }, + "source": [ + "" + ], + "execution_count": 0, + "outputs": [] + } + ] +} \ No newline at end of file From aad2a05d98e6934ad58a04e46c26dcb4437a3244 Mon Sep 17 00:00:00 2001 From: Samson <45101270+sammysamsamsama@users.noreply.github.com> Date: Fri, 7 Feb 2020 14:56:04 -0600 Subject: [PATCH 02/24] Lab-2 solution --- Labs/Lab-2/Copy_of_Lab_2.ipynb | 1263 ++++++++++++++++++++++++++++++++ 1 file changed, 1263 insertions(+) create mode 100644 Labs/Lab-2/Copy_of_Lab_2.ipynb diff --git a/Labs/Lab-2/Copy_of_Lab_2.ipynb b/Labs/Lab-2/Copy_of_Lab_2.ipynb new file mode 100644 index 0000000..d09d7b5 --- /dev/null +++ b/Labs/Lab-2/Copy_of_Lab_2.ipynb @@ -0,0 +1,1263 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "colab": { + "name": "Copy of Lab-2.ipynb", + "provenance": [], + "collapsed_sections": [] + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "uk7yc0nadBGa", + "colab_type": "text" + }, + "source": [ + "# Lab 2\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github//afarbin/DATA1401-Spring-2020/blob/master/Labs/Lab-2/Lab-2.ipynb)\n", + "\n", + "## Submitting lab solutions\n", + "\n", + "At the end of the previous lab, you should have set up a \"Solutions\" directory in your Google Drive, with a fork of the class git repository that pull from Dr. Farbin's verison and pushes to your own fork. \n", + "\n", + "Unfortunately due to a typo in the previous lab, you probably forked the 2019 version of the gitlab repository for this course. Unless you noticed and corrected the error, you'll have to fork again.\n", + "\n", + "In addition, due to some problems with the setup in Google Colab, we will be submitting our solutions to your fork using the web interface. Instructions on how to use the command-line are in this notebook, but we suggest you do not follow them unless you are working in a jupyter notebook and not Google Colab." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "J4gOp2tXCSLG", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 122 + }, + "outputId": "f491886d-d46c-4e45-e2d5-b63435df0493" + }, + "source": [ + "from google.colab import drive\n", + "drive.mount('/content/drive')" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n", + "\n", + "Enter your authorization code:\n", + "··········\n", + "Mounted at /content/drive\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "OMNaOnRksNK3", + "colab_type": "text" + }, + "source": [ + "You may also choose to delete the fork from your GitHub account. " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "J_R64sQDqv0A" + }, + "source": [ + "## Repeating last steps of Lab 1\n", + "\n", + "### Create your own fork\n", + "We will create a new fork where you can keep track and submit your work, following [these instructions](https://help.github.com/articles/fork-a-repo/).\n", + "\n", + "Goto to github.com and log in.\n", + "\n", + "Next, create a fork of the [2020 class repository](https://github.com/afarbin/DATA1401-Spring-2020). Click the link and press the \"Fork\" button on the top right. Select your repository as where you want to place the fork.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "edTvE6rOqv0C" + }, + "source": [ + "### Make a local clone (Advanced)\n", + "\n", + "Before we get started, please mount your Google Drive using by clicking the file icon on the left, then clicking \"Mount Drive\", and following the instructions as you did in the previous lab.\n", + "\n", + "If you did complete Lab 1 and therefore created a 2019 fork and a local clone in you Google Drive, delete the local clone:\n", + "\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "2u6B-rfNr1wN", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!rm -rf drive/My\\ Drive/Data-1401-Repo" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BDVI5nu8-2RH", + "colab_type": "text" + }, + "source": [ + "Now we will check out your fork in your Google Drive / Colab. If you will be doing everything on your own computer instead of Google Colab/Drive, you are welcome to install Git on your computer and perform the following steps (appropriately modified) on your computer instead.\n", + "\n", + "Start by listing the contents of your current directory." + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "e5tXg0f8qv0D", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 459 + }, + "outputId": "2800d429-d35d-4cad-a756-892511d54780" + }, + "source": [ + "%cd /content/drive/My\\ Drive\n", + "!ls" + ], + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/drive/My Drive\n", + " 0E84CCE9-ED53-4F31-B77C-6496E957ED04.jpeg\n", + " 51FD2468-94A1-4F13-974E-508BDD804C73.jpeg\n", + " A6A4BE08-543B-4088-941D-B6EF032E84E3.jpeg\n", + "'All Files'\n", + " Biothing.gdoc\n", + "'Colab Notebooks'\n", + " Conquest_1.8.zip\n", + "'Conquest Models.zip'\n", + "'Copy of The One Page Novel Scene Spreadsheet.gsheet'\n", + "'Copy of Vicentio.gslides'\n", + " Data-1401-Repo\n", + " Data-1441\n", + " Factorio.zip\n", + "'Gears v27.dwg'\n", + "'Gears v31.dwg'\n", + "'HWN Proposed Silent Auction Basket Themes.gdoc'\n", + "'May 2016 Newsletter.pdf'\n", + " MCArch\n", + "'[MV] IU(아이유) _ Friday(금요일에 만나요) (Feat. Jang Yi-jeong(장이정) of HISTORY(히스토리)).mp3'\n", + "'Provide Access to Clean Water.gslides'\n", + " resources.zip\n", + "\"Samson's Reflection.gdoc\"\n", + "'Text File (1).txt'\n", + "'Text File.txt'\n", + " TIG-1_zpse93244e6.JPG.crdownload\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "WYsyYcg1qv0J" + }, + "source": [ + "Make a new directory:" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "Z7noY1hMqv0L", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "outputId": "97d59400-1725-49b2-d381-cb2749516a1b" + }, + "source": [ + "#!mkdir Data-1401-Repo\n", + "%cd Data-1401-Repo" + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/drive/My Drive/Data-1401-Repo\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "fwsBdTnYqv0Q" + }, + "source": [ + "From the github page for your fork, press the green \"Clone or download\" button and copy the URL.\n", + "\n", + "Goto to your notebook and use the following command to clone the repository, pasting the URL you just copied:\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab_type": "code", + "id": "8w42MH6Jqv0S", + "colab": {} + }, + "source": [ + "# What you past here should look like:\n", + "#!git clone https://github.com/ origin/master\n", + "Updating 21f393b..012c415\n", + "Fast-forward\n", + " Labs/Lab-1/Copy_of_Lab_1.ipynb | 1138 \u001b[32m++++++++++++++++++++++++++++++++++++++++\u001b[m\n", + " 1 file changed, 1138 insertions(+)\n", + " create mode 100644 Labs/Lab-1/Copy_of_Lab_1.ipynb\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "u9RAhs5b4vXY" + }, + "source": [ + "You should be setup now.\n", + "\n", + "## Make your for Private\n", + "\n", + "As a final step, go back to your fork in GitHub and click the \"gear\" icon to change the settings. Select \"Options\" on the left and scroll all the way down. Then click on \"Make Private\" to make your repository private. \n", + "\n", + "Next select the collaborators on the left and add your Professor and TA as collaborators. " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "i1q1wv0IqqBL", + "colab_type": "text" + }, + "source": [ + "## Working on and Submitting Labs\n", + "\n", + "As mentioned in class, you are welcome to not use Google Colab and instead install and use your own instance of Jupyter to work on the labs.\n", + "\n", + "No matter where you do you work, you will submit your work into your fork of the class repository in GitHub. \n", + "\n", + "### Updating your fork\n", + "\n", + "The class repository will be updated several times a week. But your fork will not be updated, unless you do so explicitly. There are two ways for you to update your fork:\n", + "\n", + "1. Use GitHub's web interface to make a pull request from the course base to your fork.\n", + " * Goto your fork in GitHub.\n", + " * Press \"New Pull Request\"\n", + " * You should see option to select base/head repository that allows you to select \"afarbin/Data1401-Spring-2020\" as the base. If not press \"compare accross forks\".\n", + " * Make sure you select your fork as the head.\n", + " * Press \"Create a pull request\".\n", + " * Press \"Merge pull request\".\n", + " * Press \"Confirm pull\".\n", + "\n", + "2. Go to your clone of your fork in Google Drive (or local on your computer) that setup above and do a `git pull`. (Advanced)\n", + "\n", + "### Working on your labs\n", + "\n", + "If you are working in Google Colab, you should immediately save a copy of you labs into your Google Drive as soon as you start working, and save frequently so you don't loose your work. These copies of the labs will appear in drive/My Drive/Colab Notebooks. \n", + "\n", + "### Submitting your labs\n", + "\n", + "Once you are done with a lab and are ready to submit, you have several options:\n", + "\n", + "1. You can download the lab to your local computer and them commit to your GitHub fork via GitHub's web interface. \n", + "\n", + " * Download by selecting \"Download .ipynb\" from the file menu.\n", + " * Appropriately rename the downloaded file (for example Lab-1-Solutions.ipynb).\n", + " * On github, navigate to the directory for the specific lab.\n", + " * Click on upload and upload your solutions.\n", + "\n", + "2. (Advanced) You can copy the lab into your fork and commit/push using the command-line. Here's how:\n", + "\n", + " * Using \"cd\" command navigate to the clone of your fork in Google Drive (or local) and do `git remote -v` to verify that things are correctly setup.\n", + "\n", + " * If you are working on Google Colab, your copy of lab with your solutions is stored in contents/drive/My Drive/Colab Notebooks. Locate the your lab 1 notebook and copy and rename it into the same directory in your fork. \n", + "\n", + "For example: (Note we are using the full paths here just to make sure everything works... but if you are already in the fork directory, you don't need the full path everywhere)." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "CtPQXoQMdBGg", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!mkdir /content/drive/My\\ Drive/Data-1401-Repo/DATA1401-Spring-2020/Labs/Lab-2/\n", + "!cp /content/drive/My\\ Drive/Colab\\ Notebooks/Copy\\ of\\ Lab-2.ipynb /content/drive/My\\ Drive/Data-1401-Repo/DATA1401-Spring-2020/Labs/Lab-2/Lab-2-Solution.ipynb" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "b04FrL7sdBGr", + "colab_type": "text" + }, + "source": [ + "The reason we are renaming the file is due to a complication that you may experience when pulling updates into your fork. If a file was updated after your last pull and modifications, your modifications will likely cause a merge conflict, which can be a headache to resolve. Creating a new file side-steps this problem.\n", + "\n", + "Now that you have a new file in your fork, add the file into local repository:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "2o9JmQ1QdBGs", + "colab_type": "code", + "colab": {} + }, + "source": [ + "#!git add /content/drive/My\\ Drive/Data-1401-Repo/DATA1401-Spring-2020/Labs/Lab-2/Lab-2-Solution.ipynb\n", + "!git add Labs/Lab-2/Lab-2-Solution.ipynb" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "HX4xOctmdBG0", + "colab_type": "text" + }, + "source": [ + "You only need to add a file once. Next, commit this file to your local copy of the repository. If this is the first time you are doing a commit, you will have to tell git your github username." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Tn9jf5VXdBG1", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Uncomment and modify lines below if needed\n", + "!git config --global user.email \"sammyson79@gmail.com\"\n", + "!git config --global user.name \"Samson Nguyen\"\n", + "#!git commit -a -m \"My Lab 2 Solutions\"" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "JfKXTlv1dBG5", + "colab_type": "text" + }, + "source": [ + "You are required to provide a text message when commiting files, and the `-m` option is the nicest way to do it. If you do not supply a message, you will find yourself in a text editor (most likely vi) which is difficult to use and forced to enter a message.\n", + "\n", + "You will need to commit your changes every time you wish to submit any changes. So if you keep working or come back to a lab, make sure you commit your changes.\n", + "\n", + "Now that you committed your changes, you will need to push these changes to the fork of the package in your github account:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "L7UZOLYAdBG6", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "outputId": "220f9a52-9653-4398-aea7-dbebf928078e" + }, + "source": [ + "!git push" + ], + "execution_count": 28, + "outputs": [ + { + "output_type": "stream", + "text": [ + "fatal: could not read Username for 'https://github.com': No such device or address\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "3dI_D9PAdBG-", + "colab_type": "text" + }, + "source": [ + "You will likely need to supply you git username and password.\n", + "\n", + "Your lab is now available for grading. Remember that unless you commit and push your work, it will not be seen.\n", + "\n", + "From now on, use this procedure to submit your solutions to labs, including the remainder of this lab. \n", + "\n", + "You can work in your Solutions directory if you like. But note that it may be a good practice to use the \"File\" menu to duplicate and remain labs when you first start. Besides letting you avoid having to do the copy later, you will have a copy of the original notebook, in case you delete something and can pull updates, in case of bug fixes." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "q-gMWiE4dBG_", + "colab_type": "text" + }, + "source": [ + "## Python Programming\n", + "\n", + "In the remainder of this lab you will practice python by solving some simple exercises. \n", + "\n", + "*Exercise 1:* Write 2 functions `even(x)` and `odd(x)` that take an integer and returns True if the input is even or odd, otherwise returns False. Use cell below for your solution. Use the subsequent cell to demonstrate that your solution works. Feel free to add additional cell as needed using the \"+\" button on the button bar above. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "jwN5jff1dBG_", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def even(x):\n", + " return x & 1 == 0\n", + "def odd(x):\n", + " return x & 1 == 1" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "xNJAcodhdBHB", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 187 + }, + "outputId": "0ba4291e-08cc-4bf4-b430-65e995ae24ae" + }, + "source": [ + "# Test your solution here\n", + "for i in range(10):\n", + " print(i, even(i), odd(i))" + ], + "execution_count": 37, + "outputs": [ + { + "output_type": "stream", + "text": [ + "0 True False\n", + "1 False True\n", + "2 True False\n", + "3 False True\n", + "4 True False\n", + "5 False True\n", + "6 True False\n", + "7 False True\n", + "8 True False\n", + "9 False True\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "KL_pyzG8dBHD", + "colab_type": "text" + }, + "source": [ + "*Exercise 2:* Write a function that takes a list of numbers as input and returns a list of the subset of elements that are less that 10. Test your solution." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "g8nt0wnldBHE", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def less_than_ten(nums):\n", + " return [num for num in nums if num < 10]" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "__HTUWA1dBHH", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 51 + }, + "outputId": "73bb8c1d-8fa3-49a5-f4a3-5667c44e3667" + }, + "source": [ + "# Test your solution here\n", + "test_nums = [x**2 for x in range(11)]\n", + "print(test_nums)\n", + "print(less_than_ten(test_nums))" + ], + "execution_count": 86, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100]\n", + "[0, 1, 4, 9]\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "T0cx91JudBHK", + "colab_type": "text" + }, + "source": [ + "*Exercise 3:* Write a function that takes a number `x_max` as input and returns a function that performs the same task as exercise 2, but for `x_max` instead of 10." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "PqummMcmdBHK", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def f(x_max):\n", + " def less_than_x_max(nums):\n", + " return [num for num in nums if num < x_max]\n", + " return less_than_x_max" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "evRYemjXdBHN", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 51 + }, + "outputId": "2ea5ad48-9758-424c-eab6-b4118a279947" + }, + "source": [ + "# Test your solution here\n", + "less_than_fifty = f(50)\n", + "print(test_nums)\n", + "print(less_than_fifty(test_nums))" + ], + "execution_count": 88, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100]\n", + "[0, 1, 4, 9, 16, 25, 36, 49]\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "h0I8M27LdBHP", + "colab_type": "text" + }, + "source": [ + "*Exercise 4:* Write a function that takes an interger as input and returns a list of all divisors of that number." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "k6GUpDyrdBHP", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def divisors(int_x):\n", + " return [n for n in range(1, int_x + 1) if int_x % n == 0]" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "YDvRmft-dBHR", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 204 + }, + "outputId": "e945c93e-237c-438c-ea8a-ec7e1e03281a" + }, + "source": [ + "# Test your solution here\n", + "print(*zip(test_nums,[divisors(num) for num in test_nums]), sep=\"\\n\")" + ], + "execution_count": 102, + "outputs": [ + { + "output_type": "stream", + "text": [ + "(0, [])\n", + "(1, [1])\n", + "(4, [1, 2, 4])\n", + "(9, [1, 3, 9])\n", + "(16, [1, 2, 4, 8, 16])\n", + "(25, [1, 5, 25])\n", + "(36, [1, 2, 3, 4, 6, 9, 12, 18, 36])\n", + "(49, [1, 7, 49])\n", + "(64, [1, 2, 4, 8, 16, 32, 64])\n", + "(81, [1, 3, 9, 27, 81])\n", + "(100, [1, 2, 4, 5, 10, 20, 25, 50, 100])\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "IOWnLMvxdBHT", + "colab_type": "text" + }, + "source": [ + "*Exercise 5:* Write a function that takes 2 lists as input and returns a list that contains only the elements that are common between the lists (without duplicates). Make sure your program works on two lists of different sizes." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "tR31bnTDdBHT", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def compare_lists(list_1, list_2):\n", + " return [element for element in list_1 if element in list_2]" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "IJDf6ebYdBHV", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 68 + }, + "outputId": "e243c50d-74ed-4991-82d5-b3c72ddb2136" + }, + "source": [ + "# Test your solution here\n", + "test_list_1 = [i for i in range(15)]\n", + "test_list_2 = [2 * i for i in range(15)]\n", + "print(test_list_1)\n", + "print(test_list_2)\n", + "print(compare_lists(test_list_1, test_list_2))" + ], + "execution_count": 105, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]\n", + "[0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28]\n", + "[0, 2, 4, 6, 8, 10, 12, 14]\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "3vA7M9pldBHX", + "colab_type": "text" + }, + "source": [ + "*Exercise 6:* Write a function that reads takes a string and returns `True` if the string is a palindrome. (A palindrome is a string that reads the same forwards and backwards.)" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ncyMDzp6dBHX", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def is_palindrome(str_x):\n", + " return str_x == str_x[::-1]" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "v5cmVQ6MdBHZ", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 51 + }, + "outputId": "4ceb0434-48e0-4444-f460-596ea81e8bf0" + }, + "source": [ + "# Test your solution here\n", + "print(is_palindrome(\"tacocat\"))\n", + "print(is_palindrome(\"wasssup\"))" + ], + "execution_count": 133, + "outputs": [ + { + "output_type": "stream", + "text": [ + "True\n", + "False\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "fT35xwandBHc", + "colab_type": "text" + }, + "source": [ + "*Exercise 7:* Write a Rock-Paper-Scissors game function, that takes 2 strings, the inputs of player 1 and player 2, and output 1 or 2 corresponding to which player wins, or 0 if draw.\n", + "\n", + "Implement a Rock-Paper-Scissors game by soliciting input from 2 players, testing with this function, and repeating if there is a draw.\n", + "\n", + "Remember the rules:\n", + "\n", + "* Rock beats scissors\n", + "* Scissors beats paper\n", + "* Paper beats rock" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "U1_HxxaWdBHd", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def rock_paper_scissors():\n", + " p1 = input(\"Player 1, R, P, or S? \").upper()\n", + " p2 = input(\"Player 2, R, P, or S? \").upper()\n", + " if p1 == p2:\n", + " print(\"It's a draw!\")\n", + " return 0\n", + " else:\n", + " p1_wins = {(\"R\", \"S\"), (\"P\", \"R\"), (\"S\", \"P\")}\n", + " if (p1, p2) in p1_wins:\n", + " print(\"Player 1 wins!!!\")\n", + " return 1\n", + " else:\n", + " print(\"Player 2 wins!!!\")\n", + " return 2\n", + " " + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "ooR2ldZBdBHf", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 221 + }, + "outputId": "e051d334-0b0e-4b62-d607-7bf0fee9f972" + }, + "source": [ + "# Test your solution here\n", + "we_need_to_keep_going = 0\n", + "while(we_need_to_keep_going == 0):\n", + " we_need_to_keep_going = rock_paper_scissors()" + ], + "execution_count": 145, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Player 1, R, P, or S? R\n", + "Player 2, R, P, or S? R\n", + "It's a draw!\n", + "Player 1, R, P, or S? P\n", + "Player 2, R, P, or S? P\n", + "It's a draw!\n", + "Player 1, R, P, or S? S\n", + "Player 2, R, P, or S? S\n", + "It's a draw!\n", + "Player 1, R, P, or S? R\n", + "Player 2, R, P, or S? P\n", + "Player 2 wins!!!\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "dJQzXNKzdBHh", + "colab_type": "text" + }, + "source": [ + "*Exercise 8:* Write a function that takes a integer `n` as input and \n", + "outputs a list of the first `n` Fibonnaci numbers.\n", + "\n", + "The Fibonnaci seqence is a sequence of numbers where the next number in the sequence is the sum of the previous two numbers in the sequence. The sequence looks like this: 1, 1, 2, 3, 5, 8, 13, …)" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "G_4ooRXTdBHh", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def fib(n):\n", + " if n == 0:\n", + " return []\n", + " elif n == 1:\n", + " return [0]\n", + " nums = [0,1]\n", + " while len(nums) < n:\n", + " nums.append(nums[-2] + nums[-1])\n", + " return nums" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "rzK5FskJdBHj", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 207 + }, + "outputId": "ae170b55-6566-47a4-dd13-fe7a3391a898" + }, + "source": [ + "# Test your solution here\n", + "print(*[(fibs**2, fib(fibs**2)) for fibs in range(10)], sep=\"\\n\")" + ], + "execution_count": 155, + "outputs": [ + { + "output_type": "stream", + "text": [ + "(0, [])\n", + "(1, [0])\n", + "(4, [0, 1, 1, 2])\n", + "(9, [0, 1, 1, 2, 3, 5, 8, 13, 21])\n", + "(16, [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610])\n", + "(25, [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711, 28657, 46368])\n", + "(36, [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711, 28657, 46368, 75025, 121393, 196418, 317811, 514229, 832040, 1346269, 2178309, 3524578, 5702887, 9227465])\n", + "(49, [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711, 28657, 46368, 75025, 121393, 196418, 317811, 514229, 832040, 1346269, 2178309, 3524578, 5702887, 9227465, 14930352, 24157817, 39088169, 63245986, 102334155, 165580141, 267914296, 433494437, 701408733, 1134903170, 1836311903, 2971215073, 4807526976])\n", + "(64, [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711, 28657, 46368, 75025, 121393, 196418, 317811, 514229, 832040, 1346269, 2178309, 3524578, 5702887, 9227465, 14930352, 24157817, 39088169, 63245986, 102334155, 165580141, 267914296, 433494437, 701408733, 1134903170, 1836311903, 2971215073, 4807526976, 7778742049, 12586269025, 20365011074, 32951280099, 53316291173, 86267571272, 139583862445, 225851433717, 365435296162, 591286729879, 956722026041, 1548008755920, 2504730781961, 4052739537881, 6557470319842])\n", + "(81, [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711, 28657, 46368, 75025, 121393, 196418, 317811, 514229, 832040, 1346269, 2178309, 3524578, 5702887, 9227465, 14930352, 24157817, 39088169, 63245986, 102334155, 165580141, 267914296, 433494437, 701408733, 1134903170, 1836311903, 2971215073, 4807526976, 7778742049, 12586269025, 20365011074, 32951280099, 53316291173, 86267571272, 139583862445, 225851433717, 365435296162, 591286729879, 956722026041, 1548008755920, 2504730781961, 4052739537881, 6557470319842, 10610209857723, 17167680177565, 27777890035288, 44945570212853, 72723460248141, 117669030460994, 190392490709135, 308061521170129, 498454011879264, 806515533049393, 1304969544928657, 2111485077978050, 3416454622906707, 5527939700884757, 8944394323791464, 14472334024676221, 23416728348467685])\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "q6c_AskadBHl", + "colab_type": "text" + }, + "source": [ + "*Exercise 9:* Write a function that takes a string of consisting of several words and returns a string that reverses the order of the words.\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "aJdXX6FHdBHl", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def reverse_sentence(sentence):\n", + " words = sentence.split()[::-1]\n", + " reversed_sentence = \"\"\n", + " for word in words:\n", + " reversed_sentence += word + \" \"\n", + " return reversed_sentence" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "nQyhnLZ_dBHn", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "outputId": "79995de8-f78f-42f4-dd52-46534cea69b1" + }, + "source": [ + "# Test your solution here\n", + "test_sentence = \"The quick brown fox jumped over the lazy dog\"\n", + "print(reverse_sentence(test_sentence))" + ], + "execution_count": 182, + "outputs": [ + { + "output_type": "stream", + "text": [ + "dog lazy the over jumped fox brown quick The \n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NFSmRaSydBHq", + "colab_type": "text" + }, + "source": [ + "*Exercise 10:* Write a guessing game program that will repeatedly guess a number that the users picks, with the user indicating higher or lower, until it correctly guesses the number." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Ie2E1JzCdBHr", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def guess():\n", + " input(\"Pick a number between 0 and 100 and I'll try to guess it.\\nPress \\\"enter\\\" when ready.\")\n", + " numbers = range(0, 101)\n", + " while True:\n", + " print(\"My guess is... \" + str(numbers[len(numbers)//2]) + \"!\")\n", + " if len(numbers) == 1:\n", + " print(\"Now I HAVE to be right! There's nothing else to guess!\")\n", + " hol = input(\"Was I right? Or did I guess too high or low (r/h/l)? \")\n", + " if hol == \"r\":\n", + " print(\"Woohoo! The number you picked was \" + str(numbers[len(numbers)//2]))\n", + " return\n", + " elif hol == \"h\" and len(numbers) > 1:\n", + " numbers = numbers[:len(numbers)//2]\n", + " elif hol == \"l\" and len(numbers) > 1:\n", + " numbers = numbers[len(numbers)//2 + 1:]\n", + " " + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "6T8YdWSMdBHs", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 323 + }, + "outputId": "ab7dcf1e-e752-4b6b-bd1f-37e930c0a890" + }, + "source": [ + "# Test your solution here\n", + "guess()" + ], + "execution_count": 193, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Pick a number between 0 and 100 and I'll try to guess it.\n", + "Press \"enter\" when ready.\n", + "My guess is... 50!\n", + "Was I right? Or did I guess too high or low (r/h/l)? h\n", + "My guess is... 25!\n", + "Was I right? Or did I guess too high or low (r/h/l)? h\n", + "My guess is... 12!\n", + "Was I right? Or did I guess too high or low (r/h/l)? l\n", + "My guess is... 19!\n", + "Was I right? Or did I guess too high or low (r/h/l)? h\n", + "My guess is... 16!\n", + "Was I right? Or did I guess too high or low (r/h/l)? h\n", + "My guess is... 14!\n", + "Was I right? Or did I guess too high or low (r/h/l)? h\n", + "My guess is... 13!\n", + "Now I HAVE to be right! There's nothing else to guess!\n", + "Was I right? Or did I guess too high or low (r/h/l)? r\n", + "Woohoo! The number you picked was 13\n" + ], + "name": "stdout" + } + ] + } + ] +} \ No newline at end of file From 61cefc919ff683760c5fdaf4e2b8e56278b500f3 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Fri, 21 Feb 2020 01:31:07 -0600 Subject: [PATCH 03/24] Lab 3 completed --- Labs/Lab-3/Copy_of_Lab_3.ipynb | 1222 ++++++++++++++++++++++++++++++++ 1 file changed, 1222 insertions(+) create mode 100644 Labs/Lab-3/Copy_of_Lab_3.ipynb diff --git a/Labs/Lab-3/Copy_of_Lab_3.ipynb b/Labs/Lab-3/Copy_of_Lab_3.ipynb new file mode 100644 index 0000000..5a5be94 --- /dev/null +++ b/Labs/Lab-3/Copy_of_Lab_3.ipynb @@ -0,0 +1,1222 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "colab": { + "name": "Copy of Lab-3.ipynb", + "provenance": [] + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "YuaEkVael6wx", + "colab_type": "text" + }, + "source": [ + "# Lab 3- Tic Tac Toe\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github//afarbin/DATA1401-Spring-2020/blob/master/Labs/Lab-3/Lab-3.ipynb)\n", + "\n", + "In this lab your will build a n x n Tic Tac Toe game. As you do the exercises, make sure your solutions work for any size Tic Tac Toe game. " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7rZcHrSEl6w2", + "colab_type": "text" + }, + "source": [ + "*Exercise 1:* Write a function that creates an n by n matrix (of list of lists) which will represent the state of a Tie Tac Toe game. Let 0, 1, and 2 represent empty, \"X\", or \"O\".\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "qCG0daLAl6w4", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "empty = 0\n", + "player_1 = 1\n", + "player_2 = 2\n", + "players = {0: \" \",\n", + " 1: \"X\",\n", + " 2: \"O\"}\n", + "\n", + "\n", + "def make_game_board(n=3):\n", + " return [[empty] * n for i in range(n)]\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "xLZxreLTl6w_", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "outputId": "a2fb5d59-e002-4117-8109-ddbc752389df" + }, + "source": [ + "# Test your solution here\n", + "print(make_game_board())" + ], + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[[0, 0, 0], [0, 0, 0], [0, 0, 0]]\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "1Gpsh1YMl6xD", + "colab_type": "text" + }, + "source": [ + "*Exercise 2:* Write a function that takes a `n` by `n` matrix representing a tic-tac-toe game, and returns -1, 0, 1, or 2 indicating the game is incomplete, the game is a draw, player 1 has won, or player 2 has one, respectively. Here are some example inputs you can use to test your code:" + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "4edyIxvNl6xF", + "colab_type": "code", + "colab": {} + }, + "source": [ + "winner_is_2 = [[2, 2, 0],\n", + "\t[2, 1, 0],\n", + "\t[2, 1, 1]]\n", + "\n", + "winner_is_1 = [[1, 2, 0],\n", + "\t[2, 1, 0],\n", + "\t[2, 1, 1]]\n", + "\n", + "winner_is_also_1 = [[0, 1, 0],\n", + "\t[2, 1, 0],\n", + "\t[2, 1, 1]]\n", + "\n", + "no_winner = [[1, 2, 0],\n", + "\t[2, 1, 0],\n", + "\t[2, 1, 2]]\n", + "\n", + "also_no_winner = [[1, 2, 0],\n", + "\t[2, 1, 0],\n", + "\t[2, 1, 0]]" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "pMveSEzVl6xN", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def check_game_finished(board):\n", + " board_wins = [row for row in board]\n", + " board_wins += [list(row) for row in zip(*board)]\n", + " board_wins += [[board[i][i] for i in range(len(board))]]\n", + " board_wins += [[board[len(board) - 1 - i][i] for i in range(len(board))]]\n", + " if [1] * len(board) in board_wins:\n", + " return 1\n", + " elif [2] * len(board) in board_wins:\n", + " return 2\n", + " elif 0 in [[row[col] for col in row] for row in board]:\n", + " return 0\n", + " else:\n", + " return -1\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "jFvUimXMl6xS", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + }, + "outputId": "d97b7a37-3774-4628-970a-084f80c9c8b2" + }, + "source": [ + "# Test your solution here\n", + "print(check_game_finished(winner_is_1))\n", + "print(check_game_finished(winner_is_2))\n", + "print(check_game_finished(winner_is_also_1))\n", + "print(check_game_finished(no_winner))\n", + "print(check_game_finished(also_no_winner))" + ], + "execution_count": 66, + "outputs": [ + { + "output_type": "stream", + "text": [ + "1\n", + "2\n", + "1\n", + "-1\n", + "-1\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "KJ67s2CZl6xb", + "colab_type": "text" + }, + "source": [ + "*Exercise 3:* Write a function that takes 2 integers `n` and `m` as input and draws a `n` by `m` game board. For example the following is a 3x3 board:\n", + "```\n", + " --- --- --- \n", + " | | | | \n", + " --- --- --- \n", + " | | | | \n", + " --- --- --- \n", + " | | | | \n", + " --- --- --- \n", + " ```" + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "u3NzwB4Jl6xd", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def draw_game_board(n, m):\n", + " for row in range(n):\n", + " line_1 = \"\"\n", + " line_2 = \"\"\n", + " for col in range(m):\n", + " line_1 += \" ---\"\n", + " line_2 += \"| \"\n", + " line_2 += \"|\"\n", + " print(line_1)\n", + " print(line_2)\n", + " bottom = \"\"\n", + " for col in range(m):\n", + " bottom += \" ---\"\n", + " print(bottom)\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "wrLb0jtcl6xi", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 374 + }, + "outputId": "b276b3f8-be68-43d8-8226-42b080ed6071" + }, + "source": [ + "# Test your solution here\n", + "draw_game_board(3, 3)\n", + "draw_game_board(2, 4)\n", + "draw_game_board(4, 5)" + ], + "execution_count": 12, + "outputs": [ + { + "output_type": "stream", + "text": [ + " --- --- ---\n", + "| | | |\n", + " --- --- ---\n", + "| | | |\n", + " --- --- ---\n", + "| | | |\n", + " --- --- ---\n", + " --- --- --- ---\n", + "| | | | |\n", + " --- --- --- ---\n", + "| | | | |\n", + " --- --- --- ---\n", + " --- --- --- --- ---\n", + "| | | | | |\n", + " --- --- --- --- ---\n", + "| | | | | |\n", + " --- --- --- --- ---\n", + "| | | | | |\n", + " --- --- --- --- ---\n", + "| | | | | |\n", + " --- --- --- --- ---\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "nUAE_QUal6xq", + "colab_type": "text" + }, + "source": [ + "*Exercise 4:* Modify exercise 3, so that it takes a matrix of the form from exercise 2 and draws a tic-tac-tie board with \"X\"s and \"O\"s. " + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "fGddb0gIl6xs", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def draw_game_board(board):\n", + " for row in board:\n", + " line_1 = \"\"\n", + " line_2 = \"\"\n", + " for col in row:\n", + " line_1 += \" ---\"\n", + " line_2 += \"| \" + players[col] + \" \"\n", + " line_2 += \"|\"\n", + " print(line_1)\n", + " print(line_2)\n", + " bottom = \"\"\n", + " for col in board:\n", + " bottom += \" ---\"\n", + " print(bottom)\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "DNm_lodnl6xy", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 612 + }, + "outputId": "2ebe7f2f-589a-4905-db1e-200ff4d0a522" + }, + "source": [ + "# Test your solution here\n", + "draw_game_board(winner_is_1)\n", + "draw_game_board(winner_is_2)\n", + "draw_game_board(winner_is_also_1)\n", + "draw_game_board(no_winner)\n", + "draw_game_board(also_no_winner)" + ], + "execution_count": 28, + "outputs": [ + { + "output_type": "stream", + "text": [ + " --- --- ---\n", + "| X | O | |\n", + " --- --- ---\n", + "| O | X | |\n", + " --- --- ---\n", + "| O | X | X |\n", + " --- --- ---\n", + " --- --- ---\n", + "| O | O | |\n", + " --- --- ---\n", + "| O | X | |\n", + " --- --- ---\n", + "| O | X | X |\n", + " --- --- ---\n", + " --- --- ---\n", + "| | X | |\n", + " --- --- ---\n", + "| O | X | |\n", + " --- --- ---\n", + "| O | X | X |\n", + " --- --- ---\n", + " --- --- ---\n", + "| X | O | X |\n", + " --- --- ---\n", + "| O | X | |\n", + " --- --- ---\n", + "| O | X | O |\n", + " --- --- ---\n", + " --- --- ---\n", + "| X | O | |\n", + " --- --- ---\n", + "| O | X | |\n", + " --- --- ---\n", + "| O | X | X |\n", + " --- --- ---\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "ehxOT9Ehl6x4", + "colab_type": "text" + }, + "source": [ + "*Exercise 5:* Write a function that takes a game board, player number, and `(x,y)` coordinates and places \"X\" or \"O\" in the correct location of the game board. Make sure that you only allow filling previously empty locations. Return `True` or `False` to indicate successful placement of \"X\" or \"O\"." + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "MfTefPdll6x5", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def _move(board, player, coordinates):\n", + " x, y = coordinates\n", + " if board[x][y] == 0:\n", + " board[x][y] = player\n", + " return True\n", + " else:\n", + " return False" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "wk7OoaNcl6yA", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 136 + }, + "outputId": "4ab40ac0-81ac-430d-cdd3-7be1834c4a69" + }, + "source": [ + "# Test your solution here\n", + "test_board = make_game_board()\n", + "_move(test_board, 1, (1, 1))\n", + "_move(test_board, 2, (2, 0))\n", + "draw_game_board(test_board)" + ], + "execution_count": 17, + "outputs": [ + { + "output_type": "stream", + "text": [ + " --- --- ---\n", + "| | | |\n", + " --- --- ---\n", + "| | X | |\n", + " --- --- ---\n", + "| O | | |\n", + " --- --- ---\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "OXmrF3VBl6yG", + "colab_type": "text" + }, + "source": [ + "*Exercise 6:* Modify Exercise 4 to show column and row labels so that players can specify location using \"A2\" or \"C1\"." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "5luh37Ovl6yI", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "alphabet = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n", + "def draw_game_board(board):\n", + " top = \" \"\n", + " for col in range(1, len(board) + 1):\n", + " top += \" \" + str(col) + \" \"\n", + " print(top)\n", + " r = 0\n", + " for row in board:\n", + " line_1 = \" \"\n", + " line_2 = alphabet[r] + \" \"\n", + " r += 1\n", + " for col in row:\n", + " line_1 += \" ---\"\n", + " line_2 += \"| \" + players[col] + \" \"\n", + " line_2 += \"|\"\n", + " print(line_1)\n", + " print(line_2)\n", + " print(\" \" + \" ---\" * len(board))" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "Zo6zTO_ll6yO", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 697 + }, + "outputId": "c9d8347d-3da4-4d53-ff31-4a44c0eed36e" + }, + "source": [ + "# Test your solution here\n", + "draw_game_board(winner_is_1)\n", + "draw_game_board(winner_is_2)\n", + "draw_game_board(winner_is_also_1)\n", + "draw_game_board(no_winner)\n", + "draw_game_board(also_no_winner)" + ], + "execution_count": 34, + "outputs": [ + { + "output_type": "stream", + "text": [ + " 1 2 3 \n", + " --- --- ---\n", + "A | X | O | |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | X |\n", + " --- --- ---\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | O | O | |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | X |\n", + " --- --- ---\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | | X | |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | X |\n", + " --- --- ---\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | X | O | X |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | O |\n", + " --- --- ---\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | X | O | |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | X |\n", + " --- --- ---\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "rlfmfYRWl6yT", + "colab_type": "text" + }, + "source": [ + "*Exercise 7:* Write a function that takes a board, player number, and location specified as in exercise 6 and then calls exercise 5 to correctly modify the board. " + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "R9Lngg8wl6yW", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def move(board, player, location):\n", + " row = alphabet.find(location[0])\n", + " col = int(location[1:]) - 1\n", + " if board[row][col] == 0:\n", + " _move(board, player, (row, col))\n", + " return True\n", + " else:\n", + " print(\"Cannot put \" + players[player] + \" at location \" + location)\n", + " return False" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "on6HRvX7l6ya", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 323 + }, + "outputId": "8b84374d-8d5a-4ca3-c8d0-64385fd42216" + }, + "source": [ + "# Test your solution here\n", + "move(no_winner, 1, \"A3\")\n", + "draw_game_board(no_winner)\n", + "move(also_no_winner, 1, \"C3\")\n", + "draw_game_board(also_no_winner)" + ], + "execution_count": 36, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Cannot put X at location A3\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | X | O | X |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | O |\n", + " --- --- ---\n", + "Cannot put X at location C3\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | X | O | |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | X |\n", + " --- --- ---\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "spCblEgSl6yf", + "colab_type": "text" + }, + "source": [ + "*Exercise 8:* Write a function is called with a board and player number, takes input from the player using python's `input`, and modifies the board using your function from exercise 7. Note that you should keep asking for input until you have gotten a valid input that results in a valid move." + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "R2_KLmnhl6yi", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def player_move(board, player):\n", + " location = \"A1\"\n", + " while True:\n", + " draw_game_board(board)\n", + " location = input(\"Place \" + players[player] + \" at: \").upper()\n", + " if location[0] in alphabet and 0 < int(location[1:]) <= len(board) <= 26:\n", + " break\n", + " else:\n", + " print(\"Invalid location. Try again.\")\n", + " if move(board, player, location):\n", + " return True\n", + " else:\n", + " return player_move(board, player)\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "5klPGJNVl6yp", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 595 + }, + "outputId": "bf4e8e13-ffe3-49f2-d6a4-3e537e607da1" + }, + "source": [ + "# Test your solution here\n", + "player_move(no_winner, 1)\n", + "draw_game_board(no_winner)\n", + "player_move(also_no_winner, 2)\n", + "draw_game_board(also_no_winner)" + ], + "execution_count": 47, + "outputs": [ + { + "output_type": "stream", + "text": [ + " 1 2 3 \n", + " --- --- ---\n", + "A | X | O | |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | O |\n", + " --- --- ---\n", + "Place X at: A3\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | X | O | X |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | O |\n", + " --- --- ---\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | X | O | |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | |\n", + " --- --- ---\n", + "Place O at: A3\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | X | O | O |\n", + " --- --- ---\n", + "B | O | X | |\n", + " --- --- ---\n", + "C | O | X | |\n", + " --- --- ---\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "RpFgUbL-l6yv", + "colab_type": "text" + }, + "source": [ + "*Exercise 9:* Use all of the previous exercises to implement a full tic-tac-toe game, where an appropriate board is drawn, 2 players are repeatedly asked for a location coordinates of where they wish to place a mark, and the game status is checked until a player wins or a draw occurs." + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "cq9kSyn3l6yw", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here\n", + "def tic_tac_toe(n=3):\n", + " board = make_game_board(n)\n", + " current_player = True\n", + " while check_game_finished(board) == -1:\n", + " if current_player:\n", + " player_move(board, 1)\n", + " else:\n", + " player_move(board, 2)\n", + " current_player = not current_player\n", + " result = check_game_finished(board)\n", + " draw_game_board(board)\n", + " print(\"It's a draw!\" if result == 0 else (\"Player 1 wins!\" if result == 1 else \"Player 2 wins!\"))" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "aBSvAvKWl6yz", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 969 + }, + "outputId": "3094cb92-ced1-41a7-8477-08f6015ffeb8" + }, + "source": [ + "# Test your solution here\n", + "tic_tac_toe()" + ], + "execution_count": 55, + "outputs": [ + { + "output_type": "stream", + "text": [ + " 1 2 3 \n", + " --- --- ---\n", + "A | | | |\n", + " --- --- ---\n", + "B | | | |\n", + " --- --- ---\n", + "C | | | |\n", + " --- --- ---\n", + "Place X at: B2\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | | | |\n", + " --- --- ---\n", + "B | | X | |\n", + " --- --- ---\n", + "C | | | |\n", + " --- --- ---\n", + "Place O at: A3\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | | | O |\n", + " --- --- ---\n", + "B | | X | |\n", + " --- --- ---\n", + "C | | | |\n", + " --- --- ---\n", + "Place X at: A3\n", + "Cannot put X at location A3\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | | | O |\n", + " --- --- ---\n", + "B | | X | |\n", + " --- --- ---\n", + "C | | | |\n", + " --- --- ---\n", + "Place X at: C2\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | | | O |\n", + " --- --- ---\n", + "B | | X | |\n", + " --- --- ---\n", + "C | | X | |\n", + " --- --- ---\n", + "Place O at: B3\n", + " 1 2 3 \n", + " --- --- ---\n", + "A | | | O |\n", + " --- --- ---\n", + "B | | X | O |\n", + " --- --- ---\n", + "C | | X | |\n", + " --- --- ---\n", + "Place X at: a2\n", + "Player 1 wins!\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "_ecYRctEl6y3", + "colab_type": "text" + }, + "source": [ + "*Exercise 10:* Test that your game works for 5x5 Tic Tac Toe. " + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "SOV5nKS4l6y5", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "fca77933-3262-4cc3-fb85-7faa251f467f" + }, + "source": [ + "# Test your solution here\n", + "tic_tac_toe(5)" + ], + "execution_count": 70, + "outputs": [ + { + "output_type": "stream", + "text": [ + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | | | | | |\n", + " --- --- --- --- ---\n", + "B | | | | | |\n", + " --- --- --- --- ---\n", + "C | | | | | |\n", + " --- --- --- --- ---\n", + "D | | | | | |\n", + " --- --- --- --- ---\n", + "E | | | | | |\n", + " --- --- --- --- ---\n", + "Place X at: a1\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | | | | |\n", + " --- --- --- --- ---\n", + "B | | | | | |\n", + " --- --- --- --- ---\n", + "C | | | | | |\n", + " --- --- --- --- ---\n", + "D | | | | | |\n", + " --- --- --- --- ---\n", + "E | | | | | |\n", + " --- --- --- --- ---\n", + "Place O at: e1\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | | | | |\n", + " --- --- --- --- ---\n", + "B | | | | | |\n", + " --- --- --- --- ---\n", + "C | | | | | |\n", + " --- --- --- --- ---\n", + "D | | | | | |\n", + " --- --- --- --- ---\n", + "E | O | | | | |\n", + " --- --- --- --- ---\n", + "Place X at: 2\n", + "Invalid location. Try again.\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | | | | |\n", + " --- --- --- --- ---\n", + "B | | | | | |\n", + " --- --- --- --- ---\n", + "C | | | | | |\n", + " --- --- --- --- ---\n", + "D | | | | | |\n", + " --- --- --- --- ---\n", + "E | O | | | | |\n", + " --- --- --- --- ---\n", + "Place X at: e2\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | | | | |\n", + " --- --- --- --- ---\n", + "B | | | | | |\n", + " --- --- --- --- ---\n", + "C | | | | | |\n", + " --- --- --- --- ---\n", + "D | | | | | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | |\n", + " --- --- --- --- ---\n", + "Place O at: a2\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | | | | |\n", + " --- --- --- --- ---\n", + "C | | | | | |\n", + " --- --- --- --- ---\n", + "D | | | | | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | |\n", + " --- --- --- --- ---\n", + "Place X at: b2\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | | |\n", + " --- --- --- --- ---\n", + "C | | | | | |\n", + " --- --- --- --- ---\n", + "D | | | | | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | |\n", + " --- --- --- --- ---\n", + "Place O at: d2\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | | |\n", + " --- --- --- --- ---\n", + "C | | | | | |\n", + " --- --- --- --- ---\n", + "D | | O | | | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | |\n", + " --- --- --- --- ---\n", + "Place X at: c4\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | | |\n", + " --- --- --- --- ---\n", + "C | | | | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | |\n", + " --- --- --- --- ---\n", + "Place O at: c3\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | | |\n", + " --- --- --- --- ---\n", + "C | | | O | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | |\n", + " --- --- --- --- ---\n", + "Place X at: d4\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | | |\n", + " --- --- --- --- ---\n", + "C | | | O | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | X | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | |\n", + " --- --- --- --- ---\n", + "Place O at: b4\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | O | |\n", + " --- --- --- --- ---\n", + "C | | | O | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | X | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | |\n", + " --- --- --- --- ---\n", + "Place X at: e5\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | O | |\n", + " --- --- --- --- ---\n", + "C | | | O | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | X | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | X |\n", + " --- --- --- --- ---\n", + "Place O at: b5\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | O | O |\n", + " --- --- --- --- ---\n", + "C | | | O | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | X | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | X |\n", + " --- --- --- --- ---\n", + "Place X at: b5\n", + "Cannot put X at location B5\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | O | O |\n", + " --- --- --- --- ---\n", + "C | | | O | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | X | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | X |\n", + " --- --- --- --- ---\n", + "Place X at: b6\n", + "Invalid location. Try again.\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | O | O |\n", + " --- --- --- --- ---\n", + "C | | | O | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | X | |\n", + " --- --- --- --- ---\n", + "E | O | X | | | X |\n", + " --- --- --- --- ---\n", + "Place X at: d5\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | |\n", + " --- --- --- --- ---\n", + "B | | X | | O | O |\n", + " --- --- --- --- ---\n", + "C | | | O | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | X | X |\n", + " --- --- --- --- ---\n", + "E | O | X | | | X |\n", + " --- --- --- --- ---\n", + "Place O at: a5\n", + " 1 2 3 4 5 \n", + " --- --- --- --- ---\n", + "A | X | O | | | O |\n", + " --- --- --- --- ---\n", + "B | | X | | O | O |\n", + " --- --- --- --- ---\n", + "C | | | O | X | |\n", + " --- --- --- --- ---\n", + "D | | O | | X | X |\n", + " --- --- --- --- ---\n", + "E | O | X | | | X |\n", + " --- --- --- --- ---\n", + "Player 2 wins!\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "deletable": true, + "editable": true, + "id": "6W-bTUC9l6y8", + "colab_type": "text" + }, + "source": [ + "*Exercise 11: (Extra Credit)* Develop a version of the game where one player is the computer. Note that you don't need to do an extensive seach for the best move. You can have the computer simply protect against loosing and otherwise try to win with straight or diagonal patterns." + ] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "TRfUlsytl6y-", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Write you solution here" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "deletable": true, + "editable": true, + "id": "EdxBYXsLl6zD", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Test your solution here" + ], + "execution_count": 0, + "outputs": [] + } + ] +} \ No newline at end of file From ff144d1792027b8c5c6f9b9154967ef28097e919 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Sat, 22 Feb 2020 01:43:15 -0600 Subject: [PATCH 04/24] added tic tac toe .py file --- Labs/Lab-3/TicTacToe.py | 106 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100644 Labs/Lab-3/TicTacToe.py diff --git a/Labs/Lab-3/TicTacToe.py b/Labs/Lab-3/TicTacToe.py new file mode 100644 index 0000000..bfb2fde --- /dev/null +++ b/Labs/Lab-3/TicTacToe.py @@ -0,0 +1,106 @@ +# Write you solution here +empty = 0 +player_1 = 1 +player_2 = 2 +players = {0: " ", + 1: "X", + 2: "O"} + + +def make_game_board(n=3): + return [[empty] * n for i in range(n)] + + +# return 1 if p1 wins +# return 2 if p2 wins +# return 0 if game not finished +# return -1 if draw +def check_game_finished(board): + board_wins = [row for row in board] + board_wins += [list(row) for row in zip(*board)] + board_wins += [[board[i][i] for i in range(len(board))]] + board_wins += [[board[len(board) - 1 - i][i] for i in range(len(board))]] + if [1] * len(board) in board_wins: + return 1 + elif [2] * len(board) in board_wins: + return 2 + elif True in [board[i][j] == 0 for i in range(len(board)) for j in range(len(board))]: + return 0 + else: + return -1 + + +alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + + +def draw_game_board(board): + top = " " + for col in range(1, len(board) + 1): + top += " " + str(col) + " " + print(top) + r = 0 + for row in board: + line_1 = " " + line_2 = alphabet[r] + " " + r += 1 + for col in row: + line_1 += " ---" + line_2 += "| " + players[col] + " " + line_2 += "|" + print(line_1) + print(line_2) + print(" " + " ---" * len(board)) + + +def _move(board, player, coordinates): + x, y = coordinates + if board[x][y] == 0: + board[x][y] = player + return True + else: + return False + + +def move(board, player, location): + row = alphabet.find(location[0]) + col = int(location[1:]) - 1 + if board[row][col] == 0: + _move(board, player, (row, col)) + return True + else: + print("Cannot put " + players[player] + " at location " + location) + return False + + +def player_move(board, player): + location = "A1" + while True: + draw_game_board(board) + location = input("Place " + players[player] + " at: ").upper() + if location[0] in alphabet and alphabet.find(location[0]) < len(board) and location[1:].isnumeric() and 0 < int( + location[1:]) <= len(board) <= 26: + break + else: + print("Invalid location. Try again.") + if move(board, player, location): + return True + else: + return player_move(board, player) + + +def tic_tac_toe(): + while True: + board = make_game_board() + current_player = True + while check_game_finished(board) == 0: + if current_player: + player_move(board, 1) + else: + player_move(board, 2) + current_player = not current_player + result = check_game_finished(board) + draw_game_board(board) + print("It's a draw!" if result == -1 else ("Player 1 wins!" if result == 1 else "Player 2 wins!")) + + +tic_tac_toe() From 0de7f73f9501e7ed389d6f70a843f824cffccbf4 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Mon, 24 Feb 2020 14:53:53 -0600 Subject: [PATCH 05/24] updated lab 3 solution --- Labs/Lab-3/Copy_of_Lab_3.ipynb | 233 ++++++++++----------------------- 1 file changed, 69 insertions(+), 164 deletions(-) diff --git a/Labs/Lab-3/Copy_of_Lab_3.ipynb b/Labs/Lab-3/Copy_of_Lab_3.ipynb index 5a5be94..19790b3 100644 --- a/Labs/Lab-3/Copy_of_Lab_3.ipynb +++ b/Labs/Lab-3/Copy_of_Lab_3.ipynb @@ -151,13 +151,13 @@ " board_wins += [[board[i][i] for i in range(len(board))]]\n", " board_wins += [[board[len(board) - 1 - i][i] for i in range(len(board))]]\n", " if [1] * len(board) in board_wins:\n", - " return 1\n", + " return 1\n", " elif [2] * len(board) in board_wins:\n", - " return 2\n", - " elif 0 in [[row[col] for col in row] for row in board]:\n", - " return 0\n", + " return 2\n", + " elif True in [board[i][j] == 0 for i in range(len(board)) for j in range(len(board))]:\n", + " return -1\n", " else:\n", - " return -1\n" + " return 0\n" ], "execution_count": 0, "outputs": [] @@ -173,7 +173,7 @@ "base_uri": "https://localhost:8080/", "height": 102 }, - "outputId": "d97b7a37-3774-4628-970a-084f80c9c8b2" + "outputId": "fafafe7b-b6ef-413b-d8b5-5be4cef7ee40" }, "source": [ "# Test your solution here\n", @@ -183,7 +183,7 @@ "print(check_game_finished(no_winner))\n", "print(check_game_finished(also_no_winner))" ], - "execution_count": 66, + "execution_count": 20, "outputs": [ { "output_type": "stream", @@ -702,7 +702,7 @@ " while True:\n", " draw_game_board(board)\n", " location = input(\"Place \" + players[player] + \" at: \").upper()\n", - " if location[0] in alphabet and 0 < int(location[1:]) <= len(board) <= 26:\n", + " if location[0] in alphabet and alphabet.find(location[0]) < len(board) and location[1:].isnumeric() and 0 < int(location[1:]) <= len(board) <= 26:\n", " break\n", " else:\n", " print(\"Invalid location. Try again.\")\n", @@ -725,7 +725,7 @@ "base_uri": "https://localhost:8080/", "height": 595 }, - "outputId": "bf4e8e13-ffe3-49f2-d6a4-3e537e607da1" + "outputId": "2a010f05-694e-4a16-a5a8-1cb31f3c9036" }, "source": [ "# Test your solution here\n", @@ -734,7 +734,7 @@ "player_move(also_no_winner, 2)\n", "draw_game_board(also_no_winner)" ], - "execution_count": 47, + "execution_count": 13, "outputs": [ { "output_type": "stream", @@ -747,7 +747,7 @@ " --- --- ---\n", "C | O | X | O |\n", " --- --- ---\n", - "Place X at: A3\n", + "Place X at: a3\n", " 1 2 3 \n", " --- --- ---\n", "A | X | O | X |\n", @@ -764,7 +764,7 @@ " --- --- ---\n", "C | O | X | |\n", " --- --- ---\n", - "Place O at: A3\n", + "Place O at: a3\n", " 1 2 3 \n", " --- --- ---\n", "A | X | O | O |\n", @@ -826,15 +826,15 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 969 + "height": 935 }, - "outputId": "3094cb92-ced1-41a7-8477-08f6015ffeb8" + "outputId": "91f6a136-3a3c-4a5a-9738-e81dd6cce49d" }, "source": [ "# Test your solution here\n", "tic_tac_toe()" ], - "execution_count": 55, + "execution_count": 22, "outputs": [ { "output_type": "stream", @@ -847,7 +847,7 @@ " --- --- ---\n", "C | | | |\n", " --- --- ---\n", - "Place X at: B2\n", + "Place X at: b2\n", " 1 2 3 \n", " --- --- ---\n", "A | | | |\n", @@ -856,44 +856,42 @@ " --- --- ---\n", "C | | | |\n", " --- --- ---\n", - "Place O at: A3\n", + "Place O at: a1\n", " 1 2 3 \n", " --- --- ---\n", - "A | | | O |\n", + "A | O | | |\n", " --- --- ---\n", "B | | X | |\n", " --- --- ---\n", "C | | | |\n", " --- --- ---\n", - "Place X at: A3\n", - "Cannot put X at location A3\n", + "Place X at: a2\n", " 1 2 3 \n", " --- --- ---\n", - "A | | | O |\n", + "A | O | X | |\n", " --- --- ---\n", "B | | X | |\n", " --- --- ---\n", "C | | | |\n", " --- --- ---\n", - "Place X at: C2\n", + "Place O at: b1\n", " 1 2 3 \n", " --- --- ---\n", - "A | | | O |\n", + "A | O | X | |\n", " --- --- ---\n", - "B | | X | |\n", + "B | O | X | |\n", " --- --- ---\n", - "C | | X | |\n", + "C | | | |\n", " --- --- ---\n", - "Place O at: B3\n", + "Place X at: c2\n", " 1 2 3 \n", " --- --- ---\n", - "A | | | O |\n", + "A | O | X | |\n", " --- --- ---\n", - "B | | X | O |\n", + "B | O | X | |\n", " --- --- ---\n", "C | | X | |\n", " --- --- ---\n", - "Place X at: a2\n", "Player 1 wins!\n" ], "name": "stdout" @@ -923,13 +921,13 @@ "base_uri": "https://localhost:8080/", "height": 1000 }, - "outputId": "fca77933-3262-4cc3-fb85-7faa251f467f" + "outputId": "d591aecd-6091-478f-e273-b02f4df0db3f" }, "source": [ "# Test your solution here\n", "tic_tac_toe(5)" ], - "execution_count": 70, + "execution_count": 23, "outputs": [ { "output_type": "stream", @@ -959,46 +957,6 @@ " --- --- --- --- ---\n", "E | | | | | |\n", " --- --- --- --- ---\n", - "Place O at: e1\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | | | | |\n", - " --- --- --- --- ---\n", - "B | | | | | |\n", - " --- --- --- --- ---\n", - "C | | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | O | | | | |\n", - " --- --- --- --- ---\n", - "Place X at: 2\n", - "Invalid location. Try again.\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | | | | |\n", - " --- --- --- --- ---\n", - "B | | | | | |\n", - " --- --- --- --- ---\n", - "C | | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | O | | | | |\n", - " --- --- --- --- ---\n", - "Place X at: e2\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | | | | |\n", - " --- --- --- --- ---\n", - "B | | | | | |\n", - " --- --- --- --- ---\n", - "C | | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | O | X | | | |\n", - " --- --- --- --- ---\n", "Place O at: a2\n", " 1 2 3 4 5 \n", " --- --- --- --- ---\n", @@ -1010,167 +968,114 @@ " --- --- --- --- ---\n", "D | | | | | |\n", " --- --- --- --- ---\n", - "E | O | X | | | |\n", + "E | | | | | |\n", " --- --- --- --- ---\n", - "Place X at: b2\n", + "Place X at: b1\n", " 1 2 3 4 5 \n", " --- --- --- --- ---\n", "A | X | O | | | |\n", " --- --- --- --- ---\n", - "B | | X | | | |\n", + "B | X | | | | |\n", " --- --- --- --- ---\n", "C | | | | | |\n", " --- --- --- --- ---\n", "D | | | | | |\n", " --- --- --- --- ---\n", - "E | O | X | | | |\n", + "E | | | | | |\n", " --- --- --- --- ---\n", - "Place O at: d2\n", + "Place O at: b2\n", " 1 2 3 4 5 \n", " --- --- --- --- ---\n", "A | X | O | | | |\n", " --- --- --- --- ---\n", - "B | | X | | | |\n", + "B | X | O | | | |\n", " --- --- --- --- ---\n", "C | | | | | |\n", " --- --- --- --- ---\n", - "D | | O | | | |\n", - " --- --- --- --- ---\n", - "E | O | X | | | |\n", - " --- --- --- --- ---\n", - "Place X at: c4\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | | X | | | |\n", - " --- --- --- --- ---\n", - "C | | | | X | |\n", - " --- --- --- --- ---\n", - "D | | O | | | |\n", - " --- --- --- --- ---\n", - "E | O | X | | | |\n", - " --- --- --- --- ---\n", - "Place O at: c3\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | | X | | | |\n", - " --- --- --- --- ---\n", - "C | | | O | X | |\n", - " --- --- --- --- ---\n", - "D | | O | | | |\n", - " --- --- --- --- ---\n", - "E | O | X | | | |\n", - " --- --- --- --- ---\n", - "Place X at: d4\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | | X | | | |\n", - " --- --- --- --- ---\n", - "C | | | O | X | |\n", - " --- --- --- --- ---\n", - "D | | O | | X | |\n", + "D | | | | | |\n", " --- --- --- --- ---\n", - "E | O | X | | | |\n", + "E | | | | | |\n", " --- --- --- --- ---\n", - "Place O at: b4\n", + "Place X at: c1\n", " 1 2 3 4 5 \n", " --- --- --- --- ---\n", "A | X | O | | | |\n", " --- --- --- --- ---\n", - "B | | X | | O | |\n", + "B | X | O | | | |\n", " --- --- --- --- ---\n", - "C | | | O | X | |\n", + "C | X | | | | |\n", " --- --- --- --- ---\n", - "D | | O | | X | |\n", + "D | | | | | |\n", " --- --- --- --- ---\n", - "E | O | X | | | |\n", + "E | | | | | |\n", " --- --- --- --- ---\n", - "Place X at: e5\n", + "Place O at: 2\n", + "Invalid location. Try again.\n", " 1 2 3 4 5 \n", " --- --- --- --- ---\n", "A | X | O | | | |\n", " --- --- --- --- ---\n", - "B | | X | | O | |\n", + "B | X | O | | | |\n", " --- --- --- --- ---\n", - "C | | | O | X | |\n", + "C | X | | | | |\n", " --- --- --- --- ---\n", - "D | | O | | X | |\n", + "D | | | | | |\n", " --- --- --- --- ---\n", - "E | O | X | | | X |\n", + "E | | | | | |\n", " --- --- --- --- ---\n", - "Place O at: b5\n", + "Place O at: c2\n", " 1 2 3 4 5 \n", " --- --- --- --- ---\n", "A | X | O | | | |\n", " --- --- --- --- ---\n", - "B | | X | | O | O |\n", + "B | X | O | | | |\n", " --- --- --- --- ---\n", - "C | | | O | X | |\n", + "C | X | O | | | |\n", " --- --- --- --- ---\n", - "D | | O | | X | |\n", + "D | | | | | |\n", " --- --- --- --- ---\n", - "E | O | X | | | X |\n", + "E | | | | | |\n", " --- --- --- --- ---\n", - "Place X at: b5\n", - "Cannot put X at location B5\n", + "Place X at: d1\n", " 1 2 3 4 5 \n", " --- --- --- --- ---\n", "A | X | O | | | |\n", " --- --- --- --- ---\n", - "B | | X | | O | O |\n", + "B | X | O | | | |\n", " --- --- --- --- ---\n", - "C | | | O | X | |\n", + "C | X | O | | | |\n", " --- --- --- --- ---\n", - "D | | O | | X | |\n", + "D | X | | | | |\n", " --- --- --- --- ---\n", - "E | O | X | | | X |\n", + "E | | | | | |\n", " --- --- --- --- ---\n", - "Place X at: b6\n", - "Invalid location. Try again.\n", + "Place O at: d2\n", " 1 2 3 4 5 \n", " --- --- --- --- ---\n", "A | X | O | | | |\n", " --- --- --- --- ---\n", - "B | | X | | O | O |\n", + "B | X | O | | | |\n", " --- --- --- --- ---\n", - "C | | | O | X | |\n", + "C | X | O | | | |\n", " --- --- --- --- ---\n", - "D | | O | | X | |\n", + "D | X | O | | | |\n", " --- --- --- --- ---\n", - "E | O | X | | | X |\n", + "E | | | | | |\n", " --- --- --- --- ---\n", - "Place X at: d5\n", + "Place X at: e1\n", " 1 2 3 4 5 \n", " --- --- --- --- ---\n", "A | X | O | | | |\n", " --- --- --- --- ---\n", - "B | | X | | O | O |\n", + "B | X | O | | | |\n", " --- --- --- --- ---\n", - "C | | | O | X | |\n", + "C | X | O | | | |\n", " --- --- --- --- ---\n", - "D | | O | | X | X |\n", - " --- --- --- --- ---\n", - "E | O | X | | | X |\n", - " --- --- --- --- ---\n", - "Place O at: a5\n", - " 1 2 3 4 5 \n", + "D | X | O | | | |\n", " --- --- --- --- ---\n", - "A | X | O | | | O |\n", + "E | X | | | | |\n", " --- --- --- --- ---\n", - "B | | X | | O | O |\n", - " --- --- --- --- ---\n", - "C | | | O | X | |\n", - " --- --- --- --- ---\n", - "D | | O | | X | X |\n", - " --- --- --- --- ---\n", - "E | O | X | | | X |\n", - " --- --- --- --- ---\n", - "Player 2 wins!\n" + "Player 1 wins!\n" ], "name": "stdout" } From 1d63da02cc30714c3c4fcb84251ce4d26f163371 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Fri, 28 Feb 2020 03:55:01 -0600 Subject: [PATCH 06/24] adding copy of lab-4 solution --- Labs/Lab-4/Copy_of_Lab_4.ipynb | 1058 ++++++++++++++++++++++++++++++++ 1 file changed, 1058 insertions(+) create mode 100644 Labs/Lab-4/Copy_of_Lab_4.ipynb diff --git a/Labs/Lab-4/Copy_of_Lab_4.ipynb b/Labs/Lab-4/Copy_of_Lab_4.ipynb new file mode 100644 index 0000000..e3d92de --- /dev/null +++ b/Labs/Lab-4/Copy_of_Lab_4.ipynb @@ -0,0 +1,1058 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.1" + }, + "colab": { + "name": "Copy of Lab-4.ipynb", + "provenance": [] + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "NoBj18wB_1fa", + "colab_type": "text" + }, + "source": [ + "## Lab 4\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github//afarbin/DATA1401-Spring-2020/blob/master/Labs/Lab-4/Lab-4.ipynb)\n", + "\n", + "In this lab we will become familiar with distributions, histograms, and functional programming. \n", + "\n", + "\n", + "### Uniform Distribution\n", + "Lets start with generating some fake random data. You can get a random number between 0 and 1 using the python random module as follow:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "TmrUVAv1_1ff", + "colab_type": "code", + "outputId": "588189e3-fb26-49e2-9163-1aa3ccf64718", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "import random\n", + "x=random.random()\n", + "print(\"The Value of x is\", x)" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "The Value of x is 0.04751343885168802\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NtxsuHTs_1fw", + "colab_type": "text" + }, + "source": [ + "Everytime you call random, you will get a new number.\n", + "\n", + "*Exercise 1:* Using random, write a function `generate_uniform(N, mymin, mymax)`, that returns a python list containing N random numbers between specified minimum and maximum value. Note that you may want to quickly work out on paper how to turn numbers between 0 and 1 to between other values. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Z9GzQB02_1fy", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Skeleton\n", + "def generate_uniform(N,x_min,x_max):\n", + " out = []\n", + " ### BEGIN SOLUTION\n", + "\n", + " while len(out) < N:\n", + " out.append(random.uniform(x_min, x_max)) \n", + " \n", + " ### END SOLUTION\n", + " return out" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "eiWTH4-H_1f6", + "colab_type": "code", + "outputId": "1b0f6d0c-dfcc-4791-86d1-df408908062e", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + } + }, + "source": [ + "# Test your solution here\n", + "data=generate_uniform(1000,-10,10)\n", + "print(\"Data Type:\", type(data))\n", + "print(\"Data Length:\", len(data))\n", + "if len(data)>0: \n", + " print(\"Type of Data Contents:\", type(data[0]))\n", + " print(\"Data Minimum:\", min(data))\n", + " print(\"Data Maximum:\", max(data))" + ], + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Data Type: \n", + "Data Length: 1000\n", + "Type of Data Contents: \n", + "Data Minimum: -9.996467198995925\n", + "Data Maximum: 9.96441797265739\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "VOUqBefH_1gA", + "colab_type": "text" + }, + "source": [ + "*Exercise 2a:* \n", + "Write a function that computes the mean of values in a list." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "6sDXXVHB_1gE", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Skeleton\n", + "def mean(Data):\n", + " m=0.\n", + " \n", + " ### BEGIN SOLUTION\n", + " \n", + " m = sum(Data) / len(Data)\n", + " \n", + " ### END SOLUTION\n", + " \n", + " return m" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "0Z8u7_Hq_1gK", + "colab_type": "code", + "outputId": "990a6dae-5b2c-4594-9e61-9f4e10cae468", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "# Test your solution here\n", + "print(\"Mean of Data:\", mean(data))" + ], + "execution_count": 161, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Mean of Data: -0.17178090426615691\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "b1CWdjfM_1gR", + "colab_type": "text" + }, + "source": [ + "*Exercise 2b:* \n", + "Write a function that computes the variance of values in a list." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "gfiiJxZl_1gT", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Skeleton\n", + "def variance(Data):\n", + " m=0.\n", + " \n", + " ### BEGIN SOLUTION\n", + "\n", + " m = mean(Data)\n", + " variance = sum((x - m) ** 2 for x in Data) / len(Data)\n", + " \n", + " ### END SOLUTION\n", + " \n", + " return variance" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "IbasE7ma_1gZ", + "colab_type": "code", + "outputId": "5eaada1f-e6ba-4ec2-b683-a561978bd8d9", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "# Test your solution here\n", + "print(\"Variance of Data:\", variance(data))" + ], + "execution_count": 156, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Variance of Data: 31.828907048258063\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7qtsjJNA_1ge", + "colab_type": "text" + }, + "source": [ + "## Histogramming" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "FJsWb1o9_1gf", + "colab_type": "text" + }, + "source": [ + "*Exercise 3:* Write a function that bins the data so that you can create a histogram. An example of how to implement histogramming is the following logic:\n", + "\n", + "* User inputs a list of values `x` and optionally `n_bins` which defaults to 10.\n", + "* If not supplied, find the minimum and maximum (`x_min`,`x_max`) of the values in x.\n", + "* Determine the bin size (`bin_size`) by dividing the range of the function by the number of bins.\n", + "* Create an empty list of zeros of size `n_bins`, call it `hist`.\n", + "* Loop over the values in `x`\n", + " * Loop over the values in `hist` with index `i`:\n", + " * If x is between `x_min+i*bin_size` and `x_min+i*2*bin_size`, increment `hist[i].` \n", + " * For efficiency, try to use continue to goto the next bin and data point.\n", + "* Return `hist` and the list corresponding of the bin edges (i.e. of `x_min+i*bin_size`). " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Yp85CdIF_1gh", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Solution\n", + "def histogram(x,n_bins=10,x_min=None,x_max=None):\n", + " ### BEGIN SOLUTION\n", + "\n", + " if x_min == None:\n", + " x_min = min(x)\n", + " if x_max == None:\n", + " x_max = max(x)\n", + " \n", + " bin_size = (x_max - x_min) / n_bins\n", + "\n", + " hist = [0] * n_bins\n", + " bin_edges = [x_min]\n", + " for i in range(1, n_bins + 1):\n", + " bin_edges.append(x_min + i * bin_size)\n", + " for value in x:\n", + " for i in range(n_bins):\n", + " if (x_min + (i * bin_size)) <= value <= (x_min + ((i + 1) * bin_size)):\n", + " hist[i] += 1\n", + " break\n", + " \n", + " ### END SOLUTION\n", + "\n", + " return hist,bin_edges" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "CQg5EFMg_1gn", + "colab_type": "code", + "outputId": "4b8ebd11-18fc-41b7-f4d0-30e2c80bcc27", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 54 + } + }, + "source": [ + "# Test your solution here\n", + "h,b=histogram(data,100)\n", + "print(len(b),h)" + ], + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "text": [ + "101 [13, 13, 10, 12, 7, 9, 6, 8, 3, 5, 6, 11, 11, 7, 14, 19, 10, 10, 8, 15, 12, 9, 8, 6, 15, 10, 14, 9, 9, 12, 9, 14, 6, 16, 13, 14, 10, 5, 9, 17, 12, 10, 10, 13, 8, 10, 7, 10, 7, 17, 4, 14, 8, 11, 15, 9, 7, 13, 13, 8, 10, 4, 6, 14, 13, 10, 10, 13, 7, 8, 11, 18, 5, 6, 13, 13, 2, 9, 11, 9, 8, 11, 10, 9, 12, 8, 11, 10, 10, 12, 6, 11, 11, 10, 9, 9, 7, 10, 6, 8]\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "miaZ7qfS_1gs", + "colab_type": "text" + }, + "source": [ + "*Exercise 4:* Write a function that uses the histogram function in the previous exercise to create a text-based \"graph\". For example the output could look like the following:\n", + "```\n", + "[ 0, 1] : ######\n", + "[ 1, 2] : #####\n", + "[ 2, 3] : ######\n", + "[ 3, 4] : ####\n", + "[ 4, 5] : ####\n", + "[ 5, 6] : ######\n", + "[ 6, 7] : #####\n", + "[ 7, 8] : ######\n", + "[ 8, 9] : ####\n", + "[ 9, 10] : #####\n", + "```\n", + "\n", + "Where each line corresponds to a bin and the number of `#`'s are proportional to the value of the data in the bin. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ajxAao2B_1gt", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Solution\n", + "def draw_histogram(x,n_bins,x_min=None,x_max=None,character=\"#\",max_character_per_line=20):\n", + " ### BEGIN SOLUTION\n", + "\n", + " bins = []\n", + "\n", + " hist, bin_edges = histogram(x, n_bins, x_min, x_max)\n", + "\n", + " max_hist = max(hist)\n", + "\n", + " for i in range(len(bin_edges) - 1):\n", + " print(\"[\" + \"{:7.3f}\".format(bin_edges[i]) + \",\" + \"{:7.3f}\".format(bin_edges[i + 1]) + \"] : \" + (str(character) * int(hist[i] / max_hist * max_character_per_line)))\n", + " \n", + " ### END SOLUTION\n", + "\n", + " return hist,bin_edges" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "EsQxIs9U_1gy", + "colab_type": "code", + "outputId": "638ca22a-a07b-4f19-dd39-36c4f786c0d8", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 357 + } + }, + "source": [ + "# Test your solution here\n", + "h,b=draw_histogram(data,20)" + ], + "execution_count": 107, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[ -9.996, -8.998] : #################\n", + "[ -8.998, -8.000] : ##########\n", + "[ -8.000, -7.002] : ###############\n", + "[ -7.002, -6.004] : ####################\n", + "[ -6.004, -5.006] : ################\n", + "[ -5.006, -4.008] : #################\n", + "[ -4.008, -3.010] : ##################\n", + "[ -3.010, -2.012] : #################\n", + "[ -2.012, -1.014] : #################\n", + "[ -1.014, -0.016] : ################\n", + "[ -0.016, 0.982] : ################\n", + "[ 0.982, 1.980] : ################\n", + "[ 1.980, 2.978] : ###############\n", + "[ 2.978, 3.976] : ###############\n", + "[ 3.976, 4.974] : #################\n", + "[ 4.974, 5.972] : ##############\n", + "[ 5.972, 6.970] : ################\n", + "[ 6.970, 7.968] : ################\n", + "[ 7.968, 8.966] : ###############\n", + "[ 8.966, 9.964] : ############\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ppoPnjV4_1g3", + "colab_type": "text" + }, + "source": [ + "## Functional Programming\n", + "\n", + "*Exercise 5:* Write a function the applies a booling function (that returns true/false) to every element in data, and return a list of indices of elements where the result was true. Use this function to find the indices of entries greater than 0.5. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "HTTvnZDl_1hK", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def where(mylist,myfunc):\n", + " out= []\n", + " \n", + " ### BEGIN SOLUTION\n", + " for i in range(len(mylist)):\n", + " if myfunc(mylist[i]):\n", + " out.append(i) \n", + " \n", + " ### END SOLUTION\n", + " \n", + " return out" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "ZyXbNZK7_1hS", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 54 + }, + "outputId": "17c89f30-d78f-4176-9c26-1a641bd59952" + }, + "source": [ + "# Test your solution here\n", + "def myfunc(n):\n", + " return n > 0.5\n", + "\n", + "print(where(data, myfunc))" + ], + "execution_count": 13, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[0, 1, 2, 3, 7, 9, 10, 13, 16, 17, 18, 19, 21, 24, 25, 27, 30, 31, 33, 36, 37, 38, 39, 40, 41, 43, 44, 49, 51, 52, 54, 59, 60, 61, 62, 64, 65, 69, 70, 72, 73, 74, 75, 77, 78, 80, 82, 83, 85, 86, 88, 90, 91, 92, 95, 97, 98, 101, 104, 106, 108, 109, 111, 113, 114, 116, 118, 124, 126, 128, 131, 132, 134, 136, 140, 143, 145, 147, 155, 156, 160, 162, 164, 166, 168, 170, 174, 178, 179, 180, 181, 183, 185, 186, 189, 190, 191, 193, 195, 197, 198, 201, 205, 206, 209, 213, 215, 217, 221, 222, 225, 231, 235, 239, 243, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 256, 261, 264, 266, 268, 270, 271, 272, 274, 278, 280, 284, 285, 286, 287, 294, 295, 299, 302, 303, 305, 306, 307, 309, 314, 317, 325, 326, 329, 331, 333, 334, 337, 338, 340, 341, 342, 343, 344, 351, 353, 355, 356, 359, 361, 362, 363, 364, 366, 369, 373, 374, 375, 394, 396, 398, 401, 402, 403, 405, 406, 411, 412, 413, 414, 415, 417, 419, 425, 429, 430, 432, 433, 434, 437, 438, 439, 440, 444, 446, 448, 450, 453, 454, 458, 459, 460, 461, 462, 463, 464, 465, 468, 469, 474, 475, 478, 479, 480, 487, 490, 491, 492, 494, 495, 496, 499, 500, 501, 502, 504, 506, 508, 509, 513, 515, 516, 517, 518, 521, 526, 529, 530, 532, 534, 539, 544, 545, 546, 549, 550, 552, 553, 555, 556, 558, 560, 561, 562, 563, 564, 565, 569, 572, 573, 575, 581, 583, 585, 586, 587, 588, 589, 592, 597, 599, 603, 607, 608, 609, 611, 612, 613, 616, 618, 619, 621, 623, 624, 626, 627, 631, 632, 635, 636, 639, 641, 645, 646, 648, 650, 653, 654, 655, 658, 660, 671, 673, 676, 685, 688, 689, 690, 694, 696, 697, 700, 702, 703, 706, 708, 709, 711, 713, 716, 719, 720, 721, 723, 725, 726, 729, 735, 736, 738, 739, 740, 741, 743, 747, 748, 751, 753, 758, 763, 764, 765, 766, 770, 773, 780, 783, 785, 786, 788, 790, 792, 795, 799, 801, 806, 809, 810, 811, 813, 815, 816, 817, 821, 822, 824, 826, 828, 830, 831, 833, 840, 841, 842, 844, 847, 850, 851, 855, 856, 857, 859, 860, 861, 862, 866, 867, 870, 872, 874, 875, 877, 878, 880, 882, 884, 891, 892, 896, 898, 902, 903, 904, 907, 909, 912, 913, 915, 917, 921, 923, 924, 925, 928, 930, 931, 933, 936, 938, 939, 940, 943, 944, 946, 947, 951, 952, 953, 954, 958, 960, 961, 963, 966, 968, 973, 974, 976, 978, 979, 981, 983, 985, 987, 989, 991, 992, 996, 999]\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "jt4S-KN0_1hZ", + "colab_type": "text" + }, + "source": [ + "*Exercise 6:* The inrange(mymin,mymax) function below returns a function that tests if it's input is between the specified values. Write corresponding functions that test:\n", + "* Even\n", + "* Odd\n", + "* Greater than\n", + "* Less than\n", + "* Equal\n", + "* Divisible by" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "kkchtWxy_1hb", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 85 + }, + "outputId": "c92f822f-3b11-46d1-b2ce-3b491dd7b45e" + }, + "source": [ + "def in_range(mymin,mymax):\n", + " def testrange(x):\n", + " return x=mymin\n", + " return testrange\n", + "\n", + "# Examples:\n", + "F1=in_range(0,10)\n", + "F2=in_range(10,20)\n", + "\n", + "# Test of in_range\n", + "print(F1(0), F1(1), F1(10), F1(15), F1(20))\n", + "print(F2(0), F2(1), F2(10), F2(15), F2(20))\n", + "\n", + "print(\"Number of Entries passing F1:\", len(where(data,F1)))\n", + "print(\"Number of Entries passing F2:\", len(where(data,F2)))" + ], + "execution_count": 14, + "outputs": [ + { + "output_type": "stream", + "text": [ + "True True False False False\n", + "False False True True False\n", + "Number of Entries passing F1: 482\n", + "Number of Entries passing F2: 0\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "tcReOq0v_1hf", + "colab_type": "code", + "colab": {} + }, + "source": [ + "### BEGIN SOLUTION\n", + "\n", + "def even():\n", + " def is_even(x):\n", + " return x % 2 == 0\n", + " return is_even\n", + "\n", + "def odd():\n", + " def is_odd(x):\n", + " return x % 2 == 1\n", + " return is_odd\n", + "\n", + "def greater_than(n):\n", + " def is_greater_than(x):\n", + " return x > n\n", + " return is_greater_than\n", + "\n", + "def less_than(n):\n", + " def is_less_than(x):\n", + " return x < n\n", + " return is_less_than\n", + "\n", + "def equal_to(n):\n", + " def is_equal_to(x):\n", + " return x == n\n", + " return is_equal_to\n", + "\n", + "def divisible_by(n):\n", + " def is_divisible_by(x):\n", + " return x % n == 0\n", + " return is_divisible_by\n", + " \n", + "### END SOLUTION" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "3AhyJZjf_1hj", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 153 + }, + "outputId": "b6717252-32eb-4c61-ee9d-ed959ff40fee" + }, + "source": [ + "# Test your solution\n", + "E = even()\n", + "O = odd()\n", + "G = greater_than(0)\n", + "L = less_than(0)\n", + "EQ = equal_to(data[20])\n", + "D = divisible_by(data[50])\n", + "\n", + "print(E(2), O(2), G(2), L(2), EQ(2), D(2))\n", + "print(E(-1), O(-1), G(-1), L(-1), EQ(-1), D(-1))\n", + "\n", + "print(\"Number of Entries passing E:\", len(where(data,E)))\n", + "print(\"Number of Entries passing O:\", len(where(data,O)))\n", + "print(\"Number of Entries passing G:\", len(where(data,G)))\n", + "print(\"Number of Entries passing L:\", len(where(data,L)))\n", + "print(\"Number of Entries passing EQ:\", len(where(data,EQ)))\n", + "print(\"Number of Entries passing D:\", len(where(data,D)))" + ], + "execution_count": 16, + "outputs": [ + { + "output_type": "stream", + "text": [ + "True False True False False False\n", + "False True False True False False\n", + "Number of Entries passing E: 0\n", + "Number of Entries passing O: 0\n", + "Number of Entries passing G: 482\n", + "Number of Entries passing L: 518\n", + "Number of Entries passing EQ: 1\n", + "Number of Entries passing D: 1\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tEhLwDyH_1hq", + "colab_type": "text" + }, + "source": [ + "*Exercise 7:* Repeat the previous exercise using `lambda` and the built-in python functions sum and map instead of your solution above. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "VlWCyUXL_1hr", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 119 + }, + "outputId": "c6ec9f60-850f-4397-ba0b-d568ef2af42d" + }, + "source": [ + "### BEGIN SOLUTION\n", + "\n", + "print(\"Number of Entries passing E:\", sum(map(lambda x: x % 2 == 0,data)))\n", + "print(\"Number of Entries passing O:\", sum(map(lambda x: x % 2 == 1, data)))\n", + "print(\"Number of Entries passing G:\", sum(map(lambda x: x > 0, data)))\n", + "print(\"Number of Entries passing L:\", sum(map(lambda x: x < 0, data)))\n", + "print(\"Number of Entries passing EQ:\", sum(map(lambda x: x == data[20], data)))\n", + "print(\"Number of Entries passing D:\", sum(map(lambda x: x % data[50] == 0, data)))\n", + "### END SOLUTION" + ], + "execution_count": 17, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Number of Entries passing E: 0\n", + "Number of Entries passing O: 0\n", + "Number of Entries passing G: 482\n", + "Number of Entries passing L: 518\n", + "Number of Entries passing EQ: 1\n", + "Number of Entries passing D: 1\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7IsTjLuQ_1hz", + "colab_type": "text" + }, + "source": [ + "## Monte Carlo\n", + "\n", + "*Exercise 7:* Write a \"generator\" function called `generate_function(func,x_min,x_max,N)`, that instead of generating a flat distribution, generates a distribution with functional form coded in `func`. Note that `func` will always be > 0. \n", + "\n", + "Use the test function below and your histogramming functions above to demonstrate that your generator is working properly.\n", + "\n", + "Hint: A simple, but slow, solution is to a draw random number test_x within the specified range and another number p between the min and max of the function (which you will have to determine). If p<=function(test_x), then place test_x on the output. If not, repeat the process, drawing two new numbers. Repeat until you have the specified number of generated numbers, N. For this problem, it's OK to determine the min and max by numerically sampling the function. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "NL7k4NeJ_1h1", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def generate_function(func,x_min,x_max,N=1000):\n", + " out = list()\n", + " ### BEGIN SOLUTION\n", + " d = (x_max - x_min) / N\n", + " numbers = [x * d + x_min for x in range(N + 1)]\n", + " frequency = [func(n * d + x_min) for n in range(N + 1)]\n", + " #aux = [frequency[0]] + [0] * N\n", + " for i in range(1, N + 1):\n", + " frequency[i] = (frequency[i - 1] + frequency[i])\n", + " #print(*zip(numbers, frequency))\n", + " #print(numbers)\n", + " #print(frequency)\n", + " while len(out) < N:\n", + " num = random.random() * frequency[-1]\n", + " #print(num)\n", + " for i in range(len(frequency)):\n", + " if frequency[i] >= num:\n", + " out.append(numbers[i])\n", + " #print(frequency[i], num)\n", + " break\n", + " ### END SOLUTION\n", + " \n", + " return out" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "IOfuFLBX_1h6", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "7dfd5e18-c0d4-48ca-ad30-f379131d61cc" + }, + "source": [ + "# A test function\n", + "def test_func(x,a=1,b=1):\n", + " return abs(a*x+b)\n", + "x_min = 0\n", + "x_max = 10\n", + "N = 1000\n", + "n_bins = 20\n", + "draw_histogram(generate_function(lambda x: -((x - 5)**2) + 5 ** 2, x_min, x_max, N), n_bins, x_min, x_max, \"#\", 50)\n", + "draw_histogram(generate_function(test_func, x_min, x_max, N), n_bins, x_min, x_max, \"#\", 50)" + ], + "execution_count": 242, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[ 0.000, 0.500] : ###\n", + "[ 0.500, 1.000] : ###############\n", + "[ 1.000, 1.500] : ###################\n", + "[ 1.500, 2.000] : ######################\n", + "[ 2.000, 2.500] : ############################\n", + "[ 2.500, 3.000] : ####################################\n", + "[ 3.000, 3.500] : ###################################\n", + "[ 3.500, 4.000] : ##########################################\n", + "[ 4.000, 4.500] : ########################################\n", + "[ 4.500, 5.000] : ################################################\n", + "[ 5.000, 5.500] : ##################################################\n", + "[ 5.500, 6.000] : #########################################\n", + "[ 6.000, 6.500] : #######################################\n", + "[ 6.500, 7.000] : ###################################\n", + "[ 7.000, 7.500] : #################################\n", + "[ 7.500, 8.000] : ###############################\n", + "[ 8.000, 8.500] : #######################\n", + "[ 8.500, 9.000] : #################\n", + "[ 9.000, 9.500] : ###########\n", + "[ 9.500, 10.000] : ####\n", + "[ 0.000, 0.500] : ######\n", + "[ 0.500, 1.000] : ########\n", + "[ 1.000, 1.500] : ######\n", + "[ 1.500, 2.000] : ###########\n", + "[ 2.000, 2.500] : ##############\n", + "[ 2.500, 3.000] : #############\n", + "[ 3.000, 3.500] : #################\n", + "[ 3.500, 4.000] : ###################\n", + "[ 4.000, 4.500] : ####################\n", + "[ 4.500, 5.000] : ##########################\n", + "[ 5.000, 5.500] : ##################\n", + "[ 5.500, 6.000] : ################################\n", + "[ 6.000, 6.500] : ##########################\n", + "[ 6.500, 7.000] : ##################################\n", + "[ 7.000, 7.500] : #############################\n", + "[ 7.500, 8.000] : #######################################\n", + "[ 8.000, 8.500] : #################################\n", + "[ 8.500, 9.000] : ##############################################\n", + "[ 9.000, 9.500] : ##################################################\n", + "[ 9.500, 10.000] : ######################################\n" + ], + "name": "stdout" + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "([14,\n", + " 18,\n", + " 14,\n", + " 23,\n", + " 30,\n", + " 28,\n", + " 35,\n", + " 39,\n", + " 41,\n", + " 53,\n", + " 38,\n", + " 65,\n", + " 54,\n", + " 70,\n", + " 60,\n", + " 79,\n", + " 68,\n", + " 93,\n", + " 101,\n", + " 77],\n", + " [0,\n", + " 0.5,\n", + " 1.0,\n", + " 1.5,\n", + " 2.0,\n", + " 2.5,\n", + " 3.0,\n", + " 3.5,\n", + " 4.0,\n", + " 4.5,\n", + " 5.0,\n", + " 5.5,\n", + " 6.0,\n", + " 6.5,\n", + " 7.0,\n", + " 7.5,\n", + " 8.0,\n", + " 8.5,\n", + " 9.0,\n", + " 9.5,\n", + " 10.0])" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 242 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tEdnKUPz_1h-", + "colab_type": "text" + }, + "source": [ + "*Exercise 8:* Use your function to generate 1000 numbers that are normal distributed, using the `gaussian` function below. Confirm the mean and variance of the data is close to the mean and variance you specify when building the Gaussian. Histogram the data. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "wnZFkATK_1h_", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 819 + }, + "outputId": "8c649788-7438-4645-9e49-3df31267f58c" + }, + "source": [ + "import math\n", + "\n", + "def gaussian(mean, sigma):\n", + " def f(x):\n", + " return math.exp(-((x-mean)**2)/(2*sigma**2))/math.sqrt(math.pi*sigma)\n", + " return f\n", + "\n", + "# Example Instantiation\n", + "g1=gaussian(0,1)\n", + "g2=gaussian(10,3)\n", + "\n", + "g1_data = generate_function(g1, -4, 4, 1000)\n", + "g2_data = generate_function(g2, 0, 20, 10000)\n", + "print(g1_data)\n", + "print(g2_data)\n", + "draw_histogram(g1_data, 20)\n", + "print(\"g1 mean = \" + str(mean(g1_data)))\n", + "print(\"g1 variance = \" + str(variance(g1_data)))\n", + "draw_histogram(g2_data, 20)\n", + "print(\"g2 mean = \" + str(mean(g2_data)))\n", + "print(\"g2 variance = \" + str(variance(g2_data)))" + ], + "execution_count": 224, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[-0.4079999999999999, 0.6799999999999997, 0.08000000000000007, 0.6080000000000005, 0.9359999999999999, 0.24800000000000022, 0.16800000000000015, 1.056, 0.5840000000000005, 0.2240000000000002, 0.6479999999999997, -0.13600000000000012, 0.7439999999999998, -1.2319999999999998, 1.5200000000000005, 1.2320000000000002, 0.3280000000000003, -0.31199999999999983, -0.3599999999999999, -0.7999999999999998, 0.08000000000000007, -0.3919999999999999, 0.5120000000000005, 3.048, -0.9119999999999999, 1.7359999999999998, -1.7199999999999998, 0.8639999999999999, -0.944, -0.3919999999999999, 0.4480000000000004, 0.1280000000000001, -1.424, -0.7199999999999998, -0.7439999999999998, 1.056, 0.37600000000000033, 1.2960000000000003, 1.08, 1.2320000000000002, -1.6400000000000001, -1.488, -1.2319999999999998, 0.6399999999999997, 0.39200000000000035, 1.2000000000000002, -0.5680000000000001, 1.2160000000000002, -1.616, -0.31999999999999984, 0.27200000000000024, 1.04, 1.6559999999999997, -1.6880000000000002, -1.384, 0.7279999999999998, -1.616, -0.7119999999999997, 0.4240000000000004, -1.016, -0.08800000000000008, 0.28000000000000025, -0.984, -0.7759999999999998, 0.4560000000000004, -0.3839999999999999, 0.6159999999999997, -0.35199999999999987, 0.7199999999999998, 0.8479999999999999, 0.6799999999999997, 0.9199999999999999, -0.03200000000000003, -1.1680000000000001, 2.4480000000000004, -1.552, 0.6319999999999997, 1.1840000000000002, -0.9119999999999999, -1.2799999999999998, 1.8559999999999999, 0.29600000000000026, -0.32799999999999985, 1.3280000000000003, -0.040000000000000036, 1.2800000000000002, -1.056, 0.8879999999999999, 0.4240000000000004, -0.31199999999999983, 0.28000000000000025, 0.26400000000000023, -1.08, 1.104, -1.624, 2.808, 0.39200000000000035, -0.7119999999999997, -0.6000000000000001, 0.5520000000000005, 0.4320000000000004, 0.984, -0.02400000000000002, -1.064, 0.9119999999999999, 0.6399999999999997, -0.44799999999999995, -0.3759999999999999, -2.12, 0.7199999999999998, 0.4400000000000004, -1.1280000000000001, -0.8479999999999999, -1.6400000000000001, 1.3040000000000003, -0.2799999999999998, 0.8559999999999999, 0.3520000000000003, 1.2960000000000003, -0.8079999999999998, -0.22399999999999975, -0.6080000000000001, 1.944, -0.6240000000000001, 0.3440000000000003, -1.3279999999999998, -0.20799999999999974, -1.88, -0.7359999999999998, 0.16800000000000015, 2.6080000000000005, 0.992, -1.904, 0.3520000000000003, 0.5680000000000005, 1.7599999999999998, -0.24799999999999978, -1.448, -0.7519999999999998, 2.152, -1.888, -1.1280000000000001, 1.6959999999999997, 1.4640000000000004, 2.5760000000000005, 0.0, 0.16800000000000015, 1.1680000000000001, -0.1280000000000001, -2.344, -0.504, 0.7919999999999998, -0.8079999999999998, 0.3600000000000003, -0.3599999999999999, -0.7999999999999998, -0.7119999999999997, -1.2399999999999998, 0.03200000000000003, 1.008, -1.2719999999999998, -0.2639999999999998, -0.6480000000000001, -0.7119999999999997, 0.3440000000000003, -0.7759999999999998, 1.8479999999999999, 0.09600000000000009, 0.03200000000000003, 0.968, 0.08800000000000008, -0.3759999999999999, -1.456, -0.9359999999999999, 0.8479999999999999, 0.38400000000000034, 1.2400000000000002, -0.10400000000000009, -0.944, -0.07200000000000006, -0.2639999999999998, 0.4720000000000004, 0.9119999999999999, 0.05600000000000005, -1.3119999999999998, -1.48, 0.040000000000000036, -0.52, 0.5600000000000005, -0.016000000000000014, -1.8319999999999999, 0.19200000000000017, 0.7599999999999998, 0.09600000000000009, -2.176, -0.7199999999999998, 1.096, -0.10400000000000009, -1.7119999999999997, -0.5920000000000001, -0.08800000000000008, 0.02400000000000002, 1.5680000000000005, -0.46399999999999997, -0.15200000000000014, -0.1759999999999997, -0.19199999999999973, 1.1360000000000001, 0.8159999999999998, 1.1760000000000002, 0.5760000000000005, -1.6480000000000001, -0.31199999999999983, -0.45599999999999996, 0.5840000000000005, 0.8719999999999999, -0.52, 1.1520000000000001, 0.3520000000000003, 1.888, -0.43999999999999995, 0.28800000000000026, -1.2479999999999998, -0.016000000000000014, 1.6799999999999997, 0.976, -1.608, 1.064, -0.504, -0.7119999999999997, 2.936, 1.2080000000000002, -0.22399999999999975, 0.3280000000000003, -0.6560000000000001, 0.14400000000000013, -0.32799999999999985, -0.976, -0.5840000000000001, 0.7599999999999998, -1.2639999999999998, -0.19999999999999973, 0.5360000000000005, -1.6400000000000001, -0.1120000000000001, 0.06400000000000006, 1.6240000000000006, 1.056, -1.3439999999999999, 0.0, 0.2560000000000002, 0.4720000000000004, -0.496, 0.1120000000000001, -0.2559999999999998, -0.7599999999999998, 1.4640000000000004, -0.4079999999999999, 0.2160000000000002, 0.02400000000000002, 1.3280000000000003, 1.088, -0.3919999999999999, -0.4159999999999999, 0.008000000000000007, -0.952, 0.016000000000000014, -1.6880000000000002, -0.552, -0.4159999999999999, -0.20799999999999974, -0.23999999999999977, -0.6160000000000001, -0.20799999999999974, 1.1600000000000001, -0.7119999999999997, 2.216, -0.42399999999999993, -0.31999999999999984, -0.09600000000000009, 0.6080000000000005, -0.6480000000000001, 1.4880000000000004, 0.3200000000000003, 0.7039999999999997, -0.3599999999999999, -1.392, 0.4480000000000004, 1.6319999999999997, -1.6720000000000002, -1.2399999999999998, 0.4640000000000004, -1.6480000000000001, -0.528, 0.9199999999999999, -1.392, -0.552, -0.6400000000000001, 0.9039999999999999, 0.8559999999999999, -0.040000000000000036, -0.040000000000000036, -1.104, 0.20800000000000018, -1.3279999999999998, 1.08, -0.3679999999999999, -1.6880000000000002, 0.2320000000000002, -0.34399999999999986, 0.6639999999999997, 0.37600000000000033, -0.528, -0.7199999999999998, -0.472, -2.472, -0.07200000000000006, -0.7279999999999998, -0.1759999999999997, -0.7119999999999997, 1.6719999999999997, 0.4560000000000004, -2.152, 0.008000000000000007, 0.20000000000000018, 1.3920000000000003, -0.8159999999999998, 0.02400000000000002, 0.27200000000000024, -0.48, -0.9359999999999999, 0.7439999999999998, 0.1280000000000001, 0.9039999999999999, -1.96, -0.6320000000000001, -0.31999999999999984, 0.6639999999999997, -0.1200000000000001, -0.544, -0.31999999999999984, 0.07200000000000006, 1.112, -1.2159999999999997, 0.26400000000000023, -1.1919999999999997, -1.424, 0.1280000000000001, -0.31199999999999983, -1.7919999999999998, 0.6879999999999997, 0.13600000000000012, 0.6159999999999997, -0.6560000000000001, 0.016000000000000014, -0.6480000000000001, 1.2000000000000002, -1.6480000000000001, 0.4480000000000004, -0.552, -0.3999999999999999, 0.8959999999999999, -1.016, 1.2400000000000002, 0.4560000000000004, 1.4480000000000004, -0.3759999999999999, 0.1120000000000001, -0.1679999999999997, 0.37600000000000033, 1.112, -1.04, 0.6399999999999997, 0.6479999999999997, -0.10400000000000009, -0.5920000000000001, -0.8639999999999999, -0.5840000000000001, -0.6000000000000001, 0.05600000000000005, -1.3439999999999999, -0.34399999999999986, 0.8719999999999999, 1.04, -2.064, -0.6160000000000001, -0.8319999999999999, -1.04, -0.23999999999999977, -2.584, 1.3200000000000003, 0.3600000000000003, 1.3040000000000003, 0.5040000000000004, -1.1680000000000001, 0.09600000000000009, -0.7519999999999998, -0.24799999999999978, 0.8479999999999999, -2.6879999999999997, -1.2319999999999998, 0.7039999999999997, -1.1760000000000002, 0.3440000000000003, 0.04800000000000004, 1.5120000000000005, -0.16000000000000014, 0.37600000000000033, -2.2880000000000003, -1.1600000000000001, -0.34399999999999986, -0.3839999999999999, -0.7439999999999998, -0.22399999999999975, 0.2560000000000002, 1.096, -0.56, -0.10400000000000009, -0.472, 0.16800000000000015, 1.016, 1.1600000000000001, -1.104, 0.4800000000000004, 1.4880000000000004, -0.008000000000000007, 0.2560000000000002, 0.016000000000000014, -0.4159999999999999, -1.3599999999999999, -1.0, -0.9119999999999999, 0.5120000000000005, -2.048, 1.016, -2.088, 1.6000000000000005, 1.1440000000000001, 1.3920000000000003, -1.12, 0.29600000000000026, -0.992, -2.488, 0.26400000000000023, -0.7759999999999998, 0.5120000000000005, 0.40800000000000036, 0.8639999999999999, -1.048, -1.7679999999999998, 0.040000000000000036, -0.2559999999999998, -0.04800000000000004, -0.8719999999999999, 1.4800000000000004, -0.8959999999999999, -0.02400000000000002, -0.7679999999999998, -0.6480000000000001, 1.4240000000000004, -0.19999999999999973, 0.5920000000000005, 1.112, -0.016000000000000014, 0.7519999999999998, -0.2879999999999998, -1.6400000000000001, 0.7279999999999998, -0.6879999999999997, 0.5920000000000005, 0.6159999999999997, -3.448, 0.016000000000000014, -0.992, 0.040000000000000036, -0.8799999999999999, 0.952, 2.3920000000000003, 0.6399999999999997, 0.992, 0.40000000000000036, 0.5840000000000005, -1.032, 0.39200000000000035, 0.03200000000000003, 0.09600000000000009, -0.1200000000000001, 0.4400000000000004, 0.8159999999999998, -0.08800000000000008, -0.33599999999999985, 0.9279999999999999, 0.15200000000000014, -1.1919999999999997, -0.42399999999999993, 0.30400000000000027, -0.09600000000000009, 3.216, 0.17600000000000016, 1.4320000000000004, 0.6239999999999997, -0.22399999999999975, -1.7439999999999998, -1.008, 0.2400000000000002, -0.1280000000000001, 1.2000000000000002, -0.23999999999999977, 0.5600000000000005, -1.7679999999999998, 0.1200000000000001, -0.6719999999999997, 0.4720000000000004, 0.4400000000000004, -1.7999999999999998, 1.3200000000000003, 1.3440000000000003, 0.5360000000000005, 0.14400000000000013, 1.7119999999999997, -1.2879999999999998, 1.7759999999999998, 0.3280000000000003, -0.24799999999999978, 0.9279999999999999, -1.12, -0.496, -0.6799999999999997, 0.6639999999999997, 1.6879999999999997, -1.408, -0.488, -0.7919999999999998, 0.7439999999999998, 1.1280000000000001, -0.944, 0.9039999999999999, -2.232, -0.8399999999999999, -0.2799999999999998, 0.6479999999999997, -0.5760000000000001, 0.24800000000000022, -0.19999999999999973, 1.3440000000000003, -1.064, -1.016, -1.3359999999999999, -0.6640000000000001, -1.6400000000000001, 0.4800000000000004, -1.424, -1.2639999999999998, 1.032, -0.968, 1.2400000000000002, -0.06400000000000006, 0.04800000000000004, -1.12, 0.48800000000000043, -0.08000000000000007, 0.09600000000000009, 0.7919999999999998, 0.49600000000000044, -0.22399999999999975, -0.31199999999999983, 1.3200000000000003, -0.992, 0.3680000000000003, 1.4640000000000004, -0.4079999999999999, -0.976, -2.208, -0.3039999999999998, -1.512, -0.14400000000000013, 2.216, 0.30400000000000027, -0.7439999999999998, 0.9039999999999999, 0.5360000000000005, 0.5280000000000005, 0.7359999999999998, -0.488, -0.13600000000000012, -0.5680000000000001, -1.1999999999999997, -0.48, -1.472, -0.15200000000000014, -2.2, 0.96, -0.19199999999999973, 0.7439999999999998, 2.112, -1.1520000000000001, -0.6320000000000001, 0.040000000000000036, 0.07200000000000006, 2.016, 1.2800000000000002, 0.03200000000000003, 0.3360000000000003, -0.944, -1.088, 0.2320000000000002, 2.096, 1.1840000000000002, 1.4240000000000004, 0.4480000000000004, -2.6639999999999997, -1.08, 0.07200000000000006, 1.08, -0.5840000000000001, -0.43199999999999994, -0.9039999999999999, -0.016000000000000014, -0.35199999999999987, 1.1440000000000001, -1.544, -1.1760000000000002, 1.12, 1.2480000000000002, -0.7759999999999998, 0.5760000000000005, 0.1120000000000001, 0.06400000000000006, -0.6719999999999997, -0.43999999999999995, 1.064, 0.7759999999999998, -0.43199999999999994, 0.17600000000000016, 0.4400000000000004, 0.6799999999999997, -0.6879999999999997, 1.88, -1.8399999999999999, -2.12, 0.08800000000000008, 0.4720000000000004, -1.2799999999999998, 0.5120000000000005, 0.2400000000000002, 0.05600000000000005, -1.048, 0.8159999999999998, 0.3440000000000003, 0.49600000000000044, 0.37600000000000033, 0.992, -0.22399999999999975, 0.984, 0.6639999999999997, 1.96, 0.7519999999999998, -0.2719999999999998, 0.8799999999999999, 0.5360000000000005, -0.21599999999999975, -0.06400000000000006, -0.21599999999999975, -0.992, -0.1200000000000001, 0.07200000000000006, -1.968, -0.7439999999999998, -0.05600000000000005, -0.3679999999999999, -1.384, -0.5760000000000001, -0.1200000000000001, -0.4159999999999999, -0.23199999999999976, -0.19199999999999973, -1.88, 1.6879999999999997, -0.10400000000000009, 1.7519999999999998, 1.1280000000000001, 2.4480000000000004, 0.20800000000000018, -0.2559999999999998, 0.2320000000000002, 0.4640000000000004, 0.7919999999999998, -0.512, -0.7279999999999998, 0.30400000000000027, 1.5040000000000004, -0.3039999999999998, -0.7279999999999998, 1.4480000000000004, 0.1200000000000001, -0.544, -0.976, -0.7439999999999998, 1.2320000000000002, 0.6879999999999997, -0.23999999999999977, -0.528, 0.3120000000000003, 1.8239999999999998, 0.8959999999999999, 0.37600000000000033, -0.45599999999999996, -1.2719999999999998, 0.6799999999999997, 1.3680000000000003, 1.1600000000000001, 0.1120000000000001, -1.624, 0.6479999999999997, 0.04800000000000004, -0.1200000000000001, 0.07200000000000006, 0.6959999999999997, 0.2240000000000002, -1.2799999999999998, 0.8479999999999999, -1.2559999999999998, 1.2080000000000002, 0.1280000000000001, -0.552, -0.7279999999999998, 0.06400000000000006, -0.3039999999999998, -1.2799999999999998, 0.2160000000000002, 0.04800000000000004, 1.5760000000000005, -1.4, -1.2319999999999998, 1.8479999999999999, 0.7199999999999998, -0.5680000000000001, 1.5520000000000005, -0.2879999999999998, 0.4640000000000004, -1.8399999999999999, 0.7759999999999998, -0.528, 0.5680000000000005, -0.6320000000000001, -0.8479999999999999, -0.008000000000000007, -1.2319999999999998, 1.6319999999999997, -0.6080000000000001, -0.512, 0.2160000000000002, 1.7679999999999998, 0.16000000000000014, -0.52, -0.8239999999999998, 0.16000000000000014, -1.44, -1.0, -1.592, -0.02400000000000002, -1.592, -0.2959999999999998, 0.968, 0.8799999999999999, 0.17600000000000016, 1.2640000000000002, 0.5680000000000005, 1.1440000000000001, -0.3919999999999999, 0.2160000000000002, 0.40000000000000036, 0.07200000000000006, -0.8239999999999998, -1.1999999999999997, 0.8399999999999999, -1.1919999999999997, -1.528, -0.7119999999999997, 1.2000000000000002, 0.38400000000000034, -1.4, -1.576, 2.6480000000000006, -0.21599999999999975, 0.4320000000000004, 0.40800000000000036, -0.1280000000000001, 2.912, 1.5440000000000005, 0.09600000000000009, -1.528, -0.2559999999999998, 0.2160000000000002, 0.13600000000000012, -2.064, 0.6479999999999997, -0.9279999999999999, -0.6560000000000001, 0.5040000000000004, 0.2160000000000002, -0.9119999999999999, -0.19999999999999973, -0.23199999999999976, -0.016000000000000014, -0.6080000000000001, 2.4960000000000004, 1.5280000000000005, 0.2320000000000002, -2.808, 1.1360000000000001, -0.15200000000000014, -0.1280000000000001, 0.20000000000000018, -0.08000000000000007, -0.3759999999999999, 0.1120000000000001, 0.5600000000000005, -0.488, -0.43999999999999995, -0.2959999999999998, 0.6639999999999997, 0.2240000000000002, 0.14400000000000013, -1.8159999999999998, 0.96, -1.2399999999999998, -0.2559999999999998, 0.3280000000000003, 1.2880000000000003, 2.4240000000000004, -1.104, -0.18399999999999972, -2.008, 0.16000000000000014, -0.5760000000000001, 0.37600000000000033, -0.43999999999999995, -0.19199999999999973, -0.16000000000000014, 0.6159999999999997, 0.24800000000000022, -1.52, 0.26400000000000023, -0.19199999999999973, 0.7519999999999998, -1.1360000000000001, 0.18400000000000016, 1.1920000000000002, -0.1200000000000001, -0.6080000000000001, -0.31199999999999983, 0.7279999999999998, 0.17600000000000016, -1.016, -1.032, 0.8559999999999999, 1.1520000000000001, 0.7519999999999998, 1.4240000000000004, -0.08800000000000008, 0.02400000000000002, 0.30400000000000027, 1.3200000000000003, 1.5120000000000005, 0.27200000000000024, 1.8239999999999998, -0.10400000000000009, 1.2080000000000002, 1.3520000000000003, -2.424, -0.1759999999999997, 0.5920000000000005, 1.4560000000000004, 1.1520000000000001, -0.5760000000000001, 0.19200000000000017, -1.032, -0.09600000000000009, -1.12, -1.8639999999999999, 1.6559999999999997, -0.32799999999999985, 0.6319999999999997, 0.16800000000000015, 0.8959999999999999, -1.416, 2.224, -0.9039999999999999, 1.1440000000000001, -0.8079999999999998, -0.16000000000000014, -0.504, -0.544, -0.43199999999999994, -0.04800000000000004, -0.7519999999999998, 0.4320000000000004, 0.40000000000000036, -0.9359999999999999, -1.2079999999999997, -0.45599999999999996, 0.3440000000000003, 1.3120000000000003, 0.20800000000000018, -2.464, -0.1120000000000001, -2.904, -0.8399999999999999, 0.7439999999999998, -2.592, 0.4480000000000004, -0.31999999999999984, 0.4720000000000004, -0.6160000000000001, -0.472, -0.24799999999999978, -0.44799999999999995, -1.032, 0.944, -0.7199999999999998, 1.912, -0.2879999999999998, 0.6639999999999997, 0.1120000000000001, -1.7679999999999998, -0.952, 0.2160000000000002, -1.7599999999999998, 0.2160000000000002, -1.112, -1.2559999999999998, 0.49600000000000044, -0.44799999999999995, -0.8399999999999999, -1.6480000000000001, 1.4720000000000004, 0.3360000000000003, 0.28000000000000025, 1.3360000000000003, 0.4560000000000004, 0.03200000000000003, 0.18400000000000016, -0.05600000000000005, 0.20000000000000018, 1.3840000000000003, -0.08000000000000007, 0.37600000000000033, 2.4720000000000004, 1.1920000000000002, -0.33599999999999985, 2.16, 1.3920000000000003, 0.3680000000000003, -1.6959999999999997, 0.976, 1.7199999999999998, -0.31999999999999984, 1.048, -0.3679999999999999, -0.7839999999999998, 0.8639999999999999, -0.504, 0.06400000000000006, -1.1919999999999997, -1.0, 0.8319999999999999, 0.2400000000000002, -1.48, -0.008000000000000007, 0.5040000000000004, 0.37600000000000033, 0.7279999999999998, -0.504, -0.552, -1.064, 0.7599999999999998, 0.3360000000000003, 2.184, 0.4240000000000004, 0.07200000000000006, -0.528, 0.5520000000000005, -0.56, 0.41600000000000037, -0.6560000000000001, 0.15200000000000014, -0.5840000000000001, 0.6239999999999997, 0.41600000000000037, 1.016, 0.28000000000000025, -1.8079999999999998, 1.4720000000000004, -0.06400000000000006, -1.016, 0.952, -0.5840000000000001, 1.08, -2.368]\n", + "[4.678, 10.158, 11.748000000000001, 17.282, 10.602, 6.242, 13.378, 9.016, 7.244, 10.58, 6.756, 10.318, 9.568, 13.392, 7.796, 8.59, 8.642, 6.876, 10.524000000000001, 9.558, 13.156, 6.796, 13.794, 11.572000000000001, 3.138, 10.120000000000001, 12.302, 14.126, 8.592, 6.28, 3.374, 3.484, 15.804, 9.586, 13.51, 12.876, 7.84, 9.782, 5.2780000000000005, 7.606, 12.276, 7.072, 8.522, 15.908, 12.394, 5.538, 10.836, 12.108, 6.678, 9.168000000000001, 11.212, 6.316, 4.554, 9.056000000000001, 8.752, 12.828, 8.578, 11.34, 10.602, 6.94, 13.558, 10.624, 11.364, 4.8260000000000005, 9.700000000000001, 7.418, 12.936, 6.066, 14.062000000000001, 9.244, 8.8, 7.898000000000001, 10.916, 7.768, 10.006, 13.14, 6.206, 8.004, 11.642, 10.94, 10.84, 9.104000000000001, 12.91, 10.352, 7.6240000000000006, 7.258, 12.714, 9.788, 9.466000000000001, 6.024, 5.636, 12.540000000000001, 7.958, 12.836, 12.620000000000001, 8.69, 8.896, 9.132, 13.028, 6.628, 8.006, 10.142, 14.450000000000001, 8.438, 9.362, 6.872, 5.172, 10.874, 3.592, 13.692, 14.206, 10.552, 10.16, 12.394, 12.716000000000001, 6.498, 4.93, 6.2620000000000005, 10.058, 4.648, 13.92, 9.194, 10.648, 7.126, 8.056000000000001, 6.432, 5.308, 10.370000000000001, 7.726, 11.748000000000001, 11.648, 11.672, 5.368, 13.158, 6.602, 2.516, 10.304, 12.142, 10.886000000000001, 11.872, 9.996, 13.968, 9.450000000000001, 9.264, 6.878, 16.27, 13.666, 10.732000000000001, 9.068, 7.324, 11.68, 8.102, 11.904, 7.344, 8.006, 13.026, 8.620000000000001, 8.394, 13.126, 9.724, 8.782, 8.816, 10.94, 10.162, 10.612, 13.848, 11.116, 13.164, 11.128, 10.162, 7.816, 5.3660000000000005, 9.702, 11.886000000000001, 12.294, 10.41, 8.064, 2.572, 6.38, 6.106, 15.016, 11.562, 14.304, 9.15, 9.436, 5.45, 7.47, 8.218, 3.84, 9.92, 10.528, 4.22, 12.004, 7.946, 8.17, 8.196, 6.898000000000001, 12.978, 8.668000000000001, 5.18, 7.488, 10.21, 9.0, 12.356, 8.49, 8.016, 9.708, 14.912, 10.618, 11.276, 9.724, 7.694, 9.518, 11.568, 15.294, 12.944, 15.048, 8.814, 9.186, 12.516, 9.306000000000001, 8.286, 12.370000000000001, 9.732, 13.334, 11.236, 14.152000000000001, 3.928, 7.484, 11.794, 11.014, 8.984, 6.208, 6.788, 9.354000000000001, 14.21, 6.03, 11.156, 11.612, 9.524000000000001, 11.366, 11.218, 12.620000000000001, 6.692, 7.256, 16.576, 12.666, 8.58, 6.844, 11.742, 10.322000000000001, 9.542, 11.81, 12.158, 14.522, 15.132, 6.016, 8.628, 10.646, 11.82, 5.666, 10.216000000000001, 6.106, 7.86, 11.204, 8.768, 9.304, 6.988, 12.450000000000001, 6.798, 9.334, 8.098, 12.392, 11.446, 11.074, 8.23, 11.77, 7.332, 9.874, 10.69, 7.788, 10.498, 6.1000000000000005, 9.972, 3.158, 11.546, 15.226, 9.866, 10.458, 10.826, 13.488, 10.692, 11.154, 6.494, 14.69, 14.178, 9.722, 7.932, 9.212, 7.916, 5.13, 16.188, 3.5140000000000002, 9.832, 9.27, 13.002, 9.752, 8.224, 13.21, 17.21, 10.114, 9.646, 7.7940000000000005, 12.162, 2.162, 7.37, 12.112, 9.912, 8.132, 8.718, 9.626, 13.146, 9.704, 10.446, 12.094, 8.122, 14.906, 7.498, 12.508000000000001, 13.72, 8.784, 7.798, 9.966000000000001, 6.7620000000000005, 3.462, 4.0040000000000004, 15.952, 11.822000000000001, 6.642, 6.732, 8.334, 7.532, 11.458, 5.7540000000000004, 15.9, 11.168000000000001, 12.684000000000001, 9.27, 9.052, 5.336, 9.568, 11.76, 9.978, 7.296, 11.258000000000001, 9.134, 8.404, 12.058, 8.654, 15.538, 12.504, 14.054, 9.828, 5.51, 13.530000000000001, 7.408, 12.128, 8.31, 13.162, 17.2, 9.714, 10.806000000000001, 10.492, 12.234, 9.024000000000001, 9.594, 9.16, 13.994, 9.784, 8.26, 3.8280000000000003, 11.694, 12.83, 9.24, 8.798, 9.424, 10.572000000000001, 11.4, 14.448, 13.222, 3.138, 9.794, 8.088000000000001, 9.028, 15.94, 16.48, 9.934000000000001, 8.376, 4.984, 8.46, 8.128, 10.950000000000001, 13.366, 11.566, 11.77, 9.72, 5.526, 2.62, 9.616, 7.0520000000000005, 13.97, 9.356, 8.982, 6.626, 7.51, 11.334, 5.8260000000000005, 11.308, 5.8740000000000006, 6.382000000000001, 5.248, 12.862, 8.352, 9.912, 7.08, 11.422, 5.96, 9.348, 14.644, 10.882, 8.598, 7.868, 16.65, 9.738, 8.83, 6.1080000000000005, 12.168000000000001, 8.008000000000001, 6.356, 10.646, 6.896, 7.354, 6.876, 12.004, 13.234, 10.28, 9.526, 9.536, 9.07, 6.312, 6.406, 7.632000000000001, 9.966000000000001, 11.638, 6.55, 13.97, 10.15, 6.838, 4.644, 9.766, 6.5040000000000004, 4.74, 13.484, 8.158, 8.104000000000001, 13.016, 7.558, 10.404, 9.038, 5.05, 10.9, 5.426, 12.21, 4.5, 12.498000000000001, 8.53, 3.66, 8.808, 4.09, 14.318, 9.66, 11.712, 4.508, 14.51, 14.01, 10.732000000000001, 9.172, 12.044, 12.02, 10.878, 13.084, 11.488, 13.084, 11.91, 7.914000000000001, 14.902000000000001, 5.21, 8.506, 8.218, 9.902000000000001, 10.334, 8.692, 4.7860000000000005, 12.544, 9.064, 9.332, 9.93, 11.924, 11.18, 7.6240000000000006, 11.768, 4.406, 12.13, 13.994, 12.620000000000001, 8.48, 8.45, 1.248, 12.876, 5.66, 4.224, 8.67, 12.666, 7.174, 14.334, 12.996, 11.806000000000001, 7.404, 8.178, 13.258000000000001, 6.736, 12.052, 10.138, 5.698, 8.564, 11.056000000000001, 13.232000000000001, 11.99, 8.122, 8.508000000000001, 14.012, 7.04, 11.316, 10.612, 9.200000000000001, 7.622, 6.436, 7.746, 7.468, 12.94, 12.672, 4.972, 10.99, 15.85, 12.36, 5.868, 9.516, 8.082, 9.434000000000001, 8.938, 10.222, 8.112, 9.142, 8.126, 5.2700000000000005, 10.5, 14.07, 6.328, 14.946, 5.166, 11.394, 10.738, 8.014, 7.468, 9.682, 13.274000000000001, 12.682, 1.474, 8.518, 8.668000000000001, 11.082, 13.302, 10.318, 12.8, 7.696, 8.386000000000001, 8.388, 13.496, 7.3500000000000005, 10.292, 11.446, 9.816, 4.992, 11.022, 10.436, 7.08, 11.02, 7.83, 9.83, 13.858, 9.904, 13.948, 6.758, 10.224, 5.95, 9.01, 7.272, 10.064, 12.636000000000001, 9.506, 14.67, 9.648, 10.574, 7.34, 9.724, 11.048, 8.576, 4.0360000000000005, 8.036, 11.562, 6.442, 11.624, 6.3740000000000006, 12.716000000000001, 9.98, 10.284, 9.384, 8.924, 9.774000000000001, 10.03, 13.998000000000001, 11.742, 12.842, 11.262, 16.65, 6.996, 9.332, 7.288, 8.828, 7.364, 15.872, 7.728, 12.158, 5.796, 15.928, 6.848, 8.482, 10.054, 9.282, 11.106, 7.668, 11.398, 12.244, 5.8180000000000005, 17.112000000000002, 11.122, 13.870000000000001, 11.426, 13.026, 6.216, 14.164, 10.574, 13.656, 7.578, 6.922, 9.082, 14.378, 11.332, 8.972, 3.5420000000000003, 4.646, 11.558, 11.942, 11.508000000000001, 11.864, 12.842, 9.182, 9.954, 5.26, 7.7940000000000005, 7.854, 6.46, 6.88, 8.468, 6.142, 6.16, 6.51, 9.24, 16.674, 8.832, 12.532, 4.688, 3.08, 17.756, 13.154, 15.162, 10.256, 15.93, 11.824, 10.842, 10.744, 12.484, 10.76, 19.112000000000002, 10.53, 13.01, 9.684000000000001, 11.606, 14.46, 7.306, 12.424, 7.192, 3.128, 5.256, 12.686, 13.414, 13.656, 13.468, 9.446, 5.682, 9.704, 9.586, 12.562, 6.8420000000000005, 9.976, 9.89, 10.09, 11.296, 12.61, 13.418000000000001, 9.992, 5.482, 12.434000000000001, 9.984, 13.168000000000001, 1.05, 4.942, 10.396, 13.072000000000001, 12.956, 8.334, 10.294, 10.896, 10.074, 6.824, 8.784, 7.61, 9.78, 14.708, 10.378, 10.870000000000001, 7.344, 3.182, 9.442, 10.738, 10.232000000000001, 10.702, 7.522, 12.352, 11.912, 10.386000000000001, 6.272, 11.69, 12.204, 8.18, 12.896, 12.822000000000001, 10.58, 14.314, 5.98, 9.522, 10.786, 6.714, 14.774000000000001, 10.364, 9.92, 7.682, 9.392, 5.774, 8.746, 17.44, 12.288, 10.948, 7.282, 12.728, 6.018, 6.016, 6.94, 10.672, 12.994, 10.012, 9.394, 13.46, 11.71, 12.222, 9.702, 7.642, 12.14, 8.97, 8.894, 11.966000000000001, 8.022, 13.58, 6.952, 4.904, 8.812, 15.996, 2.582, 12.65, 11.704, 11.838000000000001, 11.200000000000001, 12.086, 15.082, 7.746, 7.05, 5.684, 12.454, 6.732, 8.752, 13.436, 8.994, 5.804, 8.688, 9.348, 10.294, 5.996, 8.576, 11.422, 14.428, 11.886000000000001, 12.132, 12.3, 7.712, 11.302, 14.056000000000001, 12.026, 16.272000000000002, 11.898, 11.99, 14.214, 7.62, 11.412, 12.964, 14.874, 17.288, 13.302, 14.202, 8.45, 8.844, 4.87, 5.986, 9.778, 9.326, 11.31, 9.728, 5.486, 12.118, 8.742, 9.27, 8.61, 12.302, 12.75, 15.08, 7.8340000000000005, 10.994, 4.0840000000000005, 10.496, 10.11, 9.158, 10.766, 14.302, 7.984, 9.796, 11.202, 10.376, 4.962, 12.062, 7.72, 11.642, 10.386000000000001, 7.932, 11.93, 7.1160000000000005, 14.782, 5.76, 7.852, 13.996, 15.078000000000001, 9.828, 9.564, 11.424, 10.152000000000001, 9.968, 10.290000000000001, 15.63, 4.666, 16.316, 9.262, 12.672, 5.658, 7.838, 14.192, 12.116, 13.758000000000001, 14.57, 12.528, 14.22, 10.912, 13.244, 7.67, 6.412, 10.724, 5.508, 11.532, 10.048, 9.564, 7.178, 16.588, 9.82, 8.018, 13.706, 15.782, 10.144, 12.038, 7.328, 11.612, 14.94, 8.592, 12.6, 6.418, 15.358, 13.406, 12.466000000000001, 8.41, 5.5920000000000005, 14.354000000000001, 10.14, 9.34, 3.884, 9.294, 5.84, 6.472, 10.25, 9.540000000000001, 10.5, 9.972, 12.596, 11.398, 10.472, 10.582, 15.812000000000001, 6.94, 4.55, 6.0280000000000005, 16.990000000000002, 7.306, 10.982000000000001, 9.512, 8.468, 11.812, 8.592, 14.094, 8.202, 13.378, 15.38, 11.984, 9.904, 9.646, 10.93, 14.928, 9.196, 12.778, 9.022, 7.194, 13.236, 4.884, 8.578, 6.058, 9.38, 6.806, 9.744, 6.05, 5.362, 15.620000000000001, 13.504, 9.48, 7.53, 10.24, 6.594, 9.14, 14.168000000000001, 3.856, 4.298, 11.424, 5.0280000000000005, 10.216000000000001, 6.8580000000000005, 7.878, 7.1240000000000006, 5.466, 8.96, 4.41, 18.924, 10.51, 11.772, 13.414, 7.646, 11.316, 12.754, 13.176, 9.86, 5.728, 4.23, 9.294, 9.124, 8.894, 11.23, 17.06, 7.998, 14.764000000000001, 8.806000000000001, 11.168000000000001, 12.762, 7.354, 10.862, 7.8260000000000005, 9.352, 10.428, 8.124, 13.638, 9.336, 8.09, 7.228, 7.6000000000000005, 12.282, 13.154, 10.882, 9.196, 9.466000000000001, 13.494, 6.138, 11.612, 12.122, 8.242, 14.040000000000001, 11.828, 5.3740000000000006, 9.292, 9.540000000000001, 9.274000000000001, 15.672, 6.76, 10.432, 4.848, 15.934000000000001, 5.99, 8.858, 11.886000000000001, 7.924, 9.906, 6.042, 12.828, 11.308, 10.568, 10.35, 6.344, 6.0920000000000005, 5.824, 10.448, 11.884, 16.2, 11.266, 11.098, 10.684000000000001, 13.448, 8.52, 5.0520000000000005, 7.962, 7.8660000000000005, 4.472, 8.24, 9.798, 6.22, 5.086, 9.948, 8.732, 11.486, 9.266, 10.626, 5.412, 11.268, 7.824, 12.968, 8.19, 9.540000000000001, 9.574, 16.378, 7.644, 8.816, 9.646, 9.412, 4.95, 15.082, 8.908, 11.868, 9.556000000000001, 6.908, 10.736, 11.134, 9.838000000000001, 6.878, 9.742, 10.57, 9.198, 11.056000000000001, 10.524000000000001, 3.306, 10.39, 9.040000000000001, 6.662, 11.724, 11.886000000000001, 12.790000000000001, 9.664, 10.156, 11.114, 14.876, 7.752, 9.468, 10.108, 9.762, 8.464, 7.128, 12.312, 12.002, 12.47, 10.168000000000001, 10.722, 9.936, 12.834, 14.306000000000001, 14.346, 7.414000000000001, 9.534, 8.334, 14.290000000000001, 13.362, 5.372, 8.67, 10.572000000000001, 13.794, 10.722, 13.922, 7.658, 12.066, 7.97, 4.532, 13.942, 4.82, 11.046, 12.666, 8.774000000000001, 7.1240000000000006, 9.174, 5.282, 10.200000000000001, 10.14, 12.284, 6.57, 6.42, 7.104, 7.356, 9.752, 13.27, 8.496, 14.64, 8.286, 9.386000000000001, 10.51, 4.154, 11.636000000000001, 8.48, 11.32, 10.546, 10.446, 11.408, 6.666, 6.392, 8.31, 14.122, 6.142, 6.228, 13.166, 13.514000000000001, 8.64, 11.644, 7.71, 13.028, 11.58, 16.306, 10.738, 10.518, 8.856, 13.958, 3.052, 10.556000000000001, 7.704, 6.604, 5.266, 9.626, 10.448, 11.626, 12.834, 11.112, 9.492, 6.6240000000000006, 6.752, 7.23, 9.928, 8.016, 11.098, 13.42, 12.422, 12.14, 16.096, 12.280000000000001, 12.554, 14.774000000000001, 3.45, 9.370000000000001, 15.212, 5.166, 6.522, 8.148, 10.784, 9.96, 7.742, 6.936, 8.63, 10.898, 6.8260000000000005, 11.804, 6.974, 8.788, 8.522, 11.5, 9.084, 8.32, 9.06, 10.784, 6.758, 4.54, 12.886000000000001, 11.868, 12.408, 8.32, 10.388, 8.844, 10.81, 14.846, 7.654, 8.106, 11.336, 13.328, 7.74, 12.344, 7.746, 3.228, 12.634, 11.23, 10.626, 8.442, 15.518, 5.242, 9.278, 13.104000000000001, 7.824, 14.698, 8.796, 10.412, 6.456, 9.084, 10.694, 9.002, 9.158, 15.72, 9.734, 11.676, 15.174, 7.026, 15.834, 12.728, 12.272, 8.6, 16.41, 9.242, 4.498, 6.892, 5.126, 13.132, 11.56, 10.932, 11.354000000000001, 11.544, 13.232000000000001, 5.928, 10.142, 10.482000000000001, 10.408, 11.336, 4.234, 13.988, 4.312, 8.286, 6.868, 12.062, 7.978, 8.796, 15.83, 16.156, 13.668000000000001, 15.222, 13.838000000000001, 12.938, 14.576, 13.282, 7.494, 11.988, 12.142, 9.458, 8.322000000000001, 12.508000000000001, 10.958, 9.158, 11.832, 13.836, 8.274000000000001, 7.628, 13.638, 11.098, 6.138, 11.958, 9.496, 12.448, 14.442, 11.82, 11.004, 9.406, 7.0, 1.782, 4.014, 9.65, 10.992, 12.214, 9.738, 9.888, 12.352, 12.328, 14.418000000000001, 9.278, 11.16, 14.034, 9.894, 4.406, 5.882000000000001, 10.998000000000001, 8.808, 12.33, 11.214, 8.656, 12.898, 11.98, 9.518, 3.94, 12.65, 13.308, 8.19, 11.352, 10.086, 11.232000000000001, 16.168, 8.978, 12.362, 9.414, 10.428, 10.944, 11.906, 12.91, 9.18, 6.816, 11.016, 7.928, 5.958, 11.796, 10.676, 13.928, 11.536, 7.362, 8.28, 9.618, 8.818, 11.78, 8.736, 8.682, 13.97, 11.342, 5.516, 9.03, 9.562, 5.352, 6.832, 12.906, 6.766, 8.514, 4.346, 9.212, 12.086, 16.136, 8.474, 10.948, 7.912, 12.91, 13.672, 10.782, 7.312, 10.716000000000001, 10.5, 6.95, 9.512, 11.994, 13.532, 17.916, 6.538, 6.978, 7.486, 12.462, 9.586, 6.122, 11.862, 7.0360000000000005, 17.904, 11.372, 8.504, 9.846, 10.196, 6.312, 9.462, 8.428, 13.748000000000001, 10.324, 8.92, 13.318, 12.838000000000001, 15.456, 7.214, 12.92, 9.388, 9.244, 8.492, 6.236, 10.636000000000001, 8.71, 14.214, 6.202, 11.616, 5.468, 12.206, 9.862, 11.326, 13.776, 7.23, 10.404, 11.156, 8.066, 5.482, 9.232, 11.78, 12.324, 8.858, 6.642, 3.888, 9.348, 12.536, 13.632, 10.404, 11.516, 8.59, 5.822, 12.672, 5.574, 13.236, 10.99, 7.872, 12.17, 17.228, 10.864, 8.022, 8.892, 8.56, 5.774, 11.606, 10.950000000000001, 8.896, 13.442, 9.852, 14.098, 10.23, 8.662, 12.946, 10.67, 9.464, 4.418, 9.218, 6.9, 13.84, 14.966000000000001, 8.866, 9.384, 10.642, 4.078, 12.446, 9.572000000000001, 8.158, 12.65, 9.912, 9.532, 13.746, 9.978, 8.49, 13.012, 14.594, 9.412, 13.554, 10.768, 10.182, 8.062, 11.488, 9.946, 8.52, 9.428, 9.288, 3.68, 12.052, 10.56, 9.684000000000001, 11.106, 7.098, 12.200000000000001, 8.188, 9.44, 8.64, 9.966000000000001, 9.388, 11.044, 11.004, 10.754, 10.742, 6.448, 9.334, 11.0, 6.458, 9.164, 11.538, 17.02, 10.898, 10.322000000000001, 12.868, 10.384, 16.296, 12.156, 11.57, 10.498, 6.096, 8.424, 9.57, 10.616, 10.984, 9.344, 6.688, 11.434000000000001, 11.726, 6.96, 11.536, 9.182, 11.694, 7.814, 13.132, 13.780000000000001, 9.124, 7.144, 11.466000000000001, 8.040000000000001, 8.466, 8.364, 6.07, 9.17, 8.892, 9.918000000000001, 13.404, 12.424, 11.4, 10.484, 14.47, 9.394, 11.158, 14.514000000000001, 6.602, 8.348, 8.324, 7.172, 8.914, 6.714, 8.708, 8.172, 9.798, 9.352, 10.816, 9.562, 13.944, 12.094, 10.950000000000001, 4.562, 9.816, 10.494, 8.34, 12.74, 8.934000000000001, 9.692, 7.84, 2.584, 8.894, 14.464, 9.72, 17.108, 10.89, 9.338000000000001, 9.376, 8.808, 3.922, 12.568, 8.484, 8.622, 11.276, 15.502, 6.188, 16.830000000000002, 6.712, 6.274, 13.686, 11.786, 8.84, 8.392, 7.132000000000001, 10.728, 5.5360000000000005, 14.21, 6.5600000000000005, 10.08, 10.178, 14.72, 11.718, 10.478, 6.432, 11.886000000000001, 6.868, 12.102, 12.628, 11.076, 8.896, 7.554, 13.624, 5.812, 11.262, 7.784, 7.538, 10.052, 8.858, 10.93, 7.704, 5.6000000000000005, 6.424, 8.468, 10.994, 4.5120000000000005, 9.444, 5.3500000000000005, 10.802, 15.432, 8.248, 10.506, 4.558, 9.918000000000001, 11.188, 12.672, 9.452, 12.344, 9.82, 8.236, 7.684, 10.526, 7.684, 11.802, 5.7700000000000005, 11.354000000000001, 15.924, 7.972, 8.992, 10.07, 7.752, 7.524, 14.27, 4.14, 11.48, 10.478, 9.27, 9.858, 11.192, 12.878, 10.566, 7.5600000000000005, 13.348, 6.93, 5.468, 6.742, 10.868, 14.958, 11.352, 6.5920000000000005, 11.746, 9.414, 6.426, 10.742, 9.298, 15.604000000000001, 13.83, 9.602, 5.914, 9.504, 9.986, 7.572, 10.188, 14.442, 5.534, 5.338, 12.158, 11.362, 8.522, 15.348, 8.298, 11.712, 8.146, 7.0840000000000005, 11.948, 16.012, 13.542, 9.452, 10.818, 9.572000000000001, 8.842, 8.5, 9.15, 5.492, 8.724, 9.568, 13.226, 14.99, 5.904, 10.942, 9.752, 10.102, 2.782, 12.082, 9.478, 13.290000000000001, 6.134, 12.462, 7.618, 11.834, 15.058, 12.368, 11.540000000000001, 10.994, 11.788, 7.632000000000001, 4.782, 9.48, 8.362, 5.04, 8.286, 9.934000000000001, 16.144000000000002, 11.912, 6.798, 10.258000000000001, 11.378, 3.742, 13.406, 8.626, 13.9, 7.072, 7.736, 12.728, 9.702, 4.926, 7.756, 8.442, 12.394, 14.318, 11.714, 8.494, 8.55, 8.370000000000001, 9.124, 9.312, 13.148, 11.126, 4.434, 6.964, 8.118, 13.924, 13.688, 7.8, 11.382, 9.076, 9.196, 11.362, 11.15, 5.438, 7.496, 4.906, 14.938, 13.336, 14.292, 14.602, 10.058, 11.186, 9.712, 8.19, 8.22, 9.78, 9.494, 9.994, 13.498000000000001, 3.3280000000000003, 7.2700000000000005, 9.108, 9.796, 10.312, 9.586, 11.708, 7.788, 15.318, 9.07, 11.408, 6.8260000000000005, 8.446, 8.956, 11.592, 13.072000000000001, 9.542, 11.944, 10.048, 10.49, 10.718, 13.908, 8.256, 15.948, 7.046, 13.204, 14.676, 10.458, 11.678, 15.754, 9.540000000000001, 6.284, 9.296, 8.824, 11.344, 6.392, 1.672, 9.884, 11.540000000000001, 11.88, 13.726, 11.082, 12.442, 9.366, 8.858, 11.17, 12.504, 14.922, 9.468, 11.246, 13.494, 13.686, 11.38, 7.206, 8.790000000000001, 7.0280000000000005, 11.084, 10.032, 8.108, 8.826, 10.388, 9.55, 8.21, 8.634, 10.290000000000001, 8.796, 11.132, 8.446, 8.03, 9.0, 14.928, 6.7940000000000005, 11.234, 9.768, 8.184000000000001, 6.96, 8.076, 8.24, 4.742, 10.496, 8.696, 9.186, 7.566, 11.644, 9.642, 13.546000000000001, 8.216, 6.944, 13.59, 8.612, 11.274000000000001, 11.468, 9.946, 11.452, 13.396, 11.952, 12.870000000000001, 7.058, 12.47, 12.864, 9.94, 13.398, 4.912, 11.912, 11.928, 9.23, 12.468, 15.598, 7.742, 6.144, 11.664, 10.048, 7.514, 10.828, 5.884, 10.448, 10.792, 12.504, 14.856, 6.524, 9.278, 13.700000000000001, 12.136000000000001, 11.77, 12.298, 8.58, 9.164, 8.642, 10.212, 12.370000000000001, 3.154, 12.898, 8.036, 14.72, 11.492, 5.946, 14.282, 9.578, 11.496, 8.954, 9.434000000000001, 5.88, 9.462, 7.416, 10.65, 11.768, 6.178, 8.176, 10.55, 9.498, 14.84, 9.472, 10.458, 7.71, 9.706, 18.242, 14.836, 7.396, 7.998, 7.474, 9.216000000000001, 4.242, 9.094, 11.01, 4.518, 8.56, 4.764, 9.822000000000001, 10.488, 12.272, 8.102, 13.8, 13.36, 10.358, 9.38, 8.106, 4.746, 8.57, 13.248000000000001, 7.782, 10.202, 6.07, 12.824, 12.322000000000001, 9.148, 7.0, 10.692, 9.19, 7.574, 11.17, 13.806000000000001, 10.838000000000001, 11.326, 12.448, 9.312, 5.3, 9.474, 9.298, 9.476, 11.226, 7.228, 10.334, 13.05, 7.046, 9.674, 9.352, 10.096, 18.696, 10.03, 6.026, 8.71, 8.208, 12.656, 12.008000000000001, 12.942, 8.238, 2.206, 10.620000000000001, 9.682, 5.976, 14.836, 10.39, 6.214, 6.3, 7.752, 7.5, 10.6, 6.332, 8.120000000000001, 8.31, 4.248, 13.194, 10.622, 13.534, 15.312000000000001, 8.736, 7.546, 14.622, 9.138, 7.914000000000001, 8.55, 10.216000000000001, 11.136000000000001, 11.222, 10.652000000000001, 15.524000000000001, 9.26, 7.95, 5.226, 12.506, 9.226, 10.422, 11.448, 5.3340000000000005, 11.028, 15.38, 14.272, 11.700000000000001, 9.542, 7.752, 6.890000000000001, 6.804, 7.998, 10.752, 9.9, 5.672, 15.328000000000001, 11.666, 8.532, 17.852, 8.668000000000001, 5.924, 12.318, 11.888, 7.05, 11.474, 9.492, 6.6080000000000005, 6.266, 8.588000000000001, 7.49, 10.802, 7.806, 9.318, 9.896, 11.802, 9.972, 6.882000000000001, 13.868, 12.656, 8.032, 7.646, 9.378, 16.93, 8.304, 10.822000000000001, 7.716, 7.664000000000001, 13.878, 14.918000000000001, 6.392, 14.242, 4.804, 11.4, 6.026, 11.620000000000001, 6.508, 11.394, 7.606, 7.824, 6.926, 13.348, 15.632, 3.398, 7.132000000000001, 9.55, 10.06, 9.67, 6.164, 11.488, 9.422, 5.694, 10.23, 7.158, 13.334, 14.108, 10.638, 15.312000000000001, 7.732, 9.382, 10.948, 1.3940000000000001, 12.72, 12.172, 12.362, 8.58, 10.14, 6.768, 8.52, 8.776, 7.554, 9.998, 9.546, 9.904, 7.5520000000000005, 10.508000000000001, 7.558, 6.22, 11.832, 13.32, 2.992, 14.068, 8.456, 11.706, 9.76, 10.568, 12.416, 11.556000000000001, 12.394, 4.12, 9.59, 7.468, 7.792, 9.068, 6.936, 8.46, 11.25, 12.018, 10.98, 8.294, 7.456, 13.316, 12.558, 8.700000000000001, 8.418000000000001, 16.864, 4.0920000000000005, 6.654, 10.198, 13.61, 8.392, 5.97, 7.542, 13.696, 12.906, 8.198, 11.524000000000001, 5.472, 9.656, 9.13, 6.958, 11.588000000000001, 10.744, 7.0600000000000005, 10.222, 15.864, 10.894, 13.15, 15.434000000000001, 13.226, 11.658, 9.676, 3.606, 15.9, 8.884, 6.448, 9.55, 7.788, 18.076, 8.214, 15.184000000000001, 7.18, 11.812, 3.716, 10.986, 7.378, 8.442, 9.736, 11.496, 7.118, 11.274000000000001, 10.098, 10.082, 9.358, 12.848, 4.986, 6.692, 11.444, 9.64, 8.862, 8.352, 8.704, 12.486, 11.652000000000001, 11.156, 10.94, 15.754, 12.132, 9.75, 17.444, 9.844, 5.5680000000000005, 13.816, 12.376, 10.044, 7.328, 10.14, 3.914, 10.178, 16.992, 13.096, 10.322000000000001, 9.198, 13.756, 8.23, 7.12, 10.84, 12.11, 18.476, 6.01, 8.34, 13.122, 12.986, 9.742, 4.942, 10.870000000000001, 9.41, 12.290000000000001, 14.228, 8.700000000000001, 16.11, 8.118, 11.138, 7.33, 12.632, 9.708, 9.528, 11.084, 9.616, 9.908, 10.154, 7.464, 12.586, 13.034, 5.92, 12.382, 13.532, 11.414, 13.062, 13.736, 7.5760000000000005, 14.716000000000001, 13.618, 13.530000000000001, 15.008000000000001, 9.692, 10.988, 7.994, 13.524000000000001, 14.012, 7.538, 13.638, 9.274000000000001, 2.5460000000000003, 9.224, 14.84, 7.298, 11.442, 5.382, 9.084, 10.57, 10.726, 6.242, 6.734, 11.968, 9.192, 10.65, 12.664, 11.718, 8.344, 9.806000000000001, 15.42, 9.132, 7.276, 10.078, 13.098, 4.952, 13.922, 17.176000000000002, 12.77, 9.426, 11.47, 10.102, 5.702, 10.248, 11.724, 7.5200000000000005, 9.862, 6.714, 5.588, 6.844, 8.626, 6.692, 14.726, 15.244, 8.312, 9.334, 12.414, 9.488, 7.78, 14.626, 11.784, 8.47, 15.256, 12.682, 11.742, 12.142, 11.290000000000001, 8.88, 10.918000000000001, 12.438, 10.076, 8.602, 13.206, 10.73, 8.358, 14.41, 8.138, 12.146, 3.612, 11.926, 12.530000000000001, 10.408, 8.566, 14.444, 12.252, 13.036, 7.916, 15.172, 10.766, 11.902000000000001, 7.064, 2.822, 12.466000000000001, 8.870000000000001, 7.604, 9.99, 11.41, 10.664, 7.034, 9.824, 11.342, 14.122, 12.370000000000001, 11.208, 9.996, 6.862, 10.182, 13.422, 11.858, 9.99, 4.662, 10.49, 2.728, 6.316, 9.216000000000001, 7.596, 7.862, 11.538, 8.768, 14.72, 10.700000000000001, 8.282, 13.0, 10.262, 11.214, 13.426, 12.66, 8.57, 7.422000000000001, 7.032, 8.028, 10.202, 11.072000000000001, 14.046000000000001, 4.15, 7.356, 12.242, 6.508, 8.862, 7.912, 4.66, 10.806000000000001, 9.322000000000001, 6.332, 8.162, 13.63, 9.566, 12.174, 14.112, 7.282, 9.388, 8.91, 10.452, 13.372, 16.678, 7.08, 9.13, 8.224, 14.712, 10.69, 5.798, 9.854000000000001, 8.872, 10.9, 10.536, 7.714, 8.802, 8.4, 2.834, 7.55, 13.68, 7.716, 12.66, 6.304, 10.624, 8.542, 9.528, 12.792, 5.658, 6.448, 13.218, 3.074, 10.38, 16.61, 10.838000000000001, 7.884, 10.478, 7.844, 6.054, 6.2, 9.488, 14.772, 11.778, 13.68, 5.498, 12.298, 9.08, 12.772, 7.734, 6.668, 11.482000000000001, 12.07, 9.736, 7.42, 11.934000000000001, 10.756, 14.374, 9.08, 10.312, 6.812, 14.536, 4.714, 12.372, 8.794, 5.55, 6.316, 10.16, 13.018, 7.48, 9.882, 10.842, 6.3580000000000005, 12.426, 8.596, 10.116, 9.086, 11.94, 6.982, 11.708, 7.244, 9.15, 8.618, 4.522, 7.246, 7.2700000000000005, 5.444, 10.422, 12.244, 10.518, 9.668000000000001, 10.296, 9.426, 7.934, 11.06, 9.794, 11.314, 11.406, 3.9, 12.370000000000001, 13.934000000000001, 9.61, 9.86, 4.01, 7.054, 17.036, 10.996, 5.222, 5.514, 8.654, 8.486, 4.242, 4.636, 5.2540000000000004, 8.59, 10.122, 11.678, 7.46, 7.174, 8.372, 10.946, 9.618, 9.088000000000001, 10.322000000000001, 11.188, 9.994, 12.334, 7.644, 11.904, 15.016, 2.918, 11.952, 10.942, 10.534, 9.842, 12.536, 7.228, 7.082, 8.934000000000001, 10.518, 8.91, 12.248000000000001, 4.7780000000000005, 10.97, 13.14, 11.796, 6.41, 4.78, 9.974, 6.34, 9.082, 11.128, 9.368, 7.3420000000000005, 10.174, 14.168000000000001, 7.166, 17.122, 6.846, 11.32, 12.896, 6.76, 11.316, 8.218, 6.672, 13.43, 12.540000000000001, 8.144, 9.878, 6.398000000000001, 6.892, 6.722, 10.812, 14.924, 9.344, 7.708, 14.144, 2.162, 9.506, 8.788, 9.33, 11.006, 11.682, 11.624, 9.262, 11.518, 10.99, 15.734, 6.914, 10.65, 9.652000000000001, 14.298, 7.672000000000001, 12.584, 14.262, 11.616, 7.978, 10.356, 12.004, 12.66, 11.132, 13.522, 12.922, 10.540000000000001, 12.144, 10.182, 8.68, 11.252, 6.716, 8.978, 9.738, 3.218, 7.182, 13.51, 11.75, 10.92, 8.72, 11.864, 15.522, 12.196, 13.458, 12.682, 10.790000000000001, 8.906, 14.016, 8.238, 13.700000000000001, 7.704, 8.768, 6.388, 8.072000000000001, 7.758, 12.166, 5.906, 10.164, 8.112, 7.058, 7.158, 10.548, 8.3, 7.212, 9.188, 9.120000000000001, 6.5520000000000005, 12.192, 8.99, 12.078, 4.48, 8.694, 11.984, 8.482, 11.464, 11.742, 8.488, 12.1, 8.648, 10.8, 11.558, 4.21, 14.63, 11.508000000000001, 6.862, 8.276, 5.954, 13.024000000000001, 13.26, 14.864, 8.5, 10.106, 12.368, 9.276, 10.302, 9.968, 16.256, 10.084, 12.836, 11.096, 8.064, 10.668000000000001, 10.57, 10.288, 11.508000000000001, 10.402000000000001, 7.238, 15.126, 4.948, 4.422, 7.894, 8.69, 5.26, 8.682, 11.284, 12.6, 6.716, 10.804, 5.846, 13.462, 9.524000000000001, 5.664, 9.144, 11.02, 8.802, 13.556000000000001, 7.472, 13.084, 4.5920000000000005, 8.516, 10.68, 16.794, 9.726, 8.264, 18.736, 10.804, 10.282, 10.262, 6.312, 10.33, 5.26, 6.458, 12.790000000000001, 10.448, 13.188, 8.81, 10.116, 8.976, 6.716, 6.878, 13.574, 11.084, 17.674, 11.128, 5.446, 7.332, 10.502, 12.314, 9.812, 9.316, 9.118, 12.502, 0.8180000000000001, 4.314, 10.72, 14.064, 12.178, 11.004, 6.3100000000000005, 9.268, 10.55, 7.016, 4.514, 5.34, 9.246, 4.0200000000000005, 11.758000000000001, 9.356, 7.8, 6.848, 12.97, 9.484, 10.542, 6.096, 5.622, 9.018, 11.540000000000001, 2.878, 11.508000000000001, 15.542, 12.728, 9.402000000000001, 12.098, 14.244, 9.282, 9.242, 11.962, 7.668, 13.018, 6.792, 12.07, 8.838000000000001, 4.212, 8.120000000000001, 10.67, 10.564, 6.764, 9.072000000000001, 10.778, 9.428, 9.036, 10.352, 7.694, 11.578, 7.656000000000001, 9.378, 15.894, 14.262, 5.934, 9.33, 12.966000000000001, 13.484, 3.314, 10.732000000000001, 13.23, 9.544, 8.496, 6.914, 12.004, 7.888, 14.854000000000001, 6.774, 12.186, 10.51, 13.768, 9.24, 16.438, 16.066, 13.816, 8.838000000000001, 6.37, 6.8660000000000005, 10.290000000000001, 9.748, 12.982000000000001, 12.388, 6.752, 9.312, 11.504, 7.44, 12.232000000000001, 3.77, 6.238, 9.26, 13.896, 7.164000000000001, 4.666, 11.266, 15.35, 6.892, 6.1080000000000005, 6.964, 14.868, 10.73, 13.766, 12.914, 13.982000000000001, 10.49, 10.24, 6.474, 15.044, 6.066, 16.94, 7.658, 9.544, 10.474, 10.894, 7.656000000000001, 5.888, 10.598, 8.162, 9.766, 9.974, 7.640000000000001, 14.536, 4.63, 9.862, 12.708, 11.414, 7.948, 12.902000000000001, 12.382, 12.224, 9.836, 6.772, 9.676, 14.572000000000001, 12.382, 11.450000000000001, 8.77, 15.108, 13.676, 9.832, 10.138, 7.474, 3.106, 7.484, 15.444, 9.402000000000001, 15.854000000000001, 16.002, 9.298, 8.386000000000001, 8.082, 9.884, 13.4, 9.432, 11.316, 11.75, 13.478, 12.608, 10.26, 8.154, 5.07, 15.5, 7.752, 12.072000000000001, 8.872, 6.12, 5.522, 12.238, 7.942, 10.796, 10.974, 9.014, 12.844, 9.558, 9.668000000000001, 8.306000000000001, 11.054, 9.88, 11.754, 6.798, 6.634, 8.09, 13.686, 7.272, 11.372, 10.024000000000001, 9.168000000000001, 9.582, 11.008000000000001, 9.814, 5.398, 12.392, 7.946, 7.064, 3.956, 13.718, 16.056, 7.368, 11.17, 11.18, 6.072, 8.764, 6.382000000000001, 1.074, 11.348, 16.018, 11.38, 15.652000000000001, 6.0280000000000005, 12.124, 11.932, 8.09, 9.504, 8.762, 6.764, 9.97, 10.124, 4.68, 7.0680000000000005, 9.388, 12.316, 15.618, 4.328, 7.644, 11.992, 11.72, 6.176, 11.248000000000001, 10.382, 5.7, 6.836, 9.232, 10.224, 17.34, 7.918, 14.752, 13.31, 16.39, 10.084, 12.540000000000001, 5.946, 11.174, 4.522, 7.8660000000000005, 9.892, 12.324, 9.074, 7.632000000000001, 8.474, 10.864, 6.61, 9.65, 7.444, 9.552, 8.538, 9.404, 6.872, 15.062000000000001, 13.652000000000001, 10.292, 11.146, 8.26, 6.632000000000001, 7.0760000000000005, 8.084, 7.164000000000001, 11.766, 10.968, 14.016, 6.25, 8.206, 12.062, 14.874, 13.822000000000001, 8.876, 6.008, 11.524000000000001, 11.540000000000001, 9.522, 16.644000000000002, 6.404, 4.654, 8.052, 6.3660000000000005, 9.214, 8.328, 8.454, 7.908, 9.874, 15.444, 13.672, 8.166, 9.122, 11.484, 6.916, 9.436, 6.338, 10.518, 11.47, 9.408, 13.258000000000001, 8.752, 9.24, 10.088000000000001, 9.998, 7.7940000000000005, 15.982000000000001, 6.886, 8.25, 12.64, 8.476, 8.32, 7.912, 10.056000000000001, 10.064, 8.186, 10.392, 5.8180000000000005, 13.038, 4.992, 9.594, 9.35, 7.07, 11.03, 10.728, 14.068, 4.95, 11.374, 9.872, 12.264000000000001, 12.58, 11.17, 11.534, 3.5020000000000002, 10.456, 4.5, 3.124, 10.368, 12.71, 6.354, 7.812, 9.926, 10.194, 10.33, 13.032, 12.268, 12.432, 13.266, 12.394, 12.208, 4.356, 11.316, 9.416, 13.478, 15.082, 10.040000000000001, 15.784, 11.976, 10.972, 11.202, 12.366, 11.1, 11.6, 10.022, 13.376, 11.736, 12.134, 10.068, 14.88, 8.554, 7.8, 10.906, 7.2940000000000005, 6.902, 8.414, 10.082, 6.0, 11.536, 11.596, 10.338000000000001, 18.54, 15.89, 7.868, 6.41, 8.84, 5.174, 12.606, 9.43, 9.546, 6.178, 9.682, 11.974, 10.192, 12.67, 7.248, 16.844, 6.782, 14.376, 8.972, 6.088, 8.286, 8.828, 12.76, 9.062, 5.892, 8.852, 15.716000000000001, 6.654, 13.842, 5.766, 7.434, 8.502, 8.64, 7.98, 12.99, 13.222, 11.16, 11.104000000000001, 6.596, 9.88, 16.816, 7.24, 8.92, 16.822, 10.766, 12.414, 7.488, 7.336, 10.246, 14.592, 8.602, 8.112, 14.296000000000001, 10.322000000000001, 9.91, 9.392, 13.21, 10.602, 11.796, 15.194, 7.058, 9.57, 6.54, 6.498, 6.852, 14.572000000000001, 13.946, 8.234, 7.768, 11.292, 9.772, 12.398, 10.61, 9.858, 13.57, 9.954, 8.002, 8.422, 8.332, 5.604, 13.152000000000001, 8.632, 10.342, 14.044, 9.018, 8.634, 9.482, 9.544, 8.956, 13.878, 7.688, 6.932, 5.618, 9.512, 9.714, 10.374, 13.096, 8.75, 9.696, 4.072, 7.8180000000000005, 6.61, 14.236, 14.016, 14.612, 8.55, 9.058, 8.986, 12.236, 6.314, 10.052, 5.2860000000000005, 7.244, 10.694, 11.834, 6.172, 9.518, 11.252, 9.812, 10.788, 10.106, 11.262, 9.508000000000001, 11.674, 13.472, 13.088000000000001, 7.966, 5.7, 8.876, 12.656, 8.83, 11.594, 11.92, 11.738, 11.496, 7.0200000000000005, 8.436, 9.208, 11.788, 10.966000000000001, 8.366, 9.976, 6.5280000000000005, 9.06, 12.994, 4.402, 5.958, 6.224, 4.934, 10.768, 8.564, 12.336, 9.228, 9.808, 12.488, 10.74, 12.428, 14.888, 7.462, 6.306, 10.058, 13.784, 14.262, 10.946, 9.504, 7.156000000000001, 10.02, 11.388, 6.752, 11.984, 15.17, 9.092, 8.608, 11.962, 13.592, 10.65, 6.232, 9.872, 6.704, 6.582, 7.95, 11.402000000000001, 7.38, 6.248, 7.752, 7.292, 7.144, 9.694, 8.598, 7.378, 9.06, 9.308, 12.316, 8.682, 8.75, 8.68, 12.658, 10.006, 10.588000000000001, 9.35, 2.222, 9.848, 8.896, 9.838000000000001, 14.142, 12.644, 17.14, 4.586, 11.34, 13.866, 10.81, 10.812, 5.758, 9.464, 16.07, 11.39, 9.31, 5.682, 9.712, 13.776, 3.0260000000000002, 9.75, 14.916, 8.58, 11.822000000000001, 12.484, 13.744, 6.6160000000000005, 8.31, 5.636, 11.988, 8.788, 9.232, 8.212, 8.15, 9.652000000000001, 10.73, 9.022, 9.824, 3.396, 9.578, 10.844, 8.458, 5.0920000000000005, 7.716, 11.15, 12.028, 13.58, 9.342, 16.232, 11.200000000000001, 12.328, 5.08, 13.534, 10.540000000000001, 13.138, 12.97, 10.212, 7.5760000000000005, 5.932, 13.008000000000001, 8.622, 12.486, 11.316, 15.418000000000001, 11.544, 8.78, 9.186, 11.856, 8.154, 17.196, 12.636000000000001, 10.082, 8.904, 9.05, 9.526, 1.974, 8.714, 13.004, 9.788, 8.32, 7.888, 10.586, 8.452, 8.234, 10.386000000000001, 8.428, 10.678, 10.352, 14.534, 7.21, 11.22, 8.732, 11.816, 8.44, 11.938, 17.926000000000002, 5.848, 12.49, 4.956, 12.468, 6.904, 13.848, 13.956, 11.622, 11.82, 7.91, 12.968, 11.924, 8.208, 9.96, 8.536, 9.464, 10.958, 12.14, 10.412, 15.654, 7.292, 15.120000000000001, 9.182, 7.916, 13.496, 9.3, 9.916, 12.118, 5.8340000000000005, 14.258000000000001, 17.682, 10.554, 5.886, 12.106, 13.432, 13.77, 11.158, 12.596, 8.612, 7.11, 6.862, 6.708, 11.848, 12.732000000000001, 15.064, 11.576, 6.1000000000000005, 8.726, 6.756, 14.424, 9.118, 7.7860000000000005, 8.888, 6.458, 7.668, 8.416, 12.766, 11.524000000000001, 8.53, 18.988, 8.604000000000001, 9.120000000000001, 10.034, 10.978, 10.192, 12.246, 12.672, 7.198, 3.0660000000000003, 12.024000000000001, 11.546, 14.692, 13.352, 14.028, 14.752, 6.758, 6.63, 9.988, 14.994, 9.904, 10.318, 7.094, 7.464, 9.598, 5.5520000000000005, 7.956, 4.66, 11.174, 11.366, 10.726, 6.306, 12.136000000000001, 11.540000000000001, 14.886000000000001, 10.620000000000001, 12.102, 12.404, 6.376, 7.248, 10.28, 11.494, 10.18, 15.954, 9.038, 15.788, 7.618, 10.318, 7.0520000000000005, 11.774000000000001, 7.694, 14.41, 4.978, 8.69, 13.77, 10.688, 14.134, 2.864, 5.042, 10.322000000000001, 7.79, 9.188, 12.422, 10.512, 8.774000000000001, 11.578, 12.064, 6.3340000000000005, 17.364, 11.426, 9.71, 13.106, 10.51, 12.174, 10.638, 9.722, 4.442, 15.64, 5.5600000000000005, 6.804, 15.252, 9.8, 10.08, 8.382, 4.356, 12.412, 11.518, 11.978, 11.194, 10.376, 8.546, 12.012, 6.582, 10.856, 5.014, 6.638, 6.338, 5.476, 8.004, 6.974, 5.368, 6.534, 8.268, 11.394, 11.184000000000001, 8.012, 3.2840000000000003, 11.616, 11.116, 8.058, 5.478, 13.816, 11.14, 6.398000000000001, 12.816, 13.200000000000001, 6.382000000000001, 9.494, 8.808, 6.16, 5.32, 7.24, 10.24, 10.618, 11.152000000000001, 9.69, 12.478, 10.31, 7.5520000000000005, 11.082, 7.634, 7.378, 13.030000000000001, 12.046, 8.998, 9.962, 14.976, 9.846, 12.832, 6.932, 15.592, 4.624, 5.104, 10.834, 12.552, 11.892, 6.468, 6.956, 11.286, 11.614, 6.964, 11.574, 15.610000000000001, 8.798, 9.794, 7.38, 13.476, 11.454, 10.968, 16.05, 7.478, 9.05, 11.268, 10.716000000000001, 9.72, 7.496, 9.094, 11.246, 11.986, 12.604000000000001, 11.13, 6.8, 10.784, 16.934, 11.9, 11.552, 10.526, 2.754, 5.756, 13.236, 14.616, 8.562, 10.454, 6.8260000000000005, 11.344, 7.346, 8.288, 10.932, 8.63, 9.524000000000001, 10.324, 5.868, 11.292, 5.65, 16.004, 5.62, 4.488, 10.692, 7.078, 7.258, 14.734, 11.018, 9.536, 8.21, 10.0, 10.19, 5.128, 6.622, 6.9, 12.348, 9.648, 13.042, 8.014, 12.234, 4.0120000000000005, 13.998000000000001, 11.788, 8.898, 7.0440000000000005, 7.994, 8.536, 3.648, 11.184000000000001, 12.074, 10.006, 6.494, 8.912, 10.83, 10.978, 8.91, 8.948, 8.678, 9.076, 14.574, 14.88, 14.620000000000001, 8.912, 9.592, 7.276, 9.58, 7.764, 12.07, 7.924, 8.006, 12.35, 6.946, 14.706, 15.302, 7.738, 11.108, 10.646, 15.996, 6.5760000000000005, 10.186, 9.826, 6.812, 6.242, 10.208, 12.89, 12.916, 8.442, 8.274000000000001, 5.088, 6.288, 10.49, 9.102, 14.462, 1.364, 9.042, 9.116, 13.866, 12.552, 17.456, 6.784, 11.138, 9.604000000000001, 5.538, 12.902000000000001, 8.11, 10.368, 8.022, 6.688, 10.328, 11.976, 7.45, 10.370000000000001, 12.314, 8.026, 8.232, 7.056, 9.43, 12.258000000000001, 10.498, 9.484, 17.3, 7.498, 10.268, 10.084, 12.374, 11.174, 10.906, 11.788, 5.558, 13.458, 9.386000000000001, 14.02, 10.212, 8.664, 14.272, 13.704, 7.0440000000000005, 10.456, 12.998000000000001, 6.298, 11.856, 9.526, 7.078, 10.450000000000001, 6.16, 9.346, 12.342, 7.102, 7.464, 8.174, 12.912, 13.824, 8.948, 14.030000000000001, 8.534, 7.83, 11.356, 13.276, 9.232, 13.132, 12.508000000000001, 12.784, 12.72, 9.504, 9.888, 13.156, 14.926, 7.96, 8.266, 4.776, 9.134, 7.0040000000000004, 6.654, 9.18, 9.184000000000001, 9.24, 8.790000000000001, 14.122, 8.404, 9.932, 7.73, 4.188, 4.522, 11.886000000000001, 8.968, 7.166, 3.138, 14.076, 13.492, 7.8660000000000005, 13.204, 11.106, 10.188, 9.27, 11.96, 11.63, 9.184000000000001, 10.678, 3.228, 6.0680000000000005, 15.128, 12.328, 9.678, 10.702, 14.908, 10.41, 9.028, 13.084, 14.994, 9.950000000000001, 6.7, 7.72, 8.01, 12.51, 10.92, 10.852, 6.686, 12.226, 9.472, 8.544, 4.49, 7.474, 10.274000000000001, 13.558, 9.294, 6.76, 8.38, 6.0920000000000005, 12.540000000000001, 13.214, 8.864, 9.128, 5.736, 5.888, 9.5, 9.58, 9.664, 11.632, 9.578, 8.422, 7.074, 9.358, 10.656, 10.97, 11.322000000000001, 7.18, 9.818, 17.436, 11.028, 8.236, 8.998, 6.12, 10.9, 8.636000000000001, 4.434, 12.488, 11.066, 8.084, 11.01, 11.376, 7.564, 9.316, 11.638, 8.018, 10.038, 7.382000000000001, 15.572000000000001, 12.398, 9.622, 13.232000000000001, 10.48, 12.47, 15.008000000000001, 12.016, 8.684000000000001, 10.104000000000001, 8.674, 10.098, 6.816, 8.18, 6.372, 14.368, 8.468, 15.1, 2.62, 17.532, 8.922, 7.34, 12.992, 17.164, 7.516, 3.08, 9.548, 11.16, 7.68, 8.68, 13.358, 7.882000000000001, 7.16, 10.596, 13.248000000000001, 3.346, 14.91, 13.674, 12.33, 7.2700000000000005, 8.284, 6.708, 9.924, 9.424, 7.424, 12.8, 7.348, 9.046, 10.858, 15.094000000000001, 8.072000000000001, 4.828, 12.31, 6.606, 9.392, 8.898, 7.832, 8.462, 12.628, 8.746, 13.308, 12.856, 9.862, 7.492, 10.626, 11.128, 7.74, 7.942, 10.628, 10.152000000000001, 9.14, 8.784, 13.586, 11.19, 14.066, 14.882, 9.362, 12.232000000000001, 8.926, 9.726, 12.904, 11.92, 11.168000000000001, 6.196, 9.616, 9.166, 13.954, 10.732000000000001, 4.824, 8.672, 11.532, 7.952, 11.88, 12.738, 9.942, 7.426, 11.118, 5.0600000000000005, 5.416, 12.434000000000001, 11.404, 15.678, 6.774, 9.292, 6.392, 2.5420000000000003, 2.95, 9.91, 9.09, 6.982, 8.116, 7.088, 10.476, 7.954, 11.126, 7.876, 6.494, 9.642, 12.620000000000001, 13.44, 14.3, 12.298, 9.292, 2.316, 13.682, 6.908, 14.154, 12.046, 9.226, 19.312, 8.552, 9.302, 10.548, 7.844, 10.618, 11.624, 8.384, 5.32, 12.1, 9.64, 12.592, 10.546, 8.214, 10.984, 5.5680000000000005, 12.378, 11.194, 8.13, 10.762, 16.56, 10.208, 7.86, 8.686, 10.08, 6.29, 8.262, 7.386, 11.532, 9.67, 14.974, 10.27, 13.05, 7.934, 11.466000000000001, 11.290000000000001, 9.574, 12.108, 12.702, 11.69, 17.088, 14.114, 10.674, 10.232000000000001, 15.902000000000001, 15.982000000000001, 10.108, 11.766, 12.694, 10.368, 9.164, 14.76, 7.664000000000001, 12.984, 11.374, 18.054000000000002, 10.55, 8.684000000000001, 8.342, 9.97, 5.404, 8.72, 6.78, 10.106, 9.512, 9.158, 15.378, 6.204, 8.074, 18.282, 16.092, 10.482000000000001, 11.586, 12.644, 7.938, 7.738, 14.824, 10.61, 8.874, 10.818, 12.262, 12.202, 15.118, 6.934, 11.088000000000001, 8.218, 8.064, 11.122, 7.16, 8.264, 14.52, 7.71, 5.244, 8.212, 10.472, 9.788, 13.134, 10.036, 11.64, 7.994, 7.32, 9.302, 8.592, 8.166, 9.004, 6.968, 14.86, 11.642, 11.962, 9.338000000000001, 10.018, 9.042, 8.228, 9.394, 7.92, 13.752, 8.51, 10.956, 13.258000000000001, 8.13, 10.256, 11.582, 9.798, 10.07, 9.852, 11.058, 10.68, 10.14, 12.46, 11.642, 10.08, 18.086000000000002, 12.5, 9.324, 7.59, 6.268, 15.754, 11.96, 4.654, 13.194, 13.264000000000001, 11.902000000000001, 4.932, 9.114, 11.174, 9.514, 10.43, 6.492, 12.106, 7.07, 6.3340000000000005, 10.966000000000001, 9.872, 11.348, 10.766, 10.450000000000001, 12.118, 7.416, 14.272, 10.418000000000001, 13.768, 6.356, 10.958, 13.544, 9.108, 3.348, 10.44, 12.97, 9.254, 3.398, 8.268, 8.906, 10.408, 7.7620000000000005, 6.8100000000000005, 10.154, 8.756, 7.988, 12.622, 16.974, 12.46, 9.368, 11.504, 11.674, 5.8, 17.544, 13.348, 7.538, 9.814, 5.166, 10.534, 9.002, 7.332, 10.088000000000001, 8.728, 11.264, 13.096, 12.038, 9.088000000000001, 9.842, 5.8340000000000005, 12.166, 11.24, 9.278, 5.212, 8.492, 8.32, 8.916, 9.796, 8.854000000000001, 7.582, 7.006, 11.592, 9.956, 8.774000000000001, 10.008000000000001, 12.796000000000001, 11.044, 9.306000000000001, 6.948, 9.832, 5.014, 14.544, 15.222, 13.014000000000001, 4.062, 10.554, 9.462, 6.5680000000000005, 11.034, 7.364, 7.158, 11.08, 11.36, 10.002, 11.056000000000001, 11.282, 11.542, 9.374, 9.474, 13.004, 11.936, 9.452, 6.33, 11.98, 5.41, 7.3260000000000005, 12.536, 8.268, 3.482, 11.722, 9.602, 9.006, 10.688, 11.522, 8.808, 8.596, 8.602, 7.8100000000000005, 10.77, 5.71, 7.508, 11.174, 13.336, 5.21, 13.436, 10.708, 6.514, 12.138, 8.638, 5.144, 10.756, 12.818, 12.922, 10.446, 11.39, 8.178, 4.24, 6.564, 9.98, 11.806000000000001, 10.924, 12.274000000000001, 11.596, 11.35, 6.674, 12.32, 11.342, 6.2860000000000005, 6.04, 12.08, 13.040000000000001, 4.358, 9.392, 7.8580000000000005, 7.824, 11.896, 11.814, 19.21, 6.99, 9.450000000000001, 4.542, 10.256, 13.582, 6.582, 9.4, 12.780000000000001, 10.874, 10.424, 8.678, 9.81, 11.47, 6.79, 16.27, 13.524000000000001, 8.936, 9.27, 15.936, 7.526, 5.97, 13.398, 17.304000000000002, 11.478, 16.18, 9.22, 11.118, 9.66, 11.49, 11.348, 8.382, 12.372, 9.644, 9.348, 10.726, 10.82, 11.096, 11.074, 12.304, 11.068, 11.912, 8.904, 6.462, 16.456, 12.422, 11.24, 10.694, 8.168000000000001, 4.9, 7.176, 8.378, 16.42, 3.3160000000000003, 10.204, 8.99, 7.982, 16.428, 10.026, 8.514, 12.578, 10.236, 6.04, 8.042, 8.604000000000001, 7.908, 11.126, 7.038, 9.016, 8.978, 14.55, 13.67, 4.978, 8.248, 8.318, 7.768, 11.52, 7.3100000000000005, 7.518, 10.236, 7.516, 11.832, 8.74, 7.71, 9.98, 11.73, 13.034, 12.172, 11.732000000000001, 11.52, 9.066, 11.768, 14.418000000000001, 14.074, 10.356, 15.336, 8.974, 13.586, 12.812, 5.5920000000000005, 13.874, 6.162, 11.66, 8.022, 14.57, 10.348, 8.626, 9.838000000000001, 11.776, 9.120000000000001, 9.63, 10.48, 16.666, 15.176, 9.540000000000001, 8.552, 13.602, 9.178, 10.432, 4.934, 16.114, 9.26, 10.448, 9.266, 13.662, 4.168, 9.864, 4.808, 9.276, 8.02, 10.756, 9.244, 5.394, 5.916, 8.644, 6.672, 10.032, 5.812, 9.446, 8.008000000000001, 6.112, 10.026, 10.208, 7.966, 11.66, 14.142, 10.950000000000001, 12.906, 10.372, 7.982, 8.888, 14.612, 7.062, 6.0840000000000005, 8.984, 10.308, 17.162, 3.926, 11.608, 6.984, 12.512, 6.784, 9.008000000000001, 13.05, 3.226, 7.466, 14.944, 13.842, 14.358, 11.018, 10.774000000000001, 11.536, 13.88, 10.728, 14.514000000000001, 15.13, 9.458, 5.104, 7.936, 7.694, 6.724, 12.336, 4.644, 12.678, 10.258000000000001, 11.700000000000001, 9.06, 11.814, 11.012, 8.526, 11.256, 12.528, 10.196, 9.77, 17.330000000000002, 10.168000000000001, 13.74, 8.034, 10.778, 15.216000000000001, 11.540000000000001, 10.66, 10.218, 9.032, 11.714, 8.790000000000001, 9.928, 8.044, 9.232, 10.608, 9.694, 10.398, 12.528, 8.738, 0.7000000000000001, 8.64, 12.074, 11.018, 9.232, 10.916, 14.566, 8.728, 13.182, 7.698, 8.014, 10.784, 7.922000000000001, 5.44, 3.672, 7.062, 11.784, 11.35, 8.904, 7.97, 6.946, 3.888, 15.902000000000001, 9.078, 11.978, 13.048, 11.138, 12.124, 11.398, 9.336, 7.71, 12.166, 11.952, 16.272000000000002, 9.508000000000001, 10.790000000000001, 8.942, 11.506, 9.788, 9.428, 8.352, 13.096, 10.128, 8.052, 8.816, 10.004, 13.846, 9.008000000000001, 11.794, 9.954, 4.792, 10.974, 16.22, 9.596, 8.86, 9.088000000000001, 12.026, 11.454, 6.018, 8.82, 11.776, 7.974, 13.894, 6.538, 11.28, 6.232, 14.962, 16.356, 13.948, 6.066, 5.458, 10.27, 8.51, 14.566, 8.35, 11.556000000000001, 10.39, 11.792, 6.96, 16.116, 8.536, 9.286, 14.71, 12.364, 14.952, 10.972, 13.542, 7.064, 7.444, 12.394, 19.262, 8.632, 14.790000000000001, 11.46, 12.832, 10.23, 5.216, 8.962, 9.958, 12.624, 14.370000000000001, 9.616, 7.452, 8.174, 8.244, 11.474, 8.244, 14.328000000000001, 4.5600000000000005, 13.098, 10.114, 14.566, 6.1000000000000005, 7.288, 11.892, 14.516, 11.876, 13.55, 5.938, 8.738, 5.558, 15.048, 6.05, 8.71, 9.152000000000001, 12.326, 7.972, 9.682, 14.44, 5.622, 10.538, 11.276, 13.104000000000001, 9.902000000000001, 5.166, 8.564, 17.898, 10.964, 14.148, 16.116, 6.432, 12.34, 3.43, 9.544, 13.398, 7.0760000000000005, 6.532, 8.294, 8.638, 10.062, 13.784, 6.216, 9.158, 11.378, 9.258000000000001, 8.244, 10.36, 11.482000000000001, 9.67, 7.814, 13.564, 6.522, 6.938, 12.790000000000001, 10.008000000000001, 7.992, 11.282, 5.498, 7.5200000000000005, 10.102, 13.436, 12.452, 6.0120000000000005, 9.398, 6.872, 5.922, 12.938, 12.042, 10.238, 9.944, 7.026, 12.728, 15.36, 11.238, 13.282, 9.546, 13.026, 3.2600000000000002, 3.954, 10.154, 14.338000000000001, 14.09, 13.318, 8.86, 12.664, 9.8, 9.872, 9.944, 2.672, 9.042, 13.776, 6.72, 12.118, 6.964, 4.864, 7.732, 8.192, 14.116, 11.178, 9.062, 11.526, 9.294, 9.31, 14.874, 18.518, 9.966000000000001, 4.53, 7.448, 8.236, 11.896, 8.024000000000001, 10.732000000000001, 10.704, 10.48, 12.188, 14.752, 6.556, 6.338, 5.064, 11.19, 8.71, 4.494, 5.8020000000000005, 11.78, 5.572, 16.968, 7.1000000000000005, 10.05, 7.216, 10.748000000000001, 5.918, 11.216000000000001, 10.566, 7.884, 8.766, 9.988, 12.694, 3.0700000000000003, 8.076, 12.688, 11.574, 7.588, 13.458, 9.568, 6.746, 9.858, 7.966, 6.11, 9.438, 9.674, 8.696, 7.952, 10.754, 7.390000000000001, 11.292, 13.802, 12.870000000000001, 7.914000000000001, 6.6240000000000006, 7.516, 10.972, 7.894, 8.278, 11.098, 6.428, 10.754, 9.068, 15.696, 9.276, 10.134, 10.574, 10.638, 10.198, 9.798, 4.416, 9.88, 12.302, 10.624, 13.454, 12.736, 12.444, 5.906, 8.8, 8.11, 5.298, 11.572000000000001, 10.374, 11.212, 10.226, 8.756, 15.782, 11.4, 9.72, 11.008000000000001, 7.212, 15.17, 13.6, 8.496, 11.72, 10.22, 10.292, 10.522, 8.948, 9.448, 11.118, 12.56, 6.3500000000000005, 7.95, 9.178, 6.33, 4.876, 12.88, 8.65, 10.262, 9.418000000000001, 2.936, 9.916, 18.896, 9.05, 12.370000000000001, 7.574, 12.632, 4.354, 8.01, 12.338000000000001, 5.5920000000000005, 13.042, 10.290000000000001, 7.3660000000000005, 12.116, 4.892, 10.338000000000001, 13.016, 4.252, 12.678, 6.074, 17.43, 10.654, 6.216, 13.24, 13.802, 12.86, 5.216, 6.966, 8.16, 15.844000000000001, 8.438, 8.21, 10.884, 5.33, 7.7940000000000005, 11.84, 12.012, 9.64, 12.620000000000001, 10.256, 11.264, 6.146, 13.702, 9.608, 7.8660000000000005, 11.492, 14.0, 11.17, 10.96, 12.186, 7.51, 10.692, 14.134, 9.426, 4.376, 5.0040000000000004, 9.958, 12.926, 10.91, 9.302, 13.674, 11.57, 8.382, 6.5120000000000005, 12.724, 11.072000000000001, 15.09, 10.71, 12.596, 7.2860000000000005, 10.48, 8.038, 11.28, 11.558, 7.878, 10.728, 10.242, 7.12, 6.6080000000000005, 12.19, 11.526, 8.648, 14.732000000000001, 8.648, 11.032, 10.384, 10.898, 13.078, 6.446, 13.414, 15.304, 5.428, 7.466, 10.620000000000001, 8.942, 10.89, 13.698, 7.408, 8.678, 5.5280000000000005, 8.97, 14.396, 8.866, 7.3740000000000006, 13.956, 8.322000000000001, 12.272, 8.954, 16.674, 5.99, 10.412, 10.848, 8.966, 4.678, 11.566, 8.176, 11.33, 12.442, 7.21, 9.35, 13.280000000000001, 8.834, 11.586, 8.186, 6.452, 5.666, 9.112, 4.158, 6.532, 10.626, 9.154, 6.996, 13.078, 13.116, 7.422000000000001, 14.398, 7.0, 8.934000000000001, 8.732, 7.844, 10.822000000000001, 12.76, 7.904, 6.684, 14.164, 9.004, 11.472, 9.774000000000001, 12.122, 9.328, 7.714, 8.65, 11.006, 11.97, 13.1, 8.728, 8.488, 13.948, 13.398, 10.316, 10.09, 8.734, 9.454, 11.508000000000001, 12.076, 17.248, 13.504, 10.43, 18.378, 12.394, 8.436, 10.582, 11.61, 7.618, 12.656, 10.788, 8.366, 11.99, 14.56, 1.6520000000000001, 13.85, 11.638, 11.624, 5.138, 16.754, 10.38, 4.178, 12.364, 11.71, 10.206, 7.58, 2.434, 13.43, 10.736, 10.272, 7.5680000000000005, 10.888, 9.736, 13.61, 7.506, 5.0440000000000005, 11.33, 13.344, 14.348, 8.644, 13.008000000000001, 5.578, 12.786, 10.392, 9.888, 16.838, 12.042, 10.068, 9.392, 10.198, 7.72, 15.478, 7.154, 11.59, 7.714, 10.338000000000001, 14.494, 8.328, 8.104000000000001, 8.964, 13.428, 15.406, 13.404, 8.502, 12.56, 2.988, 5.94, 14.546000000000001, 10.314, 5.836, 16.29, 8.950000000000001, 10.386000000000001, 9.722, 8.942, 13.620000000000001, 13.358, 15.538, 10.950000000000001, 10.166, 10.450000000000001, 9.1, 13.25, 6.066, 10.67, 8.984, 6.162, 9.266, 8.366, 10.372, 10.602, 5.182, 13.082, 15.136000000000001, 7.7620000000000005, 8.496, 15.370000000000001, 9.89, 3.93, 11.618, 12.244, 11.774000000000001, 11.784, 13.164, 9.854000000000001, 11.046, 10.68, 8.3, 14.516, 5.66, 7.364, 5.376, 13.536, 14.242, 10.186, 5.352, 10.376, 12.530000000000001, 12.398, 9.002, 10.056000000000001, 11.292, 7.8420000000000005, 7.798, 9.922, 10.57, 11.024000000000001, 11.282, 9.966000000000001, 8.686, 9.938, 13.116, 8.632, 10.612, 8.674, 12.384, 7.112, 4.678, 8.596, 7.424, 9.894, 14.978, 12.338000000000001, 9.022, 15.372, 12.418000000000001, 9.02, 7.8500000000000005, 8.286, 14.558, 11.828, 9.702, 6.5, 9.364, 9.724, 5.6000000000000005, 9.186, 12.226, 2.358, 12.656, 10.044, 10.356, 3.222, 13.82, 9.552, 9.22, 10.624, 10.67, 8.758000000000001, 6.726, 15.128, 14.792, 10.654, 10.118, 10.338000000000001, 13.264000000000001, 15.882, 3.668, 16.854, 7.056, 18.830000000000002, 10.952, 10.352, 9.13, 14.304, 13.574, 7.438, 12.258000000000001, 5.438, 9.792, 4.146, 11.698, 9.808, 12.290000000000001, 4.3100000000000005, 9.188, 13.974, 13.856, 10.764, 10.66, 7.876, 8.218, 8.52, 18.276, 8.936, 6.956, 11.616, 5.268, 10.542, 9.348, 7.932, 9.058, 9.768, 9.274000000000001, 10.014, 7.132000000000001, 11.51, 5.132, 8.768, 9.36, 13.328, 8.45, 9.348, 11.976, 12.528, 8.246, 5.934, 6.806, 14.642, 9.956, 10.216000000000001, 5.524, 13.074, 12.586, 9.654, 13.004, 13.616, 11.516, 10.554, 9.016, 7.426, 6.642, 14.116, 11.156, 12.05, 10.312, 13.768, 6.0120000000000005, 8.066, 4.048, 12.58, 6.384, 6.5840000000000005, 12.338000000000001, 3.394, 7.908, 10.98, 9.228, 15.17, 11.648, 11.434000000000001, 11.804, 6.2, 9.842, 12.574, 10.63, 9.788, 9.674, 13.344, 11.026, 10.116, 9.944, 13.05, 15.626, 12.868, 12.23, 4.74, 11.024000000000001, 5.714, 4.0520000000000005, 15.486, 6.898000000000001, 9.568, 9.758000000000001, 12.664, 15.336, 5.112, 8.036, 9.44, 10.062, 13.634, 9.406, 10.014, 11.378, 10.264, 8.978, 11.57, 6.666, 10.968, 12.86, 7.166, 5.026, 10.852, 13.968, 9.588000000000001, 6.3340000000000005, 9.35, 4.894, 9.454, 8.406, 13.444, 9.694, 12.268, 10.022, 8.966, 7.976, 8.568, 10.82, 2.79, 10.468, 6.816, 2.122, 6.182, 14.474, 7.856, 6.864, 10.478, 15.49, 10.35, 7.532, 7.42, 11.388, 8.612, 13.074, 10.112, 13.450000000000001, 8.374, 8.744, 7.912, 15.118, 11.99, 15.716000000000001, 7.256, 12.048, 7.388, 9.974, 13.218, 10.402000000000001, 13.744, 9.168000000000001, 10.1, 12.448, 12.118, 9.012, 11.006, 4.8420000000000005, 12.536, 11.336, 10.464, 9.566, 14.98, 7.162, 11.158, 14.484, 14.364, 9.548, 13.17, 10.578, 12.21, 8.608, 8.286, 12.772, 5.5840000000000005, 14.526, 12.922, 11.308, 13.036, 9.876, 12.612, 7.98, 5.4, 8.108, 10.138, 8.798, 9.692, 12.098, 6.824, 12.274000000000001, 12.618, 3.706, 7.434, 11.15, 7.398000000000001, 9.574, 6.884, 8.536, 10.418000000000001, 6.214, 12.732000000000001, 5.234, 10.268, 5.958, 10.870000000000001, 10.042, 12.334, 10.624, 14.348, 8.16, 9.612, 6.09, 10.17, 8.892, 10.464, 12.15, 7.68, 7.59, 11.368, 6.678, 8.836, 6.0760000000000005, 11.546, 13.314, 8.518, 9.982000000000001, 9.348, 6.652, 14.06, 14.700000000000001, 7.924, 9.628, 8.388, 9.046, 11.346, 8.654, 4.356, 11.024000000000001, 15.656, 9.044, 15.756, 9.664, 8.048, 8.222, 6.45, 6.446, 11.094, 11.352, 8.72, 12.462, 7.54, 5.84, 12.168000000000001, 9.1, 6.432, 12.790000000000001, 4.306, 10.964, 4.23, 15.122, 12.738, 11.472, 10.200000000000001, 11.364, 12.622, 6.944, 10.542, 10.512, 6.788, 10.81, 8.722, 10.552, 9.686, 10.192, 11.42, 10.818, 15.096, 16.904, 9.156, 5.54, 3.0700000000000003, 12.356, 11.686, 13.444, 7.6160000000000005, 10.17, 5.572, 15.96, 10.14, 9.194, 5.556, 7.064, 7.988, 9.21, 11.474, 12.604000000000001, 5.0280000000000005, 15.146, 11.01, 11.996, 10.478, 5.344, 15.0, 11.048, 11.296, 5.942, 9.622, 6.014, 16.852, 6.196, 9.46, 8.742, 8.31, 11.200000000000001, 11.526, 10.392, 7.0280000000000005, 10.592, 11.022, 13.938, 14.316, 4.484, 9.69, 7.65, 6.51, 9.086, 3.52, 8.074, 10.166, 7.442, 9.33, 11.044, 18.456, 9.598, 13.354000000000001, 16.732, 10.668000000000001, 7.618, 6.962, 10.472, 2.2760000000000002, 10.068, 9.706, 14.126, 10.344, 12.012, 9.83, 11.404, 11.268, 8.462, 13.176, 9.362, 7.752, 8.738, 8.08, 5.6160000000000005, 7.114, 11.620000000000001, 9.738, 5.994, 8.06, 11.082, 10.572000000000001, 14.244, 13.540000000000001, 13.51, 11.138, 9.458, 14.85, 13.27, 7.23, 12.182, 12.684000000000001, 11.584, 5.564, 6.924, 11.076, 9.78, 7.958, 11.632, 3.892, 6.024, 7.354, 14.062000000000001, 13.43, 13.844, 11.578, 10.944, 14.756, 10.556000000000001, 13.052, 13.866, 7.906000000000001, 4.68, 10.168000000000001, 10.05, 11.908, 8.328, 9.452, 15.598, 11.164, 8.55, 13.98, 14.138, 6.806, 12.832, 13.288, 11.484, 7.658, 8.638, 10.844, 6.104, 17.136, 9.884, 6.372, 12.672, 15.4, 7.836, 14.31, 7.998, 9.042, 8.862, 14.492, 13.39, 10.03, 7.788, 10.652000000000001, 9.21, 6.11, 15.572000000000001, 6.134, 15.05, 13.18, 12.524000000000001, 11.636000000000001, 9.502, 7.640000000000001, 8.346, 17.858, 11.978, 5.21, 10.99, 9.01, 8.686, 8.572000000000001, 3.7840000000000003, 7.742, 10.162, 13.114, 15.146, 11.824, 9.618, 4.354, 10.594, 5.614, 11.708, 12.638, 8.31, 3.368, 9.636000000000001, 9.818, 9.438, 6.862, 7.876, 12.502, 7.164000000000001, 10.334, 13.058, 11.726, 14.49, 13.076, 9.36, 11.304, 10.068, 11.552, 9.064, 9.796, 9.356, 8.686, 12.618, 7.37, 13.092, 7.328, 9.868, 9.92, 14.216000000000001, 7.392, 12.01, 9.858, 14.652000000000001, 7.3020000000000005, 8.09, 5.276, 8.434000000000001, 14.282, 9.646, 18.982, 8.524000000000001, 9.02, 8.836, 9.682, 12.284, 12.782, 9.054, 12.294, 9.56, 7.026, 13.372, 6.074, 6.448, 14.682, 9.856, 7.356, 3.388, 7.212, 10.768, 3.436, 8.062, 10.134, 12.278, 12.536, 7.238, 8.436, 11.554, 8.934000000000001, 10.308, 4.14, 12.128, 8.286, 10.464, 11.632, 9.884, 11.586, 10.794, 12.662, 15.666, 10.602, 7.902, 9.76, 9.89, 8.76, 8.436, 9.668000000000001, 8.438, 15.280000000000001, 12.148, 6.352, 8.194, 8.424, 13.844, 12.856, 8.624, 5.816, 11.852, 8.49, 10.01, 5.94, 8.098, 13.09, 15.384, 16.63, 10.994, 9.814, 12.118, 11.908, 8.996, 7.3500000000000005, 3.116, 7.0680000000000005, 5.124, 11.512, 10.494, 8.352, 12.052, 12.98, 10.126, 8.776, 8.994, 5.622, 10.878, 10.722, 12.578, 10.978, 13.918000000000001, 17.48, 11.196, 6.444, 10.57, 14.448, 7.906000000000001, 7.268, 15.598, 11.208, 11.838000000000001, 8.21, 10.804, 10.370000000000001, 10.678, 6.902, 7.006, 11.202, 6.104, 10.368, 8.1, 11.348, 10.254, 5.362, 9.424, 7.588, 9.236, 10.258000000000001, 13.776, 10.138, 12.068, 8.506, 9.068, 9.512, 5.344, 11.47, 10.620000000000001, 10.946, 9.836, 8.052, 14.34, 9.106, 9.516, 11.896, 10.258000000000001, 10.226, 10.126, 8.032, 9.818, 12.896, 12.322000000000001, 15.96, 11.472, 10.396, 5.918, 14.808, 9.842, 9.188, 14.620000000000001, 13.634, 3.984, 11.772, 12.904, 7.156000000000001, 10.762, 9.744, 7.904, 9.548, 8.488, 11.13, 10.544, 6.978, 5.32, 8.564, 12.418000000000001, 4.602, 10.58, 6.748, 13.444, 13.096, 7.654, 12.33, 11.024000000000001, 12.504, 9.478, 12.996, 11.142, 7.7860000000000005, 7.496, 13.022, 14.032, 12.950000000000001, 15.526, 7.7780000000000005, 12.528, 13.982000000000001, 11.482000000000001, 10.424, 9.69, 15.546000000000001, 9.89, 7.1000000000000005, 15.76, 14.062000000000001, 10.128, 12.48, 6.622, 7.482, 4.8500000000000005, 12.304, 8.714, 10.968, 11.076, 12.370000000000001, 12.814, 9.212, 10.916, 6.4, 5.484, 12.994, 6.04, 12.648, 7.848, 10.124, 13.93, 7.154, 4.864, 12.218, 7.66, 10.182, 6.24, 6.8100000000000005, 10.022, 8.382, 11.332, 5.984, 9.892, 8.314, 10.622, 5.62, 9.59, 11.754, 13.816, 8.756, 9.964, 13.98, 10.68, 13.176, 7.752, 8.84, 13.744, 13.216000000000001, 6.564, 7.43, 8.102, 7.216, 9.844, 8.264, 6.226, 10.344, 8.318, 11.704, 5.442, 8.966, 9.772, 13.208, 2.336, 8.106, 9.294, 11.454, 4.92, 11.432, 11.738, 9.928, 11.458, 6.648000000000001, 17.356, 9.626, 7.148000000000001, 5.99, 16.428, 6.734, 13.144, 8.07, 9.1, 10.646, 6.648000000000001, 11.226, 11.528, 11.276, 12.772, 5.0520000000000005, 7.088, 4.782, 12.024000000000001, 6.7620000000000005, 8.478, 15.762, 9.052, 7.564, 9.894, 8.73, 9.568, 16.078, 11.956, 11.216000000000001, 7.804, 12.192, 9.706, 7.3260000000000005, 11.238, 9.91, 9.872, 13.696, 7.518, 9.284, 7.748, 12.27, 9.326, 14.3, 14.378, 11.244, 9.952, 9.502, 11.638, 8.624, 8.904, 8.924, 12.362, 10.576, 11.352, 12.694, 12.902000000000001, 6.382000000000001, 9.392, 12.808, 10.988, 9.452, 9.4, 14.13, 14.908, 3.866, 9.234, 11.124, 8.122, 9.428, 5.518, 9.32, 3.462, 6.238, 8.218, 12.136000000000001, 10.884, 3.948, 6.332, 8.24, 8.46, 7.566, 14.038, 11.450000000000001, 11.084, 6.714, 15.286, 12.044, 7.126, 8.662, 5.212, 8.044, 13.816, 10.056000000000001, 7.876, 13.952, 8.376, 8.652000000000001, 7.392, 13.290000000000001, 11.658, 10.908, 10.612, 6.518, 7.0200000000000005, 6.652, 6.298, 12.616, 11.356, 4.838, 10.47, 10.022, 3.636, 13.344, 8.024000000000001, 8.76, 9.848, 10.658, 10.784, 13.01, 9.052, 12.798, 13.3, 14.416, 11.78, 9.562, 10.4, 11.168000000000001, 11.956, 8.49, 10.876, 11.486, 8.18, 9.792, 9.042, 9.370000000000001, 11.188, 14.018, 19.87, 10.562, 8.346, 7.002, 8.202, 10.304, 6.952, 10.722, 8.278, 13.374, 11.092, 15.394, 14.226, 10.882, 13.524000000000001, 15.49, 14.368, 6.682, 12.852, 13.18, 11.824, 7.1000000000000005, 4.71, 15.688, 11.85, 9.356, 10.032, 9.73, 12.168000000000001, 11.158, 12.574, 14.094, 13.864, 13.18, 3.184, 6.916, 11.932, 10.262, 8.68, 7.246, 12.134, 12.084, 10.75, 5.6160000000000005, 3.198, 7.55, 13.554, 3.334, 14.31, 7.2780000000000005, 5.62, 9.126, 11.708, 15.082, 6.072, 9.842, 14.18, 6.1000000000000005, 12.996, 7.532, 12.158, 13.752, 10.632, 12.362, 8.654, 6.284, 16.528, 9.638, 6.7, 11.404, 18.336000000000002, 5.764, 9.88, 8.11, 6.662, 11.82, 13.008000000000001, 2.056, 12.32, 9.112, 11.372, 8.208, 14.008000000000001, 6.426, 9.73, 11.120000000000001, 9.284, 10.562, 5.978, 6.556, 10.346, 10.938, 11.426, 9.93, 11.012, 9.176, 7.5280000000000005, 10.086, 16.558, 14.256, 10.348, 9.116, 7.848, 12.612, 12.368, 12.216000000000001, 3.858, 18.608, 9.592, 10.57, 14.174, 9.512, 10.22, 12.040000000000001, 5.066, 9.156, 11.106, 15.378, 9.236, 13.362, 12.074, 9.83, 9.722, 7.992, 13.304, 10.746, 12.242, 9.552, 7.234, 10.532, 13.608, 8.818, 11.054, 7.352, 4.714, 10.82, 14.408, 12.592, 11.062, 12.484, 11.486, 13.376, 9.316, 10.608, 11.646, 11.074, 7.972, 10.714, 7.0840000000000005, 9.132, 7.924, 8.664, 14.56, 7.516, 9.004, 14.458, 16.376, 9.728, 11.484, 9.3, 8.554, 13.012, 7.336, 9.544, 7.428, 15.568, 10.616, 6.63, 8.206, 10.724, 5.692, 9.836, 11.63, 10.96, 6.92, 10.024000000000001, 12.308, 15.424, 15.076, 11.0, 11.218, 10.994, 8.504, 11.068, 5.776, 10.384, 7.188, 12.142, 11.166, 10.19, 11.562, 15.862, 11.794, 11.374, 6.922, 16.918, 11.102, 11.564, 12.5, 4.744, 7.72, 12.556000000000001, 10.136000000000001, 8.382, 15.158, 11.128, 10.922, 6.8, 10.066, 10.0, 7.192, 13.472, 13.19, 11.1, 12.542, 13.014000000000001, 9.44, 6.276, 10.620000000000001, 11.156, 7.542, 3.332, 11.298, 15.092, 8.78, 11.702, 15.106, 7.36, 7.246, 11.018, 9.882, 13.506, 8.134, 5.41, 10.03, 8.69, 3.86, 9.058, 19.698, 6.37, 13.388, 16.976, 13.196, 8.786, 10.886000000000001, 9.802, 11.548, 7.894, 4.804, 8.454, 9.414, 13.278, 10.082, 6.514, 2.658, 13.55, 9.072000000000001, 10.6, 16.042, 12.81, 9.756, 7.94, 12.048, 7.926, 9.77, 5.9, 10.61, 9.774000000000001, 13.032, 9.076, 6.244, 4.566, 6.312, 10.586, 6.16, 12.196, 6.876, 12.682, 6.798, 13.944, 15.036, 11.258000000000001, 14.362, 5.3, 10.056000000000001, 12.816, 10.23, 12.386000000000001, 10.856, 8.326, 10.81, 6.474, 8.148, 11.354000000000001, 11.968, 10.148, 12.538, 11.48, 8.158, 11.178, 5.82, 9.888, 10.846, 9.612, 12.01, 7.962, 9.21, 6.522, 9.6, 11.364, 12.736, 11.652000000000001, 13.794, 12.062, 10.646, 7.71, 5.484, 9.062, 13.536, 13.894, 7.182, 11.01, 10.216000000000001, 11.634, 12.584, 8.63, 7.152, 9.772, 15.936, 14.066, 10.28, 8.984, 9.588000000000001, 10.162, 12.038, 8.898, 8.798, 9.128, 14.044, 6.5200000000000005, 12.722, 8.106, 14.268, 5.6080000000000005, 10.812, 10.156, 13.01, 8.594, 11.292, 13.786, 12.732000000000001, 11.864, 5.192, 14.696, 13.586, 9.452, 13.708, 5.868, 10.824, 13.48, 15.99, 10.296, 9.786, 8.724, 8.294, 10.452, 10.374, 9.048, 7.384, 10.734, 6.862, 9.768, 5.146, 5.614, 11.302, 7.492, 14.134, 12.256, 5.148, 3.4, 9.8, 14.334, 14.064, 3.5660000000000003, 12.172, 6.29, 3.462, 9.24, 9.034, 13.634, 6.316, 4.438, 6.972, 7.554, 13.238, 10.792, 12.21, 3.426, 10.002, 10.376, 8.904, 11.468, 6.042, 5.422, 10.362, 10.322000000000001, 5.174, 13.544, 8.054, 14.68, 15.942, 9.208, 10.184000000000001, 13.608, 10.194, 15.164, 11.026, 7.6080000000000005, 10.596, 6.122, 9.458, 14.464, 12.782, 8.3, 9.578, 11.9, 6.284, 7.948, 19.82, 11.656, 7.906000000000001, 9.19, 11.716000000000001, 9.632, 6.032, 5.0, 7.0200000000000005, 9.936, 14.700000000000001, 5.718, 8.620000000000001, 12.262, 11.048, 8.888, 11.05, 11.426, 10.56, 9.806000000000001, 9.27, 9.968, 13.218, 14.21, 11.43, 11.52, 15.682, 6.882000000000001, 7.594, 13.126, 11.858, 11.540000000000001, 13.17, 12.238, 13.702, 16.684, 8.936, 10.898, 8.768, 12.956, 10.792, 10.146, 12.31, 14.534, 7.2780000000000005, 15.012, 13.67, 7.612, 10.212, 8.07, 9.292, 11.728, 13.948, 10.144, 8.19, 13.824, 9.082, 8.832, 9.376, 5.01, 11.588000000000001, 12.556000000000001, 7.892, 6.68, 6.0280000000000005, 10.146, 6.338, 6.894, 11.200000000000001, 7.8500000000000005, 8.05, 14.392, 9.764, 11.058, 9.966000000000001, 14.716000000000001, 10.882, 9.198, 7.05, 7.282, 16.352, 10.902000000000001, 12.552, 10.032, 11.48, 8.572000000000001, 9.426, 11.456, 10.676, 10.206, 12.438, 9.812, 12.238, 12.536, 6.348, 9.848, 15.4, 8.288, 11.024000000000001, 15.83, 13.632, 7.65, 9.178, 6.042, 10.61, 7.416, 12.776, 6.188, 11.946, 17.318, 6.41, 10.026, 10.25, 8.148, 5.844, 8.414, 11.902000000000001, 7.6160000000000005, 15.094000000000001, 12.106, 13.394, 5.8740000000000006, 12.39, 11.934000000000001, 9.224, 7.3100000000000005, 12.798, 9.21, 10.994, 7.1080000000000005, 6.972, 9.61, 14.544, 9.354000000000001, 7.494, 11.128, 10.436, 7.654, 16.934, 8.804, 4.236, 8.718, 7.88, 10.762, 11.304, 8.112, 13.046000000000001, 14.108, 14.086, 9.824, 7.312, 10.092, 13.014000000000001, 11.796, 14.796000000000001, 11.388, 11.962, 14.278, 3.728, 15.986, 13.772, 9.994, 9.946, 12.858, 11.806000000000001, 13.108, 14.114, 8.614, 10.054, 9.118, 14.508000000000001, 6.3500000000000005, 9.702, 8.708, 9.214, 10.574, 14.290000000000001, 10.224, 13.546000000000001, 16.564, 6.416, 7.3180000000000005, 5.522, 9.478, 14.804, 5.692, 16.312, 2.196, 8.186, 16.25, 12.23, 7.378, 11.200000000000001, 11.918000000000001, 8.802, 12.038, 11.68, 11.656, 6.774, 9.138, 10.306000000000001, 9.822000000000001, 9.556000000000001, 9.08, 3.118, 14.518, 6.694, 9.628, 13.838000000000001, 7.692, 11.358, 8.096, 12.618, 14.524000000000001, 15.106, 8.558, 11.232000000000001, 6.914, 5.756, 12.724, 10.136000000000001, 8.742, 8.098, 11.112, 8.42, 9.268, 10.726, 8.686, 10.422, 8.586, 13.862, 14.642, 9.036, 7.59, 10.808, 6.902, 10.604000000000001, 14.434000000000001, 7.574, 14.096, 7.482, 5.992, 12.996, 8.946, 8.004, 7.362, 13.242, 13.346, 6.5760000000000005, 12.47, 7.466, 10.582, 8.69, 7.684, 17.28, 12.52, 9.33, 8.654, 9.792, 9.078, 9.73, 6.5280000000000005, 7.274, 6.088, 5.578, 13.756, 10.368, 12.874, 12.16, 12.754, 13.236, 4.054, 12.278, 5.054, 4.5200000000000005, 12.724, 7.574, 12.382, 5.704, 14.61, 4.224, 10.612, 10.758000000000001, 6.744, 9.822000000000001, 7.276, 6.376, 11.73, 7.71, 10.724, 10.524000000000001, 11.856, 11.392, 5.2, 13.976, 9.518, 8.344, 8.182, 11.196, 11.812, 7.514, 4.738, 9.99, 12.032, 13.17, 10.754, 13.288, 5.928, 11.46, 8.472, 10.426, 12.34, 11.306000000000001, 10.106, 6.664, 13.606, 10.336, 11.374, 8.76, 10.83, 14.126, 5.642, 11.376, 8.554, 12.298, 8.598, 16.952, 15.132, 16.058, 10.988, 13.722, 10.466000000000001, 8.832, 8.18, 7.554, 11.916, 11.836, 4.742, 8.622, 8.808, 8.222, 7.718, 9.696, 11.644, 10.976, 15.614, 7.574, 12.622, 9.08, 8.488, 13.14, 10.086, 10.268, 8.728, 13.224, 7.644, 8.424, 5.764, 6.828, 12.012, 12.664, 9.614, 6.928, 15.266, 11.118, 9.896, 12.034, 9.606, 14.25, 7.886, 11.548, 11.86, 9.596, 14.68, 12.596, 8.662, 14.51, 14.962, 19.794, 8.51, 9.856, 12.474, 7.8340000000000005, 10.47, 9.5, 14.450000000000001, 13.166, 10.156, 11.686, 10.77, 6.538, 6.026, 9.118, 7.742, 9.076, 14.338000000000001, 12.376, 8.348, 11.48, 6.5200000000000005, 9.716000000000001, 11.266, 9.89, 11.238, 7.656000000000001, 11.394, 11.27, 7.3660000000000005, 8.254, 7.736, 8.162, 9.82, 12.542, 7.46, 7.87, 9.912, 10.024000000000001, 7.996, 7.518, 12.532, 10.96, 10.256, 9.986, 7.8340000000000005, 9.874, 8.638, 6.486, 10.07, 9.264, 11.124, 9.146, 13.790000000000001, 6.386, 12.056000000000001, 8.964, 9.678, 10.766, 8.856, 11.48, 4.104, 5.656, 1.3840000000000001, 2.63, 18.288, 9.468, 9.462, 7.0920000000000005, 14.02, 11.72, 8.342, 7.5680000000000005, 10.01, 7.8, 12.956, 12.094, 8.656, 14.414, 7.588, 11.288, 13.652000000000001, 9.448, 6.908, 9.038, 16.616, 10.46, 2.954, 7.36, 10.968, 8.376, 13.846, 9.0, 9.828, 7.516, 11.354000000000001, 8.166, 12.496, 13.91, 16.974, 9.006, 8.324, 12.238, 14.144, 13.17, 6.5440000000000005, 7.798, 5.414, 11.72, 8.834, 10.496, 7.8, 17.162, 11.406, 8.462, 12.854000000000001, 11.012, 9.15, 12.870000000000001, 5.94, 7.258, 6.01, 12.548, 10.482000000000001, 15.31, 14.18, 16.89, 11.5, 15.072000000000001, 8.062, 13.06, 6.0360000000000005, 1.412, 8.02, 8.174, 13.282, 7.8340000000000005, 10.554, 11.802, 13.77, 14.138, 11.888, 14.776, 13.624, 7.558, 8.846, 14.02, 12.536, 7.176, 8.766, 7.908, 8.394, 13.702, 16.018, 11.786, 13.278, 10.798, 6.142, 14.778, 9.384, 7.514, 11.518, 9.824, 15.276, 14.986, 4.654, 5.4, 9.472, 12.624, 8.908, 7.5440000000000005, 15.806000000000001, 7.702, 9.698, 7.002, 9.386000000000001, 10.262, 8.71, 6.5200000000000005, 13.626, 14.636000000000001, 14.412, 7.854, 11.77, 14.372, 14.104000000000001, 10.692, 8.264, 12.744, 9.304, 11.034, 13.86, 8.206, 8.496, 10.058, 15.308, 11.376, 9.618, 5.564, 10.868, 14.154, 9.700000000000001, 11.07, 10.796, 11.834, 13.018, 2.09, 5.712, 5.97, 9.876, 9.118, 14.23, 14.858, 13.906, 7.208, 13.154, 10.224, 11.13, 12.856, 5.172, 8.964, 9.84, 7.252, 7.244, 13.358, 10.724, 11.628, 10.83, 11.598, 8.99, 12.488, 9.824, 8.744, 12.476, 8.14, 10.662, 7.390000000000001, 8.714, 9.002, 10.620000000000001, 8.32, 8.73, 9.67, 17.632, 12.232000000000001, 13.494, 6.8100000000000005, 12.254, 10.948, 10.238, 11.284, 6.964, 13.31, 7.1080000000000005, 12.574, 12.844, 9.178, 6.526, 8.934000000000001, 9.862, 10.614, 13.752, 7.642, 12.028, 7.822, 13.258000000000001, 10.73, 12.24, 5.824, 8.494, 8.89, 7.208, 8.134, 9.764, 9.428, 10.106, 8.418000000000001, 14.128, 10.316, 2.07, 11.054, 12.984, 11.892, 8.136000000000001, 7.412, 11.628, 11.27, 7.708, 5.634, 8.662, 12.258000000000001, 14.032, 13.858, 6.51, 14.866, 8.532, 14.016, 10.51, 4.046, 5.058, 12.016, 12.542, 9.666, 6.352, 14.02, 7.148000000000001, 7.348, 11.572000000000001, 9.728, 5.742, 5.39, 13.808, 4.518, 5.8, 10.392, 6.936, 15.014000000000001, 5.954, 6.868, 11.198, 6.796, 7.632000000000001, 11.866, 6.514, 12.288, 9.884, 9.354000000000001, 15.386000000000001, 7.972, 6.902, 13.91, 7.346, 11.838000000000001, 12.462, 6.312, 12.42, 11.646, 5.814, 15.75, 12.486, 7.306, 11.512, 8.838000000000001, 11.792, 10.528, 10.134, 5.7780000000000005, 4.428, 10.378, 13.502, 11.422, 8.69, 8.802, 8.068, 9.132, 11.200000000000001, 9.864, 10.878, 11.368, 7.992, 6.258, 10.702, 16.896, 4.846, 7.936, 11.144, 4.204, 14.472, 11.200000000000001, 10.808, 11.102, 5.514, 8.822000000000001, 2.376, 11.44, 10.662, 1.29, 8.584, 12.11, 6.6160000000000005, 9.382, 11.81, 4.042, 4.232, 6.964, 9.746, 6.8340000000000005, 14.502, 11.044, 14.118, 9.652000000000001, 11.954, 12.268, 6.390000000000001, 12.008000000000001, 11.19, 13.18, 11.018, 4.678, 7.8500000000000005, 7.388, 12.894, 9.354000000000001, 10.954, 9.986, 10.366, 12.124, 7.8, 16.052, 11.092, 10.406, 12.07, 6.3740000000000006, 5.706, 11.964, 5.862, 8.66, 16.22, 13.488, 7.42, 8.13, 15.88, 4.04, 12.226, 9.468, 10.072000000000001, 12.904, 7.656000000000001, 10.5, 10.514, 12.034, 9.356, 11.526, 9.556000000000001, 14.356, 12.474, 12.314, 8.682, 10.956, 11.950000000000001, 11.71, 12.08, 13.58, 4.46, 12.748000000000001, 8.272, 8.654, 11.562, 5.862, 14.454, 13.246, 7.712, 10.578, 10.104000000000001, 12.892, 10.834, 6.0200000000000005, 12.112, 7.808, 4.722, 4.8260000000000005, 10.112, 8.112, 10.784, 10.186, 12.108, 12.88, 7.8180000000000005, 8.558, 9.118, 10.966000000000001, 11.262, 9.348, 7.862, 12.372, 6.5280000000000005, 4.406, 12.292, 10.306000000000001, 13.614, 8.290000000000001, 11.376, 7.416, 12.792, 6.678, 11.99, 8.324, 3.0620000000000003, 3.166, 6.2700000000000005, 9.182, 8.268, 10.268, 9.014, 8.724, 9.698, 4.0280000000000005, 4.248, 8.902000000000001, 9.11, 10.762, 10.118, 6.0840000000000005, 15.542, 6.894, 8.568, 7.332, 5.402, 4.0840000000000005, 8.128, 14.144, 16.45, 5.968, 13.11, 11.156, 7.132000000000001, 11.744, 10.398, 11.02, 14.732000000000001, 7.61, 13.916, 9.432, 11.702, 9.41, 9.82, 8.498, 9.378, 15.59, 5.932, 10.898, 13.552, 9.038, 9.99, 4.5680000000000005, 15.312000000000001, 11.466000000000001, 4.8020000000000005, 7.048, 9.386000000000001, 7.352, 14.14, 13.044, 5.422, 11.784, 9.402000000000001, 11.352, 14.284, 8.616, 6.996, 7.112, 10.74, 5.516, 11.744, 8.82, 12.342, 12.688, 14.63, 10.502, 9.3, 10.700000000000001, 11.758000000000001, 10.734, 9.422, 8.422, 11.316, 7.034, 8.244, 6.666, 7.9, 11.844, 9.02, 6.032, 11.726, 5.99, 6.974, 13.736, 7.5280000000000005, 10.052, 13.974, 10.858, 12.582, 9.176, 12.308, 12.68, 8.646, 11.816, 10.074, 10.950000000000001, 14.34, 9.424, 11.834, 15.414, 14.592, 8.11, 8.744, 10.618, 8.856, 14.49, 10.104000000000001, 1.948, 6.0440000000000005, 6.184, 11.284, 14.43, 12.26, 11.752, 7.184, 17.014, 6.248, 10.158, 6.37, 12.322000000000001, 15.782, 8.63, 9.040000000000001, 11.442, 9.642, 14.206, 12.162, 11.578, 11.164, 11.358, 9.924, 13.068, 11.666, 8.746, 16.056, 8.71, 7.542, 8.786, 6.08, 7.074, 9.164, 11.594, 13.658, 4.846, 9.504, 7.352, 4.308, 9.986, 9.778, 10.412, 9.35, 6.292, 9.044, 13.418000000000001, 10.358, 7.232, 13.854000000000001, 8.26, 9.308, 6.46, 15.948, 12.194, 8.242, 11.074, 13.818, 9.932, 12.006, 10.686, 13.356, 9.106, 10.164, 13.562000000000001, 8.870000000000001, 13.25, 8.334, 8.476, 9.738, 12.872, 7.936, 16.842, 9.642, 1.462, 12.11, 7.598, 4.738, 7.936, 6.656000000000001, 8.696, 10.964, 13.648, 5.5520000000000005, 7.926, 9.776, 8.362, 7.856, 8.324, 5.436, 11.1, 4.3100000000000005, 10.46, 9.598, 8.192, 8.22, 16.254, 12.102, 8.498, 6.244, 9.186, 10.236, 3.532, 10.576, 7.324, 11.832, 12.786, 11.008000000000001, 14.224, 9.274000000000001, 9.078, 5.034, 9.138, 7.3500000000000005, 8.922, 7.152, 8.008000000000001, 8.396, 8.286, 4.782, 11.758000000000001, 10.022, 8.38, 7.432, 7.686, 13.46, 5.632, 11.352, 11.136000000000001, 2.578, 9.46, 6.668, 13.224, 8.862, 16.602, 12.656, 11.51, 5.994, 5.344, 8.97, 9.876, 5.954, 7.736, 10.290000000000001, 10.922, 7.8020000000000005, 7.132000000000001, 8.96, 7.694, 3.246, 12.464, 9.608, 2.266, 10.822000000000001, 12.788, 9.746, 11.182, 6.872, 11.594, 10.05, 13.744, 9.71, 9.24, 8.832, 7.386, 11.386000000000001, 12.25, 5.222, 6.86, 9.682, 13.176, 9.228, 16.104, 6.288, 14.232000000000001, 13.772, 8.694, 8.2, 9.804, 8.256, 10.700000000000001, 11.418000000000001, 12.762, 14.714, 5.44, 10.84, 11.688, 12.496, 10.382, 8.546, 10.518, 12.984, 13.266, 10.546, 11.158, 10.116, 7.582, 12.018, 11.924, 12.49, 15.708, 10.07, 7.12, 14.144, 14.75, 7.408, 10.31, 9.186, 8.906, 7.474, 12.844, 11.39, 5.372, 10.056000000000001, 8.714, 7.832, 8.462, 7.964, 14.154, 10.44, 10.48, 7.614, 14.096, 7.672000000000001, 9.318, 6.948, 17.978, 18.092, 14.036, 0.784, 5.214, 8.978, 9.488, 15.540000000000001, 10.22, 9.67, 11.57, 5.72, 10.044, 12.974, 6.332, 10.044, 12.646, 6.356, 10.49, 8.762, 9.878, 12.206, 3.624, 3.774, 9.144, 12.904, 8.07, 8.27, 8.792, 15.394, 7.476, 9.822000000000001, 14.762, 8.342, 7.37, 15.346, 12.662, 12.718, 8.874, 8.016, 13.126, 11.548, 10.262, 9.69, 7.84, 16.046, 16.61, 10.796, 8.72, 12.51, 8.354000000000001, 14.27, 12.592, 2.862, 14.188, 10.496, 13.3, 7.596, 9.244, 12.142, 12.794, 13.582, 6.554, 13.51, 7.8580000000000005, 8.858, 9.636000000000001, 8.268, 6.996, 7.446, 9.608, 8.166, 14.936, 7.824, 15.736, 13.284, 6.324, 12.452, 9.888, 12.368, 10.05, 11.256, 11.19, 11.912, 11.902000000000001, 9.612, 6.298, 15.328000000000001, 12.5, 6.726, 8.198, 7.394, 6.314, 6.2, 9.098, 7.936, 14.06, 6.24, 9.984, 7.016, 7.9, 12.41, 13.208, 12.244, 10.794, 13.85, 13.914, 6.474, 9.516, 14.568, 9.61, 10.092, 6.95, 8.282, 8.994, 13.528, 9.584, 11.484, 5.128, 8.448, 9.436, 16.176000000000002, 5.5600000000000005, 8.782, 5.502, 7.658, 9.632, 10.362, 8.092, 10.676, 8.368, 13.172, 11.228, 7.284, 10.834, 8.124, 8.63, 12.234, 12.852, 5.796, 9.392, 7.388, 14.31, 8.586, 10.298, 17.604, 16.794, 12.502, 12.818, 10.076, 11.058, 11.532, 10.968, 9.686, 12.424, 13.386000000000001, 10.618, 13.56, 6.936, 9.044, 10.66, 10.222, 9.738, 9.862, 12.546, 8.278, 7.542, 11.874, 7.368, 11.134, 12.192, 4.83, 8.93, 5.14, 13.666, 12.646, 12.612, 13.734, 11.622, 10.116, 13.514000000000001, 11.056000000000001, 9.808, 10.426, 8.252, 11.658, 14.382, 5.966, 12.194, 8.562, 3.8320000000000003, 8.534, 1.006, 7.784, 7.054, 7.5200000000000005, 11.022, 11.124, 5.998, 9.778, 10.368, 12.01, 9.892, 13.1, 8.584, 9.976, 9.662, 10.56, 6.634, 11.776, 12.628, 9.200000000000001, 14.598, 13.870000000000001, 4.894, 4.772, 7.886, 10.306000000000001, 12.934000000000001, 9.282, 17.114, 6.8500000000000005, 12.06, 8.512, 8.276, 6.236, 8.8, 5.024, 7.882000000000001, 5.7, 8.158, 4.478, 10.584, 12.622, 5.352, 8.894, 15.148, 12.008000000000001, 9.99, 8.726, 11.040000000000001, 8.538, 7.572, 7.354, 12.588000000000001, 9.812, 10.58, 10.506, 9.200000000000001, 13.156, 4.678, 10.672, 12.128, 11.728, 7.848, 6.742, 9.336, 9.352, 8.224, 6.628, 9.706, 13.1, 9.442, 16.052, 7.392, 7.256, 9.448, 12.188, 12.486, 12.872, 11.26, 2.968, 12.422, 8.222, 12.988, 10.356, 12.71, 13.048, 13.05, 13.120000000000001, 9.22, 12.586, 7.324, 13.122, 7.92, 13.422, 11.122, 15.23, 6.976, 9.892, 7.988, 8.984, 8.828, 10.368, 7.006, 13.764000000000001, 14.664, 11.478, 14.002, 12.386000000000001, 11.94, 6.2780000000000005, 10.044, 10.138, 6.142, 7.23, 9.016, 11.446, 13.994, 7.526, 7.844, 10.414, 7.622, 6.6160000000000005, 5.894, 10.64, 12.008000000000001, 12.646, 12.192, 11.09, 6.726, 9.198, 12.09, 11.85, 12.304, 10.61, 12.44, 15.030000000000001, 10.464, 14.542, 6.338, 9.518, 8.644, 12.934000000000001, 4.186, 11.468, 9.886000000000001, 16.194, 6.8420000000000005, 12.27, 10.132, 4.902, 6.086, 5.91, 12.586, 11.224, 7.182, 14.096, 11.412, 11.178, 9.398, 7.538, 9.556000000000001, 5.3180000000000005, 12.838000000000001, 10.774000000000001, 8.016, 13.69, 10.016, 15.966000000000001, 12.870000000000001, 7.016, 8.996, 8.782, 6.106, 10.984, 5.7940000000000005, 10.1, 7.976, 8.57, 13.716000000000001, 11.97, 9.446, 9.594, 6.554, 10.186, 9.304, 7.774, 11.406, 11.058, 8.846, 9.91, 6.816, 12.936, 8.620000000000001, 8.374, 7.956, 6.434, 10.494, 11.962, 9.126, 9.838000000000001, 13.244, 11.252, 7.71, 7.184, 8.42, 14.68, 12.982000000000001, 8.442, 7.390000000000001, 10.984, 3.536, 4.28, 9.03, 12.884, 11.348, 11.532, 6.236, 9.32, 6.776, 2.668, 12.726, 6.284, 4.178, 15.112, 6.138, 9.154, 14.23, 11.912, 11.86, 12.916, 11.676, 5.126, 8.804, 13.442, 9.126, 7.1000000000000005, 6.244, 12.97, 8.664, 9.404, 13.344, 9.592, 7.7940000000000005, 11.938, 12.674, 6.312, 8.07, 9.870000000000001, 13.974, 7.5200000000000005, 12.556000000000001, 7.176, 14.332, 7.196, 13.084, 11.948, 8.922, 8.016, 12.248000000000001, 8.638, 11.1, 8.702, 9.664, 8.988, 12.586, 9.484, 10.876, 14.32, 15.916, 8.346, 9.004, 2.072, 9.006, 12.202, 7.354, 6.532, 12.162, 8.612, 9.76, 17.118000000000002, 15.756, 8.436, 10.536, 14.062000000000001, 6.572, 13.318, 14.496, 11.596, 8.442, 10.046, 11.34, 11.972, 16.14, 9.296, 5.414, 8.69, 12.692, 11.254, 6.838, 9.96, 12.762, 11.858, 10.81, 4.722, 12.356, 4.664, 7.426, 15.984, 13.888, 8.232, 12.542, 4.706, 13.35, 9.398, 9.244, 4.486, 9.538, 11.542, 12.286, 11.73, 6.224, 7.242, 8.768, 10.558, 7.916, 12.776, 6.38, 11.154, 7.97, 9.286, 12.228, 11.338000000000001, 11.648, 10.468, 10.074, 11.264, 9.648, 8.05, 7.5360000000000005, 12.768, 7.708, 8.506, 8.056000000000001, 10.86, 10.066, 9.232, 9.93, 10.034, 10.288, 14.826, 5.8, 13.972, 11.89, 10.476, 12.034, 11.238, 13.712, 10.446, 7.97, 11.956, 8.322000000000001, 12.638, 14.326, 13.544, 16.608, 16.676000000000002, 11.432, 9.998, 8.01, 12.11, 8.986, 13.744, 4.886, 6.234, 9.388, 7.006, 9.352, 14.594, 9.196, 10.172, 11.966000000000001, 7.704, 12.620000000000001, 5.08, 2.996, 8.582, 7.55, 7.728, 13.604000000000001, 11.006, 12.786, 5.34, 4.732, 9.088000000000001, 10.06, 6.308, 9.22, 12.358, 6.988, 14.488, 15.262, 14.07, 11.36, 9.21, 11.886000000000001, 9.416, 10.978, 13.582, 9.862, 11.5, 10.294, 11.346, 13.192, 12.968, 7.43, 15.056000000000001, 12.36, 11.342, 13.834, 6.93, 5.5680000000000005, 12.958, 7.890000000000001, 8.954, 7.92, 12.726, 8.262, 6.646, 10.620000000000001, 14.484, 8.528, 12.13, 9.026, 12.318, 10.36, 7.198, 8.676, 8.136000000000001, 6.496, 12.332, 14.280000000000001, 8.688, 10.606, 9.768, 10.788, 3.452, 9.918000000000001, 10.152000000000001, 9.732, 10.81, 7.7860000000000005, 5.514, 11.992, 9.546, 6.11, 16.532, 13.712, 14.42, 11.202, 6.38, 10.912, 12.048, 15.006, 12.362, 6.296, 11.27, 7.244, 9.506, 0.374, 15.878, 8.092, 7.5200000000000005, 8.502, 12.576, 12.146, 10.766, 10.502, 10.098, 8.194, 6.972, 8.588000000000001, 8.808, 13.738, 7.88, 10.772, 9.874, 16.832, 9.434000000000001, 10.71, 8.718, 12.446, 9.162, 9.868, 13.844, 8.48, 10.396, 7.7780000000000005, 12.01, 5.986, 8.638, 13.89, 6.916, 11.134, 8.612, 9.952, 8.044, 11.292, 11.91, 10.008000000000001, 10.914, 16.22, 11.572000000000001, 12.374, 10.766, 12.918000000000001, 11.904, 6.2860000000000005, 7.78, 11.49, 10.756, 7.986, 7.216, 9.258000000000001, 7.796, 10.422, 7.432, 4.712, 14.222, 7.016, 8.77, 6.942, 10.744, 11.144, 5.396, 11.89, 5.99, 8.942, 8.85, 7.258, 12.268, 10.252, 11.176, 7.390000000000001, 10.31, 11.124, 7.228, 13.686, 10.196, 14.438, 6.8420000000000005, 4.946, 12.226, 8.02, 11.126, 7.708, 12.426, 6.2, 10.812, 2.474, 9.858, 11.55, 7.0280000000000005, 9.988, 5.3740000000000006, 3.954, 7.422000000000001, 8.25, 8.736, 10.678, 10.82, 13.836, 7.122, 14.652000000000001, 12.902000000000001, 14.126, 8.562, 9.568, 9.618, 8.17, 9.540000000000001, 11.364, 9.618, 7.096, 7.482, 11.096, 12.344, 9.028, 15.932, 15.834, 7.0120000000000005, 5.8180000000000005, 12.078, 9.934000000000001, 12.916, 7.518, 8.790000000000001, 12.424, 7.094, 9.046, 8.89, 8.794, 13.216000000000001, 8.502, 6.782, 11.1, 10.454, 6.736, 13.43, 10.972, 8.596, 5.632, 10.552, 0.87, 13.062, 8.13, 5.76, 10.848, 9.018, 14.22, 14.038, 9.434000000000001, 9.38, 7.288, 12.886000000000001, 10.118, 9.492, 9.488, 12.068, 19.394000000000002, 9.202, 12.898, 12.49, 6.916, 11.234, 5.5520000000000005, 12.956, 10.352, 4.662, 10.784, 9.222, 12.276, 9.92, 6.958, 10.932, 10.182, 5.522, 12.15, 7.3, 12.296, 4.21, 10.842, 10.024000000000001, 8.278, 14.26, 10.038, 12.988, 11.174, 14.65, 8.978, 6.932, 8.85, 14.676, 7.266, 15.860000000000001, 13.748000000000001, 6.898000000000001, 10.958, 12.966000000000001, 7.29, 12.198, 14.258000000000001, 6.6000000000000005, 8.436, 10.16, 9.858, 9.168000000000001, 10.484, 5.632, 13.298, 12.530000000000001, 4.0040000000000004, 2.81, 12.754, 10.904, 11.568, 9.522, 7.008, 10.266, 10.3, 8.336, 10.594, 10.488, 12.212, 10.782, 8.15, 9.496, 4.632, 12.296, 8.266, 11.166, 7.55, 10.086, 11.02, 13.314, 10.608, 10.158, 10.316, 10.392, 6.008, 11.98, 9.518, 10.076, 9.57, 9.084, 12.478, 7.208, 10.178, 13.276, 14.898, 10.416, 8.016, 7.508, 13.41, 13.65, 11.398, 6.664, 11.628, 12.798, 9.786, 11.59, 12.888, 8.782, 7.502, 10.346, 8.284, 11.040000000000001, 9.782, 10.116, 10.434000000000001, 8.648, 10.912, 15.16, 13.076, 6.598, 12.584, 5.652, 8.482, 9.784, 10.284, 10.048, 7.976, 6.164, 9.006, 12.462, 13.42, 10.136000000000001, 11.576, 9.266, 8.592, 16.306, 8.632, 3.198, 11.746, 8.51, 10.112, 9.762, 11.322000000000001, 9.492, 10.114, 11.872, 4.946, 8.658, 9.646, 7.75, 16.312, 8.888, 9.496, 6.51, 5.306, 4.978, 15.624, 5.4, 10.902000000000001, 11.906, 8.896, 12.6, 12.040000000000001, 10.194, 10.784, 11.444, 11.352, 11.012, 10.74, 7.406000000000001, 12.56, 16.794, 12.842, 4.202, 12.028, 10.058, 11.594, 9.02, 13.718, 8.646, 11.186, 8.136000000000001, 10.126, 8.97, 9.126, 10.596, 5.944, 8.92, 3.614, 6.8740000000000006, 5.674, 11.35, 10.428, 12.706, 15.588000000000001, 10.552, 7.706, 8.27, 7.206, 8.83, 6.7940000000000005, 12.796000000000001, 11.432, 14.238, 6.968, 8.192, 8.18, 10.406, 10.714, 4.422, 8.642, 12.27, 9.572000000000001, 5.69, 10.484, 7.436, 7.394, 12.208, 11.028, 7.8, 10.986, 10.78, 8.492, 12.714, 4.682, 15.31, 8.434000000000001, 9.738, 15.328000000000001, 8.354000000000001, 13.126, 9.128, 6.7780000000000005, 0.858, 9.68, 8.47, 6.622, 11.522, 11.714, 7.558, 10.58, 9.91, 11.392, 9.892, 11.27, 13.72, 9.248, 5.198, 7.19, 11.534, 7.74, 14.256, 12.184000000000001, 9.486, 10.21, 14.74, 4.5360000000000005, 11.902000000000001, 11.762, 5.466, 8.266, 17.886, 13.22, 13.528, 8.396, 9.11, 5.414, 8.766, 11.788, 11.514000000000001, 11.014, 9.442, 4.896, 16.42, 12.426, 18.35, 7.314, 9.736, 11.062, 9.018, 6.434, 7.322, 14.098, 8.96, 7.098, 12.674, 8.454, 9.4, 12.522, 10.666, 9.092, 12.084, 13.754, 6.17, 6.612, 13.306000000000001, 9.986, 9.76, 10.540000000000001, 12.366, 8.84, 8.782, 9.736, 9.712, 12.844, 14.214, 12.408, 7.58, 14.058, 6.69, 12.284, 11.388, 12.07, 8.284, 9.064, 17.494, 8.838000000000001, 6.78, 9.298, 12.424, 10.904, 8.3, 6.93, 16.404, 11.706, 9.352, 8.41, 17.884, 9.620000000000001, 7.7620000000000005, 8.378, 7.214, 9.61, 11.59, 13.514000000000001, 8.088000000000001, 10.352, 11.086, 6.704, 7.618, 10.52, 6.276, 11.266, 6.128, 13.994, 7.63, 12.786, 7.256, 7.588, 9.61, 15.452, 8.016, 15.524000000000001, 10.658, 12.56, 4.96, 7.922000000000001, 11.836, 8.972, 6.462, 10.42, 14.004, 14.722, 7.182, 11.59, 14.692, 10.132, 11.486, 10.988, 8.528, 9.412, 11.724, 14.17, 9.874, 8.278, 12.992, 8.738, 9.768, 13.986, 8.886000000000001, 8.632, 12.534, 5.5, 7.44, 6.862, 9.958, 8.42, 12.026, 12.01, 9.59, 8.864, 7.684, 9.346, 10.858, 11.858, 16.062, 7.546, 15.22, 10.856, 11.026, 15.552, 11.312, 10.702, 6.942, 14.794, 16.604, 7.684, 8.498, 5.244, 9.450000000000001, 11.094, 9.612, 8.952, 10.122, 11.11, 14.184000000000001, 8.06, 7.2940000000000005, 5.8100000000000005, 7.902, 9.332, 12.47, 9.532, 8.578, 9.396, 9.632, 11.664, 11.752, 11.208, 9.372, 5.442, 10.122, 11.506, 12.906, 14.344, 10.442, 11.708, 10.07, 8.546, 12.436, 9.056000000000001, 9.790000000000001, 8.594, 3.126, 9.202, 10.214, 12.958, 6.8, 13.636000000000001, 13.166, 8.34, 6.17, 10.734, 11.604000000000001, 13.408, 7.136, 13.442, 6.654, 13.142, 16.766000000000002, 12.620000000000001, 12.104000000000001, 11.308, 8.25, 11.1, 9.786, 10.654, 9.644, 7.36, 5.312, 8.476, 10.772, 8.598, 5.0680000000000005, 11.78, 5.88, 5.782, 13.758000000000001, 15.288, 13.538, 12.17, 12.616, 12.308, 11.922, 10.534, 7.67, 11.450000000000001, 8.462, 16.274, 13.194, 12.248000000000001, 9.456, 8.974, 13.568, 10.552, 11.19, 10.688, 15.516, 12.426, 11.584, 11.454, 9.646, 5.606, 9.370000000000001, 4.7780000000000005, 9.544, 11.092, 8.728, 10.344, 6.79, 18.304000000000002, 7.692, 13.358, 9.162, 8.540000000000001, 8.42, 10.208, 13.516, 5.712, 16.93, 6.922, 6.764, 7.204, 13.144, 8.026, 6.5, 7.898000000000001, 11.278, 11.764000000000001, 8.798, 3.7800000000000002, 10.598, 12.73, 13.062, 11.258000000000001, 6.348, 11.206, 7.046, 8.054, 4.19, 12.176, 12.386000000000001, 9.636000000000001, 12.316, 10.244, 14.078000000000001, 10.744, 7.698, 13.428, 11.856, 9.94, 12.52, 12.834, 2.548, 6.3180000000000005, 8.882, 7.972, 12.012, 6.8740000000000006, 13.192, 6.946, 7.612, 9.464, 10.066, 9.450000000000001, 6.684, 14.280000000000001, 6.458, 12.562, 3.71, 10.292, 9.822000000000001, 10.896, 10.756, 12.870000000000001, 7.784, 13.964, 5.664, 7.344, 12.636000000000001, 5.18, 12.736, 10.0, 10.63, 9.218, 10.05, 13.036, 6.33, 10.222, 11.974, 9.854000000000001, 13.924, 10.498, 7.672000000000001, 14.936, 7.008, 9.258000000000001, 9.454, 9.48, 13.248000000000001, 2.83, 8.982, 9.262, 10.066, 13.738, 14.042, 11.254, 7.214, 11.272, 7.33, 11.288, 11.568, 10.286, 11.82, 7.048, 5.01, 11.218, 10.288, 10.438, 8.456, 14.198, 9.774000000000001, 8.326, 9.73, 8.354000000000001, 11.492, 10.03, 3.67, 14.69, 13.738, 11.266, 11.372, 12.834, 11.526, 11.342, 11.302, 17.650000000000002, 9.526, 10.266, 13.286, 12.858, 8.904, 12.714, 7.264, 6.684, 7.546, 16.46, 6.744, 7.84, 13.708, 16.136, 8.82, 15.116, 7.898000000000001, 13.926, 7.84, 14.832, 12.224, 11.282, 12.21, 12.98, 13.644, 16.916, 5.0920000000000005, 13.278, 9.65, 8.044, 12.632, 8.736, 12.556000000000001, 6.0680000000000005, 4.63, 9.120000000000001, 11.370000000000001, 10.338000000000001, 12.194, 9.824, 11.064, 9.306000000000001, 8.494, 7.34, 7.104, 11.09, 7.5120000000000005, 10.498, 8.236, 12.61, 12.658, 14.614, 15.71, 6.776, 10.950000000000001, 9.762, 4.972, 12.796000000000001, 13.586, 14.502, 9.11, 12.748000000000001, 11.256, 13.13, 16.126, 4.282, 4.426, 5.566, 7.532, 8.808, 8.996, 16.696, 8.206, 6.206, 11.604000000000001, 5.792, 12.912, 7.122, 10.518, 11.004, 12.682, 11.540000000000001, 12.200000000000001, 5.686, 9.13, 4.912, 9.074, 9.798, 8.15, 13.694, 11.77, 11.444, 17.338, 13.562000000000001, 12.492, 11.062, 8.372, 7.708, 8.754, 6.15, 7.606, 4.86, 8.57, 14.548, 12.244, 11.956, 5.846, 11.608, 13.376, 6.094, 15.138, 10.206, 8.672, 13.178, 10.338000000000001, 10.07, 10.44, 7.194, 13.556000000000001, 7.596, 13.374, 8.58, 10.858, 6.634, 7.394, 10.322000000000001, 12.09, 5.694, 8.156, 6.94, 8.454, 7.478, 8.6, 10.058, 13.694, 7.414000000000001, 9.458, 5.71, 9.592, 12.622, 12.722, 8.542, 10.964, 9.174, 4.082, 9.368, 8.546, 4.838, 13.638, 15.808, 13.578, 15.006, 9.846, 14.31, 8.55, 9.548, 6.82, 5.868, 12.33, 13.332, 9.764, 6.7780000000000005, 7.426, 6.022, 10.092, 6.218, 10.428, 11.048, 8.032, 6.5440000000000005, 9.258000000000001, 10.648, 7.008, 6.446, 10.438, 15.676, 9.02, 4.986, 10.03, 10.404, 8.366, 8.790000000000001, 14.786, 6.892, 6.8100000000000005, 11.72, 14.328000000000001, 11.364, 5.274, 13.522, 8.964, 12.22, 11.628, 4.506, 13.976, 13.396, 13.812000000000001, 6.0760000000000005, 4.784, 11.962, 11.022, 6.25, 7.948, 8.75, 11.978, 9.462, 12.322000000000001, 7.272, 9.774000000000001, 7.822, 9.094, 9.574, 11.472, 7.856, 6.336, 11.06, 12.676, 5.728, 10.132, 9.796, 9.652000000000001, 9.756, 6.188, 8.716, 10.988, 5.016, 12.478, 8.076, 17.582, 7.902, 9.604000000000001, 9.932, 11.536, 11.362, 11.36, 11.452, 7.82, 8.544, 10.072000000000001, 8.24, 14.142, 8.884, 11.218, 5.208, 9.342, 11.85, 8.6, 5.892, 7.882000000000001, 12.16, 11.3, 9.61, 13.97, 8.464, 7.428, 10.338000000000001, 10.522, 11.082, 10.542, 5.748, 11.664, 16.07, 7.636, 6.822, 13.922, 6.98, 9.0, 8.14, 7.662, 10.602, 5.396, 10.678, 7.572, 11.024000000000001, 10.952, 13.200000000000001, 8.446, 14.368, 5.244, 10.106, 10.692, 11.634, 14.492, 5.434, 12.344, 5.524, 8.584, 10.042, 13.204, 11.578, 13.138, 13.174, 10.514, 11.03, 4.816, 6.894, 8.334, 14.934000000000001, 13.55, 15.138, 6.598, 7.556, 9.844, 8.59, 6.304, 13.958, 8.062, 8.476, 11.198, 8.688, 10.826, 11.852, 9.346, 11.22, 12.73, 13.528, 11.49, 8.326, 11.124, 8.606, 4.882, 16.242, 10.794, 6.388, 6.666, 16.256, 9.716000000000001, 8.262, 13.426, 13.538, 11.46, 14.804, 12.26, 13.11, 10.046, 9.13, 13.006, 9.766, 11.81, 11.1, 9.374, 8.68, 9.454, 7.51, 9.994, 16.528, 13.658, 12.392, 10.984, 5.55, 8.934000000000001, 5.022, 11.148, 4.616, 11.34, 10.632, 9.84, 7.8740000000000006, 10.938, 10.246, 4.078, 17.306, 9.526, 12.794, 14.108, 10.83, 10.594, 10.88, 12.046, 6.228, 11.89, 7.904, 6.398000000000001, 15.968, 8.734, 9.444, 7.45, 15.486, 6.438, 10.74, 12.624, 12.948, 12.448, 7.58, 12.018, 6.424, 5.978, 9.732, 10.26, 11.914, 8.404, 10.248, 8.58, 6.614, 4.166, 7.392, 8.644, 15.5, 10.914, 13.638, 11.582, 7.7860000000000005, 5.38, 11.968, 7.344, 12.734, 12.906, 11.354000000000001, 11.152000000000001, 13.918000000000001, 14.4, 8.946, 9.362, 13.002, 13.138, 12.448, 12.534, 7.498, 12.306000000000001, 11.408, 9.362, 11.828, 10.962, 8.386000000000001, 7.522, 11.19, 9.792, 14.412, 8.136000000000001, 11.726, 13.492, 7.508, 11.094, 11.11, 8.744, 7.784, 11.574, 9.672, 9.282, 9.142, 11.966000000000001, 5.808, 8.53, 11.158, 12.426, 13.564, 8.474, 12.76, 8.298, 9.338000000000001, 11.23, 13.776, 10.302, 13.404, 6.942, 9.96, 10.612, 13.586, 9.018, 13.09, 6.008, 16.9, 5.548, 10.396, 4.532, 10.904, 8.332, 11.564, 12.282, 4.71, 11.896, 10.008000000000001, 6.0600000000000005, 10.646, 12.768, 11.18, 15.038, 8.65, 10.936, 9.486, 17.964, 10.852, 13.57, 10.596, 15.34, 5.63, 5.316, 12.812, 7.352, 9.282, 10.19, 9.65, 6.472, 5.488, 9.76, 11.778, 14.186, 7.156000000000001, 13.168000000000001, 7.828, 8.308, 12.808, 8.132, 10.68, 8.57, 8.736, 10.678, 6.974, 10.906, 13.018, 13.168000000000001, 14.348, 8.822000000000001, 7.96, 8.436, 13.4, 12.72, 8.058, 8.498, 12.540000000000001, 9.866, 8.742, 5.128, 13.58, 12.92, 10.422, 16.146, 8.288, 6.242, 11.33, 9.308, 7.412, 14.032, 10.826, 9.196, 9.916, 9.808, 7.194, 14.982000000000001, 8.466, 9.938, 11.902000000000001, 10.088000000000001, 13.384, 10.972, 13.23, 14.002, 10.452, 6.772, 7.812, 7.8020000000000005, 8.192, 11.454, 13.518, 10.728, 5.3260000000000005, 11.18, 9.796, 8.824, 10.9, 11.844, 11.376, 9.698, 8.28, 6.896, 9.05, 14.442, 10.414, 8.156, 18.046, 7.292, 9.518, 11.676, 9.734, 8.198, 2.77, 13.498000000000001, 11.562, 14.234, 4.892, 9.75, 7.218, 9.26, 12.348, 10.622, 12.164, 9.272, 9.792, 8.768, 12.866, 9.44, 10.772, 11.014, 8.856, 8.68, 7.578, 12.092, 6.488, 12.43, 7.666, 9.666, 9.336, 12.032, 9.666, 8.708, 10.344, 2.858, 6.96, 9.69, 12.452, 11.562, 10.608, 13.166, 12.994, 7.84, 10.316, 6.09, 11.354000000000001, 10.848, 9.954, 11.168000000000001, 7.868, 10.236, 7.564, 15.514000000000001, 4.25, 9.374, 13.054, 6.19, 9.262, 6.162, 6.5760000000000005, 7.194, 9.966000000000001, 11.032, 15.81, 9.544, 12.346, 8.22, 9.282, 8.778, 12.842, 4.812, 9.068, 5.908, 9.518, 6.642, 8.396, 12.484, 5.89, 14.33, 10.97, 13.636000000000001, 14.896, 7.094, 12.07, 5.89, 9.33, 5.67, 10.266, 13.514000000000001, 14.176, 13.006, 11.838000000000001, 9.13, 11.884, 9.428, 8.092, 3.56, 7.0360000000000005, 7.638, 9.758000000000001, 9.704, 9.632, 12.814, 10.18, 11.578, 13.264000000000001, 16.11, 9.828, 8.204, 13.062, 17.236, 9.976, 13.88, 5.368, 10.564, 12.482000000000001]\n", + "[ -3.448, -3.115] : \n", + "[ -3.115, -2.782] : \n", + "[ -2.782, -2.448] : #\n", + "[ -2.448, -2.115] : #\n", + "[ -2.115, -1.782] : ##\n", + "[ -1.782, -1.449] : #####\n", + "[ -1.449, -1.116] : ########\n", + "[ -1.116, -0.782] : ##########\n", + "[ -0.782, -0.449] : ################\n", + "[ -0.449, -0.116] : ##################\n", + "[ -0.116, 0.217] : ####################\n", + "[ 0.217, 0.550] : #################\n", + "[ 0.550, 0.884] : #############\n", + "[ 0.884, 1.217] : ###########\n", + "[ 1.217, 1.550] : #######\n", + "[ 1.550, 1.883] : ###\n", + "[ 1.883, 2.216] : #\n", + "[ 2.216, 2.550] : #\n", + "[ 2.550, 2.883] : \n", + "[ 2.883, 3.216] : \n", + "g1 mean = -0.016807999999999896\n", + "g1 variance = 1.020684979135999\n", + "[ 0.374, 1.349] : \n", + "[ 1.349, 2.324] : \n", + "[ 2.324, 3.298] : #\n", + "[ 3.298, 4.273] : ##\n", + "[ 4.273, 5.248] : ####\n", + "[ 5.248, 6.223] : #######\n", + "[ 6.223, 7.198] : ###########\n", + "[ 7.198, 8.172] : ##############\n", + "[ 8.172, 9.147] : #################\n", + "[ 9.147, 10.122] : ####################\n", + "[ 10.122, 11.097] : #################\n", + "[ 11.097, 12.072] : ################\n", + "[ 12.072, 13.046] : #############\n", + "[ 13.046, 14.021] : #########\n", + "[ 14.021, 14.996] : ######\n", + "[ 14.996, 15.971] : ###\n", + "[ 15.971, 16.946] : ##\n", + "[ 16.946, 17.920] : \n", + "[ 17.920, 18.895] : \n", + "[ 18.895, 19.870] : \n", + "g2 mean = 10.014089200000036\n", + "g2 variance = 8.766713087243378\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "0X96qgN8_1iD", + "colab_type": "text" + }, + "source": [ + "*Exercise 9:* Combine your `generate_function`, `where`, and `in_range` functions above to create an integrate function. Use your integrate function to show that approximately 68% of Normal distribution is within one variance." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "A51dSStW_1iF", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 51 + }, + "outputId": "0ddd25e6-821c-4f09-daeb-12cca565bc7a" + }, + "source": [ + "def integrate(func, x_min, x_max, n_points=1000):\n", + " '''\n", + " points = generate_function(func, x_min, x_max, n_points)\n", + " m = mean(points)\n", + " #draw_histogram(points, 10, x_min, x_max, '#', 75)\n", + " dx = (x_max - x_min) / n_points\n", + " integral = m * sum(len(where(points, in_range(x_min + i * dx, x_min + (i + 1) * dx))) for i in range(n_points))\n", + " '''\n", + " i = x_min\n", + " integral = 0\n", + " dx = (x_max - x_min) / n_points\n", + " while i < x_max:\n", + " integral += func(i) * dx\n", + " i += dx\n", + " return integral\n", + "print(integrate(lambda x: -((x - 10) ** 2) + 100, 0, 20, 1000))\n", + "print(integrate(g2, 7, 13, 1000)/integrate(g2, 0, 20, 1000))" + ], + "execution_count": 256, + "outputs": [ + { + "output_type": "stream", + "text": [ + "1333.3320000000258\n", + "0.6832686585265082\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "d87Vi_Zj_1iL", + "colab_type": "code", + "colab": {} + }, + "source": [ + "" + ], + "execution_count": 0, + "outputs": [] + } + ] +} \ No newline at end of file From 66a823453c64d8a734dda22395dd4130cd554700 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Mon, 2 Mar 2020 03:19:35 -0600 Subject: [PATCH 07/24] updating lab 3 --- Labs/Lab-3/Copy_of_Lab_3.ipynb | 321 +++++++++------------------------ 1 file changed, 82 insertions(+), 239 deletions(-) diff --git a/Labs/Lab-3/Copy_of_Lab_3.ipynb b/Labs/Lab-3/Copy_of_Lab_3.ipynb index 19790b3..02f9db2 100644 --- a/Labs/Lab-3/Copy_of_Lab_3.ipynb +++ b/Labs/Lab-3/Copy_of_Lab_3.ipynb @@ -68,17 +68,17 @@ "editable": true, "id": "xLZxreLTl6w_", "colab_type": "code", + "outputId": "fe454a38-9d17-4bb6-bb49-4ee1b2c8f4a6", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - }, - "outputId": "a2fb5d59-e002-4117-8109-ddbc752389df" + } }, "source": [ "# Test your solution here\n", "print(make_game_board())" ], - "execution_count": 2, + "execution_count": 0, "outputs": [ { "output_type": "stream", @@ -146,10 +146,10 @@ "source": [ "# Write you solution here\n", "def check_game_finished(board):\n", - " board_wins = [row for row in board]\n", - " board_wins += [list(row) for row in zip(*board)]\n", - " board_wins += [[board[i][i] for i in range(len(board))]]\n", - " board_wins += [[board[len(board) - 1 - i][i] for i in range(len(board))]]\n", + " board_wins = [row for row in board] # rows\n", + " board_wins += [list(row) for row in zip(*board)] # columns\n", + " board_wins += [[board[i][i] for i in range(len(board))]] # right down diagonal\n", + " board_wins += [[board[len(board) - 1 - i][i] for i in range(len(board))]] # left down diagonal\n", " if [1] * len(board) in board_wins:\n", " return 1\n", " elif [2] * len(board) in board_wins:\n", @@ -169,28 +169,28 @@ "editable": true, "id": "jFvUimXMl6xS", "colab_type": "code", + "outputId": "ab2ed326-4253-42dc-d3ea-72e57487da20", "colab": { "base_uri": "https://localhost:8080/", "height": 102 - }, - "outputId": "fafafe7b-b6ef-413b-d8b5-5be4cef7ee40" + } }, "source": [ "# Test your solution here\n", - "print(check_game_finished(winner_is_1))\n", "print(check_game_finished(winner_is_2))\n", + "print(check_game_finished(winner_is_1))\n", "print(check_game_finished(winner_is_also_1))\n", "print(check_game_finished(no_winner))\n", "print(check_game_finished(also_no_winner))" ], - "execution_count": 20, + "execution_count": 0, "outputs": [ { "output_type": "stream", "text": [ - "1\n", "2\n", "1\n", + "1\n", "-1\n", "-1\n" ], @@ -255,11 +255,11 @@ "editable": true, "id": "wrLb0jtcl6xi", "colab_type": "code", + "outputId": "11643445-b2a6-4274-b9d4-983254111bef", "colab": { "base_uri": "https://localhost:8080/", "height": 374 - }, - "outputId": "b276b3f8-be68-43d8-8226-42b080ed6071" + } }, "source": [ "# Test your solution here\n", @@ -267,7 +267,7 @@ "draw_game_board(2, 4)\n", "draw_game_board(4, 5)" ], - "execution_count": 12, + "execution_count": 0, "outputs": [ { "output_type": "stream", @@ -346,11 +346,11 @@ "editable": true, "id": "DNm_lodnl6xy", "colab_type": "code", + "outputId": "f39170d3-4772-4aa4-c143-57c5559e8ad7", "colab": { "base_uri": "https://localhost:8080/", "height": 612 - }, - "outputId": "2ebe7f2f-589a-4905-db1e-200ff4d0a522" + } }, "source": [ "# Test your solution here\n", @@ -360,7 +360,7 @@ "draw_game_board(no_winner)\n", "draw_game_board(also_no_winner)" ], - "execution_count": 28, + "execution_count": 0, "outputs": [ { "output_type": "stream", @@ -387,7 +387,7 @@ "| O | X | X |\n", " --- --- ---\n", " --- --- ---\n", - "| X | O | X |\n", + "| X | O | |\n", " --- --- ---\n", "| O | X | |\n", " --- --- ---\n", @@ -398,7 +398,7 @@ " --- --- ---\n", "| O | X | |\n", " --- --- ---\n", - "| O | X | X |\n", + "| O | X | |\n", " --- --- ---\n" ], "name": "stdout" @@ -446,11 +446,11 @@ "editable": true, "id": "wk7OoaNcl6yA", "colab_type": "code", + "outputId": "fac2d9fd-42a5-43ce-a297-c62787bc7674", "colab": { "base_uri": "https://localhost:8080/", "height": 136 - }, - "outputId": "4ab40ac0-81ac-430d-cdd3-7be1834c4a69" + } }, "source": [ "# Test your solution here\n", @@ -459,7 +459,7 @@ "_move(test_board, 2, (2, 0))\n", "draw_game_board(test_board)" ], - "execution_count": 17, + "execution_count": 0, "outputs": [ { "output_type": "stream", @@ -526,11 +526,11 @@ "editable": true, "id": "Zo6zTO_ll6yO", "colab_type": "code", + "outputId": "5935d556-6d31-4fc0-cfab-c4e741625ea5", "colab": { "base_uri": "https://localhost:8080/", "height": 697 - }, - "outputId": "c9d8347d-3da4-4d53-ff31-4a44c0eed36e" + } }, "source": [ "# Test your solution here\n", @@ -540,7 +540,7 @@ "draw_game_board(no_winner)\n", "draw_game_board(also_no_winner)" ], - "execution_count": 34, + "execution_count": 0, "outputs": [ { "output_type": "stream", @@ -571,7 +571,7 @@ " --- --- ---\n", " 1 2 3 \n", " --- --- ---\n", - "A | X | O | X |\n", + "A | X | O | |\n", " --- --- ---\n", "B | O | X | |\n", " --- --- ---\n", @@ -583,7 +583,7 @@ " --- --- ---\n", "B | O | X | |\n", " --- --- ---\n", - "C | O | X | X |\n", + "C | O | X | |\n", " --- --- ---\n" ], "name": "stdout" @@ -633,11 +633,11 @@ "editable": true, "id": "on6HRvX7l6ya", "colab_type": "code", + "outputId": "5e9f3a17-275d-4c55-cd81-9e198ccb05dd", "colab": { "base_uri": "https://localhost:8080/", - "height": 323 - }, - "outputId": "8b84374d-8d5a-4ca3-c8d0-64385fd42216" + "height": 289 + } }, "source": [ "# Test your solution here\n", @@ -646,12 +646,11 @@ "move(also_no_winner, 1, \"C3\")\n", "draw_game_board(also_no_winner)" ], - "execution_count": 36, + "execution_count": 0, "outputs": [ { "output_type": "stream", "text": [ - "Cannot put X at location A3\n", " 1 2 3 \n", " --- --- ---\n", "A | X | O | X |\n", @@ -660,7 +659,6 @@ " --- --- ---\n", "C | O | X | O |\n", " --- --- ---\n", - "Cannot put X at location C3\n", " 1 2 3 \n", " --- --- ---\n", "A | X | O | |\n", @@ -721,11 +719,11 @@ "editable": true, "id": "5klPGJNVl6yp", "colab_type": "code", + "outputId": "031bd46e-7997-43e3-e391-7f23d78dd946", "colab": { "base_uri": "https://localhost:8080/", "height": 595 - }, - "outputId": "2a010f05-694e-4a16-a5a8-1cb31f3c9036" + } }, "source": [ "# Test your solution here\n", @@ -734,25 +732,25 @@ "player_move(also_no_winner, 2)\n", "draw_game_board(also_no_winner)" ], - "execution_count": 13, + "execution_count": 0, "outputs": [ { "output_type": "stream", "text": [ " 1 2 3 \n", " --- --- ---\n", - "A | X | O | |\n", + "A | X | O | X |\n", " --- --- ---\n", "B | O | X | |\n", " --- --- ---\n", "C | O | X | O |\n", " --- --- ---\n", - "Place X at: a3\n", + "Place X at: b3\n", " 1 2 3 \n", " --- --- ---\n", "A | X | O | X |\n", " --- --- ---\n", - "B | O | X | |\n", + "B | O | X | X |\n", " --- --- ---\n", "C | O | X | O |\n", " --- --- ---\n", @@ -762,7 +760,7 @@ " --- --- ---\n", "B | O | X | |\n", " --- --- ---\n", - "C | O | X | |\n", + "C | O | X | X |\n", " --- --- ---\n", "Place O at: a3\n", " 1 2 3 \n", @@ -771,7 +769,7 @@ " --- --- ---\n", "B | O | X | |\n", " --- --- ---\n", - "C | O | X | |\n", + "C | O | X | X |\n", " --- --- ---\n" ], "name": "stdout" @@ -824,17 +822,17 @@ "editable": true, "id": "aBSvAvKWl6yz", "colab_type": "code", + "outputId": "868b6418-dbad-4a09-dd92-46829dcc78be", "colab": { "base_uri": "https://localhost:8080/", - "height": 935 - }, - "outputId": "91f6a136-3a3c-4a5a-9738-e81dd6cce49d" + "height": 765 + } }, "source": [ "# Test your solution here\n", "tic_tac_toe()" ], - "execution_count": 22, + "execution_count": 0, "outputs": [ { "output_type": "stream", @@ -847,54 +845,40 @@ " --- --- ---\n", "C | | | |\n", " --- --- ---\n", - "Place X at: b2\n", - " 1 2 3 \n", - " --- --- ---\n", - "A | | | |\n", - " --- --- ---\n", - "B | | X | |\n", - " --- --- ---\n", - "C | | | |\n", - " --- --- ---\n", - "Place O at: a1\n", - " 1 2 3 \n", - " --- --- ---\n", - "A | O | | |\n", - " --- --- ---\n", - "B | | X | |\n", - " --- --- ---\n", - "C | | | |\n", - " --- --- ---\n", - "Place X at: a2\n", + "Place X at: a1\n", " 1 2 3 \n", " --- --- ---\n", - "A | O | X | |\n", + "A | X | | |\n", " --- --- ---\n", - "B | | X | |\n", + "B | | | |\n", " --- --- ---\n", "C | | | |\n", " --- --- ---\n", - "Place O at: b1\n", + "Place O at: a2\n", " 1 2 3 \n", " --- --- ---\n", - "A | O | X | |\n", + "A | X | O | |\n", " --- --- ---\n", - "B | O | X | |\n", + "B | | | |\n", " --- --- ---\n", "C | | | |\n", " --- --- ---\n", - "Place X at: c2\n", - " 1 2 3 \n", - " --- --- ---\n", - "A | O | X | |\n", - " --- --- ---\n", - "B | O | X | |\n", - " --- --- ---\n", - "C | | X | |\n", - " --- --- ---\n", - "Player 1 wins!\n" + "Place X at: \n" ], "name": "stdout" + }, + { + "output_type": "error", + "ename": "IndexError", + "evalue": "ignored", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mtic_tac_toe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m\u001b[0m in \u001b[0;36mtic_tac_toe\u001b[0;34m(n)\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;32mwhile\u001b[0m \u001b[0mcheck_game_finished\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mboard\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcurrent_player\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mplayer_move\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mboard\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0mplayer_move\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mboard\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mplayer_move\u001b[0;34m(board, player)\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mdraw_game_board\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mboard\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mlocation\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Place \"\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mplayers\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mplayer\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m\" at: \"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mlocation\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32min\u001b[0m \u001b[0malphabet\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0malphabet\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfind\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlocation\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m<\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mboard\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mlocation\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0misnumeric\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0;36m0\u001b[0m \u001b[0;34m<\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlocation\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m<=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mboard\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m<=\u001b[0m \u001b[0;36m26\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mIndexError\u001b[0m: string index out of range" + ] } ] }, @@ -917,169 +901,14 @@ "editable": true, "id": "SOV5nKS4l6y5", "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - }, - "outputId": "d591aecd-6091-478f-e273-b02f4df0db3f" + "colab": {} }, "source": [ "# Test your solution here\n", "tic_tac_toe(5)" ], - "execution_count": 23, - "outputs": [ - { - "output_type": "stream", - "text": [ - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | | | | | |\n", - " --- --- --- --- ---\n", - "B | | | | | |\n", - " --- --- --- --- ---\n", - "C | | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place X at: a1\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | | | | |\n", - " --- --- --- --- ---\n", - "B | | | | | |\n", - " --- --- --- --- ---\n", - "C | | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place O at: a2\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | | | | | |\n", - " --- --- --- --- ---\n", - "C | | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place X at: b1\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | X | | | | |\n", - " --- --- --- --- ---\n", - "C | | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place O at: b2\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | X | O | | | |\n", - " --- --- --- --- ---\n", - "C | | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place X at: c1\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | X | O | | | |\n", - " --- --- --- --- ---\n", - "C | X | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place O at: 2\n", - "Invalid location. Try again.\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | X | O | | | |\n", - " --- --- --- --- ---\n", - "C | X | | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place O at: c2\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | X | O | | | |\n", - " --- --- --- --- ---\n", - "C | X | O | | | |\n", - " --- --- --- --- ---\n", - "D | | | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place X at: d1\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | X | O | | | |\n", - " --- --- --- --- ---\n", - "C | X | O | | | |\n", - " --- --- --- --- ---\n", - "D | X | | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place O at: d2\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | X | O | | | |\n", - " --- --- --- --- ---\n", - "C | X | O | | | |\n", - " --- --- --- --- ---\n", - "D | X | O | | | |\n", - " --- --- --- --- ---\n", - "E | | | | | |\n", - " --- --- --- --- ---\n", - "Place X at: e1\n", - " 1 2 3 4 5 \n", - " --- --- --- --- ---\n", - "A | X | O | | | |\n", - " --- --- --- --- ---\n", - "B | X | O | | | |\n", - " --- --- --- --- ---\n", - "C | X | O | | | |\n", - " --- --- --- --- ---\n", - "D | X | O | | | |\n", - " --- --- --- --- ---\n", - "E | X | | | | |\n", - " --- --- --- --- ---\n", - "Player 1 wins!\n" - ], - "name": "stdout" - } - ] + "execution_count": 0, + "outputs": [] }, { "cell_type": "markdown", @@ -1103,7 +932,21 @@ "colab": {} }, "source": [ - "# Write you solution here" + "# Write you solution here\n", + "\n", + "\n", + "def tic_tac_toe(n=3):\n", + " board = make_game_board(n)\n", + " current_player = True\n", + " while check_game_finished(board) == -1:\n", + " if current_player:\n", + " player_move(board, 1)\n", + " else:\n", + " player_move(board, 2)\n", + " current_player = not current_player\n", + " result = check_game_finished(board)\n", + " draw_game_board(board)\n", + " print(\"It's a draw!\" if result == 0 else (\"Player 1 wins!\" if result == 1 else \"Player 2 wins!\"))" ], "execution_count": 0, "outputs": [] From a7660b5e9376232a2579199f51a958d55b51faf1 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Mon, 2 Mar 2020 03:20:59 -0600 Subject: [PATCH 08/24] finished lab 4. added monte carlo integral and 'proper' integral --- Labs/Lab-4/Copy_of_Lab_4.ipynb | 388 +++++++++++++++++---------------- 1 file changed, 202 insertions(+), 186 deletions(-) diff --git a/Labs/Lab-4/Copy_of_Lab_4.ipynb b/Labs/Lab-4/Copy_of_Lab_4.ipynb index e3d92de..a8b285d 100644 --- a/Labs/Lab-4/Copy_of_Lab_4.ipynb +++ b/Labs/Lab-4/Copy_of_Lab_4.ipynb @@ -48,7 +48,7 @@ "metadata": { "id": "TmrUVAv1_1ff", "colab_type": "code", - "outputId": "588189e3-fb26-49e2-9163-1aa3ccf64718", + "outputId": "058f7f18-c5ce-4f3b-fae6-b60a75b39d15", "colab": { "base_uri": "https://localhost:8080/", "height": 34 @@ -59,12 +59,12 @@ "x=random.random()\n", "print(\"The Value of x is\", x)" ], - "execution_count": 1, + "execution_count": 125, "outputs": [ { "output_type": "stream", "text": [ - "The Value of x is 0.04751343885168802\n" + "The Value of x is 0.3386614209151734\n" ], "name": "stdout" } @@ -96,7 +96,7 @@ " ### BEGIN SOLUTION\n", "\n", " while len(out) < N:\n", - " out.append(random.uniform(x_min, x_max)) \n", + " out.append(x_min + random.random() * (x_max - x_min)) \n", " \n", " ### END SOLUTION\n", " return out" @@ -109,7 +109,7 @@ "metadata": { "id": "eiWTH4-H_1f6", "colab_type": "code", - "outputId": "1b0f6d0c-dfcc-4791-86d1-df408908062e", + "outputId": "0decf992-1467-425a-9f63-6ea6d5b5e9ff", "colab": { "base_uri": "https://localhost:8080/", "height": 102 @@ -125,7 +125,7 @@ " print(\"Data Minimum:\", min(data))\n", " print(\"Data Maximum:\", max(data))" ], - "execution_count": 3, + "execution_count": 127, "outputs": [ { "output_type": "stream", @@ -133,8 +133,8 @@ "Data Type: \n", "Data Length: 1000\n", "Type of Data Contents: \n", - "Data Minimum: -9.996467198995925\n", - "Data Maximum: 9.96441797265739\n" + "Data Minimum: -9.993645208468072\n", + "Data Maximum: 9.999142279088858\n" ], "name": "stdout" } @@ -179,7 +179,7 @@ "metadata": { "id": "0Z8u7_Hq_1gK", "colab_type": "code", - "outputId": "990a6dae-5b2c-4594-9e61-9f4e10cae468", + "outputId": "f48293be-eb50-4d9c-83ea-b80f755f8a79", "colab": { "base_uri": "https://localhost:8080/", "height": 34 @@ -189,12 +189,12 @@ "# Test your solution here\n", "print(\"Mean of Data:\", mean(data))" ], - "execution_count": 161, + "execution_count": 129, "outputs": [ { "output_type": "stream", "text": [ - "Mean of Data: -0.17178090426615691\n" + "Mean of Data: -0.43159352484900165\n" ], "name": "stdout" } @@ -240,7 +240,7 @@ "metadata": { "id": "IbasE7ma_1gZ", "colab_type": "code", - "outputId": "5eaada1f-e6ba-4ec2-b683-a561978bd8d9", + "outputId": "7fef8507-1e5f-440d-87c4-96c6190b5a1e", "colab": { "base_uri": "https://localhost:8080/", "height": 34 @@ -250,12 +250,12 @@ "# Test your solution here\n", "print(\"Variance of Data:\", variance(data))" ], - "execution_count": 156, + "execution_count": 131, "outputs": [ { "output_type": "stream", "text": [ - "Variance of Data: 31.828907048258063\n" + "Variance of Data: 33.533919118720135\n" ], "name": "stdout" } @@ -332,7 +332,7 @@ "metadata": { "id": "CQg5EFMg_1gn", "colab_type": "code", - "outputId": "4b8ebd11-18fc-41b7-f4d0-30e2c80bcc27", + "outputId": "e8539894-70b2-4a3c-d686-851a83c64c9c", "colab": { "base_uri": "https://localhost:8080/", "height": 54 @@ -343,12 +343,12 @@ "h,b=histogram(data,100)\n", "print(len(b),h)" ], - "execution_count": 9, + "execution_count": 133, "outputs": [ { "output_type": "stream", "text": [ - "101 [13, 13, 10, 12, 7, 9, 6, 8, 3, 5, 6, 11, 11, 7, 14, 19, 10, 10, 8, 15, 12, 9, 8, 6, 15, 10, 14, 9, 9, 12, 9, 14, 6, 16, 13, 14, 10, 5, 9, 17, 12, 10, 10, 13, 8, 10, 7, 10, 7, 17, 4, 14, 8, 11, 15, 9, 7, 13, 13, 8, 10, 4, 6, 14, 13, 10, 10, 13, 7, 8, 11, 18, 5, 6, 13, 13, 2, 9, 11, 9, 8, 11, 10, 9, 12, 8, 11, 10, 10, 12, 6, 11, 11, 10, 9, 9, 7, 10, 6, 8]\n" + "101 [11, 11, 11, 11, 12, 16, 11, 13, 7, 18, 11, 13, 10, 5, 6, 13, 7, 9, 11, 11, 5, 14, 10, 12, 11, 18, 7, 13, 10, 11, 11, 13, 5, 9, 8, 15, 8, 9, 8, 14, 16, 14, 8, 18, 8, 12, 4, 11, 10, 11, 9, 9, 11, 5, 8, 11, 9, 10, 10, 10, 5, 11, 15, 13, 11, 10, 8, 12, 6, 9, 5, 9, 8, 6, 8, 13, 10, 14, 10, 10, 6, 4, 7, 8, 10, 13, 9, 4, 10, 7, 9, 11, 4, 10, 11, 8, 16, 12, 6, 10]\n" ], "name": "stdout" } @@ -411,7 +411,7 @@ "metadata": { "id": "EsQxIs9U_1gy", "colab_type": "code", - "outputId": "638ca22a-a07b-4f19-dd39-36c4f786c0d8", + "outputId": "f48acce2-3a92-4e7d-b855-7e741433df87", "colab": { "base_uri": "https://localhost:8080/", "height": 357 @@ -421,31 +421,31 @@ "# Test your solution here\n", "h,b=draw_histogram(data,20)" ], - "execution_count": 107, + "execution_count": 135, "outputs": [ { "output_type": "stream", "text": [ - "[ -9.996, -8.998] : #################\n", - "[ -8.998, -8.000] : ##########\n", - "[ -8.000, -7.002] : ###############\n", - "[ -7.002, -6.004] : ####################\n", - "[ -6.004, -5.006] : ################\n", - "[ -5.006, -4.008] : #################\n", - "[ -4.008, -3.010] : ##################\n", - "[ -3.010, -2.012] : #################\n", - "[ -2.012, -1.014] : #################\n", - "[ -1.014, -0.016] : ################\n", - "[ -0.016, 0.982] : ################\n", - "[ 0.982, 1.980] : ################\n", - "[ 1.980, 2.978] : ###############\n", - "[ 2.978, 3.976] : ###############\n", - "[ 3.976, 4.974] : #################\n", - "[ 4.974, 5.972] : ##############\n", - "[ 5.972, 6.970] : ################\n", - "[ 6.970, 7.968] : ################\n", - "[ 7.968, 8.966] : ###############\n", - "[ 8.966, 9.964] : ############\n" + "[ -9.994, -8.994] : #################\n", + "[ -8.994, -7.994] : ####################\n", + "[ -7.994, -6.995] : #############\n", + "[ -6.995, -5.995] : ###############\n", + "[ -5.995, -4.995] : ################\n", + "[ -4.995, -3.996] : ##################\n", + "[ -3.996, -2.996] : ##############\n", + "[ -2.996, -1.997] : ################\n", + "[ -1.997, -0.997] : ###################\n", + "[ -0.997, 0.003] : ##############\n", + "[ 0.003, 1.002] : ############\n", + "[ 1.002, 2.002] : ###############\n", + "[ 2.002, 3.002] : ################\n", + "[ 3.002, 4.001] : #############\n", + "[ 4.001, 5.001] : ###########\n", + "[ 5.001, 6.001] : #################\n", + "[ 6.001, 7.000] : ##########\n", + "[ 7.000, 8.000] : #############\n", + "[ 8.000, 9.000] : #############\n", + "[ 9.000, 9.999] : ################\n" ], "name": "stdout" } @@ -491,11 +491,11 @@ "metadata": { "id": "ZyXbNZK7_1hS", "colab_type": "code", + "outputId": "b088491e-1fd0-461d-e7eb-de2985e7d13f", "colab": { "base_uri": "https://localhost:8080/", "height": 54 - }, - "outputId": "17c89f30-d78f-4176-9c26-1a641bd59952" + } }, "source": [ "# Test your solution here\n", @@ -504,12 +504,12 @@ "\n", "print(where(data, myfunc))" ], - "execution_count": 13, + "execution_count": 137, "outputs": [ { "output_type": "stream", "text": [ - "[0, 1, 2, 3, 7, 9, 10, 13, 16, 17, 18, 19, 21, 24, 25, 27, 30, 31, 33, 36, 37, 38, 39, 40, 41, 43, 44, 49, 51, 52, 54, 59, 60, 61, 62, 64, 65, 69, 70, 72, 73, 74, 75, 77, 78, 80, 82, 83, 85, 86, 88, 90, 91, 92, 95, 97, 98, 101, 104, 106, 108, 109, 111, 113, 114, 116, 118, 124, 126, 128, 131, 132, 134, 136, 140, 143, 145, 147, 155, 156, 160, 162, 164, 166, 168, 170, 174, 178, 179, 180, 181, 183, 185, 186, 189, 190, 191, 193, 195, 197, 198, 201, 205, 206, 209, 213, 215, 217, 221, 222, 225, 231, 235, 239, 243, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 256, 261, 264, 266, 268, 270, 271, 272, 274, 278, 280, 284, 285, 286, 287, 294, 295, 299, 302, 303, 305, 306, 307, 309, 314, 317, 325, 326, 329, 331, 333, 334, 337, 338, 340, 341, 342, 343, 344, 351, 353, 355, 356, 359, 361, 362, 363, 364, 366, 369, 373, 374, 375, 394, 396, 398, 401, 402, 403, 405, 406, 411, 412, 413, 414, 415, 417, 419, 425, 429, 430, 432, 433, 434, 437, 438, 439, 440, 444, 446, 448, 450, 453, 454, 458, 459, 460, 461, 462, 463, 464, 465, 468, 469, 474, 475, 478, 479, 480, 487, 490, 491, 492, 494, 495, 496, 499, 500, 501, 502, 504, 506, 508, 509, 513, 515, 516, 517, 518, 521, 526, 529, 530, 532, 534, 539, 544, 545, 546, 549, 550, 552, 553, 555, 556, 558, 560, 561, 562, 563, 564, 565, 569, 572, 573, 575, 581, 583, 585, 586, 587, 588, 589, 592, 597, 599, 603, 607, 608, 609, 611, 612, 613, 616, 618, 619, 621, 623, 624, 626, 627, 631, 632, 635, 636, 639, 641, 645, 646, 648, 650, 653, 654, 655, 658, 660, 671, 673, 676, 685, 688, 689, 690, 694, 696, 697, 700, 702, 703, 706, 708, 709, 711, 713, 716, 719, 720, 721, 723, 725, 726, 729, 735, 736, 738, 739, 740, 741, 743, 747, 748, 751, 753, 758, 763, 764, 765, 766, 770, 773, 780, 783, 785, 786, 788, 790, 792, 795, 799, 801, 806, 809, 810, 811, 813, 815, 816, 817, 821, 822, 824, 826, 828, 830, 831, 833, 840, 841, 842, 844, 847, 850, 851, 855, 856, 857, 859, 860, 861, 862, 866, 867, 870, 872, 874, 875, 877, 878, 880, 882, 884, 891, 892, 896, 898, 902, 903, 904, 907, 909, 912, 913, 915, 917, 921, 923, 924, 925, 928, 930, 931, 933, 936, 938, 939, 940, 943, 944, 946, 947, 951, 952, 953, 954, 958, 960, 961, 963, 966, 968, 973, 974, 976, 978, 979, 981, 983, 985, 987, 989, 991, 992, 996, 999]\n" + "[2, 3, 4, 5, 7, 8, 10, 16, 18, 22, 24, 30, 32, 33, 37, 39, 42, 43, 45, 47, 48, 49, 50, 58, 61, 63, 66, 70, 74, 75, 77, 80, 81, 82, 83, 85, 89, 93, 96, 97, 99, 101, 103, 105, 106, 107, 109, 112, 113, 115, 118, 119, 120, 123, 124, 126, 128, 129, 134, 135, 138, 140, 141, 142, 144, 146, 147, 149, 151, 152, 153, 155, 156, 159, 161, 170, 172, 173, 175, 176, 178, 181, 182, 186, 189, 191, 192, 193, 194, 195, 198, 199, 203, 204, 205, 207, 208, 212, 216, 221, 222, 223, 225, 227, 228, 231, 232, 233, 234, 235, 240, 241, 243, 247, 249, 251, 252, 253, 254, 255, 256, 257, 261, 262, 267, 269, 271, 274, 278, 280, 281, 282, 284, 287, 288, 292, 295, 296, 298, 305, 306, 311, 313, 316, 317, 318, 319, 320, 323, 325, 326, 327, 331, 333, 334, 339, 342, 346, 349, 350, 356, 359, 361, 362, 364, 366, 367, 369, 370, 372, 373, 374, 378, 380, 381, 382, 389, 394, 395, 399, 400, 401, 403, 406, 409, 416, 419, 421, 422, 423, 424, 425, 426, 429, 431, 432, 434, 437, 438, 439, 442, 444, 450, 451, 452, 455, 456, 458, 460, 461, 468, 470, 472, 473, 478, 479, 480, 488, 489, 491, 493, 494, 496, 497, 498, 502, 503, 504, 507, 511, 512, 515, 516, 517, 519, 521, 523, 525, 526, 527, 531, 532, 533, 534, 540, 541, 543, 546, 547, 549, 551, 553, 556, 559, 561, 566, 567, 568, 569, 570, 574, 576, 578, 581, 584, 585, 587, 588, 589, 594, 598, 601, 603, 604, 605, 606, 607, 611, 613, 614, 617, 622, 625, 626, 627, 629, 634, 635, 636, 638, 639, 640, 642, 645, 646, 648, 652, 654, 655, 656, 658, 659, 661, 671, 674, 680, 682, 683, 686, 687, 688, 692, 695, 696, 699, 701, 702, 703, 705, 706, 709, 711, 717, 718, 722, 726, 727, 728, 730, 731, 732, 735, 738, 739, 742, 747, 749, 750, 752, 753, 755, 759, 761, 762, 764, 769, 770, 773, 774, 778, 782, 783, 784, 785, 790, 801, 802, 804, 808, 810, 813, 814, 815, 820, 822, 826, 827, 828, 835, 839, 841, 844, 847, 850, 854, 855, 857, 860, 861, 862, 863, 864, 866, 867, 868, 871, 876, 880, 881, 887, 889, 891, 892, 893, 897, 898, 900, 902, 903, 909, 911, 912, 915, 919, 920, 926, 930, 932, 933, 936, 938, 941, 943, 944, 946, 948, 951, 952, 956, 957, 958, 959, 961, 962, 964, 967, 968, 975, 976, 978, 979, 982, 983, 988, 990, 993, 997, 999]\n" ], "name": "stdout" } @@ -536,11 +536,11 @@ "metadata": { "id": "kkchtWxy_1hb", "colab_type": "code", + "outputId": "bc52f15f-2477-4d2b-f859-50ab6b3304d5", "colab": { "base_uri": "https://localhost:8080/", "height": 85 - }, - "outputId": "c92f822f-3b11-46d1-b2ce-3b491dd7b45e" + } }, "source": [ "def in_range(mymin,mymax):\n", @@ -559,14 +559,14 @@ "print(\"Number of Entries passing F1:\", len(where(data,F1)))\n", "print(\"Number of Entries passing F2:\", len(where(data,F2)))" ], - "execution_count": 14, + "execution_count": 138, "outputs": [ { "output_type": "stream", "text": [ "True True False False False\n", "False False True True False\n", - "Number of Entries passing F1: 482\n", + "Number of Entries passing F1: 461\n", "Number of Entries passing F2: 0\n" ], "name": "stdout" @@ -623,11 +623,11 @@ "metadata": { "id": "3AhyJZjf_1hj", "colab_type": "code", + "outputId": "d2d12fa2-9ecb-4f72-fae8-955c52fed6bd", "colab": { "base_uri": "https://localhost:8080/", "height": 153 - }, - "outputId": "b6717252-32eb-4c61-ee9d-ed959ff40fee" + } }, "source": [ "# Test your solution\n", @@ -648,7 +648,7 @@ "print(\"Number of Entries passing EQ:\", len(where(data,EQ)))\n", "print(\"Number of Entries passing D:\", len(where(data,D)))" ], - "execution_count": 16, + "execution_count": 140, "outputs": [ { "output_type": "stream", @@ -657,8 +657,8 @@ "False True False True False False\n", "Number of Entries passing E: 0\n", "Number of Entries passing O: 0\n", - "Number of Entries passing G: 482\n", - "Number of Entries passing L: 518\n", + "Number of Entries passing G: 461\n", + "Number of Entries passing L: 539\n", "Number of Entries passing EQ: 1\n", "Number of Entries passing D: 1\n" ], @@ -681,11 +681,11 @@ "metadata": { "id": "VlWCyUXL_1hr", "colab_type": "code", + "outputId": "b4fc8f30-8ccd-4c30-8363-5cf121cfc1ee", "colab": { "base_uri": "https://localhost:8080/", "height": 119 - }, - "outputId": "c6ec9f60-850f-4397-ba0b-d568ef2af42d" + } }, "source": [ "### BEGIN SOLUTION\n", @@ -698,15 +698,15 @@ "print(\"Number of Entries passing D:\", sum(map(lambda x: x % data[50] == 0, data)))\n", "### END SOLUTION" ], - "execution_count": 17, + "execution_count": 141, "outputs": [ { "output_type": "stream", "text": [ "Number of Entries passing E: 0\n", "Number of Entries passing O: 0\n", - "Number of Entries passing G: 482\n", - "Number of Entries passing L: 518\n", + "Number of Entries passing G: 461\n", + "Number of Entries passing L: 539\n", "Number of Entries passing EQ: 1\n", "Number of Entries passing D: 1\n" ], @@ -770,11 +770,11 @@ "metadata": { "id": "IOfuFLBX_1h6", "colab_type": "code", + "outputId": "2a9dd381-1f7f-4ae0-f0f4-13b584dbd89a", "colab": { "base_uri": "https://localhost:8080/", "height": 1000 - }, - "outputId": "7dfd5e18-c0d4-48ca-ad30-f379131d61cc" + } }, "source": [ "# A test function\n", @@ -787,51 +787,51 @@ "draw_histogram(generate_function(lambda x: -((x - 5)**2) + 5 ** 2, x_min, x_max, N), n_bins, x_min, x_max, \"#\", 50)\n", "draw_histogram(generate_function(test_func, x_min, x_max, N), n_bins, x_min, x_max, \"#\", 50)" ], - "execution_count": 242, + "execution_count": 143, "outputs": [ { "output_type": "stream", "text": [ - "[ 0.000, 0.500] : ###\n", - "[ 0.500, 1.000] : ###############\n", + "[ 0.000, 0.500] : ##\n", + "[ 0.500, 1.000] : #############\n", "[ 1.000, 1.500] : ###################\n", - "[ 1.500, 2.000] : ######################\n", - "[ 2.000, 2.500] : ############################\n", - "[ 2.500, 3.000] : ####################################\n", - "[ 3.000, 3.500] : ###################################\n", - "[ 3.500, 4.000] : ##########################################\n", - "[ 4.000, 4.500] : ########################################\n", - "[ 4.500, 5.000] : ################################################\n", - "[ 5.000, 5.500] : ##################################################\n", + "[ 1.500, 2.000] : ##################\n", + "[ 2.000, 2.500] : ##########################\n", + "[ 2.500, 3.000] : #################################\n", + "[ 3.000, 3.500] : ########################################\n", + "[ 3.500, 4.000] : #########################################\n", + "[ 4.000, 4.500] : ###########################################\n", + "[ 4.500, 5.000] : ##################################################\n", + "[ 5.000, 5.500] : ###########################################\n", "[ 5.500, 6.000] : #########################################\n", - "[ 6.000, 6.500] : #######################################\n", + "[ 6.000, 6.500] : ########################################\n", "[ 6.500, 7.000] : ###################################\n", - "[ 7.000, 7.500] : #################################\n", - "[ 7.500, 8.000] : ###############################\n", - "[ 8.000, 8.500] : #######################\n", - "[ 8.500, 9.000] : #################\n", - "[ 9.000, 9.500] : ###########\n", - "[ 9.500, 10.000] : ####\n", + "[ 7.000, 7.500] : #######################################\n", + "[ 7.500, 8.000] : #########################\n", + "[ 8.000, 8.500] : ########################\n", + "[ 8.500, 9.000] : ##############\n", + "[ 9.000, 9.500] : #################\n", + "[ 9.500, 10.000] : ###\n", "[ 0.000, 0.500] : ######\n", - "[ 0.500, 1.000] : ########\n", - "[ 1.000, 1.500] : ######\n", - "[ 1.500, 2.000] : ###########\n", - "[ 2.000, 2.500] : ##############\n", - "[ 2.500, 3.000] : #############\n", - "[ 3.000, 3.500] : #################\n", - "[ 3.500, 4.000] : ###################\n", - "[ 4.000, 4.500] : ####################\n", - "[ 4.500, 5.000] : ##########################\n", - "[ 5.000, 5.500] : ##################\n", + "[ 0.500, 1.000] : ###########\n", + "[ 1.000, 1.500] : ############\n", + "[ 1.500, 2.000] : #########\n", + "[ 2.000, 2.500] : #############\n", + "[ 2.500, 3.000] : ################\n", + "[ 3.000, 3.500] : #######################\n", + "[ 3.500, 4.000] : ##########################\n", + "[ 4.000, 4.500] : ##########################\n", + "[ 4.500, 5.000] : ################################\n", + "[ 5.000, 5.500] : ###########################\n", "[ 5.500, 6.000] : ################################\n", - "[ 6.000, 6.500] : ##########################\n", - "[ 6.500, 7.000] : ##################################\n", - "[ 7.000, 7.500] : #############################\n", - "[ 7.500, 8.000] : #######################################\n", - "[ 8.000, 8.500] : #################################\n", - "[ 8.500, 9.000] : ##############################################\n", - "[ 9.000, 9.500] : ##################################################\n", - "[ 9.500, 10.000] : ######################################\n" + "[ 6.000, 6.500] : ########################################\n", + "[ 6.500, 7.000] : ###########################################\n", + "[ 7.000, 7.500] : ##################################\n", + "[ 7.500, 8.000] : ###################################\n", + "[ 8.000, 8.500] : ############################################\n", + "[ 8.500, 9.000] : ###############################################\n", + "[ 9.000, 9.500] : ##############################################\n", + "[ 9.500, 10.000] : ##################################################\n" ], "name": "stdout" }, @@ -839,26 +839,26 @@ "output_type": "execute_result", "data": { "text/plain": [ - "([14,\n", - " 18,\n", - " 14,\n", + "([12,\n", + " 19,\n", + " 21,\n", + " 17,\n", " 23,\n", - " 30,\n", - " 28,\n", - " 35,\n", - " 39,\n", - " 41,\n", - " 53,\n", - " 38,\n", - " 65,\n", - " 54,\n", + " 29,\n", + " 40,\n", + " 45,\n", + " 45,\n", + " 56,\n", + " 48,\n", + " 56,\n", " 70,\n", - " 60,\n", - " 79,\n", - " 68,\n", - " 93,\n", - " 101,\n", - " 77],\n", + " 74,\n", + " 59,\n", + " 61,\n", + " 77,\n", + " 82,\n", + " 80,\n", + " 86],\n", " [0,\n", " 0.5,\n", " 1.0,\n", @@ -885,7 +885,7 @@ "metadata": { "tags": [] }, - "execution_count": 242 + "execution_count": 143 } ] }, @@ -904,11 +904,11 @@ "metadata": { "id": "wnZFkATK_1h_", "colab_type": "code", + "outputId": "befea955-38c3-416e-d872-97c4511b2a6a", "colab": { "base_uri": "https://localhost:8080/", "height": 819 - }, - "outputId": "8c649788-7438-4645-9e49-3df31267f58c" + } }, "source": [ "import math\n", @@ -923,7 +923,7 @@ "g2=gaussian(10,3)\n", "\n", "g1_data = generate_function(g1, -4, 4, 1000)\n", - "g2_data = generate_function(g2, 0, 20, 10000)\n", + "g2_data = generate_function(g2, 0, 20, 1000)\n", "print(g1_data)\n", "print(g2_data)\n", "draw_histogram(g1_data, 20)\n", @@ -933,57 +933,57 @@ "print(\"g2 mean = \" + str(mean(g2_data)))\n", "print(\"g2 variance = \" + str(variance(g2_data)))" ], - "execution_count": 224, + "execution_count": 144, "outputs": [ { "output_type": "stream", "text": [ - "[-0.4079999999999999, 0.6799999999999997, 0.08000000000000007, 0.6080000000000005, 0.9359999999999999, 0.24800000000000022, 0.16800000000000015, 1.056, 0.5840000000000005, 0.2240000000000002, 0.6479999999999997, -0.13600000000000012, 0.7439999999999998, -1.2319999999999998, 1.5200000000000005, 1.2320000000000002, 0.3280000000000003, -0.31199999999999983, -0.3599999999999999, -0.7999999999999998, 0.08000000000000007, -0.3919999999999999, 0.5120000000000005, 3.048, -0.9119999999999999, 1.7359999999999998, -1.7199999999999998, 0.8639999999999999, -0.944, -0.3919999999999999, 0.4480000000000004, 0.1280000000000001, -1.424, -0.7199999999999998, -0.7439999999999998, 1.056, 0.37600000000000033, 1.2960000000000003, 1.08, 1.2320000000000002, -1.6400000000000001, -1.488, -1.2319999999999998, 0.6399999999999997, 0.39200000000000035, 1.2000000000000002, -0.5680000000000001, 1.2160000000000002, -1.616, -0.31999999999999984, 0.27200000000000024, 1.04, 1.6559999999999997, -1.6880000000000002, -1.384, 0.7279999999999998, -1.616, -0.7119999999999997, 0.4240000000000004, -1.016, -0.08800000000000008, 0.28000000000000025, -0.984, -0.7759999999999998, 0.4560000000000004, -0.3839999999999999, 0.6159999999999997, -0.35199999999999987, 0.7199999999999998, 0.8479999999999999, 0.6799999999999997, 0.9199999999999999, -0.03200000000000003, -1.1680000000000001, 2.4480000000000004, -1.552, 0.6319999999999997, 1.1840000000000002, -0.9119999999999999, -1.2799999999999998, 1.8559999999999999, 0.29600000000000026, -0.32799999999999985, 1.3280000000000003, -0.040000000000000036, 1.2800000000000002, -1.056, 0.8879999999999999, 0.4240000000000004, -0.31199999999999983, 0.28000000000000025, 0.26400000000000023, -1.08, 1.104, -1.624, 2.808, 0.39200000000000035, -0.7119999999999997, -0.6000000000000001, 0.5520000000000005, 0.4320000000000004, 0.984, -0.02400000000000002, -1.064, 0.9119999999999999, 0.6399999999999997, -0.44799999999999995, -0.3759999999999999, -2.12, 0.7199999999999998, 0.4400000000000004, -1.1280000000000001, -0.8479999999999999, -1.6400000000000001, 1.3040000000000003, -0.2799999999999998, 0.8559999999999999, 0.3520000000000003, 1.2960000000000003, -0.8079999999999998, -0.22399999999999975, -0.6080000000000001, 1.944, -0.6240000000000001, 0.3440000000000003, -1.3279999999999998, -0.20799999999999974, -1.88, -0.7359999999999998, 0.16800000000000015, 2.6080000000000005, 0.992, -1.904, 0.3520000000000003, 0.5680000000000005, 1.7599999999999998, -0.24799999999999978, -1.448, -0.7519999999999998, 2.152, -1.888, -1.1280000000000001, 1.6959999999999997, 1.4640000000000004, 2.5760000000000005, 0.0, 0.16800000000000015, 1.1680000000000001, -0.1280000000000001, -2.344, -0.504, 0.7919999999999998, -0.8079999999999998, 0.3600000000000003, -0.3599999999999999, -0.7999999999999998, -0.7119999999999997, -1.2399999999999998, 0.03200000000000003, 1.008, -1.2719999999999998, -0.2639999999999998, -0.6480000000000001, -0.7119999999999997, 0.3440000000000003, -0.7759999999999998, 1.8479999999999999, 0.09600000000000009, 0.03200000000000003, 0.968, 0.08800000000000008, -0.3759999999999999, -1.456, -0.9359999999999999, 0.8479999999999999, 0.38400000000000034, 1.2400000000000002, -0.10400000000000009, -0.944, -0.07200000000000006, -0.2639999999999998, 0.4720000000000004, 0.9119999999999999, 0.05600000000000005, -1.3119999999999998, -1.48, 0.040000000000000036, -0.52, 0.5600000000000005, -0.016000000000000014, -1.8319999999999999, 0.19200000000000017, 0.7599999999999998, 0.09600000000000009, -2.176, -0.7199999999999998, 1.096, -0.10400000000000009, -1.7119999999999997, -0.5920000000000001, -0.08800000000000008, 0.02400000000000002, 1.5680000000000005, -0.46399999999999997, -0.15200000000000014, -0.1759999999999997, -0.19199999999999973, 1.1360000000000001, 0.8159999999999998, 1.1760000000000002, 0.5760000000000005, -1.6480000000000001, -0.31199999999999983, -0.45599999999999996, 0.5840000000000005, 0.8719999999999999, -0.52, 1.1520000000000001, 0.3520000000000003, 1.888, -0.43999999999999995, 0.28800000000000026, -1.2479999999999998, -0.016000000000000014, 1.6799999999999997, 0.976, -1.608, 1.064, -0.504, -0.7119999999999997, 2.936, 1.2080000000000002, -0.22399999999999975, 0.3280000000000003, -0.6560000000000001, 0.14400000000000013, -0.32799999999999985, -0.976, -0.5840000000000001, 0.7599999999999998, -1.2639999999999998, -0.19999999999999973, 0.5360000000000005, -1.6400000000000001, -0.1120000000000001, 0.06400000000000006, 1.6240000000000006, 1.056, -1.3439999999999999, 0.0, 0.2560000000000002, 0.4720000000000004, -0.496, 0.1120000000000001, -0.2559999999999998, -0.7599999999999998, 1.4640000000000004, -0.4079999999999999, 0.2160000000000002, 0.02400000000000002, 1.3280000000000003, 1.088, -0.3919999999999999, -0.4159999999999999, 0.008000000000000007, -0.952, 0.016000000000000014, -1.6880000000000002, -0.552, -0.4159999999999999, -0.20799999999999974, -0.23999999999999977, -0.6160000000000001, -0.20799999999999974, 1.1600000000000001, -0.7119999999999997, 2.216, -0.42399999999999993, -0.31999999999999984, -0.09600000000000009, 0.6080000000000005, -0.6480000000000001, 1.4880000000000004, 0.3200000000000003, 0.7039999999999997, -0.3599999999999999, -1.392, 0.4480000000000004, 1.6319999999999997, -1.6720000000000002, -1.2399999999999998, 0.4640000000000004, -1.6480000000000001, -0.528, 0.9199999999999999, -1.392, -0.552, -0.6400000000000001, 0.9039999999999999, 0.8559999999999999, -0.040000000000000036, -0.040000000000000036, -1.104, 0.20800000000000018, -1.3279999999999998, 1.08, -0.3679999999999999, -1.6880000000000002, 0.2320000000000002, -0.34399999999999986, 0.6639999999999997, 0.37600000000000033, -0.528, -0.7199999999999998, -0.472, -2.472, -0.07200000000000006, -0.7279999999999998, -0.1759999999999997, -0.7119999999999997, 1.6719999999999997, 0.4560000000000004, -2.152, 0.008000000000000007, 0.20000000000000018, 1.3920000000000003, -0.8159999999999998, 0.02400000000000002, 0.27200000000000024, -0.48, -0.9359999999999999, 0.7439999999999998, 0.1280000000000001, 0.9039999999999999, -1.96, -0.6320000000000001, -0.31999999999999984, 0.6639999999999997, -0.1200000000000001, -0.544, -0.31999999999999984, 0.07200000000000006, 1.112, -1.2159999999999997, 0.26400000000000023, -1.1919999999999997, -1.424, 0.1280000000000001, -0.31199999999999983, -1.7919999999999998, 0.6879999999999997, 0.13600000000000012, 0.6159999999999997, -0.6560000000000001, 0.016000000000000014, -0.6480000000000001, 1.2000000000000002, -1.6480000000000001, 0.4480000000000004, -0.552, -0.3999999999999999, 0.8959999999999999, -1.016, 1.2400000000000002, 0.4560000000000004, 1.4480000000000004, -0.3759999999999999, 0.1120000000000001, -0.1679999999999997, 0.37600000000000033, 1.112, -1.04, 0.6399999999999997, 0.6479999999999997, -0.10400000000000009, -0.5920000000000001, -0.8639999999999999, -0.5840000000000001, -0.6000000000000001, 0.05600000000000005, -1.3439999999999999, -0.34399999999999986, 0.8719999999999999, 1.04, -2.064, -0.6160000000000001, -0.8319999999999999, -1.04, -0.23999999999999977, -2.584, 1.3200000000000003, 0.3600000000000003, 1.3040000000000003, 0.5040000000000004, -1.1680000000000001, 0.09600000000000009, -0.7519999999999998, -0.24799999999999978, 0.8479999999999999, -2.6879999999999997, -1.2319999999999998, 0.7039999999999997, -1.1760000000000002, 0.3440000000000003, 0.04800000000000004, 1.5120000000000005, -0.16000000000000014, 0.37600000000000033, -2.2880000000000003, -1.1600000000000001, -0.34399999999999986, -0.3839999999999999, -0.7439999999999998, -0.22399999999999975, 0.2560000000000002, 1.096, -0.56, -0.10400000000000009, -0.472, 0.16800000000000015, 1.016, 1.1600000000000001, -1.104, 0.4800000000000004, 1.4880000000000004, -0.008000000000000007, 0.2560000000000002, 0.016000000000000014, -0.4159999999999999, -1.3599999999999999, -1.0, -0.9119999999999999, 0.5120000000000005, -2.048, 1.016, -2.088, 1.6000000000000005, 1.1440000000000001, 1.3920000000000003, -1.12, 0.29600000000000026, -0.992, -2.488, 0.26400000000000023, -0.7759999999999998, 0.5120000000000005, 0.40800000000000036, 0.8639999999999999, -1.048, -1.7679999999999998, 0.040000000000000036, -0.2559999999999998, -0.04800000000000004, -0.8719999999999999, 1.4800000000000004, -0.8959999999999999, -0.02400000000000002, -0.7679999999999998, -0.6480000000000001, 1.4240000000000004, -0.19999999999999973, 0.5920000000000005, 1.112, -0.016000000000000014, 0.7519999999999998, -0.2879999999999998, -1.6400000000000001, 0.7279999999999998, -0.6879999999999997, 0.5920000000000005, 0.6159999999999997, -3.448, 0.016000000000000014, -0.992, 0.040000000000000036, -0.8799999999999999, 0.952, 2.3920000000000003, 0.6399999999999997, 0.992, 0.40000000000000036, 0.5840000000000005, -1.032, 0.39200000000000035, 0.03200000000000003, 0.09600000000000009, -0.1200000000000001, 0.4400000000000004, 0.8159999999999998, -0.08800000000000008, -0.33599999999999985, 0.9279999999999999, 0.15200000000000014, -1.1919999999999997, -0.42399999999999993, 0.30400000000000027, -0.09600000000000009, 3.216, 0.17600000000000016, 1.4320000000000004, 0.6239999999999997, -0.22399999999999975, -1.7439999999999998, -1.008, 0.2400000000000002, -0.1280000000000001, 1.2000000000000002, -0.23999999999999977, 0.5600000000000005, -1.7679999999999998, 0.1200000000000001, -0.6719999999999997, 0.4720000000000004, 0.4400000000000004, -1.7999999999999998, 1.3200000000000003, 1.3440000000000003, 0.5360000000000005, 0.14400000000000013, 1.7119999999999997, -1.2879999999999998, 1.7759999999999998, 0.3280000000000003, -0.24799999999999978, 0.9279999999999999, -1.12, -0.496, -0.6799999999999997, 0.6639999999999997, 1.6879999999999997, -1.408, -0.488, -0.7919999999999998, 0.7439999999999998, 1.1280000000000001, -0.944, 0.9039999999999999, -2.232, -0.8399999999999999, -0.2799999999999998, 0.6479999999999997, -0.5760000000000001, 0.24800000000000022, -0.19999999999999973, 1.3440000000000003, -1.064, -1.016, -1.3359999999999999, -0.6640000000000001, -1.6400000000000001, 0.4800000000000004, -1.424, -1.2639999999999998, 1.032, -0.968, 1.2400000000000002, -0.06400000000000006, 0.04800000000000004, -1.12, 0.48800000000000043, -0.08000000000000007, 0.09600000000000009, 0.7919999999999998, 0.49600000000000044, -0.22399999999999975, -0.31199999999999983, 1.3200000000000003, -0.992, 0.3680000000000003, 1.4640000000000004, -0.4079999999999999, -0.976, -2.208, -0.3039999999999998, -1.512, -0.14400000000000013, 2.216, 0.30400000000000027, -0.7439999999999998, 0.9039999999999999, 0.5360000000000005, 0.5280000000000005, 0.7359999999999998, -0.488, -0.13600000000000012, -0.5680000000000001, -1.1999999999999997, -0.48, -1.472, -0.15200000000000014, -2.2, 0.96, -0.19199999999999973, 0.7439999999999998, 2.112, -1.1520000000000001, -0.6320000000000001, 0.040000000000000036, 0.07200000000000006, 2.016, 1.2800000000000002, 0.03200000000000003, 0.3360000000000003, -0.944, -1.088, 0.2320000000000002, 2.096, 1.1840000000000002, 1.4240000000000004, 0.4480000000000004, -2.6639999999999997, -1.08, 0.07200000000000006, 1.08, -0.5840000000000001, -0.43199999999999994, -0.9039999999999999, -0.016000000000000014, -0.35199999999999987, 1.1440000000000001, -1.544, -1.1760000000000002, 1.12, 1.2480000000000002, -0.7759999999999998, 0.5760000000000005, 0.1120000000000001, 0.06400000000000006, -0.6719999999999997, -0.43999999999999995, 1.064, 0.7759999999999998, -0.43199999999999994, 0.17600000000000016, 0.4400000000000004, 0.6799999999999997, -0.6879999999999997, 1.88, -1.8399999999999999, -2.12, 0.08800000000000008, 0.4720000000000004, -1.2799999999999998, 0.5120000000000005, 0.2400000000000002, 0.05600000000000005, -1.048, 0.8159999999999998, 0.3440000000000003, 0.49600000000000044, 0.37600000000000033, 0.992, -0.22399999999999975, 0.984, 0.6639999999999997, 1.96, 0.7519999999999998, -0.2719999999999998, 0.8799999999999999, 0.5360000000000005, -0.21599999999999975, -0.06400000000000006, -0.21599999999999975, -0.992, -0.1200000000000001, 0.07200000000000006, -1.968, -0.7439999999999998, -0.05600000000000005, -0.3679999999999999, -1.384, -0.5760000000000001, -0.1200000000000001, -0.4159999999999999, -0.23199999999999976, -0.19199999999999973, -1.88, 1.6879999999999997, -0.10400000000000009, 1.7519999999999998, 1.1280000000000001, 2.4480000000000004, 0.20800000000000018, -0.2559999999999998, 0.2320000000000002, 0.4640000000000004, 0.7919999999999998, -0.512, -0.7279999999999998, 0.30400000000000027, 1.5040000000000004, -0.3039999999999998, -0.7279999999999998, 1.4480000000000004, 0.1200000000000001, -0.544, -0.976, -0.7439999999999998, 1.2320000000000002, 0.6879999999999997, -0.23999999999999977, -0.528, 0.3120000000000003, 1.8239999999999998, 0.8959999999999999, 0.37600000000000033, -0.45599999999999996, -1.2719999999999998, 0.6799999999999997, 1.3680000000000003, 1.1600000000000001, 0.1120000000000001, -1.624, 0.6479999999999997, 0.04800000000000004, -0.1200000000000001, 0.07200000000000006, 0.6959999999999997, 0.2240000000000002, -1.2799999999999998, 0.8479999999999999, -1.2559999999999998, 1.2080000000000002, 0.1280000000000001, -0.552, -0.7279999999999998, 0.06400000000000006, -0.3039999999999998, -1.2799999999999998, 0.2160000000000002, 0.04800000000000004, 1.5760000000000005, -1.4, -1.2319999999999998, 1.8479999999999999, 0.7199999999999998, -0.5680000000000001, 1.5520000000000005, -0.2879999999999998, 0.4640000000000004, -1.8399999999999999, 0.7759999999999998, -0.528, 0.5680000000000005, -0.6320000000000001, -0.8479999999999999, -0.008000000000000007, -1.2319999999999998, 1.6319999999999997, -0.6080000000000001, -0.512, 0.2160000000000002, 1.7679999999999998, 0.16000000000000014, -0.52, -0.8239999999999998, 0.16000000000000014, -1.44, -1.0, -1.592, -0.02400000000000002, -1.592, -0.2959999999999998, 0.968, 0.8799999999999999, 0.17600000000000016, 1.2640000000000002, 0.5680000000000005, 1.1440000000000001, -0.3919999999999999, 0.2160000000000002, 0.40000000000000036, 0.07200000000000006, -0.8239999999999998, -1.1999999999999997, 0.8399999999999999, -1.1919999999999997, -1.528, -0.7119999999999997, 1.2000000000000002, 0.38400000000000034, -1.4, -1.576, 2.6480000000000006, -0.21599999999999975, 0.4320000000000004, 0.40800000000000036, -0.1280000000000001, 2.912, 1.5440000000000005, 0.09600000000000009, -1.528, -0.2559999999999998, 0.2160000000000002, 0.13600000000000012, -2.064, 0.6479999999999997, -0.9279999999999999, -0.6560000000000001, 0.5040000000000004, 0.2160000000000002, -0.9119999999999999, -0.19999999999999973, -0.23199999999999976, -0.016000000000000014, -0.6080000000000001, 2.4960000000000004, 1.5280000000000005, 0.2320000000000002, -2.808, 1.1360000000000001, -0.15200000000000014, -0.1280000000000001, 0.20000000000000018, -0.08000000000000007, -0.3759999999999999, 0.1120000000000001, 0.5600000000000005, -0.488, -0.43999999999999995, -0.2959999999999998, 0.6639999999999997, 0.2240000000000002, 0.14400000000000013, -1.8159999999999998, 0.96, -1.2399999999999998, -0.2559999999999998, 0.3280000000000003, 1.2880000000000003, 2.4240000000000004, -1.104, -0.18399999999999972, -2.008, 0.16000000000000014, -0.5760000000000001, 0.37600000000000033, -0.43999999999999995, -0.19199999999999973, -0.16000000000000014, 0.6159999999999997, 0.24800000000000022, -1.52, 0.26400000000000023, -0.19199999999999973, 0.7519999999999998, -1.1360000000000001, 0.18400000000000016, 1.1920000000000002, -0.1200000000000001, -0.6080000000000001, -0.31199999999999983, 0.7279999999999998, 0.17600000000000016, -1.016, -1.032, 0.8559999999999999, 1.1520000000000001, 0.7519999999999998, 1.4240000000000004, -0.08800000000000008, 0.02400000000000002, 0.30400000000000027, 1.3200000000000003, 1.5120000000000005, 0.27200000000000024, 1.8239999999999998, -0.10400000000000009, 1.2080000000000002, 1.3520000000000003, -2.424, -0.1759999999999997, 0.5920000000000005, 1.4560000000000004, 1.1520000000000001, -0.5760000000000001, 0.19200000000000017, -1.032, -0.09600000000000009, -1.12, -1.8639999999999999, 1.6559999999999997, -0.32799999999999985, 0.6319999999999997, 0.16800000000000015, 0.8959999999999999, -1.416, 2.224, -0.9039999999999999, 1.1440000000000001, -0.8079999999999998, -0.16000000000000014, -0.504, -0.544, -0.43199999999999994, -0.04800000000000004, -0.7519999999999998, 0.4320000000000004, 0.40000000000000036, -0.9359999999999999, -1.2079999999999997, -0.45599999999999996, 0.3440000000000003, 1.3120000000000003, 0.20800000000000018, -2.464, -0.1120000000000001, -2.904, -0.8399999999999999, 0.7439999999999998, -2.592, 0.4480000000000004, -0.31999999999999984, 0.4720000000000004, -0.6160000000000001, -0.472, -0.24799999999999978, -0.44799999999999995, -1.032, 0.944, -0.7199999999999998, 1.912, -0.2879999999999998, 0.6639999999999997, 0.1120000000000001, -1.7679999999999998, -0.952, 0.2160000000000002, -1.7599999999999998, 0.2160000000000002, -1.112, -1.2559999999999998, 0.49600000000000044, -0.44799999999999995, -0.8399999999999999, -1.6480000000000001, 1.4720000000000004, 0.3360000000000003, 0.28000000000000025, 1.3360000000000003, 0.4560000000000004, 0.03200000000000003, 0.18400000000000016, -0.05600000000000005, 0.20000000000000018, 1.3840000000000003, -0.08000000000000007, 0.37600000000000033, 2.4720000000000004, 1.1920000000000002, -0.33599999999999985, 2.16, 1.3920000000000003, 0.3680000000000003, -1.6959999999999997, 0.976, 1.7199999999999998, -0.31999999999999984, 1.048, -0.3679999999999999, -0.7839999999999998, 0.8639999999999999, -0.504, 0.06400000000000006, -1.1919999999999997, -1.0, 0.8319999999999999, 0.2400000000000002, -1.48, -0.008000000000000007, 0.5040000000000004, 0.37600000000000033, 0.7279999999999998, -0.504, -0.552, -1.064, 0.7599999999999998, 0.3360000000000003, 2.184, 0.4240000000000004, 0.07200000000000006, -0.528, 0.5520000000000005, -0.56, 0.41600000000000037, -0.6560000000000001, 0.15200000000000014, -0.5840000000000001, 0.6239999999999997, 0.41600000000000037, 1.016, 0.28000000000000025, -1.8079999999999998, 1.4720000000000004, -0.06400000000000006, -1.016, 0.952, -0.5840000000000001, 1.08, -2.368]\n", - "[4.678, 10.158, 11.748000000000001, 17.282, 10.602, 6.242, 13.378, 9.016, 7.244, 10.58, 6.756, 10.318, 9.568, 13.392, 7.796, 8.59, 8.642, 6.876, 10.524000000000001, 9.558, 13.156, 6.796, 13.794, 11.572000000000001, 3.138, 10.120000000000001, 12.302, 14.126, 8.592, 6.28, 3.374, 3.484, 15.804, 9.586, 13.51, 12.876, 7.84, 9.782, 5.2780000000000005, 7.606, 12.276, 7.072, 8.522, 15.908, 12.394, 5.538, 10.836, 12.108, 6.678, 9.168000000000001, 11.212, 6.316, 4.554, 9.056000000000001, 8.752, 12.828, 8.578, 11.34, 10.602, 6.94, 13.558, 10.624, 11.364, 4.8260000000000005, 9.700000000000001, 7.418, 12.936, 6.066, 14.062000000000001, 9.244, 8.8, 7.898000000000001, 10.916, 7.768, 10.006, 13.14, 6.206, 8.004, 11.642, 10.94, 10.84, 9.104000000000001, 12.91, 10.352, 7.6240000000000006, 7.258, 12.714, 9.788, 9.466000000000001, 6.024, 5.636, 12.540000000000001, 7.958, 12.836, 12.620000000000001, 8.69, 8.896, 9.132, 13.028, 6.628, 8.006, 10.142, 14.450000000000001, 8.438, 9.362, 6.872, 5.172, 10.874, 3.592, 13.692, 14.206, 10.552, 10.16, 12.394, 12.716000000000001, 6.498, 4.93, 6.2620000000000005, 10.058, 4.648, 13.92, 9.194, 10.648, 7.126, 8.056000000000001, 6.432, 5.308, 10.370000000000001, 7.726, 11.748000000000001, 11.648, 11.672, 5.368, 13.158, 6.602, 2.516, 10.304, 12.142, 10.886000000000001, 11.872, 9.996, 13.968, 9.450000000000001, 9.264, 6.878, 16.27, 13.666, 10.732000000000001, 9.068, 7.324, 11.68, 8.102, 11.904, 7.344, 8.006, 13.026, 8.620000000000001, 8.394, 13.126, 9.724, 8.782, 8.816, 10.94, 10.162, 10.612, 13.848, 11.116, 13.164, 11.128, 10.162, 7.816, 5.3660000000000005, 9.702, 11.886000000000001, 12.294, 10.41, 8.064, 2.572, 6.38, 6.106, 15.016, 11.562, 14.304, 9.15, 9.436, 5.45, 7.47, 8.218, 3.84, 9.92, 10.528, 4.22, 12.004, 7.946, 8.17, 8.196, 6.898000000000001, 12.978, 8.668000000000001, 5.18, 7.488, 10.21, 9.0, 12.356, 8.49, 8.016, 9.708, 14.912, 10.618, 11.276, 9.724, 7.694, 9.518, 11.568, 15.294, 12.944, 15.048, 8.814, 9.186, 12.516, 9.306000000000001, 8.286, 12.370000000000001, 9.732, 13.334, 11.236, 14.152000000000001, 3.928, 7.484, 11.794, 11.014, 8.984, 6.208, 6.788, 9.354000000000001, 14.21, 6.03, 11.156, 11.612, 9.524000000000001, 11.366, 11.218, 12.620000000000001, 6.692, 7.256, 16.576, 12.666, 8.58, 6.844, 11.742, 10.322000000000001, 9.542, 11.81, 12.158, 14.522, 15.132, 6.016, 8.628, 10.646, 11.82, 5.666, 10.216000000000001, 6.106, 7.86, 11.204, 8.768, 9.304, 6.988, 12.450000000000001, 6.798, 9.334, 8.098, 12.392, 11.446, 11.074, 8.23, 11.77, 7.332, 9.874, 10.69, 7.788, 10.498, 6.1000000000000005, 9.972, 3.158, 11.546, 15.226, 9.866, 10.458, 10.826, 13.488, 10.692, 11.154, 6.494, 14.69, 14.178, 9.722, 7.932, 9.212, 7.916, 5.13, 16.188, 3.5140000000000002, 9.832, 9.27, 13.002, 9.752, 8.224, 13.21, 17.21, 10.114, 9.646, 7.7940000000000005, 12.162, 2.162, 7.37, 12.112, 9.912, 8.132, 8.718, 9.626, 13.146, 9.704, 10.446, 12.094, 8.122, 14.906, 7.498, 12.508000000000001, 13.72, 8.784, 7.798, 9.966000000000001, 6.7620000000000005, 3.462, 4.0040000000000004, 15.952, 11.822000000000001, 6.642, 6.732, 8.334, 7.532, 11.458, 5.7540000000000004, 15.9, 11.168000000000001, 12.684000000000001, 9.27, 9.052, 5.336, 9.568, 11.76, 9.978, 7.296, 11.258000000000001, 9.134, 8.404, 12.058, 8.654, 15.538, 12.504, 14.054, 9.828, 5.51, 13.530000000000001, 7.408, 12.128, 8.31, 13.162, 17.2, 9.714, 10.806000000000001, 10.492, 12.234, 9.024000000000001, 9.594, 9.16, 13.994, 9.784, 8.26, 3.8280000000000003, 11.694, 12.83, 9.24, 8.798, 9.424, 10.572000000000001, 11.4, 14.448, 13.222, 3.138, 9.794, 8.088000000000001, 9.028, 15.94, 16.48, 9.934000000000001, 8.376, 4.984, 8.46, 8.128, 10.950000000000001, 13.366, 11.566, 11.77, 9.72, 5.526, 2.62, 9.616, 7.0520000000000005, 13.97, 9.356, 8.982, 6.626, 7.51, 11.334, 5.8260000000000005, 11.308, 5.8740000000000006, 6.382000000000001, 5.248, 12.862, 8.352, 9.912, 7.08, 11.422, 5.96, 9.348, 14.644, 10.882, 8.598, 7.868, 16.65, 9.738, 8.83, 6.1080000000000005, 12.168000000000001, 8.008000000000001, 6.356, 10.646, 6.896, 7.354, 6.876, 12.004, 13.234, 10.28, 9.526, 9.536, 9.07, 6.312, 6.406, 7.632000000000001, 9.966000000000001, 11.638, 6.55, 13.97, 10.15, 6.838, 4.644, 9.766, 6.5040000000000004, 4.74, 13.484, 8.158, 8.104000000000001, 13.016, 7.558, 10.404, 9.038, 5.05, 10.9, 5.426, 12.21, 4.5, 12.498000000000001, 8.53, 3.66, 8.808, 4.09, 14.318, 9.66, 11.712, 4.508, 14.51, 14.01, 10.732000000000001, 9.172, 12.044, 12.02, 10.878, 13.084, 11.488, 13.084, 11.91, 7.914000000000001, 14.902000000000001, 5.21, 8.506, 8.218, 9.902000000000001, 10.334, 8.692, 4.7860000000000005, 12.544, 9.064, 9.332, 9.93, 11.924, 11.18, 7.6240000000000006, 11.768, 4.406, 12.13, 13.994, 12.620000000000001, 8.48, 8.45, 1.248, 12.876, 5.66, 4.224, 8.67, 12.666, 7.174, 14.334, 12.996, 11.806000000000001, 7.404, 8.178, 13.258000000000001, 6.736, 12.052, 10.138, 5.698, 8.564, 11.056000000000001, 13.232000000000001, 11.99, 8.122, 8.508000000000001, 14.012, 7.04, 11.316, 10.612, 9.200000000000001, 7.622, 6.436, 7.746, 7.468, 12.94, 12.672, 4.972, 10.99, 15.85, 12.36, 5.868, 9.516, 8.082, 9.434000000000001, 8.938, 10.222, 8.112, 9.142, 8.126, 5.2700000000000005, 10.5, 14.07, 6.328, 14.946, 5.166, 11.394, 10.738, 8.014, 7.468, 9.682, 13.274000000000001, 12.682, 1.474, 8.518, 8.668000000000001, 11.082, 13.302, 10.318, 12.8, 7.696, 8.386000000000001, 8.388, 13.496, 7.3500000000000005, 10.292, 11.446, 9.816, 4.992, 11.022, 10.436, 7.08, 11.02, 7.83, 9.83, 13.858, 9.904, 13.948, 6.758, 10.224, 5.95, 9.01, 7.272, 10.064, 12.636000000000001, 9.506, 14.67, 9.648, 10.574, 7.34, 9.724, 11.048, 8.576, 4.0360000000000005, 8.036, 11.562, 6.442, 11.624, 6.3740000000000006, 12.716000000000001, 9.98, 10.284, 9.384, 8.924, 9.774000000000001, 10.03, 13.998000000000001, 11.742, 12.842, 11.262, 16.65, 6.996, 9.332, 7.288, 8.828, 7.364, 15.872, 7.728, 12.158, 5.796, 15.928, 6.848, 8.482, 10.054, 9.282, 11.106, 7.668, 11.398, 12.244, 5.8180000000000005, 17.112000000000002, 11.122, 13.870000000000001, 11.426, 13.026, 6.216, 14.164, 10.574, 13.656, 7.578, 6.922, 9.082, 14.378, 11.332, 8.972, 3.5420000000000003, 4.646, 11.558, 11.942, 11.508000000000001, 11.864, 12.842, 9.182, 9.954, 5.26, 7.7940000000000005, 7.854, 6.46, 6.88, 8.468, 6.142, 6.16, 6.51, 9.24, 16.674, 8.832, 12.532, 4.688, 3.08, 17.756, 13.154, 15.162, 10.256, 15.93, 11.824, 10.842, 10.744, 12.484, 10.76, 19.112000000000002, 10.53, 13.01, 9.684000000000001, 11.606, 14.46, 7.306, 12.424, 7.192, 3.128, 5.256, 12.686, 13.414, 13.656, 13.468, 9.446, 5.682, 9.704, 9.586, 12.562, 6.8420000000000005, 9.976, 9.89, 10.09, 11.296, 12.61, 13.418000000000001, 9.992, 5.482, 12.434000000000001, 9.984, 13.168000000000001, 1.05, 4.942, 10.396, 13.072000000000001, 12.956, 8.334, 10.294, 10.896, 10.074, 6.824, 8.784, 7.61, 9.78, 14.708, 10.378, 10.870000000000001, 7.344, 3.182, 9.442, 10.738, 10.232000000000001, 10.702, 7.522, 12.352, 11.912, 10.386000000000001, 6.272, 11.69, 12.204, 8.18, 12.896, 12.822000000000001, 10.58, 14.314, 5.98, 9.522, 10.786, 6.714, 14.774000000000001, 10.364, 9.92, 7.682, 9.392, 5.774, 8.746, 17.44, 12.288, 10.948, 7.282, 12.728, 6.018, 6.016, 6.94, 10.672, 12.994, 10.012, 9.394, 13.46, 11.71, 12.222, 9.702, 7.642, 12.14, 8.97, 8.894, 11.966000000000001, 8.022, 13.58, 6.952, 4.904, 8.812, 15.996, 2.582, 12.65, 11.704, 11.838000000000001, 11.200000000000001, 12.086, 15.082, 7.746, 7.05, 5.684, 12.454, 6.732, 8.752, 13.436, 8.994, 5.804, 8.688, 9.348, 10.294, 5.996, 8.576, 11.422, 14.428, 11.886000000000001, 12.132, 12.3, 7.712, 11.302, 14.056000000000001, 12.026, 16.272000000000002, 11.898, 11.99, 14.214, 7.62, 11.412, 12.964, 14.874, 17.288, 13.302, 14.202, 8.45, 8.844, 4.87, 5.986, 9.778, 9.326, 11.31, 9.728, 5.486, 12.118, 8.742, 9.27, 8.61, 12.302, 12.75, 15.08, 7.8340000000000005, 10.994, 4.0840000000000005, 10.496, 10.11, 9.158, 10.766, 14.302, 7.984, 9.796, 11.202, 10.376, 4.962, 12.062, 7.72, 11.642, 10.386000000000001, 7.932, 11.93, 7.1160000000000005, 14.782, 5.76, 7.852, 13.996, 15.078000000000001, 9.828, 9.564, 11.424, 10.152000000000001, 9.968, 10.290000000000001, 15.63, 4.666, 16.316, 9.262, 12.672, 5.658, 7.838, 14.192, 12.116, 13.758000000000001, 14.57, 12.528, 14.22, 10.912, 13.244, 7.67, 6.412, 10.724, 5.508, 11.532, 10.048, 9.564, 7.178, 16.588, 9.82, 8.018, 13.706, 15.782, 10.144, 12.038, 7.328, 11.612, 14.94, 8.592, 12.6, 6.418, 15.358, 13.406, 12.466000000000001, 8.41, 5.5920000000000005, 14.354000000000001, 10.14, 9.34, 3.884, 9.294, 5.84, 6.472, 10.25, 9.540000000000001, 10.5, 9.972, 12.596, 11.398, 10.472, 10.582, 15.812000000000001, 6.94, 4.55, 6.0280000000000005, 16.990000000000002, 7.306, 10.982000000000001, 9.512, 8.468, 11.812, 8.592, 14.094, 8.202, 13.378, 15.38, 11.984, 9.904, 9.646, 10.93, 14.928, 9.196, 12.778, 9.022, 7.194, 13.236, 4.884, 8.578, 6.058, 9.38, 6.806, 9.744, 6.05, 5.362, 15.620000000000001, 13.504, 9.48, 7.53, 10.24, 6.594, 9.14, 14.168000000000001, 3.856, 4.298, 11.424, 5.0280000000000005, 10.216000000000001, 6.8580000000000005, 7.878, 7.1240000000000006, 5.466, 8.96, 4.41, 18.924, 10.51, 11.772, 13.414, 7.646, 11.316, 12.754, 13.176, 9.86, 5.728, 4.23, 9.294, 9.124, 8.894, 11.23, 17.06, 7.998, 14.764000000000001, 8.806000000000001, 11.168000000000001, 12.762, 7.354, 10.862, 7.8260000000000005, 9.352, 10.428, 8.124, 13.638, 9.336, 8.09, 7.228, 7.6000000000000005, 12.282, 13.154, 10.882, 9.196, 9.466000000000001, 13.494, 6.138, 11.612, 12.122, 8.242, 14.040000000000001, 11.828, 5.3740000000000006, 9.292, 9.540000000000001, 9.274000000000001, 15.672, 6.76, 10.432, 4.848, 15.934000000000001, 5.99, 8.858, 11.886000000000001, 7.924, 9.906, 6.042, 12.828, 11.308, 10.568, 10.35, 6.344, 6.0920000000000005, 5.824, 10.448, 11.884, 16.2, 11.266, 11.098, 10.684000000000001, 13.448, 8.52, 5.0520000000000005, 7.962, 7.8660000000000005, 4.472, 8.24, 9.798, 6.22, 5.086, 9.948, 8.732, 11.486, 9.266, 10.626, 5.412, 11.268, 7.824, 12.968, 8.19, 9.540000000000001, 9.574, 16.378, 7.644, 8.816, 9.646, 9.412, 4.95, 15.082, 8.908, 11.868, 9.556000000000001, 6.908, 10.736, 11.134, 9.838000000000001, 6.878, 9.742, 10.57, 9.198, 11.056000000000001, 10.524000000000001, 3.306, 10.39, 9.040000000000001, 6.662, 11.724, 11.886000000000001, 12.790000000000001, 9.664, 10.156, 11.114, 14.876, 7.752, 9.468, 10.108, 9.762, 8.464, 7.128, 12.312, 12.002, 12.47, 10.168000000000001, 10.722, 9.936, 12.834, 14.306000000000001, 14.346, 7.414000000000001, 9.534, 8.334, 14.290000000000001, 13.362, 5.372, 8.67, 10.572000000000001, 13.794, 10.722, 13.922, 7.658, 12.066, 7.97, 4.532, 13.942, 4.82, 11.046, 12.666, 8.774000000000001, 7.1240000000000006, 9.174, 5.282, 10.200000000000001, 10.14, 12.284, 6.57, 6.42, 7.104, 7.356, 9.752, 13.27, 8.496, 14.64, 8.286, 9.386000000000001, 10.51, 4.154, 11.636000000000001, 8.48, 11.32, 10.546, 10.446, 11.408, 6.666, 6.392, 8.31, 14.122, 6.142, 6.228, 13.166, 13.514000000000001, 8.64, 11.644, 7.71, 13.028, 11.58, 16.306, 10.738, 10.518, 8.856, 13.958, 3.052, 10.556000000000001, 7.704, 6.604, 5.266, 9.626, 10.448, 11.626, 12.834, 11.112, 9.492, 6.6240000000000006, 6.752, 7.23, 9.928, 8.016, 11.098, 13.42, 12.422, 12.14, 16.096, 12.280000000000001, 12.554, 14.774000000000001, 3.45, 9.370000000000001, 15.212, 5.166, 6.522, 8.148, 10.784, 9.96, 7.742, 6.936, 8.63, 10.898, 6.8260000000000005, 11.804, 6.974, 8.788, 8.522, 11.5, 9.084, 8.32, 9.06, 10.784, 6.758, 4.54, 12.886000000000001, 11.868, 12.408, 8.32, 10.388, 8.844, 10.81, 14.846, 7.654, 8.106, 11.336, 13.328, 7.74, 12.344, 7.746, 3.228, 12.634, 11.23, 10.626, 8.442, 15.518, 5.242, 9.278, 13.104000000000001, 7.824, 14.698, 8.796, 10.412, 6.456, 9.084, 10.694, 9.002, 9.158, 15.72, 9.734, 11.676, 15.174, 7.026, 15.834, 12.728, 12.272, 8.6, 16.41, 9.242, 4.498, 6.892, 5.126, 13.132, 11.56, 10.932, 11.354000000000001, 11.544, 13.232000000000001, 5.928, 10.142, 10.482000000000001, 10.408, 11.336, 4.234, 13.988, 4.312, 8.286, 6.868, 12.062, 7.978, 8.796, 15.83, 16.156, 13.668000000000001, 15.222, 13.838000000000001, 12.938, 14.576, 13.282, 7.494, 11.988, 12.142, 9.458, 8.322000000000001, 12.508000000000001, 10.958, 9.158, 11.832, 13.836, 8.274000000000001, 7.628, 13.638, 11.098, 6.138, 11.958, 9.496, 12.448, 14.442, 11.82, 11.004, 9.406, 7.0, 1.782, 4.014, 9.65, 10.992, 12.214, 9.738, 9.888, 12.352, 12.328, 14.418000000000001, 9.278, 11.16, 14.034, 9.894, 4.406, 5.882000000000001, 10.998000000000001, 8.808, 12.33, 11.214, 8.656, 12.898, 11.98, 9.518, 3.94, 12.65, 13.308, 8.19, 11.352, 10.086, 11.232000000000001, 16.168, 8.978, 12.362, 9.414, 10.428, 10.944, 11.906, 12.91, 9.18, 6.816, 11.016, 7.928, 5.958, 11.796, 10.676, 13.928, 11.536, 7.362, 8.28, 9.618, 8.818, 11.78, 8.736, 8.682, 13.97, 11.342, 5.516, 9.03, 9.562, 5.352, 6.832, 12.906, 6.766, 8.514, 4.346, 9.212, 12.086, 16.136, 8.474, 10.948, 7.912, 12.91, 13.672, 10.782, 7.312, 10.716000000000001, 10.5, 6.95, 9.512, 11.994, 13.532, 17.916, 6.538, 6.978, 7.486, 12.462, 9.586, 6.122, 11.862, 7.0360000000000005, 17.904, 11.372, 8.504, 9.846, 10.196, 6.312, 9.462, 8.428, 13.748000000000001, 10.324, 8.92, 13.318, 12.838000000000001, 15.456, 7.214, 12.92, 9.388, 9.244, 8.492, 6.236, 10.636000000000001, 8.71, 14.214, 6.202, 11.616, 5.468, 12.206, 9.862, 11.326, 13.776, 7.23, 10.404, 11.156, 8.066, 5.482, 9.232, 11.78, 12.324, 8.858, 6.642, 3.888, 9.348, 12.536, 13.632, 10.404, 11.516, 8.59, 5.822, 12.672, 5.574, 13.236, 10.99, 7.872, 12.17, 17.228, 10.864, 8.022, 8.892, 8.56, 5.774, 11.606, 10.950000000000001, 8.896, 13.442, 9.852, 14.098, 10.23, 8.662, 12.946, 10.67, 9.464, 4.418, 9.218, 6.9, 13.84, 14.966000000000001, 8.866, 9.384, 10.642, 4.078, 12.446, 9.572000000000001, 8.158, 12.65, 9.912, 9.532, 13.746, 9.978, 8.49, 13.012, 14.594, 9.412, 13.554, 10.768, 10.182, 8.062, 11.488, 9.946, 8.52, 9.428, 9.288, 3.68, 12.052, 10.56, 9.684000000000001, 11.106, 7.098, 12.200000000000001, 8.188, 9.44, 8.64, 9.966000000000001, 9.388, 11.044, 11.004, 10.754, 10.742, 6.448, 9.334, 11.0, 6.458, 9.164, 11.538, 17.02, 10.898, 10.322000000000001, 12.868, 10.384, 16.296, 12.156, 11.57, 10.498, 6.096, 8.424, 9.57, 10.616, 10.984, 9.344, 6.688, 11.434000000000001, 11.726, 6.96, 11.536, 9.182, 11.694, 7.814, 13.132, 13.780000000000001, 9.124, 7.144, 11.466000000000001, 8.040000000000001, 8.466, 8.364, 6.07, 9.17, 8.892, 9.918000000000001, 13.404, 12.424, 11.4, 10.484, 14.47, 9.394, 11.158, 14.514000000000001, 6.602, 8.348, 8.324, 7.172, 8.914, 6.714, 8.708, 8.172, 9.798, 9.352, 10.816, 9.562, 13.944, 12.094, 10.950000000000001, 4.562, 9.816, 10.494, 8.34, 12.74, 8.934000000000001, 9.692, 7.84, 2.584, 8.894, 14.464, 9.72, 17.108, 10.89, 9.338000000000001, 9.376, 8.808, 3.922, 12.568, 8.484, 8.622, 11.276, 15.502, 6.188, 16.830000000000002, 6.712, 6.274, 13.686, 11.786, 8.84, 8.392, 7.132000000000001, 10.728, 5.5360000000000005, 14.21, 6.5600000000000005, 10.08, 10.178, 14.72, 11.718, 10.478, 6.432, 11.886000000000001, 6.868, 12.102, 12.628, 11.076, 8.896, 7.554, 13.624, 5.812, 11.262, 7.784, 7.538, 10.052, 8.858, 10.93, 7.704, 5.6000000000000005, 6.424, 8.468, 10.994, 4.5120000000000005, 9.444, 5.3500000000000005, 10.802, 15.432, 8.248, 10.506, 4.558, 9.918000000000001, 11.188, 12.672, 9.452, 12.344, 9.82, 8.236, 7.684, 10.526, 7.684, 11.802, 5.7700000000000005, 11.354000000000001, 15.924, 7.972, 8.992, 10.07, 7.752, 7.524, 14.27, 4.14, 11.48, 10.478, 9.27, 9.858, 11.192, 12.878, 10.566, 7.5600000000000005, 13.348, 6.93, 5.468, 6.742, 10.868, 14.958, 11.352, 6.5920000000000005, 11.746, 9.414, 6.426, 10.742, 9.298, 15.604000000000001, 13.83, 9.602, 5.914, 9.504, 9.986, 7.572, 10.188, 14.442, 5.534, 5.338, 12.158, 11.362, 8.522, 15.348, 8.298, 11.712, 8.146, 7.0840000000000005, 11.948, 16.012, 13.542, 9.452, 10.818, 9.572000000000001, 8.842, 8.5, 9.15, 5.492, 8.724, 9.568, 13.226, 14.99, 5.904, 10.942, 9.752, 10.102, 2.782, 12.082, 9.478, 13.290000000000001, 6.134, 12.462, 7.618, 11.834, 15.058, 12.368, 11.540000000000001, 10.994, 11.788, 7.632000000000001, 4.782, 9.48, 8.362, 5.04, 8.286, 9.934000000000001, 16.144000000000002, 11.912, 6.798, 10.258000000000001, 11.378, 3.742, 13.406, 8.626, 13.9, 7.072, 7.736, 12.728, 9.702, 4.926, 7.756, 8.442, 12.394, 14.318, 11.714, 8.494, 8.55, 8.370000000000001, 9.124, 9.312, 13.148, 11.126, 4.434, 6.964, 8.118, 13.924, 13.688, 7.8, 11.382, 9.076, 9.196, 11.362, 11.15, 5.438, 7.496, 4.906, 14.938, 13.336, 14.292, 14.602, 10.058, 11.186, 9.712, 8.19, 8.22, 9.78, 9.494, 9.994, 13.498000000000001, 3.3280000000000003, 7.2700000000000005, 9.108, 9.796, 10.312, 9.586, 11.708, 7.788, 15.318, 9.07, 11.408, 6.8260000000000005, 8.446, 8.956, 11.592, 13.072000000000001, 9.542, 11.944, 10.048, 10.49, 10.718, 13.908, 8.256, 15.948, 7.046, 13.204, 14.676, 10.458, 11.678, 15.754, 9.540000000000001, 6.284, 9.296, 8.824, 11.344, 6.392, 1.672, 9.884, 11.540000000000001, 11.88, 13.726, 11.082, 12.442, 9.366, 8.858, 11.17, 12.504, 14.922, 9.468, 11.246, 13.494, 13.686, 11.38, 7.206, 8.790000000000001, 7.0280000000000005, 11.084, 10.032, 8.108, 8.826, 10.388, 9.55, 8.21, 8.634, 10.290000000000001, 8.796, 11.132, 8.446, 8.03, 9.0, 14.928, 6.7940000000000005, 11.234, 9.768, 8.184000000000001, 6.96, 8.076, 8.24, 4.742, 10.496, 8.696, 9.186, 7.566, 11.644, 9.642, 13.546000000000001, 8.216, 6.944, 13.59, 8.612, 11.274000000000001, 11.468, 9.946, 11.452, 13.396, 11.952, 12.870000000000001, 7.058, 12.47, 12.864, 9.94, 13.398, 4.912, 11.912, 11.928, 9.23, 12.468, 15.598, 7.742, 6.144, 11.664, 10.048, 7.514, 10.828, 5.884, 10.448, 10.792, 12.504, 14.856, 6.524, 9.278, 13.700000000000001, 12.136000000000001, 11.77, 12.298, 8.58, 9.164, 8.642, 10.212, 12.370000000000001, 3.154, 12.898, 8.036, 14.72, 11.492, 5.946, 14.282, 9.578, 11.496, 8.954, 9.434000000000001, 5.88, 9.462, 7.416, 10.65, 11.768, 6.178, 8.176, 10.55, 9.498, 14.84, 9.472, 10.458, 7.71, 9.706, 18.242, 14.836, 7.396, 7.998, 7.474, 9.216000000000001, 4.242, 9.094, 11.01, 4.518, 8.56, 4.764, 9.822000000000001, 10.488, 12.272, 8.102, 13.8, 13.36, 10.358, 9.38, 8.106, 4.746, 8.57, 13.248000000000001, 7.782, 10.202, 6.07, 12.824, 12.322000000000001, 9.148, 7.0, 10.692, 9.19, 7.574, 11.17, 13.806000000000001, 10.838000000000001, 11.326, 12.448, 9.312, 5.3, 9.474, 9.298, 9.476, 11.226, 7.228, 10.334, 13.05, 7.046, 9.674, 9.352, 10.096, 18.696, 10.03, 6.026, 8.71, 8.208, 12.656, 12.008000000000001, 12.942, 8.238, 2.206, 10.620000000000001, 9.682, 5.976, 14.836, 10.39, 6.214, 6.3, 7.752, 7.5, 10.6, 6.332, 8.120000000000001, 8.31, 4.248, 13.194, 10.622, 13.534, 15.312000000000001, 8.736, 7.546, 14.622, 9.138, 7.914000000000001, 8.55, 10.216000000000001, 11.136000000000001, 11.222, 10.652000000000001, 15.524000000000001, 9.26, 7.95, 5.226, 12.506, 9.226, 10.422, 11.448, 5.3340000000000005, 11.028, 15.38, 14.272, 11.700000000000001, 9.542, 7.752, 6.890000000000001, 6.804, 7.998, 10.752, 9.9, 5.672, 15.328000000000001, 11.666, 8.532, 17.852, 8.668000000000001, 5.924, 12.318, 11.888, 7.05, 11.474, 9.492, 6.6080000000000005, 6.266, 8.588000000000001, 7.49, 10.802, 7.806, 9.318, 9.896, 11.802, 9.972, 6.882000000000001, 13.868, 12.656, 8.032, 7.646, 9.378, 16.93, 8.304, 10.822000000000001, 7.716, 7.664000000000001, 13.878, 14.918000000000001, 6.392, 14.242, 4.804, 11.4, 6.026, 11.620000000000001, 6.508, 11.394, 7.606, 7.824, 6.926, 13.348, 15.632, 3.398, 7.132000000000001, 9.55, 10.06, 9.67, 6.164, 11.488, 9.422, 5.694, 10.23, 7.158, 13.334, 14.108, 10.638, 15.312000000000001, 7.732, 9.382, 10.948, 1.3940000000000001, 12.72, 12.172, 12.362, 8.58, 10.14, 6.768, 8.52, 8.776, 7.554, 9.998, 9.546, 9.904, 7.5520000000000005, 10.508000000000001, 7.558, 6.22, 11.832, 13.32, 2.992, 14.068, 8.456, 11.706, 9.76, 10.568, 12.416, 11.556000000000001, 12.394, 4.12, 9.59, 7.468, 7.792, 9.068, 6.936, 8.46, 11.25, 12.018, 10.98, 8.294, 7.456, 13.316, 12.558, 8.700000000000001, 8.418000000000001, 16.864, 4.0920000000000005, 6.654, 10.198, 13.61, 8.392, 5.97, 7.542, 13.696, 12.906, 8.198, 11.524000000000001, 5.472, 9.656, 9.13, 6.958, 11.588000000000001, 10.744, 7.0600000000000005, 10.222, 15.864, 10.894, 13.15, 15.434000000000001, 13.226, 11.658, 9.676, 3.606, 15.9, 8.884, 6.448, 9.55, 7.788, 18.076, 8.214, 15.184000000000001, 7.18, 11.812, 3.716, 10.986, 7.378, 8.442, 9.736, 11.496, 7.118, 11.274000000000001, 10.098, 10.082, 9.358, 12.848, 4.986, 6.692, 11.444, 9.64, 8.862, 8.352, 8.704, 12.486, 11.652000000000001, 11.156, 10.94, 15.754, 12.132, 9.75, 17.444, 9.844, 5.5680000000000005, 13.816, 12.376, 10.044, 7.328, 10.14, 3.914, 10.178, 16.992, 13.096, 10.322000000000001, 9.198, 13.756, 8.23, 7.12, 10.84, 12.11, 18.476, 6.01, 8.34, 13.122, 12.986, 9.742, 4.942, 10.870000000000001, 9.41, 12.290000000000001, 14.228, 8.700000000000001, 16.11, 8.118, 11.138, 7.33, 12.632, 9.708, 9.528, 11.084, 9.616, 9.908, 10.154, 7.464, 12.586, 13.034, 5.92, 12.382, 13.532, 11.414, 13.062, 13.736, 7.5760000000000005, 14.716000000000001, 13.618, 13.530000000000001, 15.008000000000001, 9.692, 10.988, 7.994, 13.524000000000001, 14.012, 7.538, 13.638, 9.274000000000001, 2.5460000000000003, 9.224, 14.84, 7.298, 11.442, 5.382, 9.084, 10.57, 10.726, 6.242, 6.734, 11.968, 9.192, 10.65, 12.664, 11.718, 8.344, 9.806000000000001, 15.42, 9.132, 7.276, 10.078, 13.098, 4.952, 13.922, 17.176000000000002, 12.77, 9.426, 11.47, 10.102, 5.702, 10.248, 11.724, 7.5200000000000005, 9.862, 6.714, 5.588, 6.844, 8.626, 6.692, 14.726, 15.244, 8.312, 9.334, 12.414, 9.488, 7.78, 14.626, 11.784, 8.47, 15.256, 12.682, 11.742, 12.142, 11.290000000000001, 8.88, 10.918000000000001, 12.438, 10.076, 8.602, 13.206, 10.73, 8.358, 14.41, 8.138, 12.146, 3.612, 11.926, 12.530000000000001, 10.408, 8.566, 14.444, 12.252, 13.036, 7.916, 15.172, 10.766, 11.902000000000001, 7.064, 2.822, 12.466000000000001, 8.870000000000001, 7.604, 9.99, 11.41, 10.664, 7.034, 9.824, 11.342, 14.122, 12.370000000000001, 11.208, 9.996, 6.862, 10.182, 13.422, 11.858, 9.99, 4.662, 10.49, 2.728, 6.316, 9.216000000000001, 7.596, 7.862, 11.538, 8.768, 14.72, 10.700000000000001, 8.282, 13.0, 10.262, 11.214, 13.426, 12.66, 8.57, 7.422000000000001, 7.032, 8.028, 10.202, 11.072000000000001, 14.046000000000001, 4.15, 7.356, 12.242, 6.508, 8.862, 7.912, 4.66, 10.806000000000001, 9.322000000000001, 6.332, 8.162, 13.63, 9.566, 12.174, 14.112, 7.282, 9.388, 8.91, 10.452, 13.372, 16.678, 7.08, 9.13, 8.224, 14.712, 10.69, 5.798, 9.854000000000001, 8.872, 10.9, 10.536, 7.714, 8.802, 8.4, 2.834, 7.55, 13.68, 7.716, 12.66, 6.304, 10.624, 8.542, 9.528, 12.792, 5.658, 6.448, 13.218, 3.074, 10.38, 16.61, 10.838000000000001, 7.884, 10.478, 7.844, 6.054, 6.2, 9.488, 14.772, 11.778, 13.68, 5.498, 12.298, 9.08, 12.772, 7.734, 6.668, 11.482000000000001, 12.07, 9.736, 7.42, 11.934000000000001, 10.756, 14.374, 9.08, 10.312, 6.812, 14.536, 4.714, 12.372, 8.794, 5.55, 6.316, 10.16, 13.018, 7.48, 9.882, 10.842, 6.3580000000000005, 12.426, 8.596, 10.116, 9.086, 11.94, 6.982, 11.708, 7.244, 9.15, 8.618, 4.522, 7.246, 7.2700000000000005, 5.444, 10.422, 12.244, 10.518, 9.668000000000001, 10.296, 9.426, 7.934, 11.06, 9.794, 11.314, 11.406, 3.9, 12.370000000000001, 13.934000000000001, 9.61, 9.86, 4.01, 7.054, 17.036, 10.996, 5.222, 5.514, 8.654, 8.486, 4.242, 4.636, 5.2540000000000004, 8.59, 10.122, 11.678, 7.46, 7.174, 8.372, 10.946, 9.618, 9.088000000000001, 10.322000000000001, 11.188, 9.994, 12.334, 7.644, 11.904, 15.016, 2.918, 11.952, 10.942, 10.534, 9.842, 12.536, 7.228, 7.082, 8.934000000000001, 10.518, 8.91, 12.248000000000001, 4.7780000000000005, 10.97, 13.14, 11.796, 6.41, 4.78, 9.974, 6.34, 9.082, 11.128, 9.368, 7.3420000000000005, 10.174, 14.168000000000001, 7.166, 17.122, 6.846, 11.32, 12.896, 6.76, 11.316, 8.218, 6.672, 13.43, 12.540000000000001, 8.144, 9.878, 6.398000000000001, 6.892, 6.722, 10.812, 14.924, 9.344, 7.708, 14.144, 2.162, 9.506, 8.788, 9.33, 11.006, 11.682, 11.624, 9.262, 11.518, 10.99, 15.734, 6.914, 10.65, 9.652000000000001, 14.298, 7.672000000000001, 12.584, 14.262, 11.616, 7.978, 10.356, 12.004, 12.66, 11.132, 13.522, 12.922, 10.540000000000001, 12.144, 10.182, 8.68, 11.252, 6.716, 8.978, 9.738, 3.218, 7.182, 13.51, 11.75, 10.92, 8.72, 11.864, 15.522, 12.196, 13.458, 12.682, 10.790000000000001, 8.906, 14.016, 8.238, 13.700000000000001, 7.704, 8.768, 6.388, 8.072000000000001, 7.758, 12.166, 5.906, 10.164, 8.112, 7.058, 7.158, 10.548, 8.3, 7.212, 9.188, 9.120000000000001, 6.5520000000000005, 12.192, 8.99, 12.078, 4.48, 8.694, 11.984, 8.482, 11.464, 11.742, 8.488, 12.1, 8.648, 10.8, 11.558, 4.21, 14.63, 11.508000000000001, 6.862, 8.276, 5.954, 13.024000000000001, 13.26, 14.864, 8.5, 10.106, 12.368, 9.276, 10.302, 9.968, 16.256, 10.084, 12.836, 11.096, 8.064, 10.668000000000001, 10.57, 10.288, 11.508000000000001, 10.402000000000001, 7.238, 15.126, 4.948, 4.422, 7.894, 8.69, 5.26, 8.682, 11.284, 12.6, 6.716, 10.804, 5.846, 13.462, 9.524000000000001, 5.664, 9.144, 11.02, 8.802, 13.556000000000001, 7.472, 13.084, 4.5920000000000005, 8.516, 10.68, 16.794, 9.726, 8.264, 18.736, 10.804, 10.282, 10.262, 6.312, 10.33, 5.26, 6.458, 12.790000000000001, 10.448, 13.188, 8.81, 10.116, 8.976, 6.716, 6.878, 13.574, 11.084, 17.674, 11.128, 5.446, 7.332, 10.502, 12.314, 9.812, 9.316, 9.118, 12.502, 0.8180000000000001, 4.314, 10.72, 14.064, 12.178, 11.004, 6.3100000000000005, 9.268, 10.55, 7.016, 4.514, 5.34, 9.246, 4.0200000000000005, 11.758000000000001, 9.356, 7.8, 6.848, 12.97, 9.484, 10.542, 6.096, 5.622, 9.018, 11.540000000000001, 2.878, 11.508000000000001, 15.542, 12.728, 9.402000000000001, 12.098, 14.244, 9.282, 9.242, 11.962, 7.668, 13.018, 6.792, 12.07, 8.838000000000001, 4.212, 8.120000000000001, 10.67, 10.564, 6.764, 9.072000000000001, 10.778, 9.428, 9.036, 10.352, 7.694, 11.578, 7.656000000000001, 9.378, 15.894, 14.262, 5.934, 9.33, 12.966000000000001, 13.484, 3.314, 10.732000000000001, 13.23, 9.544, 8.496, 6.914, 12.004, 7.888, 14.854000000000001, 6.774, 12.186, 10.51, 13.768, 9.24, 16.438, 16.066, 13.816, 8.838000000000001, 6.37, 6.8660000000000005, 10.290000000000001, 9.748, 12.982000000000001, 12.388, 6.752, 9.312, 11.504, 7.44, 12.232000000000001, 3.77, 6.238, 9.26, 13.896, 7.164000000000001, 4.666, 11.266, 15.35, 6.892, 6.1080000000000005, 6.964, 14.868, 10.73, 13.766, 12.914, 13.982000000000001, 10.49, 10.24, 6.474, 15.044, 6.066, 16.94, 7.658, 9.544, 10.474, 10.894, 7.656000000000001, 5.888, 10.598, 8.162, 9.766, 9.974, 7.640000000000001, 14.536, 4.63, 9.862, 12.708, 11.414, 7.948, 12.902000000000001, 12.382, 12.224, 9.836, 6.772, 9.676, 14.572000000000001, 12.382, 11.450000000000001, 8.77, 15.108, 13.676, 9.832, 10.138, 7.474, 3.106, 7.484, 15.444, 9.402000000000001, 15.854000000000001, 16.002, 9.298, 8.386000000000001, 8.082, 9.884, 13.4, 9.432, 11.316, 11.75, 13.478, 12.608, 10.26, 8.154, 5.07, 15.5, 7.752, 12.072000000000001, 8.872, 6.12, 5.522, 12.238, 7.942, 10.796, 10.974, 9.014, 12.844, 9.558, 9.668000000000001, 8.306000000000001, 11.054, 9.88, 11.754, 6.798, 6.634, 8.09, 13.686, 7.272, 11.372, 10.024000000000001, 9.168000000000001, 9.582, 11.008000000000001, 9.814, 5.398, 12.392, 7.946, 7.064, 3.956, 13.718, 16.056, 7.368, 11.17, 11.18, 6.072, 8.764, 6.382000000000001, 1.074, 11.348, 16.018, 11.38, 15.652000000000001, 6.0280000000000005, 12.124, 11.932, 8.09, 9.504, 8.762, 6.764, 9.97, 10.124, 4.68, 7.0680000000000005, 9.388, 12.316, 15.618, 4.328, 7.644, 11.992, 11.72, 6.176, 11.248000000000001, 10.382, 5.7, 6.836, 9.232, 10.224, 17.34, 7.918, 14.752, 13.31, 16.39, 10.084, 12.540000000000001, 5.946, 11.174, 4.522, 7.8660000000000005, 9.892, 12.324, 9.074, 7.632000000000001, 8.474, 10.864, 6.61, 9.65, 7.444, 9.552, 8.538, 9.404, 6.872, 15.062000000000001, 13.652000000000001, 10.292, 11.146, 8.26, 6.632000000000001, 7.0760000000000005, 8.084, 7.164000000000001, 11.766, 10.968, 14.016, 6.25, 8.206, 12.062, 14.874, 13.822000000000001, 8.876, 6.008, 11.524000000000001, 11.540000000000001, 9.522, 16.644000000000002, 6.404, 4.654, 8.052, 6.3660000000000005, 9.214, 8.328, 8.454, 7.908, 9.874, 15.444, 13.672, 8.166, 9.122, 11.484, 6.916, 9.436, 6.338, 10.518, 11.47, 9.408, 13.258000000000001, 8.752, 9.24, 10.088000000000001, 9.998, 7.7940000000000005, 15.982000000000001, 6.886, 8.25, 12.64, 8.476, 8.32, 7.912, 10.056000000000001, 10.064, 8.186, 10.392, 5.8180000000000005, 13.038, 4.992, 9.594, 9.35, 7.07, 11.03, 10.728, 14.068, 4.95, 11.374, 9.872, 12.264000000000001, 12.58, 11.17, 11.534, 3.5020000000000002, 10.456, 4.5, 3.124, 10.368, 12.71, 6.354, 7.812, 9.926, 10.194, 10.33, 13.032, 12.268, 12.432, 13.266, 12.394, 12.208, 4.356, 11.316, 9.416, 13.478, 15.082, 10.040000000000001, 15.784, 11.976, 10.972, 11.202, 12.366, 11.1, 11.6, 10.022, 13.376, 11.736, 12.134, 10.068, 14.88, 8.554, 7.8, 10.906, 7.2940000000000005, 6.902, 8.414, 10.082, 6.0, 11.536, 11.596, 10.338000000000001, 18.54, 15.89, 7.868, 6.41, 8.84, 5.174, 12.606, 9.43, 9.546, 6.178, 9.682, 11.974, 10.192, 12.67, 7.248, 16.844, 6.782, 14.376, 8.972, 6.088, 8.286, 8.828, 12.76, 9.062, 5.892, 8.852, 15.716000000000001, 6.654, 13.842, 5.766, 7.434, 8.502, 8.64, 7.98, 12.99, 13.222, 11.16, 11.104000000000001, 6.596, 9.88, 16.816, 7.24, 8.92, 16.822, 10.766, 12.414, 7.488, 7.336, 10.246, 14.592, 8.602, 8.112, 14.296000000000001, 10.322000000000001, 9.91, 9.392, 13.21, 10.602, 11.796, 15.194, 7.058, 9.57, 6.54, 6.498, 6.852, 14.572000000000001, 13.946, 8.234, 7.768, 11.292, 9.772, 12.398, 10.61, 9.858, 13.57, 9.954, 8.002, 8.422, 8.332, 5.604, 13.152000000000001, 8.632, 10.342, 14.044, 9.018, 8.634, 9.482, 9.544, 8.956, 13.878, 7.688, 6.932, 5.618, 9.512, 9.714, 10.374, 13.096, 8.75, 9.696, 4.072, 7.8180000000000005, 6.61, 14.236, 14.016, 14.612, 8.55, 9.058, 8.986, 12.236, 6.314, 10.052, 5.2860000000000005, 7.244, 10.694, 11.834, 6.172, 9.518, 11.252, 9.812, 10.788, 10.106, 11.262, 9.508000000000001, 11.674, 13.472, 13.088000000000001, 7.966, 5.7, 8.876, 12.656, 8.83, 11.594, 11.92, 11.738, 11.496, 7.0200000000000005, 8.436, 9.208, 11.788, 10.966000000000001, 8.366, 9.976, 6.5280000000000005, 9.06, 12.994, 4.402, 5.958, 6.224, 4.934, 10.768, 8.564, 12.336, 9.228, 9.808, 12.488, 10.74, 12.428, 14.888, 7.462, 6.306, 10.058, 13.784, 14.262, 10.946, 9.504, 7.156000000000001, 10.02, 11.388, 6.752, 11.984, 15.17, 9.092, 8.608, 11.962, 13.592, 10.65, 6.232, 9.872, 6.704, 6.582, 7.95, 11.402000000000001, 7.38, 6.248, 7.752, 7.292, 7.144, 9.694, 8.598, 7.378, 9.06, 9.308, 12.316, 8.682, 8.75, 8.68, 12.658, 10.006, 10.588000000000001, 9.35, 2.222, 9.848, 8.896, 9.838000000000001, 14.142, 12.644, 17.14, 4.586, 11.34, 13.866, 10.81, 10.812, 5.758, 9.464, 16.07, 11.39, 9.31, 5.682, 9.712, 13.776, 3.0260000000000002, 9.75, 14.916, 8.58, 11.822000000000001, 12.484, 13.744, 6.6160000000000005, 8.31, 5.636, 11.988, 8.788, 9.232, 8.212, 8.15, 9.652000000000001, 10.73, 9.022, 9.824, 3.396, 9.578, 10.844, 8.458, 5.0920000000000005, 7.716, 11.15, 12.028, 13.58, 9.342, 16.232, 11.200000000000001, 12.328, 5.08, 13.534, 10.540000000000001, 13.138, 12.97, 10.212, 7.5760000000000005, 5.932, 13.008000000000001, 8.622, 12.486, 11.316, 15.418000000000001, 11.544, 8.78, 9.186, 11.856, 8.154, 17.196, 12.636000000000001, 10.082, 8.904, 9.05, 9.526, 1.974, 8.714, 13.004, 9.788, 8.32, 7.888, 10.586, 8.452, 8.234, 10.386000000000001, 8.428, 10.678, 10.352, 14.534, 7.21, 11.22, 8.732, 11.816, 8.44, 11.938, 17.926000000000002, 5.848, 12.49, 4.956, 12.468, 6.904, 13.848, 13.956, 11.622, 11.82, 7.91, 12.968, 11.924, 8.208, 9.96, 8.536, 9.464, 10.958, 12.14, 10.412, 15.654, 7.292, 15.120000000000001, 9.182, 7.916, 13.496, 9.3, 9.916, 12.118, 5.8340000000000005, 14.258000000000001, 17.682, 10.554, 5.886, 12.106, 13.432, 13.77, 11.158, 12.596, 8.612, 7.11, 6.862, 6.708, 11.848, 12.732000000000001, 15.064, 11.576, 6.1000000000000005, 8.726, 6.756, 14.424, 9.118, 7.7860000000000005, 8.888, 6.458, 7.668, 8.416, 12.766, 11.524000000000001, 8.53, 18.988, 8.604000000000001, 9.120000000000001, 10.034, 10.978, 10.192, 12.246, 12.672, 7.198, 3.0660000000000003, 12.024000000000001, 11.546, 14.692, 13.352, 14.028, 14.752, 6.758, 6.63, 9.988, 14.994, 9.904, 10.318, 7.094, 7.464, 9.598, 5.5520000000000005, 7.956, 4.66, 11.174, 11.366, 10.726, 6.306, 12.136000000000001, 11.540000000000001, 14.886000000000001, 10.620000000000001, 12.102, 12.404, 6.376, 7.248, 10.28, 11.494, 10.18, 15.954, 9.038, 15.788, 7.618, 10.318, 7.0520000000000005, 11.774000000000001, 7.694, 14.41, 4.978, 8.69, 13.77, 10.688, 14.134, 2.864, 5.042, 10.322000000000001, 7.79, 9.188, 12.422, 10.512, 8.774000000000001, 11.578, 12.064, 6.3340000000000005, 17.364, 11.426, 9.71, 13.106, 10.51, 12.174, 10.638, 9.722, 4.442, 15.64, 5.5600000000000005, 6.804, 15.252, 9.8, 10.08, 8.382, 4.356, 12.412, 11.518, 11.978, 11.194, 10.376, 8.546, 12.012, 6.582, 10.856, 5.014, 6.638, 6.338, 5.476, 8.004, 6.974, 5.368, 6.534, 8.268, 11.394, 11.184000000000001, 8.012, 3.2840000000000003, 11.616, 11.116, 8.058, 5.478, 13.816, 11.14, 6.398000000000001, 12.816, 13.200000000000001, 6.382000000000001, 9.494, 8.808, 6.16, 5.32, 7.24, 10.24, 10.618, 11.152000000000001, 9.69, 12.478, 10.31, 7.5520000000000005, 11.082, 7.634, 7.378, 13.030000000000001, 12.046, 8.998, 9.962, 14.976, 9.846, 12.832, 6.932, 15.592, 4.624, 5.104, 10.834, 12.552, 11.892, 6.468, 6.956, 11.286, 11.614, 6.964, 11.574, 15.610000000000001, 8.798, 9.794, 7.38, 13.476, 11.454, 10.968, 16.05, 7.478, 9.05, 11.268, 10.716000000000001, 9.72, 7.496, 9.094, 11.246, 11.986, 12.604000000000001, 11.13, 6.8, 10.784, 16.934, 11.9, 11.552, 10.526, 2.754, 5.756, 13.236, 14.616, 8.562, 10.454, 6.8260000000000005, 11.344, 7.346, 8.288, 10.932, 8.63, 9.524000000000001, 10.324, 5.868, 11.292, 5.65, 16.004, 5.62, 4.488, 10.692, 7.078, 7.258, 14.734, 11.018, 9.536, 8.21, 10.0, 10.19, 5.128, 6.622, 6.9, 12.348, 9.648, 13.042, 8.014, 12.234, 4.0120000000000005, 13.998000000000001, 11.788, 8.898, 7.0440000000000005, 7.994, 8.536, 3.648, 11.184000000000001, 12.074, 10.006, 6.494, 8.912, 10.83, 10.978, 8.91, 8.948, 8.678, 9.076, 14.574, 14.88, 14.620000000000001, 8.912, 9.592, 7.276, 9.58, 7.764, 12.07, 7.924, 8.006, 12.35, 6.946, 14.706, 15.302, 7.738, 11.108, 10.646, 15.996, 6.5760000000000005, 10.186, 9.826, 6.812, 6.242, 10.208, 12.89, 12.916, 8.442, 8.274000000000001, 5.088, 6.288, 10.49, 9.102, 14.462, 1.364, 9.042, 9.116, 13.866, 12.552, 17.456, 6.784, 11.138, 9.604000000000001, 5.538, 12.902000000000001, 8.11, 10.368, 8.022, 6.688, 10.328, 11.976, 7.45, 10.370000000000001, 12.314, 8.026, 8.232, 7.056, 9.43, 12.258000000000001, 10.498, 9.484, 17.3, 7.498, 10.268, 10.084, 12.374, 11.174, 10.906, 11.788, 5.558, 13.458, 9.386000000000001, 14.02, 10.212, 8.664, 14.272, 13.704, 7.0440000000000005, 10.456, 12.998000000000001, 6.298, 11.856, 9.526, 7.078, 10.450000000000001, 6.16, 9.346, 12.342, 7.102, 7.464, 8.174, 12.912, 13.824, 8.948, 14.030000000000001, 8.534, 7.83, 11.356, 13.276, 9.232, 13.132, 12.508000000000001, 12.784, 12.72, 9.504, 9.888, 13.156, 14.926, 7.96, 8.266, 4.776, 9.134, 7.0040000000000004, 6.654, 9.18, 9.184000000000001, 9.24, 8.790000000000001, 14.122, 8.404, 9.932, 7.73, 4.188, 4.522, 11.886000000000001, 8.968, 7.166, 3.138, 14.076, 13.492, 7.8660000000000005, 13.204, 11.106, 10.188, 9.27, 11.96, 11.63, 9.184000000000001, 10.678, 3.228, 6.0680000000000005, 15.128, 12.328, 9.678, 10.702, 14.908, 10.41, 9.028, 13.084, 14.994, 9.950000000000001, 6.7, 7.72, 8.01, 12.51, 10.92, 10.852, 6.686, 12.226, 9.472, 8.544, 4.49, 7.474, 10.274000000000001, 13.558, 9.294, 6.76, 8.38, 6.0920000000000005, 12.540000000000001, 13.214, 8.864, 9.128, 5.736, 5.888, 9.5, 9.58, 9.664, 11.632, 9.578, 8.422, 7.074, 9.358, 10.656, 10.97, 11.322000000000001, 7.18, 9.818, 17.436, 11.028, 8.236, 8.998, 6.12, 10.9, 8.636000000000001, 4.434, 12.488, 11.066, 8.084, 11.01, 11.376, 7.564, 9.316, 11.638, 8.018, 10.038, 7.382000000000001, 15.572000000000001, 12.398, 9.622, 13.232000000000001, 10.48, 12.47, 15.008000000000001, 12.016, 8.684000000000001, 10.104000000000001, 8.674, 10.098, 6.816, 8.18, 6.372, 14.368, 8.468, 15.1, 2.62, 17.532, 8.922, 7.34, 12.992, 17.164, 7.516, 3.08, 9.548, 11.16, 7.68, 8.68, 13.358, 7.882000000000001, 7.16, 10.596, 13.248000000000001, 3.346, 14.91, 13.674, 12.33, 7.2700000000000005, 8.284, 6.708, 9.924, 9.424, 7.424, 12.8, 7.348, 9.046, 10.858, 15.094000000000001, 8.072000000000001, 4.828, 12.31, 6.606, 9.392, 8.898, 7.832, 8.462, 12.628, 8.746, 13.308, 12.856, 9.862, 7.492, 10.626, 11.128, 7.74, 7.942, 10.628, 10.152000000000001, 9.14, 8.784, 13.586, 11.19, 14.066, 14.882, 9.362, 12.232000000000001, 8.926, 9.726, 12.904, 11.92, 11.168000000000001, 6.196, 9.616, 9.166, 13.954, 10.732000000000001, 4.824, 8.672, 11.532, 7.952, 11.88, 12.738, 9.942, 7.426, 11.118, 5.0600000000000005, 5.416, 12.434000000000001, 11.404, 15.678, 6.774, 9.292, 6.392, 2.5420000000000003, 2.95, 9.91, 9.09, 6.982, 8.116, 7.088, 10.476, 7.954, 11.126, 7.876, 6.494, 9.642, 12.620000000000001, 13.44, 14.3, 12.298, 9.292, 2.316, 13.682, 6.908, 14.154, 12.046, 9.226, 19.312, 8.552, 9.302, 10.548, 7.844, 10.618, 11.624, 8.384, 5.32, 12.1, 9.64, 12.592, 10.546, 8.214, 10.984, 5.5680000000000005, 12.378, 11.194, 8.13, 10.762, 16.56, 10.208, 7.86, 8.686, 10.08, 6.29, 8.262, 7.386, 11.532, 9.67, 14.974, 10.27, 13.05, 7.934, 11.466000000000001, 11.290000000000001, 9.574, 12.108, 12.702, 11.69, 17.088, 14.114, 10.674, 10.232000000000001, 15.902000000000001, 15.982000000000001, 10.108, 11.766, 12.694, 10.368, 9.164, 14.76, 7.664000000000001, 12.984, 11.374, 18.054000000000002, 10.55, 8.684000000000001, 8.342, 9.97, 5.404, 8.72, 6.78, 10.106, 9.512, 9.158, 15.378, 6.204, 8.074, 18.282, 16.092, 10.482000000000001, 11.586, 12.644, 7.938, 7.738, 14.824, 10.61, 8.874, 10.818, 12.262, 12.202, 15.118, 6.934, 11.088000000000001, 8.218, 8.064, 11.122, 7.16, 8.264, 14.52, 7.71, 5.244, 8.212, 10.472, 9.788, 13.134, 10.036, 11.64, 7.994, 7.32, 9.302, 8.592, 8.166, 9.004, 6.968, 14.86, 11.642, 11.962, 9.338000000000001, 10.018, 9.042, 8.228, 9.394, 7.92, 13.752, 8.51, 10.956, 13.258000000000001, 8.13, 10.256, 11.582, 9.798, 10.07, 9.852, 11.058, 10.68, 10.14, 12.46, 11.642, 10.08, 18.086000000000002, 12.5, 9.324, 7.59, 6.268, 15.754, 11.96, 4.654, 13.194, 13.264000000000001, 11.902000000000001, 4.932, 9.114, 11.174, 9.514, 10.43, 6.492, 12.106, 7.07, 6.3340000000000005, 10.966000000000001, 9.872, 11.348, 10.766, 10.450000000000001, 12.118, 7.416, 14.272, 10.418000000000001, 13.768, 6.356, 10.958, 13.544, 9.108, 3.348, 10.44, 12.97, 9.254, 3.398, 8.268, 8.906, 10.408, 7.7620000000000005, 6.8100000000000005, 10.154, 8.756, 7.988, 12.622, 16.974, 12.46, 9.368, 11.504, 11.674, 5.8, 17.544, 13.348, 7.538, 9.814, 5.166, 10.534, 9.002, 7.332, 10.088000000000001, 8.728, 11.264, 13.096, 12.038, 9.088000000000001, 9.842, 5.8340000000000005, 12.166, 11.24, 9.278, 5.212, 8.492, 8.32, 8.916, 9.796, 8.854000000000001, 7.582, 7.006, 11.592, 9.956, 8.774000000000001, 10.008000000000001, 12.796000000000001, 11.044, 9.306000000000001, 6.948, 9.832, 5.014, 14.544, 15.222, 13.014000000000001, 4.062, 10.554, 9.462, 6.5680000000000005, 11.034, 7.364, 7.158, 11.08, 11.36, 10.002, 11.056000000000001, 11.282, 11.542, 9.374, 9.474, 13.004, 11.936, 9.452, 6.33, 11.98, 5.41, 7.3260000000000005, 12.536, 8.268, 3.482, 11.722, 9.602, 9.006, 10.688, 11.522, 8.808, 8.596, 8.602, 7.8100000000000005, 10.77, 5.71, 7.508, 11.174, 13.336, 5.21, 13.436, 10.708, 6.514, 12.138, 8.638, 5.144, 10.756, 12.818, 12.922, 10.446, 11.39, 8.178, 4.24, 6.564, 9.98, 11.806000000000001, 10.924, 12.274000000000001, 11.596, 11.35, 6.674, 12.32, 11.342, 6.2860000000000005, 6.04, 12.08, 13.040000000000001, 4.358, 9.392, 7.8580000000000005, 7.824, 11.896, 11.814, 19.21, 6.99, 9.450000000000001, 4.542, 10.256, 13.582, 6.582, 9.4, 12.780000000000001, 10.874, 10.424, 8.678, 9.81, 11.47, 6.79, 16.27, 13.524000000000001, 8.936, 9.27, 15.936, 7.526, 5.97, 13.398, 17.304000000000002, 11.478, 16.18, 9.22, 11.118, 9.66, 11.49, 11.348, 8.382, 12.372, 9.644, 9.348, 10.726, 10.82, 11.096, 11.074, 12.304, 11.068, 11.912, 8.904, 6.462, 16.456, 12.422, 11.24, 10.694, 8.168000000000001, 4.9, 7.176, 8.378, 16.42, 3.3160000000000003, 10.204, 8.99, 7.982, 16.428, 10.026, 8.514, 12.578, 10.236, 6.04, 8.042, 8.604000000000001, 7.908, 11.126, 7.038, 9.016, 8.978, 14.55, 13.67, 4.978, 8.248, 8.318, 7.768, 11.52, 7.3100000000000005, 7.518, 10.236, 7.516, 11.832, 8.74, 7.71, 9.98, 11.73, 13.034, 12.172, 11.732000000000001, 11.52, 9.066, 11.768, 14.418000000000001, 14.074, 10.356, 15.336, 8.974, 13.586, 12.812, 5.5920000000000005, 13.874, 6.162, 11.66, 8.022, 14.57, 10.348, 8.626, 9.838000000000001, 11.776, 9.120000000000001, 9.63, 10.48, 16.666, 15.176, 9.540000000000001, 8.552, 13.602, 9.178, 10.432, 4.934, 16.114, 9.26, 10.448, 9.266, 13.662, 4.168, 9.864, 4.808, 9.276, 8.02, 10.756, 9.244, 5.394, 5.916, 8.644, 6.672, 10.032, 5.812, 9.446, 8.008000000000001, 6.112, 10.026, 10.208, 7.966, 11.66, 14.142, 10.950000000000001, 12.906, 10.372, 7.982, 8.888, 14.612, 7.062, 6.0840000000000005, 8.984, 10.308, 17.162, 3.926, 11.608, 6.984, 12.512, 6.784, 9.008000000000001, 13.05, 3.226, 7.466, 14.944, 13.842, 14.358, 11.018, 10.774000000000001, 11.536, 13.88, 10.728, 14.514000000000001, 15.13, 9.458, 5.104, 7.936, 7.694, 6.724, 12.336, 4.644, 12.678, 10.258000000000001, 11.700000000000001, 9.06, 11.814, 11.012, 8.526, 11.256, 12.528, 10.196, 9.77, 17.330000000000002, 10.168000000000001, 13.74, 8.034, 10.778, 15.216000000000001, 11.540000000000001, 10.66, 10.218, 9.032, 11.714, 8.790000000000001, 9.928, 8.044, 9.232, 10.608, 9.694, 10.398, 12.528, 8.738, 0.7000000000000001, 8.64, 12.074, 11.018, 9.232, 10.916, 14.566, 8.728, 13.182, 7.698, 8.014, 10.784, 7.922000000000001, 5.44, 3.672, 7.062, 11.784, 11.35, 8.904, 7.97, 6.946, 3.888, 15.902000000000001, 9.078, 11.978, 13.048, 11.138, 12.124, 11.398, 9.336, 7.71, 12.166, 11.952, 16.272000000000002, 9.508000000000001, 10.790000000000001, 8.942, 11.506, 9.788, 9.428, 8.352, 13.096, 10.128, 8.052, 8.816, 10.004, 13.846, 9.008000000000001, 11.794, 9.954, 4.792, 10.974, 16.22, 9.596, 8.86, 9.088000000000001, 12.026, 11.454, 6.018, 8.82, 11.776, 7.974, 13.894, 6.538, 11.28, 6.232, 14.962, 16.356, 13.948, 6.066, 5.458, 10.27, 8.51, 14.566, 8.35, 11.556000000000001, 10.39, 11.792, 6.96, 16.116, 8.536, 9.286, 14.71, 12.364, 14.952, 10.972, 13.542, 7.064, 7.444, 12.394, 19.262, 8.632, 14.790000000000001, 11.46, 12.832, 10.23, 5.216, 8.962, 9.958, 12.624, 14.370000000000001, 9.616, 7.452, 8.174, 8.244, 11.474, 8.244, 14.328000000000001, 4.5600000000000005, 13.098, 10.114, 14.566, 6.1000000000000005, 7.288, 11.892, 14.516, 11.876, 13.55, 5.938, 8.738, 5.558, 15.048, 6.05, 8.71, 9.152000000000001, 12.326, 7.972, 9.682, 14.44, 5.622, 10.538, 11.276, 13.104000000000001, 9.902000000000001, 5.166, 8.564, 17.898, 10.964, 14.148, 16.116, 6.432, 12.34, 3.43, 9.544, 13.398, 7.0760000000000005, 6.532, 8.294, 8.638, 10.062, 13.784, 6.216, 9.158, 11.378, 9.258000000000001, 8.244, 10.36, 11.482000000000001, 9.67, 7.814, 13.564, 6.522, 6.938, 12.790000000000001, 10.008000000000001, 7.992, 11.282, 5.498, 7.5200000000000005, 10.102, 13.436, 12.452, 6.0120000000000005, 9.398, 6.872, 5.922, 12.938, 12.042, 10.238, 9.944, 7.026, 12.728, 15.36, 11.238, 13.282, 9.546, 13.026, 3.2600000000000002, 3.954, 10.154, 14.338000000000001, 14.09, 13.318, 8.86, 12.664, 9.8, 9.872, 9.944, 2.672, 9.042, 13.776, 6.72, 12.118, 6.964, 4.864, 7.732, 8.192, 14.116, 11.178, 9.062, 11.526, 9.294, 9.31, 14.874, 18.518, 9.966000000000001, 4.53, 7.448, 8.236, 11.896, 8.024000000000001, 10.732000000000001, 10.704, 10.48, 12.188, 14.752, 6.556, 6.338, 5.064, 11.19, 8.71, 4.494, 5.8020000000000005, 11.78, 5.572, 16.968, 7.1000000000000005, 10.05, 7.216, 10.748000000000001, 5.918, 11.216000000000001, 10.566, 7.884, 8.766, 9.988, 12.694, 3.0700000000000003, 8.076, 12.688, 11.574, 7.588, 13.458, 9.568, 6.746, 9.858, 7.966, 6.11, 9.438, 9.674, 8.696, 7.952, 10.754, 7.390000000000001, 11.292, 13.802, 12.870000000000001, 7.914000000000001, 6.6240000000000006, 7.516, 10.972, 7.894, 8.278, 11.098, 6.428, 10.754, 9.068, 15.696, 9.276, 10.134, 10.574, 10.638, 10.198, 9.798, 4.416, 9.88, 12.302, 10.624, 13.454, 12.736, 12.444, 5.906, 8.8, 8.11, 5.298, 11.572000000000001, 10.374, 11.212, 10.226, 8.756, 15.782, 11.4, 9.72, 11.008000000000001, 7.212, 15.17, 13.6, 8.496, 11.72, 10.22, 10.292, 10.522, 8.948, 9.448, 11.118, 12.56, 6.3500000000000005, 7.95, 9.178, 6.33, 4.876, 12.88, 8.65, 10.262, 9.418000000000001, 2.936, 9.916, 18.896, 9.05, 12.370000000000001, 7.574, 12.632, 4.354, 8.01, 12.338000000000001, 5.5920000000000005, 13.042, 10.290000000000001, 7.3660000000000005, 12.116, 4.892, 10.338000000000001, 13.016, 4.252, 12.678, 6.074, 17.43, 10.654, 6.216, 13.24, 13.802, 12.86, 5.216, 6.966, 8.16, 15.844000000000001, 8.438, 8.21, 10.884, 5.33, 7.7940000000000005, 11.84, 12.012, 9.64, 12.620000000000001, 10.256, 11.264, 6.146, 13.702, 9.608, 7.8660000000000005, 11.492, 14.0, 11.17, 10.96, 12.186, 7.51, 10.692, 14.134, 9.426, 4.376, 5.0040000000000004, 9.958, 12.926, 10.91, 9.302, 13.674, 11.57, 8.382, 6.5120000000000005, 12.724, 11.072000000000001, 15.09, 10.71, 12.596, 7.2860000000000005, 10.48, 8.038, 11.28, 11.558, 7.878, 10.728, 10.242, 7.12, 6.6080000000000005, 12.19, 11.526, 8.648, 14.732000000000001, 8.648, 11.032, 10.384, 10.898, 13.078, 6.446, 13.414, 15.304, 5.428, 7.466, 10.620000000000001, 8.942, 10.89, 13.698, 7.408, 8.678, 5.5280000000000005, 8.97, 14.396, 8.866, 7.3740000000000006, 13.956, 8.322000000000001, 12.272, 8.954, 16.674, 5.99, 10.412, 10.848, 8.966, 4.678, 11.566, 8.176, 11.33, 12.442, 7.21, 9.35, 13.280000000000001, 8.834, 11.586, 8.186, 6.452, 5.666, 9.112, 4.158, 6.532, 10.626, 9.154, 6.996, 13.078, 13.116, 7.422000000000001, 14.398, 7.0, 8.934000000000001, 8.732, 7.844, 10.822000000000001, 12.76, 7.904, 6.684, 14.164, 9.004, 11.472, 9.774000000000001, 12.122, 9.328, 7.714, 8.65, 11.006, 11.97, 13.1, 8.728, 8.488, 13.948, 13.398, 10.316, 10.09, 8.734, 9.454, 11.508000000000001, 12.076, 17.248, 13.504, 10.43, 18.378, 12.394, 8.436, 10.582, 11.61, 7.618, 12.656, 10.788, 8.366, 11.99, 14.56, 1.6520000000000001, 13.85, 11.638, 11.624, 5.138, 16.754, 10.38, 4.178, 12.364, 11.71, 10.206, 7.58, 2.434, 13.43, 10.736, 10.272, 7.5680000000000005, 10.888, 9.736, 13.61, 7.506, 5.0440000000000005, 11.33, 13.344, 14.348, 8.644, 13.008000000000001, 5.578, 12.786, 10.392, 9.888, 16.838, 12.042, 10.068, 9.392, 10.198, 7.72, 15.478, 7.154, 11.59, 7.714, 10.338000000000001, 14.494, 8.328, 8.104000000000001, 8.964, 13.428, 15.406, 13.404, 8.502, 12.56, 2.988, 5.94, 14.546000000000001, 10.314, 5.836, 16.29, 8.950000000000001, 10.386000000000001, 9.722, 8.942, 13.620000000000001, 13.358, 15.538, 10.950000000000001, 10.166, 10.450000000000001, 9.1, 13.25, 6.066, 10.67, 8.984, 6.162, 9.266, 8.366, 10.372, 10.602, 5.182, 13.082, 15.136000000000001, 7.7620000000000005, 8.496, 15.370000000000001, 9.89, 3.93, 11.618, 12.244, 11.774000000000001, 11.784, 13.164, 9.854000000000001, 11.046, 10.68, 8.3, 14.516, 5.66, 7.364, 5.376, 13.536, 14.242, 10.186, 5.352, 10.376, 12.530000000000001, 12.398, 9.002, 10.056000000000001, 11.292, 7.8420000000000005, 7.798, 9.922, 10.57, 11.024000000000001, 11.282, 9.966000000000001, 8.686, 9.938, 13.116, 8.632, 10.612, 8.674, 12.384, 7.112, 4.678, 8.596, 7.424, 9.894, 14.978, 12.338000000000001, 9.022, 15.372, 12.418000000000001, 9.02, 7.8500000000000005, 8.286, 14.558, 11.828, 9.702, 6.5, 9.364, 9.724, 5.6000000000000005, 9.186, 12.226, 2.358, 12.656, 10.044, 10.356, 3.222, 13.82, 9.552, 9.22, 10.624, 10.67, 8.758000000000001, 6.726, 15.128, 14.792, 10.654, 10.118, 10.338000000000001, 13.264000000000001, 15.882, 3.668, 16.854, 7.056, 18.830000000000002, 10.952, 10.352, 9.13, 14.304, 13.574, 7.438, 12.258000000000001, 5.438, 9.792, 4.146, 11.698, 9.808, 12.290000000000001, 4.3100000000000005, 9.188, 13.974, 13.856, 10.764, 10.66, 7.876, 8.218, 8.52, 18.276, 8.936, 6.956, 11.616, 5.268, 10.542, 9.348, 7.932, 9.058, 9.768, 9.274000000000001, 10.014, 7.132000000000001, 11.51, 5.132, 8.768, 9.36, 13.328, 8.45, 9.348, 11.976, 12.528, 8.246, 5.934, 6.806, 14.642, 9.956, 10.216000000000001, 5.524, 13.074, 12.586, 9.654, 13.004, 13.616, 11.516, 10.554, 9.016, 7.426, 6.642, 14.116, 11.156, 12.05, 10.312, 13.768, 6.0120000000000005, 8.066, 4.048, 12.58, 6.384, 6.5840000000000005, 12.338000000000001, 3.394, 7.908, 10.98, 9.228, 15.17, 11.648, 11.434000000000001, 11.804, 6.2, 9.842, 12.574, 10.63, 9.788, 9.674, 13.344, 11.026, 10.116, 9.944, 13.05, 15.626, 12.868, 12.23, 4.74, 11.024000000000001, 5.714, 4.0520000000000005, 15.486, 6.898000000000001, 9.568, 9.758000000000001, 12.664, 15.336, 5.112, 8.036, 9.44, 10.062, 13.634, 9.406, 10.014, 11.378, 10.264, 8.978, 11.57, 6.666, 10.968, 12.86, 7.166, 5.026, 10.852, 13.968, 9.588000000000001, 6.3340000000000005, 9.35, 4.894, 9.454, 8.406, 13.444, 9.694, 12.268, 10.022, 8.966, 7.976, 8.568, 10.82, 2.79, 10.468, 6.816, 2.122, 6.182, 14.474, 7.856, 6.864, 10.478, 15.49, 10.35, 7.532, 7.42, 11.388, 8.612, 13.074, 10.112, 13.450000000000001, 8.374, 8.744, 7.912, 15.118, 11.99, 15.716000000000001, 7.256, 12.048, 7.388, 9.974, 13.218, 10.402000000000001, 13.744, 9.168000000000001, 10.1, 12.448, 12.118, 9.012, 11.006, 4.8420000000000005, 12.536, 11.336, 10.464, 9.566, 14.98, 7.162, 11.158, 14.484, 14.364, 9.548, 13.17, 10.578, 12.21, 8.608, 8.286, 12.772, 5.5840000000000005, 14.526, 12.922, 11.308, 13.036, 9.876, 12.612, 7.98, 5.4, 8.108, 10.138, 8.798, 9.692, 12.098, 6.824, 12.274000000000001, 12.618, 3.706, 7.434, 11.15, 7.398000000000001, 9.574, 6.884, 8.536, 10.418000000000001, 6.214, 12.732000000000001, 5.234, 10.268, 5.958, 10.870000000000001, 10.042, 12.334, 10.624, 14.348, 8.16, 9.612, 6.09, 10.17, 8.892, 10.464, 12.15, 7.68, 7.59, 11.368, 6.678, 8.836, 6.0760000000000005, 11.546, 13.314, 8.518, 9.982000000000001, 9.348, 6.652, 14.06, 14.700000000000001, 7.924, 9.628, 8.388, 9.046, 11.346, 8.654, 4.356, 11.024000000000001, 15.656, 9.044, 15.756, 9.664, 8.048, 8.222, 6.45, 6.446, 11.094, 11.352, 8.72, 12.462, 7.54, 5.84, 12.168000000000001, 9.1, 6.432, 12.790000000000001, 4.306, 10.964, 4.23, 15.122, 12.738, 11.472, 10.200000000000001, 11.364, 12.622, 6.944, 10.542, 10.512, 6.788, 10.81, 8.722, 10.552, 9.686, 10.192, 11.42, 10.818, 15.096, 16.904, 9.156, 5.54, 3.0700000000000003, 12.356, 11.686, 13.444, 7.6160000000000005, 10.17, 5.572, 15.96, 10.14, 9.194, 5.556, 7.064, 7.988, 9.21, 11.474, 12.604000000000001, 5.0280000000000005, 15.146, 11.01, 11.996, 10.478, 5.344, 15.0, 11.048, 11.296, 5.942, 9.622, 6.014, 16.852, 6.196, 9.46, 8.742, 8.31, 11.200000000000001, 11.526, 10.392, 7.0280000000000005, 10.592, 11.022, 13.938, 14.316, 4.484, 9.69, 7.65, 6.51, 9.086, 3.52, 8.074, 10.166, 7.442, 9.33, 11.044, 18.456, 9.598, 13.354000000000001, 16.732, 10.668000000000001, 7.618, 6.962, 10.472, 2.2760000000000002, 10.068, 9.706, 14.126, 10.344, 12.012, 9.83, 11.404, 11.268, 8.462, 13.176, 9.362, 7.752, 8.738, 8.08, 5.6160000000000005, 7.114, 11.620000000000001, 9.738, 5.994, 8.06, 11.082, 10.572000000000001, 14.244, 13.540000000000001, 13.51, 11.138, 9.458, 14.85, 13.27, 7.23, 12.182, 12.684000000000001, 11.584, 5.564, 6.924, 11.076, 9.78, 7.958, 11.632, 3.892, 6.024, 7.354, 14.062000000000001, 13.43, 13.844, 11.578, 10.944, 14.756, 10.556000000000001, 13.052, 13.866, 7.906000000000001, 4.68, 10.168000000000001, 10.05, 11.908, 8.328, 9.452, 15.598, 11.164, 8.55, 13.98, 14.138, 6.806, 12.832, 13.288, 11.484, 7.658, 8.638, 10.844, 6.104, 17.136, 9.884, 6.372, 12.672, 15.4, 7.836, 14.31, 7.998, 9.042, 8.862, 14.492, 13.39, 10.03, 7.788, 10.652000000000001, 9.21, 6.11, 15.572000000000001, 6.134, 15.05, 13.18, 12.524000000000001, 11.636000000000001, 9.502, 7.640000000000001, 8.346, 17.858, 11.978, 5.21, 10.99, 9.01, 8.686, 8.572000000000001, 3.7840000000000003, 7.742, 10.162, 13.114, 15.146, 11.824, 9.618, 4.354, 10.594, 5.614, 11.708, 12.638, 8.31, 3.368, 9.636000000000001, 9.818, 9.438, 6.862, 7.876, 12.502, 7.164000000000001, 10.334, 13.058, 11.726, 14.49, 13.076, 9.36, 11.304, 10.068, 11.552, 9.064, 9.796, 9.356, 8.686, 12.618, 7.37, 13.092, 7.328, 9.868, 9.92, 14.216000000000001, 7.392, 12.01, 9.858, 14.652000000000001, 7.3020000000000005, 8.09, 5.276, 8.434000000000001, 14.282, 9.646, 18.982, 8.524000000000001, 9.02, 8.836, 9.682, 12.284, 12.782, 9.054, 12.294, 9.56, 7.026, 13.372, 6.074, 6.448, 14.682, 9.856, 7.356, 3.388, 7.212, 10.768, 3.436, 8.062, 10.134, 12.278, 12.536, 7.238, 8.436, 11.554, 8.934000000000001, 10.308, 4.14, 12.128, 8.286, 10.464, 11.632, 9.884, 11.586, 10.794, 12.662, 15.666, 10.602, 7.902, 9.76, 9.89, 8.76, 8.436, 9.668000000000001, 8.438, 15.280000000000001, 12.148, 6.352, 8.194, 8.424, 13.844, 12.856, 8.624, 5.816, 11.852, 8.49, 10.01, 5.94, 8.098, 13.09, 15.384, 16.63, 10.994, 9.814, 12.118, 11.908, 8.996, 7.3500000000000005, 3.116, 7.0680000000000005, 5.124, 11.512, 10.494, 8.352, 12.052, 12.98, 10.126, 8.776, 8.994, 5.622, 10.878, 10.722, 12.578, 10.978, 13.918000000000001, 17.48, 11.196, 6.444, 10.57, 14.448, 7.906000000000001, 7.268, 15.598, 11.208, 11.838000000000001, 8.21, 10.804, 10.370000000000001, 10.678, 6.902, 7.006, 11.202, 6.104, 10.368, 8.1, 11.348, 10.254, 5.362, 9.424, 7.588, 9.236, 10.258000000000001, 13.776, 10.138, 12.068, 8.506, 9.068, 9.512, 5.344, 11.47, 10.620000000000001, 10.946, 9.836, 8.052, 14.34, 9.106, 9.516, 11.896, 10.258000000000001, 10.226, 10.126, 8.032, 9.818, 12.896, 12.322000000000001, 15.96, 11.472, 10.396, 5.918, 14.808, 9.842, 9.188, 14.620000000000001, 13.634, 3.984, 11.772, 12.904, 7.156000000000001, 10.762, 9.744, 7.904, 9.548, 8.488, 11.13, 10.544, 6.978, 5.32, 8.564, 12.418000000000001, 4.602, 10.58, 6.748, 13.444, 13.096, 7.654, 12.33, 11.024000000000001, 12.504, 9.478, 12.996, 11.142, 7.7860000000000005, 7.496, 13.022, 14.032, 12.950000000000001, 15.526, 7.7780000000000005, 12.528, 13.982000000000001, 11.482000000000001, 10.424, 9.69, 15.546000000000001, 9.89, 7.1000000000000005, 15.76, 14.062000000000001, 10.128, 12.48, 6.622, 7.482, 4.8500000000000005, 12.304, 8.714, 10.968, 11.076, 12.370000000000001, 12.814, 9.212, 10.916, 6.4, 5.484, 12.994, 6.04, 12.648, 7.848, 10.124, 13.93, 7.154, 4.864, 12.218, 7.66, 10.182, 6.24, 6.8100000000000005, 10.022, 8.382, 11.332, 5.984, 9.892, 8.314, 10.622, 5.62, 9.59, 11.754, 13.816, 8.756, 9.964, 13.98, 10.68, 13.176, 7.752, 8.84, 13.744, 13.216000000000001, 6.564, 7.43, 8.102, 7.216, 9.844, 8.264, 6.226, 10.344, 8.318, 11.704, 5.442, 8.966, 9.772, 13.208, 2.336, 8.106, 9.294, 11.454, 4.92, 11.432, 11.738, 9.928, 11.458, 6.648000000000001, 17.356, 9.626, 7.148000000000001, 5.99, 16.428, 6.734, 13.144, 8.07, 9.1, 10.646, 6.648000000000001, 11.226, 11.528, 11.276, 12.772, 5.0520000000000005, 7.088, 4.782, 12.024000000000001, 6.7620000000000005, 8.478, 15.762, 9.052, 7.564, 9.894, 8.73, 9.568, 16.078, 11.956, 11.216000000000001, 7.804, 12.192, 9.706, 7.3260000000000005, 11.238, 9.91, 9.872, 13.696, 7.518, 9.284, 7.748, 12.27, 9.326, 14.3, 14.378, 11.244, 9.952, 9.502, 11.638, 8.624, 8.904, 8.924, 12.362, 10.576, 11.352, 12.694, 12.902000000000001, 6.382000000000001, 9.392, 12.808, 10.988, 9.452, 9.4, 14.13, 14.908, 3.866, 9.234, 11.124, 8.122, 9.428, 5.518, 9.32, 3.462, 6.238, 8.218, 12.136000000000001, 10.884, 3.948, 6.332, 8.24, 8.46, 7.566, 14.038, 11.450000000000001, 11.084, 6.714, 15.286, 12.044, 7.126, 8.662, 5.212, 8.044, 13.816, 10.056000000000001, 7.876, 13.952, 8.376, 8.652000000000001, 7.392, 13.290000000000001, 11.658, 10.908, 10.612, 6.518, 7.0200000000000005, 6.652, 6.298, 12.616, 11.356, 4.838, 10.47, 10.022, 3.636, 13.344, 8.024000000000001, 8.76, 9.848, 10.658, 10.784, 13.01, 9.052, 12.798, 13.3, 14.416, 11.78, 9.562, 10.4, 11.168000000000001, 11.956, 8.49, 10.876, 11.486, 8.18, 9.792, 9.042, 9.370000000000001, 11.188, 14.018, 19.87, 10.562, 8.346, 7.002, 8.202, 10.304, 6.952, 10.722, 8.278, 13.374, 11.092, 15.394, 14.226, 10.882, 13.524000000000001, 15.49, 14.368, 6.682, 12.852, 13.18, 11.824, 7.1000000000000005, 4.71, 15.688, 11.85, 9.356, 10.032, 9.73, 12.168000000000001, 11.158, 12.574, 14.094, 13.864, 13.18, 3.184, 6.916, 11.932, 10.262, 8.68, 7.246, 12.134, 12.084, 10.75, 5.6160000000000005, 3.198, 7.55, 13.554, 3.334, 14.31, 7.2780000000000005, 5.62, 9.126, 11.708, 15.082, 6.072, 9.842, 14.18, 6.1000000000000005, 12.996, 7.532, 12.158, 13.752, 10.632, 12.362, 8.654, 6.284, 16.528, 9.638, 6.7, 11.404, 18.336000000000002, 5.764, 9.88, 8.11, 6.662, 11.82, 13.008000000000001, 2.056, 12.32, 9.112, 11.372, 8.208, 14.008000000000001, 6.426, 9.73, 11.120000000000001, 9.284, 10.562, 5.978, 6.556, 10.346, 10.938, 11.426, 9.93, 11.012, 9.176, 7.5280000000000005, 10.086, 16.558, 14.256, 10.348, 9.116, 7.848, 12.612, 12.368, 12.216000000000001, 3.858, 18.608, 9.592, 10.57, 14.174, 9.512, 10.22, 12.040000000000001, 5.066, 9.156, 11.106, 15.378, 9.236, 13.362, 12.074, 9.83, 9.722, 7.992, 13.304, 10.746, 12.242, 9.552, 7.234, 10.532, 13.608, 8.818, 11.054, 7.352, 4.714, 10.82, 14.408, 12.592, 11.062, 12.484, 11.486, 13.376, 9.316, 10.608, 11.646, 11.074, 7.972, 10.714, 7.0840000000000005, 9.132, 7.924, 8.664, 14.56, 7.516, 9.004, 14.458, 16.376, 9.728, 11.484, 9.3, 8.554, 13.012, 7.336, 9.544, 7.428, 15.568, 10.616, 6.63, 8.206, 10.724, 5.692, 9.836, 11.63, 10.96, 6.92, 10.024000000000001, 12.308, 15.424, 15.076, 11.0, 11.218, 10.994, 8.504, 11.068, 5.776, 10.384, 7.188, 12.142, 11.166, 10.19, 11.562, 15.862, 11.794, 11.374, 6.922, 16.918, 11.102, 11.564, 12.5, 4.744, 7.72, 12.556000000000001, 10.136000000000001, 8.382, 15.158, 11.128, 10.922, 6.8, 10.066, 10.0, 7.192, 13.472, 13.19, 11.1, 12.542, 13.014000000000001, 9.44, 6.276, 10.620000000000001, 11.156, 7.542, 3.332, 11.298, 15.092, 8.78, 11.702, 15.106, 7.36, 7.246, 11.018, 9.882, 13.506, 8.134, 5.41, 10.03, 8.69, 3.86, 9.058, 19.698, 6.37, 13.388, 16.976, 13.196, 8.786, 10.886000000000001, 9.802, 11.548, 7.894, 4.804, 8.454, 9.414, 13.278, 10.082, 6.514, 2.658, 13.55, 9.072000000000001, 10.6, 16.042, 12.81, 9.756, 7.94, 12.048, 7.926, 9.77, 5.9, 10.61, 9.774000000000001, 13.032, 9.076, 6.244, 4.566, 6.312, 10.586, 6.16, 12.196, 6.876, 12.682, 6.798, 13.944, 15.036, 11.258000000000001, 14.362, 5.3, 10.056000000000001, 12.816, 10.23, 12.386000000000001, 10.856, 8.326, 10.81, 6.474, 8.148, 11.354000000000001, 11.968, 10.148, 12.538, 11.48, 8.158, 11.178, 5.82, 9.888, 10.846, 9.612, 12.01, 7.962, 9.21, 6.522, 9.6, 11.364, 12.736, 11.652000000000001, 13.794, 12.062, 10.646, 7.71, 5.484, 9.062, 13.536, 13.894, 7.182, 11.01, 10.216000000000001, 11.634, 12.584, 8.63, 7.152, 9.772, 15.936, 14.066, 10.28, 8.984, 9.588000000000001, 10.162, 12.038, 8.898, 8.798, 9.128, 14.044, 6.5200000000000005, 12.722, 8.106, 14.268, 5.6080000000000005, 10.812, 10.156, 13.01, 8.594, 11.292, 13.786, 12.732000000000001, 11.864, 5.192, 14.696, 13.586, 9.452, 13.708, 5.868, 10.824, 13.48, 15.99, 10.296, 9.786, 8.724, 8.294, 10.452, 10.374, 9.048, 7.384, 10.734, 6.862, 9.768, 5.146, 5.614, 11.302, 7.492, 14.134, 12.256, 5.148, 3.4, 9.8, 14.334, 14.064, 3.5660000000000003, 12.172, 6.29, 3.462, 9.24, 9.034, 13.634, 6.316, 4.438, 6.972, 7.554, 13.238, 10.792, 12.21, 3.426, 10.002, 10.376, 8.904, 11.468, 6.042, 5.422, 10.362, 10.322000000000001, 5.174, 13.544, 8.054, 14.68, 15.942, 9.208, 10.184000000000001, 13.608, 10.194, 15.164, 11.026, 7.6080000000000005, 10.596, 6.122, 9.458, 14.464, 12.782, 8.3, 9.578, 11.9, 6.284, 7.948, 19.82, 11.656, 7.906000000000001, 9.19, 11.716000000000001, 9.632, 6.032, 5.0, 7.0200000000000005, 9.936, 14.700000000000001, 5.718, 8.620000000000001, 12.262, 11.048, 8.888, 11.05, 11.426, 10.56, 9.806000000000001, 9.27, 9.968, 13.218, 14.21, 11.43, 11.52, 15.682, 6.882000000000001, 7.594, 13.126, 11.858, 11.540000000000001, 13.17, 12.238, 13.702, 16.684, 8.936, 10.898, 8.768, 12.956, 10.792, 10.146, 12.31, 14.534, 7.2780000000000005, 15.012, 13.67, 7.612, 10.212, 8.07, 9.292, 11.728, 13.948, 10.144, 8.19, 13.824, 9.082, 8.832, 9.376, 5.01, 11.588000000000001, 12.556000000000001, 7.892, 6.68, 6.0280000000000005, 10.146, 6.338, 6.894, 11.200000000000001, 7.8500000000000005, 8.05, 14.392, 9.764, 11.058, 9.966000000000001, 14.716000000000001, 10.882, 9.198, 7.05, 7.282, 16.352, 10.902000000000001, 12.552, 10.032, 11.48, 8.572000000000001, 9.426, 11.456, 10.676, 10.206, 12.438, 9.812, 12.238, 12.536, 6.348, 9.848, 15.4, 8.288, 11.024000000000001, 15.83, 13.632, 7.65, 9.178, 6.042, 10.61, 7.416, 12.776, 6.188, 11.946, 17.318, 6.41, 10.026, 10.25, 8.148, 5.844, 8.414, 11.902000000000001, 7.6160000000000005, 15.094000000000001, 12.106, 13.394, 5.8740000000000006, 12.39, 11.934000000000001, 9.224, 7.3100000000000005, 12.798, 9.21, 10.994, 7.1080000000000005, 6.972, 9.61, 14.544, 9.354000000000001, 7.494, 11.128, 10.436, 7.654, 16.934, 8.804, 4.236, 8.718, 7.88, 10.762, 11.304, 8.112, 13.046000000000001, 14.108, 14.086, 9.824, 7.312, 10.092, 13.014000000000001, 11.796, 14.796000000000001, 11.388, 11.962, 14.278, 3.728, 15.986, 13.772, 9.994, 9.946, 12.858, 11.806000000000001, 13.108, 14.114, 8.614, 10.054, 9.118, 14.508000000000001, 6.3500000000000005, 9.702, 8.708, 9.214, 10.574, 14.290000000000001, 10.224, 13.546000000000001, 16.564, 6.416, 7.3180000000000005, 5.522, 9.478, 14.804, 5.692, 16.312, 2.196, 8.186, 16.25, 12.23, 7.378, 11.200000000000001, 11.918000000000001, 8.802, 12.038, 11.68, 11.656, 6.774, 9.138, 10.306000000000001, 9.822000000000001, 9.556000000000001, 9.08, 3.118, 14.518, 6.694, 9.628, 13.838000000000001, 7.692, 11.358, 8.096, 12.618, 14.524000000000001, 15.106, 8.558, 11.232000000000001, 6.914, 5.756, 12.724, 10.136000000000001, 8.742, 8.098, 11.112, 8.42, 9.268, 10.726, 8.686, 10.422, 8.586, 13.862, 14.642, 9.036, 7.59, 10.808, 6.902, 10.604000000000001, 14.434000000000001, 7.574, 14.096, 7.482, 5.992, 12.996, 8.946, 8.004, 7.362, 13.242, 13.346, 6.5760000000000005, 12.47, 7.466, 10.582, 8.69, 7.684, 17.28, 12.52, 9.33, 8.654, 9.792, 9.078, 9.73, 6.5280000000000005, 7.274, 6.088, 5.578, 13.756, 10.368, 12.874, 12.16, 12.754, 13.236, 4.054, 12.278, 5.054, 4.5200000000000005, 12.724, 7.574, 12.382, 5.704, 14.61, 4.224, 10.612, 10.758000000000001, 6.744, 9.822000000000001, 7.276, 6.376, 11.73, 7.71, 10.724, 10.524000000000001, 11.856, 11.392, 5.2, 13.976, 9.518, 8.344, 8.182, 11.196, 11.812, 7.514, 4.738, 9.99, 12.032, 13.17, 10.754, 13.288, 5.928, 11.46, 8.472, 10.426, 12.34, 11.306000000000001, 10.106, 6.664, 13.606, 10.336, 11.374, 8.76, 10.83, 14.126, 5.642, 11.376, 8.554, 12.298, 8.598, 16.952, 15.132, 16.058, 10.988, 13.722, 10.466000000000001, 8.832, 8.18, 7.554, 11.916, 11.836, 4.742, 8.622, 8.808, 8.222, 7.718, 9.696, 11.644, 10.976, 15.614, 7.574, 12.622, 9.08, 8.488, 13.14, 10.086, 10.268, 8.728, 13.224, 7.644, 8.424, 5.764, 6.828, 12.012, 12.664, 9.614, 6.928, 15.266, 11.118, 9.896, 12.034, 9.606, 14.25, 7.886, 11.548, 11.86, 9.596, 14.68, 12.596, 8.662, 14.51, 14.962, 19.794, 8.51, 9.856, 12.474, 7.8340000000000005, 10.47, 9.5, 14.450000000000001, 13.166, 10.156, 11.686, 10.77, 6.538, 6.026, 9.118, 7.742, 9.076, 14.338000000000001, 12.376, 8.348, 11.48, 6.5200000000000005, 9.716000000000001, 11.266, 9.89, 11.238, 7.656000000000001, 11.394, 11.27, 7.3660000000000005, 8.254, 7.736, 8.162, 9.82, 12.542, 7.46, 7.87, 9.912, 10.024000000000001, 7.996, 7.518, 12.532, 10.96, 10.256, 9.986, 7.8340000000000005, 9.874, 8.638, 6.486, 10.07, 9.264, 11.124, 9.146, 13.790000000000001, 6.386, 12.056000000000001, 8.964, 9.678, 10.766, 8.856, 11.48, 4.104, 5.656, 1.3840000000000001, 2.63, 18.288, 9.468, 9.462, 7.0920000000000005, 14.02, 11.72, 8.342, 7.5680000000000005, 10.01, 7.8, 12.956, 12.094, 8.656, 14.414, 7.588, 11.288, 13.652000000000001, 9.448, 6.908, 9.038, 16.616, 10.46, 2.954, 7.36, 10.968, 8.376, 13.846, 9.0, 9.828, 7.516, 11.354000000000001, 8.166, 12.496, 13.91, 16.974, 9.006, 8.324, 12.238, 14.144, 13.17, 6.5440000000000005, 7.798, 5.414, 11.72, 8.834, 10.496, 7.8, 17.162, 11.406, 8.462, 12.854000000000001, 11.012, 9.15, 12.870000000000001, 5.94, 7.258, 6.01, 12.548, 10.482000000000001, 15.31, 14.18, 16.89, 11.5, 15.072000000000001, 8.062, 13.06, 6.0360000000000005, 1.412, 8.02, 8.174, 13.282, 7.8340000000000005, 10.554, 11.802, 13.77, 14.138, 11.888, 14.776, 13.624, 7.558, 8.846, 14.02, 12.536, 7.176, 8.766, 7.908, 8.394, 13.702, 16.018, 11.786, 13.278, 10.798, 6.142, 14.778, 9.384, 7.514, 11.518, 9.824, 15.276, 14.986, 4.654, 5.4, 9.472, 12.624, 8.908, 7.5440000000000005, 15.806000000000001, 7.702, 9.698, 7.002, 9.386000000000001, 10.262, 8.71, 6.5200000000000005, 13.626, 14.636000000000001, 14.412, 7.854, 11.77, 14.372, 14.104000000000001, 10.692, 8.264, 12.744, 9.304, 11.034, 13.86, 8.206, 8.496, 10.058, 15.308, 11.376, 9.618, 5.564, 10.868, 14.154, 9.700000000000001, 11.07, 10.796, 11.834, 13.018, 2.09, 5.712, 5.97, 9.876, 9.118, 14.23, 14.858, 13.906, 7.208, 13.154, 10.224, 11.13, 12.856, 5.172, 8.964, 9.84, 7.252, 7.244, 13.358, 10.724, 11.628, 10.83, 11.598, 8.99, 12.488, 9.824, 8.744, 12.476, 8.14, 10.662, 7.390000000000001, 8.714, 9.002, 10.620000000000001, 8.32, 8.73, 9.67, 17.632, 12.232000000000001, 13.494, 6.8100000000000005, 12.254, 10.948, 10.238, 11.284, 6.964, 13.31, 7.1080000000000005, 12.574, 12.844, 9.178, 6.526, 8.934000000000001, 9.862, 10.614, 13.752, 7.642, 12.028, 7.822, 13.258000000000001, 10.73, 12.24, 5.824, 8.494, 8.89, 7.208, 8.134, 9.764, 9.428, 10.106, 8.418000000000001, 14.128, 10.316, 2.07, 11.054, 12.984, 11.892, 8.136000000000001, 7.412, 11.628, 11.27, 7.708, 5.634, 8.662, 12.258000000000001, 14.032, 13.858, 6.51, 14.866, 8.532, 14.016, 10.51, 4.046, 5.058, 12.016, 12.542, 9.666, 6.352, 14.02, 7.148000000000001, 7.348, 11.572000000000001, 9.728, 5.742, 5.39, 13.808, 4.518, 5.8, 10.392, 6.936, 15.014000000000001, 5.954, 6.868, 11.198, 6.796, 7.632000000000001, 11.866, 6.514, 12.288, 9.884, 9.354000000000001, 15.386000000000001, 7.972, 6.902, 13.91, 7.346, 11.838000000000001, 12.462, 6.312, 12.42, 11.646, 5.814, 15.75, 12.486, 7.306, 11.512, 8.838000000000001, 11.792, 10.528, 10.134, 5.7780000000000005, 4.428, 10.378, 13.502, 11.422, 8.69, 8.802, 8.068, 9.132, 11.200000000000001, 9.864, 10.878, 11.368, 7.992, 6.258, 10.702, 16.896, 4.846, 7.936, 11.144, 4.204, 14.472, 11.200000000000001, 10.808, 11.102, 5.514, 8.822000000000001, 2.376, 11.44, 10.662, 1.29, 8.584, 12.11, 6.6160000000000005, 9.382, 11.81, 4.042, 4.232, 6.964, 9.746, 6.8340000000000005, 14.502, 11.044, 14.118, 9.652000000000001, 11.954, 12.268, 6.390000000000001, 12.008000000000001, 11.19, 13.18, 11.018, 4.678, 7.8500000000000005, 7.388, 12.894, 9.354000000000001, 10.954, 9.986, 10.366, 12.124, 7.8, 16.052, 11.092, 10.406, 12.07, 6.3740000000000006, 5.706, 11.964, 5.862, 8.66, 16.22, 13.488, 7.42, 8.13, 15.88, 4.04, 12.226, 9.468, 10.072000000000001, 12.904, 7.656000000000001, 10.5, 10.514, 12.034, 9.356, 11.526, 9.556000000000001, 14.356, 12.474, 12.314, 8.682, 10.956, 11.950000000000001, 11.71, 12.08, 13.58, 4.46, 12.748000000000001, 8.272, 8.654, 11.562, 5.862, 14.454, 13.246, 7.712, 10.578, 10.104000000000001, 12.892, 10.834, 6.0200000000000005, 12.112, 7.808, 4.722, 4.8260000000000005, 10.112, 8.112, 10.784, 10.186, 12.108, 12.88, 7.8180000000000005, 8.558, 9.118, 10.966000000000001, 11.262, 9.348, 7.862, 12.372, 6.5280000000000005, 4.406, 12.292, 10.306000000000001, 13.614, 8.290000000000001, 11.376, 7.416, 12.792, 6.678, 11.99, 8.324, 3.0620000000000003, 3.166, 6.2700000000000005, 9.182, 8.268, 10.268, 9.014, 8.724, 9.698, 4.0280000000000005, 4.248, 8.902000000000001, 9.11, 10.762, 10.118, 6.0840000000000005, 15.542, 6.894, 8.568, 7.332, 5.402, 4.0840000000000005, 8.128, 14.144, 16.45, 5.968, 13.11, 11.156, 7.132000000000001, 11.744, 10.398, 11.02, 14.732000000000001, 7.61, 13.916, 9.432, 11.702, 9.41, 9.82, 8.498, 9.378, 15.59, 5.932, 10.898, 13.552, 9.038, 9.99, 4.5680000000000005, 15.312000000000001, 11.466000000000001, 4.8020000000000005, 7.048, 9.386000000000001, 7.352, 14.14, 13.044, 5.422, 11.784, 9.402000000000001, 11.352, 14.284, 8.616, 6.996, 7.112, 10.74, 5.516, 11.744, 8.82, 12.342, 12.688, 14.63, 10.502, 9.3, 10.700000000000001, 11.758000000000001, 10.734, 9.422, 8.422, 11.316, 7.034, 8.244, 6.666, 7.9, 11.844, 9.02, 6.032, 11.726, 5.99, 6.974, 13.736, 7.5280000000000005, 10.052, 13.974, 10.858, 12.582, 9.176, 12.308, 12.68, 8.646, 11.816, 10.074, 10.950000000000001, 14.34, 9.424, 11.834, 15.414, 14.592, 8.11, 8.744, 10.618, 8.856, 14.49, 10.104000000000001, 1.948, 6.0440000000000005, 6.184, 11.284, 14.43, 12.26, 11.752, 7.184, 17.014, 6.248, 10.158, 6.37, 12.322000000000001, 15.782, 8.63, 9.040000000000001, 11.442, 9.642, 14.206, 12.162, 11.578, 11.164, 11.358, 9.924, 13.068, 11.666, 8.746, 16.056, 8.71, 7.542, 8.786, 6.08, 7.074, 9.164, 11.594, 13.658, 4.846, 9.504, 7.352, 4.308, 9.986, 9.778, 10.412, 9.35, 6.292, 9.044, 13.418000000000001, 10.358, 7.232, 13.854000000000001, 8.26, 9.308, 6.46, 15.948, 12.194, 8.242, 11.074, 13.818, 9.932, 12.006, 10.686, 13.356, 9.106, 10.164, 13.562000000000001, 8.870000000000001, 13.25, 8.334, 8.476, 9.738, 12.872, 7.936, 16.842, 9.642, 1.462, 12.11, 7.598, 4.738, 7.936, 6.656000000000001, 8.696, 10.964, 13.648, 5.5520000000000005, 7.926, 9.776, 8.362, 7.856, 8.324, 5.436, 11.1, 4.3100000000000005, 10.46, 9.598, 8.192, 8.22, 16.254, 12.102, 8.498, 6.244, 9.186, 10.236, 3.532, 10.576, 7.324, 11.832, 12.786, 11.008000000000001, 14.224, 9.274000000000001, 9.078, 5.034, 9.138, 7.3500000000000005, 8.922, 7.152, 8.008000000000001, 8.396, 8.286, 4.782, 11.758000000000001, 10.022, 8.38, 7.432, 7.686, 13.46, 5.632, 11.352, 11.136000000000001, 2.578, 9.46, 6.668, 13.224, 8.862, 16.602, 12.656, 11.51, 5.994, 5.344, 8.97, 9.876, 5.954, 7.736, 10.290000000000001, 10.922, 7.8020000000000005, 7.132000000000001, 8.96, 7.694, 3.246, 12.464, 9.608, 2.266, 10.822000000000001, 12.788, 9.746, 11.182, 6.872, 11.594, 10.05, 13.744, 9.71, 9.24, 8.832, 7.386, 11.386000000000001, 12.25, 5.222, 6.86, 9.682, 13.176, 9.228, 16.104, 6.288, 14.232000000000001, 13.772, 8.694, 8.2, 9.804, 8.256, 10.700000000000001, 11.418000000000001, 12.762, 14.714, 5.44, 10.84, 11.688, 12.496, 10.382, 8.546, 10.518, 12.984, 13.266, 10.546, 11.158, 10.116, 7.582, 12.018, 11.924, 12.49, 15.708, 10.07, 7.12, 14.144, 14.75, 7.408, 10.31, 9.186, 8.906, 7.474, 12.844, 11.39, 5.372, 10.056000000000001, 8.714, 7.832, 8.462, 7.964, 14.154, 10.44, 10.48, 7.614, 14.096, 7.672000000000001, 9.318, 6.948, 17.978, 18.092, 14.036, 0.784, 5.214, 8.978, 9.488, 15.540000000000001, 10.22, 9.67, 11.57, 5.72, 10.044, 12.974, 6.332, 10.044, 12.646, 6.356, 10.49, 8.762, 9.878, 12.206, 3.624, 3.774, 9.144, 12.904, 8.07, 8.27, 8.792, 15.394, 7.476, 9.822000000000001, 14.762, 8.342, 7.37, 15.346, 12.662, 12.718, 8.874, 8.016, 13.126, 11.548, 10.262, 9.69, 7.84, 16.046, 16.61, 10.796, 8.72, 12.51, 8.354000000000001, 14.27, 12.592, 2.862, 14.188, 10.496, 13.3, 7.596, 9.244, 12.142, 12.794, 13.582, 6.554, 13.51, 7.8580000000000005, 8.858, 9.636000000000001, 8.268, 6.996, 7.446, 9.608, 8.166, 14.936, 7.824, 15.736, 13.284, 6.324, 12.452, 9.888, 12.368, 10.05, 11.256, 11.19, 11.912, 11.902000000000001, 9.612, 6.298, 15.328000000000001, 12.5, 6.726, 8.198, 7.394, 6.314, 6.2, 9.098, 7.936, 14.06, 6.24, 9.984, 7.016, 7.9, 12.41, 13.208, 12.244, 10.794, 13.85, 13.914, 6.474, 9.516, 14.568, 9.61, 10.092, 6.95, 8.282, 8.994, 13.528, 9.584, 11.484, 5.128, 8.448, 9.436, 16.176000000000002, 5.5600000000000005, 8.782, 5.502, 7.658, 9.632, 10.362, 8.092, 10.676, 8.368, 13.172, 11.228, 7.284, 10.834, 8.124, 8.63, 12.234, 12.852, 5.796, 9.392, 7.388, 14.31, 8.586, 10.298, 17.604, 16.794, 12.502, 12.818, 10.076, 11.058, 11.532, 10.968, 9.686, 12.424, 13.386000000000001, 10.618, 13.56, 6.936, 9.044, 10.66, 10.222, 9.738, 9.862, 12.546, 8.278, 7.542, 11.874, 7.368, 11.134, 12.192, 4.83, 8.93, 5.14, 13.666, 12.646, 12.612, 13.734, 11.622, 10.116, 13.514000000000001, 11.056000000000001, 9.808, 10.426, 8.252, 11.658, 14.382, 5.966, 12.194, 8.562, 3.8320000000000003, 8.534, 1.006, 7.784, 7.054, 7.5200000000000005, 11.022, 11.124, 5.998, 9.778, 10.368, 12.01, 9.892, 13.1, 8.584, 9.976, 9.662, 10.56, 6.634, 11.776, 12.628, 9.200000000000001, 14.598, 13.870000000000001, 4.894, 4.772, 7.886, 10.306000000000001, 12.934000000000001, 9.282, 17.114, 6.8500000000000005, 12.06, 8.512, 8.276, 6.236, 8.8, 5.024, 7.882000000000001, 5.7, 8.158, 4.478, 10.584, 12.622, 5.352, 8.894, 15.148, 12.008000000000001, 9.99, 8.726, 11.040000000000001, 8.538, 7.572, 7.354, 12.588000000000001, 9.812, 10.58, 10.506, 9.200000000000001, 13.156, 4.678, 10.672, 12.128, 11.728, 7.848, 6.742, 9.336, 9.352, 8.224, 6.628, 9.706, 13.1, 9.442, 16.052, 7.392, 7.256, 9.448, 12.188, 12.486, 12.872, 11.26, 2.968, 12.422, 8.222, 12.988, 10.356, 12.71, 13.048, 13.05, 13.120000000000001, 9.22, 12.586, 7.324, 13.122, 7.92, 13.422, 11.122, 15.23, 6.976, 9.892, 7.988, 8.984, 8.828, 10.368, 7.006, 13.764000000000001, 14.664, 11.478, 14.002, 12.386000000000001, 11.94, 6.2780000000000005, 10.044, 10.138, 6.142, 7.23, 9.016, 11.446, 13.994, 7.526, 7.844, 10.414, 7.622, 6.6160000000000005, 5.894, 10.64, 12.008000000000001, 12.646, 12.192, 11.09, 6.726, 9.198, 12.09, 11.85, 12.304, 10.61, 12.44, 15.030000000000001, 10.464, 14.542, 6.338, 9.518, 8.644, 12.934000000000001, 4.186, 11.468, 9.886000000000001, 16.194, 6.8420000000000005, 12.27, 10.132, 4.902, 6.086, 5.91, 12.586, 11.224, 7.182, 14.096, 11.412, 11.178, 9.398, 7.538, 9.556000000000001, 5.3180000000000005, 12.838000000000001, 10.774000000000001, 8.016, 13.69, 10.016, 15.966000000000001, 12.870000000000001, 7.016, 8.996, 8.782, 6.106, 10.984, 5.7940000000000005, 10.1, 7.976, 8.57, 13.716000000000001, 11.97, 9.446, 9.594, 6.554, 10.186, 9.304, 7.774, 11.406, 11.058, 8.846, 9.91, 6.816, 12.936, 8.620000000000001, 8.374, 7.956, 6.434, 10.494, 11.962, 9.126, 9.838000000000001, 13.244, 11.252, 7.71, 7.184, 8.42, 14.68, 12.982000000000001, 8.442, 7.390000000000001, 10.984, 3.536, 4.28, 9.03, 12.884, 11.348, 11.532, 6.236, 9.32, 6.776, 2.668, 12.726, 6.284, 4.178, 15.112, 6.138, 9.154, 14.23, 11.912, 11.86, 12.916, 11.676, 5.126, 8.804, 13.442, 9.126, 7.1000000000000005, 6.244, 12.97, 8.664, 9.404, 13.344, 9.592, 7.7940000000000005, 11.938, 12.674, 6.312, 8.07, 9.870000000000001, 13.974, 7.5200000000000005, 12.556000000000001, 7.176, 14.332, 7.196, 13.084, 11.948, 8.922, 8.016, 12.248000000000001, 8.638, 11.1, 8.702, 9.664, 8.988, 12.586, 9.484, 10.876, 14.32, 15.916, 8.346, 9.004, 2.072, 9.006, 12.202, 7.354, 6.532, 12.162, 8.612, 9.76, 17.118000000000002, 15.756, 8.436, 10.536, 14.062000000000001, 6.572, 13.318, 14.496, 11.596, 8.442, 10.046, 11.34, 11.972, 16.14, 9.296, 5.414, 8.69, 12.692, 11.254, 6.838, 9.96, 12.762, 11.858, 10.81, 4.722, 12.356, 4.664, 7.426, 15.984, 13.888, 8.232, 12.542, 4.706, 13.35, 9.398, 9.244, 4.486, 9.538, 11.542, 12.286, 11.73, 6.224, 7.242, 8.768, 10.558, 7.916, 12.776, 6.38, 11.154, 7.97, 9.286, 12.228, 11.338000000000001, 11.648, 10.468, 10.074, 11.264, 9.648, 8.05, 7.5360000000000005, 12.768, 7.708, 8.506, 8.056000000000001, 10.86, 10.066, 9.232, 9.93, 10.034, 10.288, 14.826, 5.8, 13.972, 11.89, 10.476, 12.034, 11.238, 13.712, 10.446, 7.97, 11.956, 8.322000000000001, 12.638, 14.326, 13.544, 16.608, 16.676000000000002, 11.432, 9.998, 8.01, 12.11, 8.986, 13.744, 4.886, 6.234, 9.388, 7.006, 9.352, 14.594, 9.196, 10.172, 11.966000000000001, 7.704, 12.620000000000001, 5.08, 2.996, 8.582, 7.55, 7.728, 13.604000000000001, 11.006, 12.786, 5.34, 4.732, 9.088000000000001, 10.06, 6.308, 9.22, 12.358, 6.988, 14.488, 15.262, 14.07, 11.36, 9.21, 11.886000000000001, 9.416, 10.978, 13.582, 9.862, 11.5, 10.294, 11.346, 13.192, 12.968, 7.43, 15.056000000000001, 12.36, 11.342, 13.834, 6.93, 5.5680000000000005, 12.958, 7.890000000000001, 8.954, 7.92, 12.726, 8.262, 6.646, 10.620000000000001, 14.484, 8.528, 12.13, 9.026, 12.318, 10.36, 7.198, 8.676, 8.136000000000001, 6.496, 12.332, 14.280000000000001, 8.688, 10.606, 9.768, 10.788, 3.452, 9.918000000000001, 10.152000000000001, 9.732, 10.81, 7.7860000000000005, 5.514, 11.992, 9.546, 6.11, 16.532, 13.712, 14.42, 11.202, 6.38, 10.912, 12.048, 15.006, 12.362, 6.296, 11.27, 7.244, 9.506, 0.374, 15.878, 8.092, 7.5200000000000005, 8.502, 12.576, 12.146, 10.766, 10.502, 10.098, 8.194, 6.972, 8.588000000000001, 8.808, 13.738, 7.88, 10.772, 9.874, 16.832, 9.434000000000001, 10.71, 8.718, 12.446, 9.162, 9.868, 13.844, 8.48, 10.396, 7.7780000000000005, 12.01, 5.986, 8.638, 13.89, 6.916, 11.134, 8.612, 9.952, 8.044, 11.292, 11.91, 10.008000000000001, 10.914, 16.22, 11.572000000000001, 12.374, 10.766, 12.918000000000001, 11.904, 6.2860000000000005, 7.78, 11.49, 10.756, 7.986, 7.216, 9.258000000000001, 7.796, 10.422, 7.432, 4.712, 14.222, 7.016, 8.77, 6.942, 10.744, 11.144, 5.396, 11.89, 5.99, 8.942, 8.85, 7.258, 12.268, 10.252, 11.176, 7.390000000000001, 10.31, 11.124, 7.228, 13.686, 10.196, 14.438, 6.8420000000000005, 4.946, 12.226, 8.02, 11.126, 7.708, 12.426, 6.2, 10.812, 2.474, 9.858, 11.55, 7.0280000000000005, 9.988, 5.3740000000000006, 3.954, 7.422000000000001, 8.25, 8.736, 10.678, 10.82, 13.836, 7.122, 14.652000000000001, 12.902000000000001, 14.126, 8.562, 9.568, 9.618, 8.17, 9.540000000000001, 11.364, 9.618, 7.096, 7.482, 11.096, 12.344, 9.028, 15.932, 15.834, 7.0120000000000005, 5.8180000000000005, 12.078, 9.934000000000001, 12.916, 7.518, 8.790000000000001, 12.424, 7.094, 9.046, 8.89, 8.794, 13.216000000000001, 8.502, 6.782, 11.1, 10.454, 6.736, 13.43, 10.972, 8.596, 5.632, 10.552, 0.87, 13.062, 8.13, 5.76, 10.848, 9.018, 14.22, 14.038, 9.434000000000001, 9.38, 7.288, 12.886000000000001, 10.118, 9.492, 9.488, 12.068, 19.394000000000002, 9.202, 12.898, 12.49, 6.916, 11.234, 5.5520000000000005, 12.956, 10.352, 4.662, 10.784, 9.222, 12.276, 9.92, 6.958, 10.932, 10.182, 5.522, 12.15, 7.3, 12.296, 4.21, 10.842, 10.024000000000001, 8.278, 14.26, 10.038, 12.988, 11.174, 14.65, 8.978, 6.932, 8.85, 14.676, 7.266, 15.860000000000001, 13.748000000000001, 6.898000000000001, 10.958, 12.966000000000001, 7.29, 12.198, 14.258000000000001, 6.6000000000000005, 8.436, 10.16, 9.858, 9.168000000000001, 10.484, 5.632, 13.298, 12.530000000000001, 4.0040000000000004, 2.81, 12.754, 10.904, 11.568, 9.522, 7.008, 10.266, 10.3, 8.336, 10.594, 10.488, 12.212, 10.782, 8.15, 9.496, 4.632, 12.296, 8.266, 11.166, 7.55, 10.086, 11.02, 13.314, 10.608, 10.158, 10.316, 10.392, 6.008, 11.98, 9.518, 10.076, 9.57, 9.084, 12.478, 7.208, 10.178, 13.276, 14.898, 10.416, 8.016, 7.508, 13.41, 13.65, 11.398, 6.664, 11.628, 12.798, 9.786, 11.59, 12.888, 8.782, 7.502, 10.346, 8.284, 11.040000000000001, 9.782, 10.116, 10.434000000000001, 8.648, 10.912, 15.16, 13.076, 6.598, 12.584, 5.652, 8.482, 9.784, 10.284, 10.048, 7.976, 6.164, 9.006, 12.462, 13.42, 10.136000000000001, 11.576, 9.266, 8.592, 16.306, 8.632, 3.198, 11.746, 8.51, 10.112, 9.762, 11.322000000000001, 9.492, 10.114, 11.872, 4.946, 8.658, 9.646, 7.75, 16.312, 8.888, 9.496, 6.51, 5.306, 4.978, 15.624, 5.4, 10.902000000000001, 11.906, 8.896, 12.6, 12.040000000000001, 10.194, 10.784, 11.444, 11.352, 11.012, 10.74, 7.406000000000001, 12.56, 16.794, 12.842, 4.202, 12.028, 10.058, 11.594, 9.02, 13.718, 8.646, 11.186, 8.136000000000001, 10.126, 8.97, 9.126, 10.596, 5.944, 8.92, 3.614, 6.8740000000000006, 5.674, 11.35, 10.428, 12.706, 15.588000000000001, 10.552, 7.706, 8.27, 7.206, 8.83, 6.7940000000000005, 12.796000000000001, 11.432, 14.238, 6.968, 8.192, 8.18, 10.406, 10.714, 4.422, 8.642, 12.27, 9.572000000000001, 5.69, 10.484, 7.436, 7.394, 12.208, 11.028, 7.8, 10.986, 10.78, 8.492, 12.714, 4.682, 15.31, 8.434000000000001, 9.738, 15.328000000000001, 8.354000000000001, 13.126, 9.128, 6.7780000000000005, 0.858, 9.68, 8.47, 6.622, 11.522, 11.714, 7.558, 10.58, 9.91, 11.392, 9.892, 11.27, 13.72, 9.248, 5.198, 7.19, 11.534, 7.74, 14.256, 12.184000000000001, 9.486, 10.21, 14.74, 4.5360000000000005, 11.902000000000001, 11.762, 5.466, 8.266, 17.886, 13.22, 13.528, 8.396, 9.11, 5.414, 8.766, 11.788, 11.514000000000001, 11.014, 9.442, 4.896, 16.42, 12.426, 18.35, 7.314, 9.736, 11.062, 9.018, 6.434, 7.322, 14.098, 8.96, 7.098, 12.674, 8.454, 9.4, 12.522, 10.666, 9.092, 12.084, 13.754, 6.17, 6.612, 13.306000000000001, 9.986, 9.76, 10.540000000000001, 12.366, 8.84, 8.782, 9.736, 9.712, 12.844, 14.214, 12.408, 7.58, 14.058, 6.69, 12.284, 11.388, 12.07, 8.284, 9.064, 17.494, 8.838000000000001, 6.78, 9.298, 12.424, 10.904, 8.3, 6.93, 16.404, 11.706, 9.352, 8.41, 17.884, 9.620000000000001, 7.7620000000000005, 8.378, 7.214, 9.61, 11.59, 13.514000000000001, 8.088000000000001, 10.352, 11.086, 6.704, 7.618, 10.52, 6.276, 11.266, 6.128, 13.994, 7.63, 12.786, 7.256, 7.588, 9.61, 15.452, 8.016, 15.524000000000001, 10.658, 12.56, 4.96, 7.922000000000001, 11.836, 8.972, 6.462, 10.42, 14.004, 14.722, 7.182, 11.59, 14.692, 10.132, 11.486, 10.988, 8.528, 9.412, 11.724, 14.17, 9.874, 8.278, 12.992, 8.738, 9.768, 13.986, 8.886000000000001, 8.632, 12.534, 5.5, 7.44, 6.862, 9.958, 8.42, 12.026, 12.01, 9.59, 8.864, 7.684, 9.346, 10.858, 11.858, 16.062, 7.546, 15.22, 10.856, 11.026, 15.552, 11.312, 10.702, 6.942, 14.794, 16.604, 7.684, 8.498, 5.244, 9.450000000000001, 11.094, 9.612, 8.952, 10.122, 11.11, 14.184000000000001, 8.06, 7.2940000000000005, 5.8100000000000005, 7.902, 9.332, 12.47, 9.532, 8.578, 9.396, 9.632, 11.664, 11.752, 11.208, 9.372, 5.442, 10.122, 11.506, 12.906, 14.344, 10.442, 11.708, 10.07, 8.546, 12.436, 9.056000000000001, 9.790000000000001, 8.594, 3.126, 9.202, 10.214, 12.958, 6.8, 13.636000000000001, 13.166, 8.34, 6.17, 10.734, 11.604000000000001, 13.408, 7.136, 13.442, 6.654, 13.142, 16.766000000000002, 12.620000000000001, 12.104000000000001, 11.308, 8.25, 11.1, 9.786, 10.654, 9.644, 7.36, 5.312, 8.476, 10.772, 8.598, 5.0680000000000005, 11.78, 5.88, 5.782, 13.758000000000001, 15.288, 13.538, 12.17, 12.616, 12.308, 11.922, 10.534, 7.67, 11.450000000000001, 8.462, 16.274, 13.194, 12.248000000000001, 9.456, 8.974, 13.568, 10.552, 11.19, 10.688, 15.516, 12.426, 11.584, 11.454, 9.646, 5.606, 9.370000000000001, 4.7780000000000005, 9.544, 11.092, 8.728, 10.344, 6.79, 18.304000000000002, 7.692, 13.358, 9.162, 8.540000000000001, 8.42, 10.208, 13.516, 5.712, 16.93, 6.922, 6.764, 7.204, 13.144, 8.026, 6.5, 7.898000000000001, 11.278, 11.764000000000001, 8.798, 3.7800000000000002, 10.598, 12.73, 13.062, 11.258000000000001, 6.348, 11.206, 7.046, 8.054, 4.19, 12.176, 12.386000000000001, 9.636000000000001, 12.316, 10.244, 14.078000000000001, 10.744, 7.698, 13.428, 11.856, 9.94, 12.52, 12.834, 2.548, 6.3180000000000005, 8.882, 7.972, 12.012, 6.8740000000000006, 13.192, 6.946, 7.612, 9.464, 10.066, 9.450000000000001, 6.684, 14.280000000000001, 6.458, 12.562, 3.71, 10.292, 9.822000000000001, 10.896, 10.756, 12.870000000000001, 7.784, 13.964, 5.664, 7.344, 12.636000000000001, 5.18, 12.736, 10.0, 10.63, 9.218, 10.05, 13.036, 6.33, 10.222, 11.974, 9.854000000000001, 13.924, 10.498, 7.672000000000001, 14.936, 7.008, 9.258000000000001, 9.454, 9.48, 13.248000000000001, 2.83, 8.982, 9.262, 10.066, 13.738, 14.042, 11.254, 7.214, 11.272, 7.33, 11.288, 11.568, 10.286, 11.82, 7.048, 5.01, 11.218, 10.288, 10.438, 8.456, 14.198, 9.774000000000001, 8.326, 9.73, 8.354000000000001, 11.492, 10.03, 3.67, 14.69, 13.738, 11.266, 11.372, 12.834, 11.526, 11.342, 11.302, 17.650000000000002, 9.526, 10.266, 13.286, 12.858, 8.904, 12.714, 7.264, 6.684, 7.546, 16.46, 6.744, 7.84, 13.708, 16.136, 8.82, 15.116, 7.898000000000001, 13.926, 7.84, 14.832, 12.224, 11.282, 12.21, 12.98, 13.644, 16.916, 5.0920000000000005, 13.278, 9.65, 8.044, 12.632, 8.736, 12.556000000000001, 6.0680000000000005, 4.63, 9.120000000000001, 11.370000000000001, 10.338000000000001, 12.194, 9.824, 11.064, 9.306000000000001, 8.494, 7.34, 7.104, 11.09, 7.5120000000000005, 10.498, 8.236, 12.61, 12.658, 14.614, 15.71, 6.776, 10.950000000000001, 9.762, 4.972, 12.796000000000001, 13.586, 14.502, 9.11, 12.748000000000001, 11.256, 13.13, 16.126, 4.282, 4.426, 5.566, 7.532, 8.808, 8.996, 16.696, 8.206, 6.206, 11.604000000000001, 5.792, 12.912, 7.122, 10.518, 11.004, 12.682, 11.540000000000001, 12.200000000000001, 5.686, 9.13, 4.912, 9.074, 9.798, 8.15, 13.694, 11.77, 11.444, 17.338, 13.562000000000001, 12.492, 11.062, 8.372, 7.708, 8.754, 6.15, 7.606, 4.86, 8.57, 14.548, 12.244, 11.956, 5.846, 11.608, 13.376, 6.094, 15.138, 10.206, 8.672, 13.178, 10.338000000000001, 10.07, 10.44, 7.194, 13.556000000000001, 7.596, 13.374, 8.58, 10.858, 6.634, 7.394, 10.322000000000001, 12.09, 5.694, 8.156, 6.94, 8.454, 7.478, 8.6, 10.058, 13.694, 7.414000000000001, 9.458, 5.71, 9.592, 12.622, 12.722, 8.542, 10.964, 9.174, 4.082, 9.368, 8.546, 4.838, 13.638, 15.808, 13.578, 15.006, 9.846, 14.31, 8.55, 9.548, 6.82, 5.868, 12.33, 13.332, 9.764, 6.7780000000000005, 7.426, 6.022, 10.092, 6.218, 10.428, 11.048, 8.032, 6.5440000000000005, 9.258000000000001, 10.648, 7.008, 6.446, 10.438, 15.676, 9.02, 4.986, 10.03, 10.404, 8.366, 8.790000000000001, 14.786, 6.892, 6.8100000000000005, 11.72, 14.328000000000001, 11.364, 5.274, 13.522, 8.964, 12.22, 11.628, 4.506, 13.976, 13.396, 13.812000000000001, 6.0760000000000005, 4.784, 11.962, 11.022, 6.25, 7.948, 8.75, 11.978, 9.462, 12.322000000000001, 7.272, 9.774000000000001, 7.822, 9.094, 9.574, 11.472, 7.856, 6.336, 11.06, 12.676, 5.728, 10.132, 9.796, 9.652000000000001, 9.756, 6.188, 8.716, 10.988, 5.016, 12.478, 8.076, 17.582, 7.902, 9.604000000000001, 9.932, 11.536, 11.362, 11.36, 11.452, 7.82, 8.544, 10.072000000000001, 8.24, 14.142, 8.884, 11.218, 5.208, 9.342, 11.85, 8.6, 5.892, 7.882000000000001, 12.16, 11.3, 9.61, 13.97, 8.464, 7.428, 10.338000000000001, 10.522, 11.082, 10.542, 5.748, 11.664, 16.07, 7.636, 6.822, 13.922, 6.98, 9.0, 8.14, 7.662, 10.602, 5.396, 10.678, 7.572, 11.024000000000001, 10.952, 13.200000000000001, 8.446, 14.368, 5.244, 10.106, 10.692, 11.634, 14.492, 5.434, 12.344, 5.524, 8.584, 10.042, 13.204, 11.578, 13.138, 13.174, 10.514, 11.03, 4.816, 6.894, 8.334, 14.934000000000001, 13.55, 15.138, 6.598, 7.556, 9.844, 8.59, 6.304, 13.958, 8.062, 8.476, 11.198, 8.688, 10.826, 11.852, 9.346, 11.22, 12.73, 13.528, 11.49, 8.326, 11.124, 8.606, 4.882, 16.242, 10.794, 6.388, 6.666, 16.256, 9.716000000000001, 8.262, 13.426, 13.538, 11.46, 14.804, 12.26, 13.11, 10.046, 9.13, 13.006, 9.766, 11.81, 11.1, 9.374, 8.68, 9.454, 7.51, 9.994, 16.528, 13.658, 12.392, 10.984, 5.55, 8.934000000000001, 5.022, 11.148, 4.616, 11.34, 10.632, 9.84, 7.8740000000000006, 10.938, 10.246, 4.078, 17.306, 9.526, 12.794, 14.108, 10.83, 10.594, 10.88, 12.046, 6.228, 11.89, 7.904, 6.398000000000001, 15.968, 8.734, 9.444, 7.45, 15.486, 6.438, 10.74, 12.624, 12.948, 12.448, 7.58, 12.018, 6.424, 5.978, 9.732, 10.26, 11.914, 8.404, 10.248, 8.58, 6.614, 4.166, 7.392, 8.644, 15.5, 10.914, 13.638, 11.582, 7.7860000000000005, 5.38, 11.968, 7.344, 12.734, 12.906, 11.354000000000001, 11.152000000000001, 13.918000000000001, 14.4, 8.946, 9.362, 13.002, 13.138, 12.448, 12.534, 7.498, 12.306000000000001, 11.408, 9.362, 11.828, 10.962, 8.386000000000001, 7.522, 11.19, 9.792, 14.412, 8.136000000000001, 11.726, 13.492, 7.508, 11.094, 11.11, 8.744, 7.784, 11.574, 9.672, 9.282, 9.142, 11.966000000000001, 5.808, 8.53, 11.158, 12.426, 13.564, 8.474, 12.76, 8.298, 9.338000000000001, 11.23, 13.776, 10.302, 13.404, 6.942, 9.96, 10.612, 13.586, 9.018, 13.09, 6.008, 16.9, 5.548, 10.396, 4.532, 10.904, 8.332, 11.564, 12.282, 4.71, 11.896, 10.008000000000001, 6.0600000000000005, 10.646, 12.768, 11.18, 15.038, 8.65, 10.936, 9.486, 17.964, 10.852, 13.57, 10.596, 15.34, 5.63, 5.316, 12.812, 7.352, 9.282, 10.19, 9.65, 6.472, 5.488, 9.76, 11.778, 14.186, 7.156000000000001, 13.168000000000001, 7.828, 8.308, 12.808, 8.132, 10.68, 8.57, 8.736, 10.678, 6.974, 10.906, 13.018, 13.168000000000001, 14.348, 8.822000000000001, 7.96, 8.436, 13.4, 12.72, 8.058, 8.498, 12.540000000000001, 9.866, 8.742, 5.128, 13.58, 12.92, 10.422, 16.146, 8.288, 6.242, 11.33, 9.308, 7.412, 14.032, 10.826, 9.196, 9.916, 9.808, 7.194, 14.982000000000001, 8.466, 9.938, 11.902000000000001, 10.088000000000001, 13.384, 10.972, 13.23, 14.002, 10.452, 6.772, 7.812, 7.8020000000000005, 8.192, 11.454, 13.518, 10.728, 5.3260000000000005, 11.18, 9.796, 8.824, 10.9, 11.844, 11.376, 9.698, 8.28, 6.896, 9.05, 14.442, 10.414, 8.156, 18.046, 7.292, 9.518, 11.676, 9.734, 8.198, 2.77, 13.498000000000001, 11.562, 14.234, 4.892, 9.75, 7.218, 9.26, 12.348, 10.622, 12.164, 9.272, 9.792, 8.768, 12.866, 9.44, 10.772, 11.014, 8.856, 8.68, 7.578, 12.092, 6.488, 12.43, 7.666, 9.666, 9.336, 12.032, 9.666, 8.708, 10.344, 2.858, 6.96, 9.69, 12.452, 11.562, 10.608, 13.166, 12.994, 7.84, 10.316, 6.09, 11.354000000000001, 10.848, 9.954, 11.168000000000001, 7.868, 10.236, 7.564, 15.514000000000001, 4.25, 9.374, 13.054, 6.19, 9.262, 6.162, 6.5760000000000005, 7.194, 9.966000000000001, 11.032, 15.81, 9.544, 12.346, 8.22, 9.282, 8.778, 12.842, 4.812, 9.068, 5.908, 9.518, 6.642, 8.396, 12.484, 5.89, 14.33, 10.97, 13.636000000000001, 14.896, 7.094, 12.07, 5.89, 9.33, 5.67, 10.266, 13.514000000000001, 14.176, 13.006, 11.838000000000001, 9.13, 11.884, 9.428, 8.092, 3.56, 7.0360000000000005, 7.638, 9.758000000000001, 9.704, 9.632, 12.814, 10.18, 11.578, 13.264000000000001, 16.11, 9.828, 8.204, 13.062, 17.236, 9.976, 13.88, 5.368, 10.564, 12.482000000000001]\n", - "[ -3.448, -3.115] : \n", - "[ -3.115, -2.782] : \n", - "[ -2.782, -2.448] : #\n", - "[ -2.448, -2.115] : #\n", - "[ -2.115, -1.782] : ##\n", - "[ -1.782, -1.449] : #####\n", - "[ -1.449, -1.116] : ########\n", - "[ -1.116, -0.782] : ##########\n", - "[ -0.782, -0.449] : ################\n", - "[ -0.449, -0.116] : ##################\n", - "[ -0.116, 0.217] : ####################\n", - "[ 0.217, 0.550] : #################\n", - "[ 0.550, 0.884] : #############\n", - "[ 0.884, 1.217] : ###########\n", - "[ 1.217, 1.550] : #######\n", - "[ 1.550, 1.883] : ###\n", - "[ 1.883, 2.216] : #\n", - "[ 2.216, 2.550] : #\n", - "[ 2.550, 2.883] : \n", - "[ 2.883, 3.216] : \n", - "g1 mean = -0.016807999999999896\n", - "g1 variance = 1.020684979135999\n", - "[ 0.374, 1.349] : \n", - "[ 1.349, 2.324] : \n", - "[ 2.324, 3.298] : #\n", - "[ 3.298, 4.273] : ##\n", - "[ 4.273, 5.248] : ####\n", - "[ 5.248, 6.223] : #######\n", - "[ 6.223, 7.198] : ###########\n", - "[ 7.198, 8.172] : ##############\n", - "[ 8.172, 9.147] : #################\n", - "[ 9.147, 10.122] : ####################\n", - "[ 10.122, 11.097] : #################\n", - "[ 11.097, 12.072] : ################\n", - "[ 12.072, 13.046] : #############\n", - "[ 13.046, 14.021] : #########\n", - "[ 14.021, 14.996] : ######\n", - "[ 14.996, 15.971] : ###\n", - "[ 15.971, 16.946] : ##\n", - "[ 16.946, 17.920] : \n", - "[ 17.920, 18.895] : \n", - "[ 18.895, 19.870] : \n", - "g2 mean = 10.014089200000036\n", - "g2 variance = 8.766713087243378\n" + "[-0.6879999999999997, 1.3200000000000003, -1.424, -1.0, -0.7199999999999998, -0.3839999999999999, 0.8479999999999999, -1.1680000000000001, -0.43199999999999994, 0.41600000000000037, -0.3919999999999999, 0.13600000000000012, 0.48800000000000043, 0.28000000000000025, 0.16800000000000015, 2.4000000000000004, -0.472, -0.5920000000000001, -0.3679999999999999, 0.3440000000000003, 0.3360000000000003, 1.4960000000000004, -1.56, 1.096, -0.43199999999999994, -0.008000000000000007, -0.496, 1.1600000000000001, -1.592, 0.016000000000000014, 0.8959999999999999, 0.8639999999999999, 0.26400000000000023, 0.2240000000000002, -0.6959999999999997, -0.008000000000000007, -0.7919999999999998, 1.4160000000000004, -1.528, 0.41600000000000037, 0.6319999999999997, -2.7119999999999997, -0.6320000000000001, 2.6480000000000006, 1.0, -1.072, -0.7199999999999998, -0.43999999999999995, -0.6959999999999997, 0.3520000000000003, -1.3359999999999999, -0.6959999999999997, 0.8559999999999999, -2.112, 0.40800000000000036, -1.96, 1.12, 0.5200000000000005, 1.96, -0.08800000000000008, 0.4720000000000004, 0.15200000000000014, 1.1440000000000001, -0.31199999999999983, 1.2400000000000002, -0.552, -0.2639999999999998, -0.21599999999999975, -0.7759999999999998, -0.45599999999999996, -0.2879999999999998, -1.048, 1.032, 0.09600000000000009, 1.1920000000000002, 0.3520000000000003, -0.4079999999999999, -0.6320000000000001, -0.3039999999999998, -1.1440000000000001, 1.3920000000000003, -0.10400000000000009, -1.472, 0.5360000000000005, 0.4240000000000004, -0.3599999999999999, 0.7839999999999998, 0.6959999999999997, 1.5920000000000005, -0.05600000000000005, -0.48, 0.5440000000000005, -0.2559999999999998, 0.30400000000000027, 1.4240000000000004, -0.968, -0.7999999999999998, 2.128, 0.5680000000000005, -2.416, 0.3200000000000003, 0.08000000000000007, 0.5760000000000005, 0.30400000000000027, -0.3759999999999999, -1.616, -1.2719999999999998, -1.1999999999999997, -0.7279999999999998, 1.3920000000000003, 0.14400000000000013, 0.28800000000000026, -0.3679999999999999, 0.5200000000000005, -0.7599999999999998, -0.7279999999999998, -1.968, -0.7439999999999998, -1.944, 0.17600000000000016, -0.18399999999999972, 0.2240000000000002, -0.976, -0.3679999999999999, 0.03200000000000003, 0.26400000000000023, -0.19199999999999973, 1.7519999999999998, -1.1680000000000001, 0.5600000000000005, 1.4000000000000004, -0.31199999999999983, -0.16000000000000014, 0.16000000000000014, 1.8399999999999999, 0.6159999999999997, 0.02400000000000002, 0.2320000000000002, -1.2319999999999998, 0.6719999999999997, 0.7119999999999997, 0.29600000000000026, 1.2240000000000002, 0.5920000000000005, -0.2719999999999998, 0.16000000000000014, -0.512, -0.8319999999999999, -0.536, 1.4240000000000004, -1.1840000000000002, -0.08000000000000007, 0.14400000000000013, 1.2320000000000002, 0.26400000000000023, 0.8159999999999998, -0.2639999999999998, 0.8159999999999998, -1.592, -0.6879999999999997, 0.2320000000000002, 0.944, -0.528, 1.12, 0.5200000000000005, -0.19199999999999973, -0.14400000000000013, 0.008000000000000007, -1.6400000000000001, -0.3599999999999999, 0.10400000000000009, -0.23199999999999976, 0.2400000000000002, 0.1120000000000001, 1.1600000000000001, 0.4240000000000004, -0.56, -1.1840000000000002, -1.064, -1.6560000000000001, 0.7359999999999998, 0.6799999999999997, 0.8239999999999998, 0.6080000000000005, 0.03200000000000003, -0.22399999999999975, 0.7759999999999998, -2.528, -0.5920000000000001, -0.23999999999999977, 2.008, 0.41600000000000037, -1.032, -0.06400000000000006, 0.6799999999999997, -0.6959999999999997, 1.3040000000000003, -0.31999999999999984, 1.024, -0.040000000000000036, -1.1360000000000001, 0.48800000000000043, 1.032, 0.7839999999999998, -0.3599999999999999, -0.952, -0.1120000000000001, -0.488, 0.24800000000000022, -0.8879999999999999, -2.016, -1.2719999999999998, -2.168, 1.952, -0.6799999999999997, 1.088, -1.2879999999999998, -0.7679999999999998, -0.7119999999999997, -0.2879999999999998, 0.5600000000000005, 1.4880000000000004, 1.8719999999999999, -1.8479999999999999, -1.472, 0.15200000000000014, -0.6480000000000001, 1.024, -0.33599999999999985, -0.16000000000000014, 0.17600000000000016, 0.7439999999999998, 0.1200000000000001, -0.96, 0.10400000000000009, -0.9039999999999999, -0.2639999999999998, -0.10400000000000009, 0.1120000000000001, -0.8639999999999999, 1.088, -0.1679999999999997, -1.7199999999999998, 1.7439999999999998, -0.1280000000000001, 1.5200000000000005, -0.7119999999999997, 2.152, 0.18400000000000016, -0.45599999999999996, -1.0, 0.16000000000000014, -0.23999999999999977, -0.20799999999999974, -0.008000000000000007, 0.9199999999999999, 0.5200000000000005, 2.2800000000000002, 0.4640000000000004, 0.24800000000000022, -0.8479999999999999, -0.24799999999999978, -2.0, 0.40800000000000036, 1.056, -0.2799999999999998, 0.7839999999999998, 0.18400000000000016, -0.7759999999999998, -0.31999999999999984, 0.5360000000000005, -0.7679999999999998, -0.8639999999999999, 0.14400000000000013, 0.05600000000000005, 0.3520000000000003, 0.5280000000000005, 1.4320000000000004, 2.184, 1.992, 0.3360000000000003, 1.8399999999999999, 0.6879999999999997, 1.2880000000000003, -0.8239999999999998, -0.6240000000000001, 1.912, -1.536, 0.37600000000000033, 0.2560000000000002, -0.7119999999999997, 0.9039999999999999, -1.6560000000000001, 0.7119999999999997, 1.912, -0.45599999999999996, 0.7039999999999997, -0.96, 0.8479999999999999, 0.8159999999999998, -0.3999999999999999, 0.3680000000000003, 0.6000000000000005, 0.6559999999999997, 0.7039999999999997, -0.44799999999999995, -0.7039999999999997, -0.19199999999999973, 0.6719999999999997, 0.7359999999999998, 0.37600000000000033, -1.056, 0.2320000000000002, 0.2560000000000002, -0.2959999999999998, -0.5920000000000001, -0.7679999999999998, -1.2239999999999998, -1.008, -0.14400000000000013, -0.96, 0.992, 1.4720000000000004, 0.16000000000000014, 0.26400000000000023, 0.39200000000000035, -0.6480000000000001, 0.15200000000000014, -1.3679999999999999, -0.31999999999999984, -0.5840000000000001, 0.9279999999999999, 1.2880000000000003, -0.8799999999999999, -2.576, -0.24799999999999978, -1.576, 1.112, -0.06400000000000006, -1.1760000000000002, -0.3999999999999999, -1.0, 0.8799999999999999, 0.04800000000000004, -0.21599999999999975, 0.49600000000000044, 1.1680000000000001, -0.2879999999999998, -0.8799999999999999, 0.6719999999999997, -0.528, -1.4, 0.7439999999999998, -2.216, -0.09600000000000009, 0.5840000000000005, 0.8079999999999998, 0.6399999999999997, -1.536, 0.952, -0.1679999999999997, -0.6080000000000001, 0.7919999999999998, -0.2799999999999998, 1.1440000000000001, -0.504, -0.42399999999999993, -1.2079999999999997, 1.6000000000000005, -1.912, -0.7919999999999998, -0.15200000000000014, -1.624, -1.608, 0.9279999999999999, -0.15200000000000014, -0.24799999999999978, -0.13600000000000012, -1.584, 1.024, -0.35199999999999987, 0.38400000000000034, -1.7119999999999997, 0.8559999999999999, -0.8639999999999999, -0.34399999999999986, 0.5040000000000004, 0.3680000000000003, -3.216, 0.18400000000000016, 0.5920000000000005, -0.05600000000000005, -1.088, 0.5840000000000005, -0.56, 1.888, -0.472, -0.2879999999999998, -1.536, -2.832, -0.1759999999999997, -0.968, -0.8079999999999998, -0.3039999999999998, -0.31199999999999983, 0.04800000000000004, -2.168, 0.016000000000000014, 1.5040000000000004, 0.1120000000000001, -1.3199999999999998, -0.22399999999999975, -0.6719999999999997, -1.2559999999999998, -0.7279999999999998, 0.39200000000000035, -1.056, -0.21599999999999975, -1.024, -0.6160000000000001, 1.2880000000000003, 0.08800000000000008, -1.8159999999999998, 0.15200000000000014, 0.016000000000000014, 0.2240000000000002, -1.376, 0.96, 1.4960000000000004, 0.8879999999999999, 0.6080000000000005, 0.5360000000000005, 0.8079999999999998, -2.376, 0.8079999999999998, 1.2000000000000002, -2.888, -0.5920000000000001, -0.544, 0.6559999999999997, -1.0, -1.512, -0.19999999999999973, -0.3679999999999999, 2.216, 2.5440000000000005, -0.6240000000000001, -0.5920000000000001, -0.472, -0.16000000000000014, 0.7599999999999998, -0.3839999999999999, -0.3679999999999999, -1.2479999999999998, -1.2479999999999998, -0.2639999999999998, -0.42399999999999993, -1.1680000000000001, 1.064, -1.088, -3.2640000000000002, 1.032, -1.8719999999999999, 1.5040000000000004, -0.35199999999999987, 0.1280000000000001, -0.20799999999999974, -1.912, 1.088, 0.05600000000000005, 0.13600000000000012, 2.3600000000000003, 0.13600000000000012, 0.28000000000000025, -0.34399999999999986, 0.07200000000000006, -0.24799999999999978, 1.6959999999999997, 0.3440000000000003, -0.3839999999999999, 0.4800000000000004, -0.02400000000000002, 0.040000000000000036, -0.6240000000000001, 0.1200000000000001, -0.33599999999999985, 0.6639999999999997, -0.1679999999999997, 0.5360000000000005, -0.8879999999999999, 0.28800000000000026, 0.8159999999999998, 1.12, -0.8079999999999998, 2.152, -0.23999999999999977, -1.096, 0.8479999999999999, 0.7439999999999998, 0.39200000000000035, -0.15200000000000014, 0.2160000000000002, 0.4640000000000004, -0.9039999999999999, 0.976, -1.1440000000000001, -0.472, -0.536, -1.12, -0.552, 0.02400000000000002, 0.06400000000000006, -1.04, -0.04800000000000004, -1.4, -1.376, -0.8159999999999998, 1.008, 1.3280000000000003, 0.08000000000000007, -0.952, -1.1280000000000001, -1.1360000000000001, 0.30400000000000027, 1.888, -0.2639999999999998, 0.10400000000000009, 1.7199999999999998, -0.20799999999999974, -1.032, -0.7039999999999997, -0.7119999999999997, -0.6480000000000001, -0.5680000000000001, 0.29600000000000026, -0.2639999999999998, -1.2559999999999998, -0.7519999999999998, -0.23999999999999977, 0.7999999999999998, -0.9119999999999999, -0.4159999999999999, -1.416, -0.7359999999999998, 0.9359999999999999, 1.6160000000000005, 0.6559999999999997, 0.9359999999999999, -0.7999999999999998, -0.7279999999999998, 0.09600000000000009, 0.6559999999999997, -0.5760000000000001, -0.6640000000000001, 1.8159999999999998, -0.2559999999999998, 0.6799999999999997, -0.3919999999999999, -1.576, 1.2400000000000002, 0.20800000000000018, -0.6080000000000001, 0.7919999999999998, 0.10400000000000009, -1.048, -0.6160000000000001, -0.016000000000000014, 0.40000000000000036, -0.24799999999999978, -0.040000000000000036, 0.5360000000000005, -0.984, 0.27200000000000024, -0.536, 0.7759999999999998, 0.2560000000000002, -0.8239999999999998, 3.6879999999999997, -0.984, -1.392, 0.19200000000000017, 0.7679999999999998, -1.104, 0.29600000000000026, -0.22399999999999975, -0.7359999999999998, 2.048, 0.4400000000000004, 0.24800000000000022, 0.5200000000000005, -3.432, -0.33599999999999985, 2.008, -0.18399999999999972, -0.7119999999999997, -0.7199999999999998, 1.2800000000000002, -0.48, -1.3599999999999999, -1.92, 0.20000000000000018, 1.1680000000000001, 0.20000000000000018, -0.008000000000000007, -0.2639999999999998, -0.20799999999999974, -0.504, -0.016000000000000014, -0.3599999999999999, -0.952, -0.02400000000000002, 0.26400000000000023, 0.6639999999999997, -0.3919999999999999, 0.07200000000000006, -0.7039999999999997, 1.4720000000000004, -0.7119999999999997, 0.2240000000000002, 0.24800000000000022, -0.488, -0.6799999999999997, 0.4640000000000004, -1.1680000000000001, 0.15200000000000014, 0.8479999999999999, -0.5680000000000001, 3.224, 0.16000000000000014, -0.7599999999999998, -0.42399999999999993, 2.208, -0.3759999999999999, -0.8079999999999998, -1.7679999999999998, -1.496, 1.5360000000000005, -1.912, 0.08800000000000008, -0.952, -0.984, 0.8239999999999998, -0.016000000000000014, -0.6640000000000001, 0.6719999999999997, 0.9039999999999999, 0.8559999999999999, 1.2880000000000003, -0.6879999999999997, 0.7199999999999998, -1.2399999999999998, -0.2879999999999998, -0.5920000000000001, 2.144, -0.44799999999999995, 0.8239999999999998, -1.016, 2.224, -0.3759999999999999, -0.03200000000000003, -0.2639999999999998, -0.8559999999999999, -1.096, 1.7599999999999998, -0.9199999999999999, 1.04, -0.22399999999999975, -0.6480000000000001, 0.15200000000000014, -0.32799999999999985, -1.968, -1.528, 1.7199999999999998, -1.448, 2.3360000000000003, 0.5120000000000005, -1.7039999999999997, 1.032, 0.7039999999999997, 0.7359999999999998, 0.5600000000000005, -0.8879999999999999, -1.608, -0.6879999999999997, -1.1280000000000001, 1.3520000000000003, 0.20000000000000018, 0.2560000000000002, -0.6640000000000001, 0.02400000000000002, 2.7359999999999998, -1.12, -1.376, -0.6400000000000001, 2.16, 0.37600000000000033, -0.504, -0.3599999999999999, -1.904, -0.34399999999999986, -0.992, -1.424, -1.1919999999999997, -0.22399999999999975, 0.9359999999999999, 0.37600000000000033, -0.8719999999999999, -0.13600000000000012, -1.6959999999999997, 0.17600000000000016, -0.6640000000000001, 1.3120000000000003, -0.3919999999999999, 1.5280000000000005, 1.2000000000000002, 0.09600000000000009, -0.09600000000000009, -1.6400000000000001, 0.6239999999999997, 1.1600000000000001, -0.24799999999999978, 1.2640000000000002, 0.5840000000000005, -0.48, -1.072, 1.008, -1.1600000000000001, 2.3920000000000003, 0.29600000000000026, 0.3440000000000003, 0.4320000000000004, 0.1280000000000001, -0.552, 1.7679999999999998, -0.7599999999999998, -0.3599999999999999, -0.31999999999999984, 0.17600000000000016, -0.3839999999999999, -0.32799999999999985, 0.7679999999999998, 0.6000000000000005, -1.064, -0.2719999999999998, 0.39200000000000035, -0.6719999999999997, 0.09600000000000009, 0.6000000000000005, 0.41600000000000037, 0.6239999999999997, -1.8159999999999998, -0.6400000000000001, 1.904, -0.22399999999999975, 0.7039999999999997, -0.4159999999999999, -1.52, -0.31199999999999983, 0.3200000000000003, 1.6799999999999997, 0.6239999999999997, 1.4240000000000004, -0.6400000000000001, -0.44799999999999995, 0.40000000000000036, 0.09600000000000009, -0.6000000000000001, 1.2800000000000002, -0.7199999999999998, -0.6879999999999997, 1.4240000000000004, -0.31999999999999984, 1.6319999999999997, 0.38400000000000034, 0.40000000000000036, -0.08000000000000007, 1.976, 0.6479999999999997, 0.5280000000000005, -1.3439999999999999, -1.096, -0.23199999999999976, 3.192, -0.7759999999999998, 0.24800000000000022, -0.16000000000000014, 0.4400000000000004, -1.952, 0.5600000000000005, -0.44799999999999995, 1.3200000000000003, -0.10400000000000009, 0.7919999999999998, 1.3520000000000003, -1.072, -0.7199999999999998, 0.6799999999999997, -1.2639999999999998, -1.6, 0.3680000000000003, 0.19200000000000017, 0.952, -1.7279999999999998, -0.8319999999999999, -0.15200000000000014, 0.7679999999999998, 0.5120000000000005, -0.23999999999999977, 0.39200000000000035, 0.7039999999999997, 0.9039999999999999, 2.088, 0.944, 1.5600000000000005, 0.6239999999999997, 0.2160000000000002, -0.8879999999999999, -0.5680000000000001, 2.2560000000000002, -0.1679999999999997, 0.6879999999999997, -1.2319999999999998, 2.048, 1.2720000000000002, 1.008, -0.8319999999999999, -0.6799999999999997, -1.584, 0.08800000000000008, -1.112, -2.0, 0.30400000000000027, 2.08, 1.088, -0.3999999999999999, 0.27200000000000024, 0.5680000000000005, -0.6400000000000001, 0.41600000000000037, -0.21599999999999975, 0.4240000000000004, -0.7359999999999998, -1.6880000000000002, 0.5680000000000005, -0.08800000000000008, -1.544, 1.2560000000000002, 1.8159999999999998, -1.1999999999999997, 0.944, 0.6559999999999997, -0.7359999999999998, 0.7199999999999998, 0.8799999999999999, -0.42399999999999993, -0.22399999999999975, 0.6959999999999997, -0.23999999999999977, -0.8719999999999999, 1.3760000000000003, 1.4640000000000004, 1.3360000000000003, 0.3120000000000003, 0.20000000000000018, -3.248, 0.5760000000000005, 1.912, 0.4480000000000004, 0.05600000000000005, -0.952, 0.3600000000000003, -1.6880000000000002, -0.6719999999999997, 0.14400000000000013, 0.6080000000000005, -0.07200000000000006, 0.9039999999999999, 1.7679999999999998, 0.07200000000000006, 0.2320000000000002, -0.8879999999999999, -0.06400000000000006, -1.112, -0.3919999999999999, -1.936, -0.02400000000000002, -0.18399999999999972, -1.072, 0.3280000000000003, -0.7039999999999997, -0.16000000000000014, -0.2959999999999998, -0.24799999999999978, 2.7359999999999998, 1.2480000000000002, 0.4240000000000004, 0.6559999999999997, -0.992, -0.7359999999999998, -2.2800000000000002, -0.35199999999999987, -0.48, 0.07200000000000006, -0.4159999999999999, -0.10400000000000009, 1.0, 1.6000000000000005, 0.952, -0.536, -1.52, -0.6240000000000001, -0.32799999999999985, 2.216, 1.5440000000000005, 0.49600000000000044, 0.984, -1.944, -1.1280000000000001, 1.2640000000000002, -1.4, -1.48, -1.8559999999999999, -0.46399999999999997, 0.1120000000000001, 0.3680000000000003, 1.5440000000000005, 0.06400000000000006, 2.192, -1.064, -2.144, 0.9039999999999999, -0.9199999999999999, -0.43199999999999994, -0.3599999999999999, -1.472, 1.7519999999999998, -0.6640000000000001, -1.8639999999999999, -0.984, 1.7119999999999997, -2.072, -0.05600000000000005, -0.08800000000000008, 0.8719999999999999, 0.49600000000000044, 0.976, 0.05600000000000005, 0.02400000000000002, 0.28800000000000026, -0.9039999999999999, -1.616, 0.2240000000000002, 0.8719999999999999, 0.2320000000000002, -0.07200000000000006, 0.5760000000000005, 2.04, 1.4000000000000004, 1.4320000000000004, 1.024, -0.6000000000000001, 0.4800000000000004, 1.2640000000000002, -0.512, -1.1600000000000001, -0.43199999999999994, -1.2559999999999998, 0.5920000000000005, -1.6560000000000001, 0.8719999999999999, 0.5440000000000005, -1.584, -0.952, 1.3360000000000003, -0.1759999999999997, -0.528, -1.08, 0.5920000000000005, -1.456, -1.424, 0.19200000000000017, 1.2800000000000002, 1.3200000000000003, 0.30400000000000027, -0.43999999999999995, -0.496, -0.48, -0.944, -0.02400000000000002, -0.8879999999999999, -1.384, 1.2800000000000002, 1.04, -0.984, 0.8239999999999998, 0.5680000000000005, -0.22399999999999975, -0.2639999999999998, 1.0, -0.5760000000000001]\n", + "[8.9, 7.72, 6.34, 13.56, 9.36, 8.2, 7.08, 7.72, 9.120000000000001, 10.98, 10.9, 5.12, 8.58, 14.16, 16.0, 10.44, 7.46, 9.3, 7.04, 10.700000000000001, 8.4, 8.1, 9.66, 9.56, 10.9, 5.16, 11.700000000000001, 12.38, 9.26, 7.96, 6.8, 8.18, 6.48, 8.34, 10.92, 13.84, 11.200000000000001, 13.36, 11.4, 7.22, 11.56, 9.040000000000001, 17.16, 7.18, 3.3200000000000003, 6.88, 10.0, 9.56, 12.82, 10.92, 11.72, 10.08, 5.38, 7.26, 8.36, 5.16, 12.280000000000001, 12.620000000000001, 8.32, 16.38, 9.74, 11.52, 14.14, 14.76, 7.4, 7.5600000000000005, 9.700000000000001, 13.66, 14.120000000000001, 13.16, 10.06, 10.28, 10.92, 8.36, 9.0, 12.82, 12.92, 8.92, 12.44, 9.540000000000001, 5.42, 7.72, 5.7, 16.5, 14.66, 12.780000000000001, 12.6, 11.02, 9.96, 12.3, 11.66, 3.8200000000000003, 10.540000000000001, 14.9, 9.72, 11.4, 10.56, 11.46, 6.5600000000000005, 8.26, 14.66, 12.620000000000001, 9.42, 10.8, 12.08, 6.3, 6.44, 12.66, 9.26, 10.56, 5.68, 4.0200000000000005, 13.780000000000001, 12.8, 16.18, 2.82, 17.68, 7.44, 9.96, 6.9, 4.36, 9.82, 8.18, 11.040000000000001, 8.4, 7.76, 3.22, 8.22, 8.4, 8.94, 11.56, 12.8, 8.9, 7.9, 11.36, 11.38, 7.92, 13.98, 6.76, 7.72, 8.66, 9.98, 14.540000000000001, 8.8, 12.4, 6.44, 10.6, 2.7800000000000002, 6.32, 15.64, 10.620000000000001, 8.88, 16.18, 13.44, 9.48, 10.52, 7.74, 8.040000000000001, 12.82, 16.02, 8.32, 12.56, 14.02, 11.200000000000001, 9.36, 6.34, 7.8, 8.98, 7.78, 11.9, 11.0, 8.2, 4.6000000000000005, 13.52, 12.120000000000001, 7.92, 7.78, 7.46, 8.94, 12.96, 9.52, 13.280000000000001, 8.2, 16.7, 11.9, 6.8, 9.96, 4.6000000000000005, 11.76, 8.38, 14.620000000000001, 10.48, 12.18, 11.08, 4.94, 11.96, 11.76, 9.1, 10.6, 12.38, 12.9, 12.22, 6.72, 3.88, 6.48, 9.700000000000001, 8.82, 13.38, 12.86, 12.82, 13.26, 10.84, 6.62, 13.36, 14.200000000000001, 12.34, 8.16, 10.72, 8.34, 5.66, 7.4, 14.4, 8.5, 10.86, 11.96, 9.88, 11.48, 7.9, 6.42, 14.26, 8.5, 7.42, 7.32, 10.94, 5.0200000000000005, 10.6, 10.18, 8.96, 12.36, 14.08, 10.48, 10.98, 5.86, 14.02, 8.32, 9.040000000000001, 8.700000000000001, 10.18, 10.16, 6.2, 14.68, 7.44, 13.4, 9.18, 6.72, 8.1, 3.36, 12.44, 8.24, 6.46, 6.98, 13.06, 9.46, 8.46, 4.62, 8.540000000000001, 7.42, 7.44, 12.0, 12.18, 14.44, 12.88, 4.48, 13.08, 9.76, 7.18, 11.36, 13.66, 4.42, 7.4, 10.24, 14.48, 10.200000000000001, 7.16, 10.5, 12.56, 10.02, 7.16, 12.44, 10.28, 10.3, 15.9, 8.14, 10.56, 4.62, 9.64, 8.22, 12.14, 11.700000000000001, 13.32, 9.34, 6.24, 10.040000000000001, 11.700000000000001, 12.38, 15.200000000000001, 11.86, 8.22, 10.76, 10.28, 10.44, 6.6000000000000005, 10.96, 12.040000000000001, 8.1, 10.86, 10.5, 7.98, 8.44, 11.02, 10.68, 14.64, 5.76, 10.72, 14.120000000000001, 10.14, 11.1, 9.38, 10.8, 12.9, 6.36, 7.96, 10.82, 8.700000000000001, 10.24, 10.22, 8.72, 10.88, 10.92, 14.24, 10.1, 9.78, 8.2, 5.98, 8.74, 13.36, 8.26, 10.16, 18.82, 7.140000000000001, 7.54, 11.66, 8.46, 8.88, 4.44, 10.26, 12.6, 10.84, 10.3, 13.200000000000001, 9.68, 8.94, 7.54, 9.74, 9.26, 9.16, 9.96, 16.8, 11.46, 10.96, 8.620000000000001, 9.48, 9.18, 8.82, 11.34, 8.84, 5.38, 14.52, 12.620000000000001, 12.72, 11.28, 7.8, 5.66, 9.200000000000001, 11.4, 12.120000000000001, 11.0, 12.18, 9.6, 7.28, 8.72, 11.16, 9.56, 13.4, 6.640000000000001, 10.52, 9.42, 8.08, 10.46, 4.54, 13.5, 6.92, 9.94, 10.6, 12.3, 5.18, 9.9, 11.44, 9.9, 8.66, 12.120000000000001, 12.76, 10.3, 8.6, 8.42, 7.86, 7.72, 15.6, 6.640000000000001, 7.88, 10.58, 11.4, 8.52, 12.32, 8.02, 13.68, 11.84, 7.34, 9.48, 11.64, 7.8, 11.4, 10.32, 7.32, 11.06, 9.96, 6.42, 10.1, 8.620000000000001, 13.86, 12.6, 9.540000000000001, 8.120000000000001, 11.32, 8.96, 9.42, 12.44, 13.36, 13.08, 9.6, 11.02, 12.26, 9.44, 9.86, 6.28, 10.72, 10.84, 9.44, 10.9, 13.24, 10.84, 9.78, 4.92, 10.56, 5.58, 7.68, 12.280000000000001, 15.700000000000001, 9.28, 9.18, 14.98, 10.18, 10.86, 6.9, 13.74, 8.5, 12.040000000000001, 8.36, 7.88, 10.84, 11.52, 12.92, 18.42, 13.780000000000001, 13.24, 13.56, 11.16, 9.78, 9.94, 9.84, 12.42, 8.8, 7.8, 7.76, 10.700000000000001, 8.48, 12.34, 12.32, 5.74, 8.86, 8.9, 11.64, 8.74, 12.06, 7.84, 12.56, 11.02, 12.280000000000001, 9.9, 6.32, 7.86, 13.4, 9.44, 10.78, 8.08, 5.42, 10.06, 10.64, 12.22, 12.84, 8.74, 9.84, 9.4, 8.94, 10.06, 7.34, 7.9, 6.98, 13.620000000000001, 16.44, 10.040000000000001, 6.62, 12.18, 6.640000000000001, 6.82, 5.4, 12.22, 8.540000000000001, 9.22, 10.48, 9.92, 15.540000000000001, 9.24, 12.68, 3.04, 11.78, 8.620000000000001, 9.68, 9.8, 9.68, 10.24, 7.12, 9.18, 10.88, 13.96, 13.44, 6.640000000000001, 6.68, 13.0, 15.26, 15.6, 8.44, 12.26, 13.34, 9.44, 9.46, 8.78, 9.08, 9.06, 10.08, 9.200000000000001, 7.58, 14.94, 13.72, 11.98, 8.74, 10.76, 12.58, 9.26, 12.700000000000001, 6.38, 14.38, 5.5200000000000005, 7.640000000000001, 6.7, 9.96, 2.3000000000000003, 10.88, 9.9, 10.52, 7.3, 15.8, 11.06, 6.74, 8.42, 6.26, 8.66, 9.6, 9.540000000000001, 15.42, 12.280000000000001, 11.26, 10.72, 7.88, 8.74, 11.3, 11.16, 11.4, 7.82, 9.700000000000001, 8.1, 7.78, 10.88, 7.34, 5.24, 6.2, 4.28, 9.700000000000001, 8.86, 10.28, 15.08, 9.3, 9.52, 8.4, 3.24, 10.68, 9.78, 13.88, 14.1, 13.92, 15.34, 10.14, 10.700000000000001, 10.46, 10.86, 5.2, 12.82, 9.92, 9.1, 7.32, 18.36, 13.0, 5.6000000000000005, 4.5600000000000005, 3.96, 10.94, 8.94, 13.98, 14.120000000000001, 6.84, 6.92, 8.8, 9.200000000000001, 12.94, 7.0200000000000005, 11.4, 12.620000000000001, 14.84, 11.24, 10.8, 10.46, 8.5, 15.48, 5.26, 12.02, 11.38, 11.32, 8.68, 4.5600000000000005, 9.48, 10.74, 9.74, 7.140000000000001, 11.58, 7.98, 8.76, 13.02, 10.5, 7.36, 8.620000000000001, 5.48, 10.06, 11.700000000000001, 15.34, 11.8, 11.120000000000001, 8.92, 6.66, 14.24, 7.12, 11.66, 11.68, 9.88, 7.5600000000000005, 10.8, 5.92, 13.42, 10.68, 12.24, 8.6, 15.56, 10.28, 10.26, 5.62, 8.08, 7.68, 6.88, 13.58, 11.08, 7.92, 11.540000000000001, 5.62, 11.1, 4.82, 11.72, 8.8, 11.78, 14.84, 6.3, 5.2, 7.18, 9.58, 4.0600000000000005, 10.040000000000001, 10.78, 5.94, 10.84, 10.9, 6.86, 7.68, 11.32, 6.6000000000000005, 2.08, 7.66, 7.46, 8.26, 11.22, 6.96, 6.6000000000000005, 5.84, 13.06, 10.44, 9.46, 5.44, 14.22, 10.700000000000001, 10.44, 12.120000000000001, 6.0600000000000005, 9.38, 15.96, 11.1, 12.3, 9.68, 9.34, 10.9, 11.58, 11.48, 11.94, 14.8, 9.66, 10.1, 11.28, 10.26, 9.82, 16.36, 4.68, 10.16, 11.6, 11.0, 8.68, 14.74, 9.48, 12.14, 10.42, 10.28, 11.82, 9.42, 12.540000000000001, 8.98, 5.0600000000000005, 14.3, 15.1, 11.5, 12.9, 9.74, 6.34, 13.18, 6.3, 8.82, 11.6, 8.540000000000001, 4.14, 9.96, 9.24, 4.5, 10.34, 12.620000000000001, 11.56, 12.4, 12.22, 11.32, 5.14, 15.860000000000001, 10.72, 8.42, 6.76, 8.1, 3.38, 4.32, 6.26, 0.9400000000000001, 13.120000000000001, 13.16, 3.9, 10.9, 12.780000000000001, 12.84, 7.42, 12.4, 10.040000000000001, 13.700000000000001, 12.86, 6.24, 14.1, 4.72, 13.48, 8.82, 6.8, 12.42, 9.98, 5.44, 9.76, 14.84, 14.86, 11.64, 11.700000000000001, 7.7, 7.66, 15.52, 11.98, 4.48, 8.5, 5.14, 11.4, 11.96, 10.06, 11.92, 10.42, 4.62, 9.540000000000001, 13.9, 11.42, 10.700000000000001, 1.16, 8.52, 14.98, 10.14, 6.86, 9.18, 7.74, 8.82, 7.36, 10.66, 9.08, 12.64, 10.08, 5.92, 5.38, 7.22, 10.6, 7.36, 9.28, 11.48, 5.18, 8.96, 9.0, 10.48, 14.06, 12.42, 8.28, 12.16, 4.34, 5.68, 11.68, 8.82, 5.32, 11.82, 7.46, 7.96, 10.120000000000001, 8.96, 10.22, 8.040000000000001, 9.32, 11.32, 11.46, 9.02, 11.98, 12.700000000000001, 14.72, 4.9, 8.06, 8.4, 8.700000000000001, 11.64, 12.58, 10.38, 6.44, 7.22, 14.9, 13.040000000000001, 8.120000000000001, 7.28, 14.02, 11.48, 8.36, 7.72, 10.98, 14.58, 9.1, 14.34, 7.2, 16.46, 10.3, 12.06, 8.82, 11.0, 7.0, 8.52, 13.58, 9.28, 12.76, 2.56, 12.280000000000001, 6.9, 11.38, 11.02, 6.08, 6.24, 6.04, 6.38, 10.0, 14.02, 12.76, 2.64, 3.18, 9.02, 5.64, 11.78, 10.26, 9.14, 8.700000000000001, 12.96, 12.280000000000001, 9.6, 6.54, 9.78, 5.96, 9.66, 11.540000000000001, 10.56, 10.98, 6.98, 10.200000000000001, 8.86, 9.4, 8.3, 9.5, 8.42, 7.24, 7.7, 9.48, 11.68, 13.040000000000001, 9.38, 8.6, 10.32, 8.92, 7.5200000000000005, 6.66, 7.08, 13.88, 9.16, 13.9, 11.26, 7.72, 10.76, 12.08, 7.4, 9.6, 9.040000000000001, 7.78, 16.76, 13.22, 9.92, 16.22, 5.0200000000000005, 9.84, 9.64]\n", + "[ -3.432, -3.076] : \n", + "[ -3.076, -2.720] : \n", + "[ -2.720, -2.364] : \n", + "[ -2.364, -2.008] : #\n", + "[ -2.008, -1.652] : ####\n", + "[ -1.652, -1.296] : ######\n", + "[ -1.296, -0.940] : ###########\n", + "[ -0.940, -0.584] : ################\n", + "[ -0.584, -0.228] : ####################\n", + "[ -0.228, 0.128] : ################\n", + "[ 0.128, 0.484] : ##################\n", + "[ 0.484, 0.840] : ###############\n", + "[ 0.840, 1.196] : #########\n", + "[ 1.196, 1.552] : #######\n", + "[ 1.552, 1.908] : ###\n", + "[ 1.908, 2.264] : ###\n", + "[ 2.264, 2.620] : \n", + "[ 2.620, 2.976] : \n", + "[ 2.976, 3.332] : \n", + "[ 3.332, 3.688] : \n", + "g1 mean = -0.03141599999999989\n", + "g1 variance = 1.0479690029439992\n", + "[ 0.940, 1.834] : \n", + "[ 1.834, 2.728] : \n", + "[ 2.728, 3.622] : #\n", + "[ 3.622, 4.516] : ##\n", + "[ 4.516, 5.410] : #####\n", + "[ 5.410, 6.304] : ######\n", + "[ 6.304, 7.198] : ##########\n", + "[ 7.198, 8.092] : ##############\n", + "[ 8.092, 8.986] : ###################\n", + "[ 8.986, 9.880] : ##################\n", + "[ 9.880, 10.774] : ####################\n", + "[ 10.774, 11.668] : ##################\n", + "[ 11.668, 12.562] : ##############\n", + "[ 12.562, 13.456] : ###########\n", + "[ 13.456, 14.350] : #######\n", + "[ 14.350, 15.244] : ####\n", + "[ 15.244, 16.138] : ##\n", + "[ 16.138, 17.032] : #\n", + "[ 17.032, 17.926] : \n", + "[ 17.926, 18.820] : \n", + "g2 mean = 9.907799999999993\n", + "g2 variance = 8.300915960000015\n" ], "name": "stdout" } @@ -1004,38 +1004,54 @@ "metadata": { "id": "A51dSStW_1iF", "colab_type": "code", + "outputId": "8071f6c6-8402-44f2-9df9-b67698eaf0a6", "colab": { "base_uri": "https://localhost:8080/", - "height": 51 - }, - "outputId": "0ddd25e6-821c-4f09-daeb-12cca565bc7a" + "height": 119 + } }, "source": [ "def integrate(func, x_min, x_max, n_points=1000):\n", - " '''\n", - " points = generate_function(func, x_min, x_max, n_points)\n", - " m = mean(points)\n", - " #draw_histogram(points, 10, x_min, x_max, '#', 75)\n", - " dx = (x_max - x_min) / n_points\n", - " integral = m * sum(len(where(points, in_range(x_min + i * dx, x_min + (i + 1) * dx))) for i in range(n_points))\n", - " '''\n", - " i = x_min\n", - " integral = 0\n", - " dx = (x_max - x_min) / n_points\n", - " while i < x_max:\n", - " integral += func(i) * dx\n", - " i += dx\n", - " return integral\n", - "print(integrate(lambda x: -((x - 10) ** 2) + 100, 0, 20, 1000))\n", - "print(integrate(g2, 7, 13, 1000)/integrate(g2, 0, 20, 1000))" + " # monte carlo integral\n", + " # max f(x) over [x_min, x_max] in steps of (1/1000 or 1/n_points) * the difference\n", + " f_max = max([func(x) for x in [x_min + n * (x_max - x_min) / max(1000, n_points) for n in range(max(1000, n_points))]])\n", + " # generate n_points random x and y values on a grid x = [x_min, x_max] and y = [0, f_max]\n", + " x_rand = [x_min + random.random() * (x_max - x_min) for n in range(n_points)]\n", + " y_rand = [random.random() * f_max for n in range(n_points)]\n", + " # return height * width * (# of points on grid below the function curve)\n", + " # # of pts where passing x and y as tuples 0 < y < func(x)\n", + " return f_max * (x_max - x_min) * len(where(list(zip(x_rand, y_rand)), lambda xy: in_range(0, func(xy[0]))(xy[1]))) / n_points\n", + "\n", + "\n", + "def integrate_properly(func, x_min, x_max, n_points=1000):\n", + " # proper integral\n", + " i = x_min\n", + " integral = 0\n", + " dx = (x_max - x_min) / n_points\n", + " while i < x_max:\n", + " integral += func(i) * dx\n", + " i += dx\n", + " return integral\n", + "\n", + "\n", + "print(\" data: {:4.3f}\".format(integrate(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)))\n", + "print(\"theory: {:4.3f}\".format(integrate_properly(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)))\n", + "print(\" d/t: {:3.1f}%\".format(100*integrate(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)/integrate_properly(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)))\n", + "print(\" data: {:1.3f}\".format(integrate(g2, 7, 13, 10000)/integrate(g2, 0, 20, 10000)))\n", + "print(\"theory: {:1.3f}\".format(integrate_properly(g2, 7, 13, 10000)/integrate_properly(g2, 0, 20, 10000)))\n", + "print(\" d/t: {:3.1f}%\".format(100*(integrate(g2, 7, 13, 10000)/integrate(g2, 0, 20, 10000))/(integrate_properly(g2, 7, 13, 10000)/integrate_properly(g2, 0, 20, 10000))))" ], - "execution_count": 256, + "execution_count": 146, "outputs": [ { "output_type": "stream", "text": [ - "1333.3320000000258\n", - "0.6832686585265082\n" + " data: 1330.600\n", + "theory: 1333.333\n", + " d/t: 99.8%\n", + " data: 0.680\n", + "theory: 0.683\n", + " d/t: 99.5%\n" ], "name": "stdout" } From 9c1d32355ab2058f8bf45a28637565278d569b67 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Mon, 2 Mar 2020 14:51:29 -0600 Subject: [PATCH 09/24] improved formatting for exercise 9 output --- Labs/Lab-4/Copy_of_Lab_4.ipynb | 354 +++++++++++++++++---------------- 1 file changed, 181 insertions(+), 173 deletions(-) diff --git a/Labs/Lab-4/Copy_of_Lab_4.ipynb b/Labs/Lab-4/Copy_of_Lab_4.ipynb index a8b285d..48b9607 100644 --- a/Labs/Lab-4/Copy_of_Lab_4.ipynb +++ b/Labs/Lab-4/Copy_of_Lab_4.ipynb @@ -21,7 +21,8 @@ }, "colab": { "name": "Copy of Lab-4.ipynb", - "provenance": [] + "provenance": [], + "collapsed_sections": [] } }, "cells": [ @@ -48,7 +49,7 @@ "metadata": { "id": "TmrUVAv1_1ff", "colab_type": "code", - "outputId": "058f7f18-c5ce-4f3b-fae6-b60a75b39d15", + "outputId": "055500e9-f47c-49e2-fcad-e9430e15fecd", "colab": { "base_uri": "https://localhost:8080/", "height": 34 @@ -59,12 +60,12 @@ "x=random.random()\n", "print(\"The Value of x is\", x)" ], - "execution_count": 125, + "execution_count": 1, "outputs": [ { "output_type": "stream", "text": [ - "The Value of x is 0.3386614209151734\n" + "The Value of x is 0.32871666542362765\n" ], "name": "stdout" } @@ -109,7 +110,7 @@ "metadata": { "id": "eiWTH4-H_1f6", "colab_type": "code", - "outputId": "0decf992-1467-425a-9f63-6ea6d5b5e9ff", + "outputId": "37ef9700-6316-4947-9baa-fc05fd121af2", "colab": { "base_uri": "https://localhost:8080/", "height": 102 @@ -125,7 +126,7 @@ " print(\"Data Minimum:\", min(data))\n", " print(\"Data Maximum:\", max(data))" ], - "execution_count": 127, + "execution_count": 3, "outputs": [ { "output_type": "stream", @@ -133,8 +134,8 @@ "Data Type: \n", "Data Length: 1000\n", "Type of Data Contents: \n", - "Data Minimum: -9.993645208468072\n", - "Data Maximum: 9.999142279088858\n" + "Data Minimum: -9.975816305169623\n", + "Data Maximum: 9.995291400820292\n" ], "name": "stdout" } @@ -179,7 +180,7 @@ "metadata": { "id": "0Z8u7_Hq_1gK", "colab_type": "code", - "outputId": "f48293be-eb50-4d9c-83ea-b80f755f8a79", + "outputId": "25a2074a-f16a-4229-c2ad-e3827754aad1", "colab": { "base_uri": "https://localhost:8080/", "height": 34 @@ -189,12 +190,12 @@ "# Test your solution here\n", "print(\"Mean of Data:\", mean(data))" ], - "execution_count": 129, + "execution_count": 5, "outputs": [ { "output_type": "stream", "text": [ - "Mean of Data: -0.43159352484900165\n" + "Mean of Data: -0.03451181690297556\n" ], "name": "stdout" } @@ -240,7 +241,7 @@ "metadata": { "id": "IbasE7ma_1gZ", "colab_type": "code", - "outputId": "7fef8507-1e5f-440d-87c4-96c6190b5a1e", + "outputId": "654d0f50-968d-4669-e051-e12d3905741c", "colab": { "base_uri": "https://localhost:8080/", "height": 34 @@ -250,12 +251,12 @@ "# Test your solution here\n", "print(\"Variance of Data:\", variance(data))" ], - "execution_count": 131, + "execution_count": 7, "outputs": [ { "output_type": "stream", "text": [ - "Variance of Data: 33.533919118720135\n" + "Variance of Data: 31.635785180548243\n" ], "name": "stdout" } @@ -332,7 +333,7 @@ "metadata": { "id": "CQg5EFMg_1gn", "colab_type": "code", - "outputId": "e8539894-70b2-4a3c-d686-851a83c64c9c", + "outputId": "11e0626a-665e-4487-c67d-9a915e38acef", "colab": { "base_uri": "https://localhost:8080/", "height": 54 @@ -343,12 +344,12 @@ "h,b=histogram(data,100)\n", "print(len(b),h)" ], - "execution_count": 133, + "execution_count": 9, "outputs": [ { "output_type": "stream", "text": [ - "101 [11, 11, 11, 11, 12, 16, 11, 13, 7, 18, 11, 13, 10, 5, 6, 13, 7, 9, 11, 11, 5, 14, 10, 12, 11, 18, 7, 13, 10, 11, 11, 13, 5, 9, 8, 15, 8, 9, 8, 14, 16, 14, 8, 18, 8, 12, 4, 11, 10, 11, 9, 9, 11, 5, 8, 11, 9, 10, 10, 10, 5, 11, 15, 13, 11, 10, 8, 12, 6, 9, 5, 9, 8, 6, 8, 13, 10, 14, 10, 10, 6, 4, 7, 8, 10, 13, 9, 4, 10, 7, 9, 11, 4, 10, 11, 8, 16, 12, 6, 10]\n" + "101 [8, 7, 8, 9, 10, 11, 13, 5, 15, 6, 8, 14, 11, 10, 11, 14, 7, 4, 7, 4, 15, 11, 16, 5, 11, 7, 12, 10, 11, 12, 10, 14, 17, 9, 7, 8, 7, 9, 8, 13, 7, 9, 8, 11, 13, 12, 11, 15, 15, 10, 9, 13, 8, 13, 14, 8, 16, 11, 15, 14, 6, 13, 12, 8, 5, 9, 5, 13, 7, 13, 6, 4, 11, 12, 9, 13, 11, 10, 3, 9, 16, 11, 7, 13, 9, 7, 13, 3, 8, 11, 8, 13, 13, 14, 6, 9, 5, 13, 10, 6]\n" ], "name": "stdout" } @@ -411,7 +412,7 @@ "metadata": { "id": "EsQxIs9U_1gy", "colab_type": "code", - "outputId": "f48acce2-3a92-4e7d-b855-7e741433df87", + "outputId": "4ccda8ad-657b-4977-b32a-16e14dca8033", "colab": { "base_uri": "https://localhost:8080/", "height": 357 @@ -421,31 +422,31 @@ "# Test your solution here\n", "h,b=draw_histogram(data,20)" ], - "execution_count": 135, + "execution_count": 11, "outputs": [ { "output_type": "stream", "text": [ - "[ -9.994, -8.994] : #################\n", - "[ -8.994, -7.994] : ####################\n", - "[ -7.994, -6.995] : #############\n", - "[ -6.995, -5.995] : ###############\n", - "[ -5.995, -4.995] : ################\n", - "[ -4.995, -3.996] : ##################\n", - "[ -3.996, -2.996] : ##############\n", - "[ -2.996, -1.997] : ################\n", - "[ -1.997, -0.997] : ###################\n", - "[ -0.997, 0.003] : ##############\n", - "[ 0.003, 1.002] : ############\n", - "[ 1.002, 2.002] : ###############\n", - "[ 2.002, 3.002] : ################\n", - "[ 3.002, 4.001] : #############\n", - "[ 4.001, 5.001] : ###########\n", - "[ 5.001, 6.001] : #################\n", - "[ 6.001, 7.000] : ##########\n", - "[ 7.000, 8.000] : #############\n", - "[ 8.000, 9.000] : #############\n", - "[ 9.000, 9.999] : ################\n" + "[ -9.976, -8.977] : #############\n", + "[ -8.977, -7.979] : ###############\n", + "[ -7.979, -6.980] : ################\n", + "[ -6.980, -5.982] : ###########\n", + "[ -5.982, -4.983] : ##################\n", + "[ -4.983, -3.984] : ################\n", + "[ -3.984, -2.986] : #################\n", + "[ -2.986, -1.987] : ##############\n", + "[ -1.987, -0.989] : ###############\n", + "[ -0.989, 0.010] : ###################\n", + "[ 0.010, 1.008] : #################\n", + "[ 1.008, 2.007] : ####################\n", + "[ 2.007, 3.005] : #############\n", + "[ 3.005, 4.004] : ##############\n", + "[ 4.004, 5.003] : #############\n", + "[ 5.003, 6.001] : ##############\n", + "[ 6.001, 7.000] : #################\n", + "[ 7.000, 7.998] : #############\n", + "[ 7.998, 8.997] : ################\n", + "[ 8.997, 9.995] : #############\n" ], "name": "stdout" } @@ -491,7 +492,7 @@ "metadata": { "id": "ZyXbNZK7_1hS", "colab_type": "code", - "outputId": "b088491e-1fd0-461d-e7eb-de2985e7d13f", + "outputId": "3953bc9b-ae61-44b9-c7e9-c05e3548f65d", "colab": { "base_uri": "https://localhost:8080/", "height": 54 @@ -504,12 +505,12 @@ "\n", "print(where(data, myfunc))" ], - "execution_count": 137, + "execution_count": 13, "outputs": [ { "output_type": "stream", "text": [ - "[2, 3, 4, 5, 7, 8, 10, 16, 18, 22, 24, 30, 32, 33, 37, 39, 42, 43, 45, 47, 48, 49, 50, 58, 61, 63, 66, 70, 74, 75, 77, 80, 81, 82, 83, 85, 89, 93, 96, 97, 99, 101, 103, 105, 106, 107, 109, 112, 113, 115, 118, 119, 120, 123, 124, 126, 128, 129, 134, 135, 138, 140, 141, 142, 144, 146, 147, 149, 151, 152, 153, 155, 156, 159, 161, 170, 172, 173, 175, 176, 178, 181, 182, 186, 189, 191, 192, 193, 194, 195, 198, 199, 203, 204, 205, 207, 208, 212, 216, 221, 222, 223, 225, 227, 228, 231, 232, 233, 234, 235, 240, 241, 243, 247, 249, 251, 252, 253, 254, 255, 256, 257, 261, 262, 267, 269, 271, 274, 278, 280, 281, 282, 284, 287, 288, 292, 295, 296, 298, 305, 306, 311, 313, 316, 317, 318, 319, 320, 323, 325, 326, 327, 331, 333, 334, 339, 342, 346, 349, 350, 356, 359, 361, 362, 364, 366, 367, 369, 370, 372, 373, 374, 378, 380, 381, 382, 389, 394, 395, 399, 400, 401, 403, 406, 409, 416, 419, 421, 422, 423, 424, 425, 426, 429, 431, 432, 434, 437, 438, 439, 442, 444, 450, 451, 452, 455, 456, 458, 460, 461, 468, 470, 472, 473, 478, 479, 480, 488, 489, 491, 493, 494, 496, 497, 498, 502, 503, 504, 507, 511, 512, 515, 516, 517, 519, 521, 523, 525, 526, 527, 531, 532, 533, 534, 540, 541, 543, 546, 547, 549, 551, 553, 556, 559, 561, 566, 567, 568, 569, 570, 574, 576, 578, 581, 584, 585, 587, 588, 589, 594, 598, 601, 603, 604, 605, 606, 607, 611, 613, 614, 617, 622, 625, 626, 627, 629, 634, 635, 636, 638, 639, 640, 642, 645, 646, 648, 652, 654, 655, 656, 658, 659, 661, 671, 674, 680, 682, 683, 686, 687, 688, 692, 695, 696, 699, 701, 702, 703, 705, 706, 709, 711, 717, 718, 722, 726, 727, 728, 730, 731, 732, 735, 738, 739, 742, 747, 749, 750, 752, 753, 755, 759, 761, 762, 764, 769, 770, 773, 774, 778, 782, 783, 784, 785, 790, 801, 802, 804, 808, 810, 813, 814, 815, 820, 822, 826, 827, 828, 835, 839, 841, 844, 847, 850, 854, 855, 857, 860, 861, 862, 863, 864, 866, 867, 868, 871, 876, 880, 881, 887, 889, 891, 892, 893, 897, 898, 900, 902, 903, 909, 911, 912, 915, 919, 920, 926, 930, 932, 933, 936, 938, 941, 943, 944, 946, 948, 951, 952, 956, 957, 958, 959, 961, 962, 964, 967, 968, 975, 976, 978, 979, 982, 983, 988, 990, 993, 997, 999]\n" + "[2, 3, 4, 7, 9, 13, 15, 16, 19, 22, 23, 24, 25, 27, 29, 30, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 43, 48, 50, 53, 59, 61, 66, 69, 73, 74, 78, 82, 85, 87, 89, 93, 94, 96, 97, 98, 101, 102, 105, 106, 111, 114, 115, 116, 117, 120, 122, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 140, 144, 145, 146, 148, 149, 150, 151, 155, 158, 159, 161, 162, 163, 164, 165, 167, 170, 171, 173, 174, 175, 178, 187, 188, 189, 190, 193, 195, 196, 198, 199, 201, 202, 204, 208, 209, 210, 212, 213, 215, 220, 222, 225, 226, 227, 228, 229, 230, 235, 236, 238, 241, 245, 246, 253, 258, 259, 262, 263, 267, 268, 270, 271, 275, 277, 279, 280, 284, 290, 291, 295, 297, 298, 300, 302, 304, 306, 307, 308, 309, 312, 313, 314, 318, 319, 321, 326, 327, 329, 330, 331, 334, 337, 338, 340, 341, 343, 346, 353, 354, 355, 356, 358, 359, 361, 362, 363, 364, 365, 366, 369, 370, 371, 376, 377, 378, 379, 381, 388, 393, 396, 401, 403, 405, 406, 408, 409, 411, 412, 413, 419, 420, 423, 424, 425, 427, 429, 434, 436, 440, 442, 443, 445, 446, 447, 454, 455, 462, 463, 466, 468, 470, 471, 473, 474, 475, 478, 489, 490, 491, 494, 497, 498, 501, 502, 503, 504, 505, 506, 507, 509, 515, 518, 523, 524, 526, 528, 529, 530, 532, 533, 534, 535, 536, 537, 540, 541, 544, 545, 546, 547, 550, 551, 552, 554, 555, 559, 568, 569, 572, 574, 575, 576, 580, 581, 584, 587, 588, 589, 591, 592, 593, 598, 600, 601, 605, 613, 614, 615, 617, 620, 622, 625, 627, 628, 629, 630, 632, 633, 635, 639, 643, 648, 649, 650, 654, 655, 660, 662, 664, 665, 669, 671, 674, 675, 676, 677, 679, 680, 683, 684, 689, 690, 691, 693, 699, 701, 702, 703, 706, 707, 710, 712, 713, 718, 719, 720, 724, 728, 731, 733, 734, 736, 737, 739, 741, 743, 744, 745, 747, 748, 751, 755, 756, 769, 770, 774, 775, 777, 778, 779, 780, 783, 785, 787, 788, 790, 794, 795, 796, 797, 801, 807, 808, 810, 811, 814, 815, 816, 817, 818, 820, 821, 822, 823, 824, 826, 827, 829, 837, 838, 840, 847, 848, 851, 852, 853, 854, 855, 857, 859, 860, 863, 864, 866, 869, 870, 871, 873, 874, 876, 877, 878, 881, 882, 885, 886, 887, 889, 890, 892, 893, 895, 897, 898, 899, 900, 903, 906, 907, 910, 911, 920, 921, 922, 923, 924, 928, 932, 933, 935, 937, 942, 943, 944, 945, 950, 951, 952, 954, 956, 958, 959, 960, 961, 963, 965, 966, 972, 973, 981, 983, 984, 986, 987, 988, 989, 990, 993, 996, 997, 999]\n" ], "name": "stdout" } @@ -536,7 +537,7 @@ "metadata": { "id": "kkchtWxy_1hb", "colab_type": "code", - "outputId": "bc52f15f-2477-4d2b-f859-50ab6b3304d5", + "outputId": "ff8b5e85-d03d-4652-bca9-63a18f201af4", "colab": { "base_uri": "https://localhost:8080/", "height": 85 @@ -559,14 +560,14 @@ "print(\"Number of Entries passing F1:\", len(where(data,F1)))\n", "print(\"Number of Entries passing F2:\", len(where(data,F2)))" ], - "execution_count": 138, + "execution_count": 14, "outputs": [ { "output_type": "stream", "text": [ "True True False False False\n", "False False True True False\n", - "Number of Entries passing F1: 461\n", + "Number of Entries passing F1: 496\n", "Number of Entries passing F2: 0\n" ], "name": "stdout" @@ -623,7 +624,7 @@ "metadata": { "id": "3AhyJZjf_1hj", "colab_type": "code", - "outputId": "d2d12fa2-9ecb-4f72-fae8-955c52fed6bd", + "outputId": "5f6d8f63-6e9a-4cd0-9932-31e0650a90cf", "colab": { "base_uri": "https://localhost:8080/", "height": 153 @@ -648,7 +649,7 @@ "print(\"Number of Entries passing EQ:\", len(where(data,EQ)))\n", "print(\"Number of Entries passing D:\", len(where(data,D)))" ], - "execution_count": 140, + "execution_count": 16, "outputs": [ { "output_type": "stream", @@ -657,8 +658,8 @@ "False True False True False False\n", "Number of Entries passing E: 0\n", "Number of Entries passing O: 0\n", - "Number of Entries passing G: 461\n", - "Number of Entries passing L: 539\n", + "Number of Entries passing G: 496\n", + "Number of Entries passing L: 504\n", "Number of Entries passing EQ: 1\n", "Number of Entries passing D: 1\n" ], @@ -681,7 +682,7 @@ "metadata": { "id": "VlWCyUXL_1hr", "colab_type": "code", - "outputId": "b4fc8f30-8ccd-4c30-8363-5cf121cfc1ee", + "outputId": "8cc4be15-dcd4-4016-8234-2188876aa9e6", "colab": { "base_uri": "https://localhost:8080/", "height": 119 @@ -698,15 +699,15 @@ "print(\"Number of Entries passing D:\", sum(map(lambda x: x % data[50] == 0, data)))\n", "### END SOLUTION" ], - "execution_count": 141, + "execution_count": 17, "outputs": [ { "output_type": "stream", "text": [ "Number of Entries passing E: 0\n", "Number of Entries passing O: 0\n", - "Number of Entries passing G: 461\n", - "Number of Entries passing L: 539\n", + "Number of Entries passing G: 496\n", + "Number of Entries passing L: 504\n", "Number of Entries passing EQ: 1\n", "Number of Entries passing D: 1\n" ], @@ -770,7 +771,7 @@ "metadata": { "id": "IOfuFLBX_1h6", "colab_type": "code", - "outputId": "2a9dd381-1f7f-4ae0-f0f4-13b584dbd89a", + "outputId": "a5dc3747-a7ed-4987-c196-1ec4cb29e877", "colab": { "base_uri": "https://localhost:8080/", "height": 1000 @@ -787,50 +788,50 @@ "draw_histogram(generate_function(lambda x: -((x - 5)**2) + 5 ** 2, x_min, x_max, N), n_bins, x_min, x_max, \"#\", 50)\n", "draw_histogram(generate_function(test_func, x_min, x_max, N), n_bins, x_min, x_max, \"#\", 50)" ], - "execution_count": 143, + "execution_count": 19, "outputs": [ { "output_type": "stream", "text": [ - "[ 0.000, 0.500] : ##\n", - "[ 0.500, 1.000] : #############\n", + "[ 0.000, 0.500] : ######\n", + "[ 0.500, 1.000] : ######\n", "[ 1.000, 1.500] : ###################\n", - "[ 1.500, 2.000] : ##################\n", - "[ 2.000, 2.500] : ##########################\n", - "[ 2.500, 3.000] : #################################\n", - "[ 3.000, 3.500] : ########################################\n", - "[ 3.500, 4.000] : #########################################\n", - "[ 4.000, 4.500] : ###########################################\n", - "[ 4.500, 5.000] : ##################################################\n", + "[ 1.500, 2.000] : #########################\n", + "[ 2.000, 2.500] : #########################\n", + "[ 2.500, 3.000] : #########################################\n", + "[ 3.000, 3.500] : #####################################\n", + "[ 3.500, 4.000] : ###########################################\n", + "[ 4.000, 4.500] : #########################################\n", + "[ 4.500, 5.000] : #############################################\n", "[ 5.000, 5.500] : ###########################################\n", - "[ 5.500, 6.000] : #########################################\n", - "[ 6.000, 6.500] : ########################################\n", - "[ 6.500, 7.000] : ###################################\n", - "[ 7.000, 7.500] : #######################################\n", - "[ 7.500, 8.000] : #########################\n", - "[ 8.000, 8.500] : ########################\n", - "[ 8.500, 9.000] : ##############\n", - "[ 9.000, 9.500] : #################\n", - "[ 9.500, 10.000] : ###\n", - "[ 0.000, 0.500] : ######\n", - "[ 0.500, 1.000] : ###########\n", - "[ 1.000, 1.500] : ############\n", - "[ 1.500, 2.000] : #########\n", - "[ 2.000, 2.500] : #############\n", - "[ 2.500, 3.000] : ################\n", - "[ 3.000, 3.500] : #######################\n", - "[ 3.500, 4.000] : ##########################\n", - "[ 4.000, 4.500] : ##########################\n", + "[ 5.500, 6.000] : ##################################################\n", + "[ 6.000, 6.500] : ################################################\n", + "[ 6.500, 7.000] : ########################################\n", + "[ 7.000, 7.500] : ####################################\n", + "[ 7.500, 8.000] : ##############################\n", + "[ 8.000, 8.500] : #########################\n", + "[ 8.500, 9.000] : ###################\n", + "[ 9.000, 9.500] : #######\n", + "[ 9.500, 10.000] : #####\n", + "[ 0.000, 0.500] : #####\n", + "[ 0.500, 1.000] : ########\n", + "[ 1.000, 1.500] : ##############\n", + "[ 1.500, 2.000] : ############\n", + "[ 2.000, 2.500] : ##############\n", + "[ 2.500, 3.000] : ###################\n", + "[ 3.000, 3.500] : ####################\n", + "[ 3.500, 4.000] : #########################\n", + "[ 4.000, 4.500] : ##################\n", "[ 4.500, 5.000] : ################################\n", - "[ 5.000, 5.500] : ###########################\n", - "[ 5.500, 6.000] : ################################\n", - "[ 6.000, 6.500] : ########################################\n", - "[ 6.500, 7.000] : ###########################################\n", - "[ 7.000, 7.500] : ##################################\n", - "[ 7.500, 8.000] : ###################################\n", - "[ 8.000, 8.500] : ############################################\n", - "[ 8.500, 9.000] : ###############################################\n", - "[ 9.000, 9.500] : ##############################################\n", + "[ 5.000, 5.500] : #############################\n", + "[ 5.500, 6.000] : ###########################\n", + "[ 6.000, 6.500] : ###################################\n", + "[ 6.500, 7.000] : ##################################\n", + "[ 7.000, 7.500] : #############################################\n", + "[ 7.500, 8.000] : #######################################\n", + "[ 8.000, 8.500] : #################################################\n", + "[ 8.500, 9.000] : ############################################\n", + "[ 9.000, 9.500] : ###############################################\n", "[ 9.500, 10.000] : ##################################################\n" ], "name": "stdout" @@ -839,26 +840,26 @@ "output_type": "execute_result", "data": { "text/plain": [ - "([12,\n", - " 19,\n", - " 21,\n", - " 17,\n", - " 23,\n", - " 29,\n", - " 40,\n", - " 45,\n", - " 45,\n", - " 56,\n", - " 48,\n", - " 56,\n", - " 70,\n", - " 74,\n", - " 59,\n", + "([10,\n", + " 14,\n", + " 26,\n", + " 22,\n", + " 25,\n", + " 34,\n", + " 35,\n", + " 44,\n", + " 33,\n", + " 57,\n", + " 52,\n", + " 47,\n", " 61,\n", + " 60,\n", + " 79,\n", + " 69,\n", + " 86,\n", " 77,\n", " 82,\n", - " 80,\n", - " 86],\n", + " 87],\n", " [0,\n", " 0.5,\n", " 1.0,\n", @@ -885,7 +886,7 @@ "metadata": { "tags": [] }, - "execution_count": 143 + "execution_count": 19 } ] }, @@ -904,7 +905,7 @@ "metadata": { "id": "wnZFkATK_1h_", "colab_type": "code", - "outputId": "befea955-38c3-416e-d872-97c4511b2a6a", + "outputId": "ca30831f-6686-410f-b844-8d58e42c02ff", "colab": { "base_uri": "https://localhost:8080/", "height": 819 @@ -933,57 +934,57 @@ "print(\"g2 mean = \" + str(mean(g2_data)))\n", "print(\"g2 variance = \" + str(variance(g2_data)))" ], - "execution_count": 144, + "execution_count": 20, "outputs": [ { "output_type": "stream", "text": [ - "[-0.6879999999999997, 1.3200000000000003, -1.424, -1.0, -0.7199999999999998, -0.3839999999999999, 0.8479999999999999, -1.1680000000000001, -0.43199999999999994, 0.41600000000000037, -0.3919999999999999, 0.13600000000000012, 0.48800000000000043, 0.28000000000000025, 0.16800000000000015, 2.4000000000000004, -0.472, -0.5920000000000001, -0.3679999999999999, 0.3440000000000003, 0.3360000000000003, 1.4960000000000004, -1.56, 1.096, -0.43199999999999994, -0.008000000000000007, -0.496, 1.1600000000000001, -1.592, 0.016000000000000014, 0.8959999999999999, 0.8639999999999999, 0.26400000000000023, 0.2240000000000002, -0.6959999999999997, -0.008000000000000007, -0.7919999999999998, 1.4160000000000004, -1.528, 0.41600000000000037, 0.6319999999999997, -2.7119999999999997, -0.6320000000000001, 2.6480000000000006, 1.0, -1.072, -0.7199999999999998, -0.43999999999999995, -0.6959999999999997, 0.3520000000000003, -1.3359999999999999, -0.6959999999999997, 0.8559999999999999, -2.112, 0.40800000000000036, -1.96, 1.12, 0.5200000000000005, 1.96, -0.08800000000000008, 0.4720000000000004, 0.15200000000000014, 1.1440000000000001, -0.31199999999999983, 1.2400000000000002, -0.552, -0.2639999999999998, -0.21599999999999975, -0.7759999999999998, -0.45599999999999996, -0.2879999999999998, -1.048, 1.032, 0.09600000000000009, 1.1920000000000002, 0.3520000000000003, -0.4079999999999999, -0.6320000000000001, -0.3039999999999998, -1.1440000000000001, 1.3920000000000003, -0.10400000000000009, -1.472, 0.5360000000000005, 0.4240000000000004, -0.3599999999999999, 0.7839999999999998, 0.6959999999999997, 1.5920000000000005, -0.05600000000000005, -0.48, 0.5440000000000005, -0.2559999999999998, 0.30400000000000027, 1.4240000000000004, -0.968, -0.7999999999999998, 2.128, 0.5680000000000005, -2.416, 0.3200000000000003, 0.08000000000000007, 0.5760000000000005, 0.30400000000000027, -0.3759999999999999, -1.616, -1.2719999999999998, -1.1999999999999997, -0.7279999999999998, 1.3920000000000003, 0.14400000000000013, 0.28800000000000026, -0.3679999999999999, 0.5200000000000005, -0.7599999999999998, -0.7279999999999998, -1.968, -0.7439999999999998, -1.944, 0.17600000000000016, -0.18399999999999972, 0.2240000000000002, -0.976, -0.3679999999999999, 0.03200000000000003, 0.26400000000000023, -0.19199999999999973, 1.7519999999999998, -1.1680000000000001, 0.5600000000000005, 1.4000000000000004, -0.31199999999999983, -0.16000000000000014, 0.16000000000000014, 1.8399999999999999, 0.6159999999999997, 0.02400000000000002, 0.2320000000000002, -1.2319999999999998, 0.6719999999999997, 0.7119999999999997, 0.29600000000000026, 1.2240000000000002, 0.5920000000000005, -0.2719999999999998, 0.16000000000000014, -0.512, -0.8319999999999999, -0.536, 1.4240000000000004, -1.1840000000000002, -0.08000000000000007, 0.14400000000000013, 1.2320000000000002, 0.26400000000000023, 0.8159999999999998, -0.2639999999999998, 0.8159999999999998, -1.592, -0.6879999999999997, 0.2320000000000002, 0.944, -0.528, 1.12, 0.5200000000000005, -0.19199999999999973, -0.14400000000000013, 0.008000000000000007, -1.6400000000000001, -0.3599999999999999, 0.10400000000000009, -0.23199999999999976, 0.2400000000000002, 0.1120000000000001, 1.1600000000000001, 0.4240000000000004, -0.56, -1.1840000000000002, -1.064, -1.6560000000000001, 0.7359999999999998, 0.6799999999999997, 0.8239999999999998, 0.6080000000000005, 0.03200000000000003, -0.22399999999999975, 0.7759999999999998, -2.528, -0.5920000000000001, -0.23999999999999977, 2.008, 0.41600000000000037, -1.032, -0.06400000000000006, 0.6799999999999997, -0.6959999999999997, 1.3040000000000003, -0.31999999999999984, 1.024, -0.040000000000000036, -1.1360000000000001, 0.48800000000000043, 1.032, 0.7839999999999998, -0.3599999999999999, -0.952, -0.1120000000000001, -0.488, 0.24800000000000022, -0.8879999999999999, -2.016, -1.2719999999999998, -2.168, 1.952, -0.6799999999999997, 1.088, -1.2879999999999998, -0.7679999999999998, -0.7119999999999997, -0.2879999999999998, 0.5600000000000005, 1.4880000000000004, 1.8719999999999999, -1.8479999999999999, -1.472, 0.15200000000000014, -0.6480000000000001, 1.024, -0.33599999999999985, -0.16000000000000014, 0.17600000000000016, 0.7439999999999998, 0.1200000000000001, -0.96, 0.10400000000000009, -0.9039999999999999, -0.2639999999999998, -0.10400000000000009, 0.1120000000000001, -0.8639999999999999, 1.088, -0.1679999999999997, -1.7199999999999998, 1.7439999999999998, -0.1280000000000001, 1.5200000000000005, -0.7119999999999997, 2.152, 0.18400000000000016, -0.45599999999999996, -1.0, 0.16000000000000014, -0.23999999999999977, -0.20799999999999974, -0.008000000000000007, 0.9199999999999999, 0.5200000000000005, 2.2800000000000002, 0.4640000000000004, 0.24800000000000022, -0.8479999999999999, -0.24799999999999978, -2.0, 0.40800000000000036, 1.056, -0.2799999999999998, 0.7839999999999998, 0.18400000000000016, -0.7759999999999998, -0.31999999999999984, 0.5360000000000005, -0.7679999999999998, -0.8639999999999999, 0.14400000000000013, 0.05600000000000005, 0.3520000000000003, 0.5280000000000005, 1.4320000000000004, 2.184, 1.992, 0.3360000000000003, 1.8399999999999999, 0.6879999999999997, 1.2880000000000003, -0.8239999999999998, -0.6240000000000001, 1.912, -1.536, 0.37600000000000033, 0.2560000000000002, -0.7119999999999997, 0.9039999999999999, -1.6560000000000001, 0.7119999999999997, 1.912, -0.45599999999999996, 0.7039999999999997, -0.96, 0.8479999999999999, 0.8159999999999998, -0.3999999999999999, 0.3680000000000003, 0.6000000000000005, 0.6559999999999997, 0.7039999999999997, -0.44799999999999995, -0.7039999999999997, -0.19199999999999973, 0.6719999999999997, 0.7359999999999998, 0.37600000000000033, -1.056, 0.2320000000000002, 0.2560000000000002, -0.2959999999999998, -0.5920000000000001, -0.7679999999999998, -1.2239999999999998, -1.008, -0.14400000000000013, -0.96, 0.992, 1.4720000000000004, 0.16000000000000014, 0.26400000000000023, 0.39200000000000035, -0.6480000000000001, 0.15200000000000014, -1.3679999999999999, -0.31999999999999984, -0.5840000000000001, 0.9279999999999999, 1.2880000000000003, -0.8799999999999999, -2.576, -0.24799999999999978, -1.576, 1.112, -0.06400000000000006, -1.1760000000000002, -0.3999999999999999, -1.0, 0.8799999999999999, 0.04800000000000004, -0.21599999999999975, 0.49600000000000044, 1.1680000000000001, -0.2879999999999998, -0.8799999999999999, 0.6719999999999997, -0.528, -1.4, 0.7439999999999998, -2.216, -0.09600000000000009, 0.5840000000000005, 0.8079999999999998, 0.6399999999999997, -1.536, 0.952, -0.1679999999999997, -0.6080000000000001, 0.7919999999999998, -0.2799999999999998, 1.1440000000000001, -0.504, -0.42399999999999993, -1.2079999999999997, 1.6000000000000005, -1.912, -0.7919999999999998, -0.15200000000000014, -1.624, -1.608, 0.9279999999999999, -0.15200000000000014, -0.24799999999999978, -0.13600000000000012, -1.584, 1.024, -0.35199999999999987, 0.38400000000000034, -1.7119999999999997, 0.8559999999999999, -0.8639999999999999, -0.34399999999999986, 0.5040000000000004, 0.3680000000000003, -3.216, 0.18400000000000016, 0.5920000000000005, -0.05600000000000005, -1.088, 0.5840000000000005, -0.56, 1.888, -0.472, -0.2879999999999998, -1.536, -2.832, -0.1759999999999997, -0.968, -0.8079999999999998, -0.3039999999999998, -0.31199999999999983, 0.04800000000000004, -2.168, 0.016000000000000014, 1.5040000000000004, 0.1120000000000001, -1.3199999999999998, -0.22399999999999975, -0.6719999999999997, -1.2559999999999998, -0.7279999999999998, 0.39200000000000035, -1.056, -0.21599999999999975, -1.024, -0.6160000000000001, 1.2880000000000003, 0.08800000000000008, -1.8159999999999998, 0.15200000000000014, 0.016000000000000014, 0.2240000000000002, -1.376, 0.96, 1.4960000000000004, 0.8879999999999999, 0.6080000000000005, 0.5360000000000005, 0.8079999999999998, -2.376, 0.8079999999999998, 1.2000000000000002, -2.888, -0.5920000000000001, -0.544, 0.6559999999999997, -1.0, -1.512, -0.19999999999999973, -0.3679999999999999, 2.216, 2.5440000000000005, -0.6240000000000001, -0.5920000000000001, -0.472, -0.16000000000000014, 0.7599999999999998, -0.3839999999999999, -0.3679999999999999, -1.2479999999999998, -1.2479999999999998, -0.2639999999999998, -0.42399999999999993, -1.1680000000000001, 1.064, -1.088, -3.2640000000000002, 1.032, -1.8719999999999999, 1.5040000000000004, -0.35199999999999987, 0.1280000000000001, -0.20799999999999974, -1.912, 1.088, 0.05600000000000005, 0.13600000000000012, 2.3600000000000003, 0.13600000000000012, 0.28000000000000025, -0.34399999999999986, 0.07200000000000006, -0.24799999999999978, 1.6959999999999997, 0.3440000000000003, -0.3839999999999999, 0.4800000000000004, -0.02400000000000002, 0.040000000000000036, -0.6240000000000001, 0.1200000000000001, -0.33599999999999985, 0.6639999999999997, -0.1679999999999997, 0.5360000000000005, -0.8879999999999999, 0.28800000000000026, 0.8159999999999998, 1.12, -0.8079999999999998, 2.152, -0.23999999999999977, -1.096, 0.8479999999999999, 0.7439999999999998, 0.39200000000000035, -0.15200000000000014, 0.2160000000000002, 0.4640000000000004, -0.9039999999999999, 0.976, -1.1440000000000001, -0.472, -0.536, -1.12, -0.552, 0.02400000000000002, 0.06400000000000006, -1.04, -0.04800000000000004, -1.4, -1.376, -0.8159999999999998, 1.008, 1.3280000000000003, 0.08000000000000007, -0.952, -1.1280000000000001, -1.1360000000000001, 0.30400000000000027, 1.888, -0.2639999999999998, 0.10400000000000009, 1.7199999999999998, -0.20799999999999974, -1.032, -0.7039999999999997, -0.7119999999999997, -0.6480000000000001, -0.5680000000000001, 0.29600000000000026, -0.2639999999999998, -1.2559999999999998, -0.7519999999999998, -0.23999999999999977, 0.7999999999999998, -0.9119999999999999, -0.4159999999999999, -1.416, -0.7359999999999998, 0.9359999999999999, 1.6160000000000005, 0.6559999999999997, 0.9359999999999999, -0.7999999999999998, -0.7279999999999998, 0.09600000000000009, 0.6559999999999997, -0.5760000000000001, -0.6640000000000001, 1.8159999999999998, -0.2559999999999998, 0.6799999999999997, -0.3919999999999999, -1.576, 1.2400000000000002, 0.20800000000000018, -0.6080000000000001, 0.7919999999999998, 0.10400000000000009, -1.048, -0.6160000000000001, -0.016000000000000014, 0.40000000000000036, -0.24799999999999978, -0.040000000000000036, 0.5360000000000005, -0.984, 0.27200000000000024, -0.536, 0.7759999999999998, 0.2560000000000002, -0.8239999999999998, 3.6879999999999997, -0.984, -1.392, 0.19200000000000017, 0.7679999999999998, -1.104, 0.29600000000000026, -0.22399999999999975, -0.7359999999999998, 2.048, 0.4400000000000004, 0.24800000000000022, 0.5200000000000005, -3.432, -0.33599999999999985, 2.008, -0.18399999999999972, -0.7119999999999997, -0.7199999999999998, 1.2800000000000002, -0.48, -1.3599999999999999, -1.92, 0.20000000000000018, 1.1680000000000001, 0.20000000000000018, -0.008000000000000007, -0.2639999999999998, -0.20799999999999974, -0.504, -0.016000000000000014, -0.3599999999999999, -0.952, -0.02400000000000002, 0.26400000000000023, 0.6639999999999997, -0.3919999999999999, 0.07200000000000006, -0.7039999999999997, 1.4720000000000004, -0.7119999999999997, 0.2240000000000002, 0.24800000000000022, -0.488, -0.6799999999999997, 0.4640000000000004, -1.1680000000000001, 0.15200000000000014, 0.8479999999999999, -0.5680000000000001, 3.224, 0.16000000000000014, -0.7599999999999998, -0.42399999999999993, 2.208, -0.3759999999999999, -0.8079999999999998, -1.7679999999999998, -1.496, 1.5360000000000005, -1.912, 0.08800000000000008, -0.952, -0.984, 0.8239999999999998, -0.016000000000000014, -0.6640000000000001, 0.6719999999999997, 0.9039999999999999, 0.8559999999999999, 1.2880000000000003, -0.6879999999999997, 0.7199999999999998, -1.2399999999999998, -0.2879999999999998, -0.5920000000000001, 2.144, -0.44799999999999995, 0.8239999999999998, -1.016, 2.224, -0.3759999999999999, -0.03200000000000003, -0.2639999999999998, -0.8559999999999999, -1.096, 1.7599999999999998, -0.9199999999999999, 1.04, -0.22399999999999975, -0.6480000000000001, 0.15200000000000014, -0.32799999999999985, -1.968, -1.528, 1.7199999999999998, -1.448, 2.3360000000000003, 0.5120000000000005, -1.7039999999999997, 1.032, 0.7039999999999997, 0.7359999999999998, 0.5600000000000005, -0.8879999999999999, -1.608, -0.6879999999999997, -1.1280000000000001, 1.3520000000000003, 0.20000000000000018, 0.2560000000000002, -0.6640000000000001, 0.02400000000000002, 2.7359999999999998, -1.12, -1.376, -0.6400000000000001, 2.16, 0.37600000000000033, -0.504, -0.3599999999999999, -1.904, -0.34399999999999986, -0.992, -1.424, -1.1919999999999997, -0.22399999999999975, 0.9359999999999999, 0.37600000000000033, -0.8719999999999999, -0.13600000000000012, -1.6959999999999997, 0.17600000000000016, -0.6640000000000001, 1.3120000000000003, -0.3919999999999999, 1.5280000000000005, 1.2000000000000002, 0.09600000000000009, -0.09600000000000009, -1.6400000000000001, 0.6239999999999997, 1.1600000000000001, -0.24799999999999978, 1.2640000000000002, 0.5840000000000005, -0.48, -1.072, 1.008, -1.1600000000000001, 2.3920000000000003, 0.29600000000000026, 0.3440000000000003, 0.4320000000000004, 0.1280000000000001, -0.552, 1.7679999999999998, -0.7599999999999998, -0.3599999999999999, -0.31999999999999984, 0.17600000000000016, -0.3839999999999999, -0.32799999999999985, 0.7679999999999998, 0.6000000000000005, -1.064, -0.2719999999999998, 0.39200000000000035, -0.6719999999999997, 0.09600000000000009, 0.6000000000000005, 0.41600000000000037, 0.6239999999999997, -1.8159999999999998, -0.6400000000000001, 1.904, -0.22399999999999975, 0.7039999999999997, -0.4159999999999999, -1.52, -0.31199999999999983, 0.3200000000000003, 1.6799999999999997, 0.6239999999999997, 1.4240000000000004, -0.6400000000000001, -0.44799999999999995, 0.40000000000000036, 0.09600000000000009, -0.6000000000000001, 1.2800000000000002, -0.7199999999999998, -0.6879999999999997, 1.4240000000000004, -0.31999999999999984, 1.6319999999999997, 0.38400000000000034, 0.40000000000000036, -0.08000000000000007, 1.976, 0.6479999999999997, 0.5280000000000005, -1.3439999999999999, -1.096, -0.23199999999999976, 3.192, -0.7759999999999998, 0.24800000000000022, -0.16000000000000014, 0.4400000000000004, -1.952, 0.5600000000000005, -0.44799999999999995, 1.3200000000000003, -0.10400000000000009, 0.7919999999999998, 1.3520000000000003, -1.072, -0.7199999999999998, 0.6799999999999997, -1.2639999999999998, -1.6, 0.3680000000000003, 0.19200000000000017, 0.952, -1.7279999999999998, -0.8319999999999999, -0.15200000000000014, 0.7679999999999998, 0.5120000000000005, -0.23999999999999977, 0.39200000000000035, 0.7039999999999997, 0.9039999999999999, 2.088, 0.944, 1.5600000000000005, 0.6239999999999997, 0.2160000000000002, -0.8879999999999999, -0.5680000000000001, 2.2560000000000002, -0.1679999999999997, 0.6879999999999997, -1.2319999999999998, 2.048, 1.2720000000000002, 1.008, -0.8319999999999999, -0.6799999999999997, -1.584, 0.08800000000000008, -1.112, -2.0, 0.30400000000000027, 2.08, 1.088, -0.3999999999999999, 0.27200000000000024, 0.5680000000000005, -0.6400000000000001, 0.41600000000000037, -0.21599999999999975, 0.4240000000000004, -0.7359999999999998, -1.6880000000000002, 0.5680000000000005, -0.08800000000000008, -1.544, 1.2560000000000002, 1.8159999999999998, -1.1999999999999997, 0.944, 0.6559999999999997, -0.7359999999999998, 0.7199999999999998, 0.8799999999999999, -0.42399999999999993, -0.22399999999999975, 0.6959999999999997, -0.23999999999999977, -0.8719999999999999, 1.3760000000000003, 1.4640000000000004, 1.3360000000000003, 0.3120000000000003, 0.20000000000000018, -3.248, 0.5760000000000005, 1.912, 0.4480000000000004, 0.05600000000000005, -0.952, 0.3600000000000003, -1.6880000000000002, -0.6719999999999997, 0.14400000000000013, 0.6080000000000005, -0.07200000000000006, 0.9039999999999999, 1.7679999999999998, 0.07200000000000006, 0.2320000000000002, -0.8879999999999999, -0.06400000000000006, -1.112, -0.3919999999999999, -1.936, -0.02400000000000002, -0.18399999999999972, -1.072, 0.3280000000000003, -0.7039999999999997, -0.16000000000000014, -0.2959999999999998, -0.24799999999999978, 2.7359999999999998, 1.2480000000000002, 0.4240000000000004, 0.6559999999999997, -0.992, -0.7359999999999998, -2.2800000000000002, -0.35199999999999987, -0.48, 0.07200000000000006, -0.4159999999999999, -0.10400000000000009, 1.0, 1.6000000000000005, 0.952, -0.536, -1.52, -0.6240000000000001, -0.32799999999999985, 2.216, 1.5440000000000005, 0.49600000000000044, 0.984, -1.944, -1.1280000000000001, 1.2640000000000002, -1.4, -1.48, -1.8559999999999999, -0.46399999999999997, 0.1120000000000001, 0.3680000000000003, 1.5440000000000005, 0.06400000000000006, 2.192, -1.064, -2.144, 0.9039999999999999, -0.9199999999999999, -0.43199999999999994, -0.3599999999999999, -1.472, 1.7519999999999998, -0.6640000000000001, -1.8639999999999999, -0.984, 1.7119999999999997, -2.072, -0.05600000000000005, -0.08800000000000008, 0.8719999999999999, 0.49600000000000044, 0.976, 0.05600000000000005, 0.02400000000000002, 0.28800000000000026, -0.9039999999999999, -1.616, 0.2240000000000002, 0.8719999999999999, 0.2320000000000002, -0.07200000000000006, 0.5760000000000005, 2.04, 1.4000000000000004, 1.4320000000000004, 1.024, -0.6000000000000001, 0.4800000000000004, 1.2640000000000002, -0.512, -1.1600000000000001, -0.43199999999999994, -1.2559999999999998, 0.5920000000000005, -1.6560000000000001, 0.8719999999999999, 0.5440000000000005, -1.584, -0.952, 1.3360000000000003, -0.1759999999999997, -0.528, -1.08, 0.5920000000000005, -1.456, -1.424, 0.19200000000000017, 1.2800000000000002, 1.3200000000000003, 0.30400000000000027, -0.43999999999999995, -0.496, -0.48, -0.944, -0.02400000000000002, -0.8879999999999999, -1.384, 1.2800000000000002, 1.04, -0.984, 0.8239999999999998, 0.5680000000000005, -0.22399999999999975, -0.2639999999999998, 1.0, -0.5760000000000001]\n", - "[8.9, 7.72, 6.34, 13.56, 9.36, 8.2, 7.08, 7.72, 9.120000000000001, 10.98, 10.9, 5.12, 8.58, 14.16, 16.0, 10.44, 7.46, 9.3, 7.04, 10.700000000000001, 8.4, 8.1, 9.66, 9.56, 10.9, 5.16, 11.700000000000001, 12.38, 9.26, 7.96, 6.8, 8.18, 6.48, 8.34, 10.92, 13.84, 11.200000000000001, 13.36, 11.4, 7.22, 11.56, 9.040000000000001, 17.16, 7.18, 3.3200000000000003, 6.88, 10.0, 9.56, 12.82, 10.92, 11.72, 10.08, 5.38, 7.26, 8.36, 5.16, 12.280000000000001, 12.620000000000001, 8.32, 16.38, 9.74, 11.52, 14.14, 14.76, 7.4, 7.5600000000000005, 9.700000000000001, 13.66, 14.120000000000001, 13.16, 10.06, 10.28, 10.92, 8.36, 9.0, 12.82, 12.92, 8.92, 12.44, 9.540000000000001, 5.42, 7.72, 5.7, 16.5, 14.66, 12.780000000000001, 12.6, 11.02, 9.96, 12.3, 11.66, 3.8200000000000003, 10.540000000000001, 14.9, 9.72, 11.4, 10.56, 11.46, 6.5600000000000005, 8.26, 14.66, 12.620000000000001, 9.42, 10.8, 12.08, 6.3, 6.44, 12.66, 9.26, 10.56, 5.68, 4.0200000000000005, 13.780000000000001, 12.8, 16.18, 2.82, 17.68, 7.44, 9.96, 6.9, 4.36, 9.82, 8.18, 11.040000000000001, 8.4, 7.76, 3.22, 8.22, 8.4, 8.94, 11.56, 12.8, 8.9, 7.9, 11.36, 11.38, 7.92, 13.98, 6.76, 7.72, 8.66, 9.98, 14.540000000000001, 8.8, 12.4, 6.44, 10.6, 2.7800000000000002, 6.32, 15.64, 10.620000000000001, 8.88, 16.18, 13.44, 9.48, 10.52, 7.74, 8.040000000000001, 12.82, 16.02, 8.32, 12.56, 14.02, 11.200000000000001, 9.36, 6.34, 7.8, 8.98, 7.78, 11.9, 11.0, 8.2, 4.6000000000000005, 13.52, 12.120000000000001, 7.92, 7.78, 7.46, 8.94, 12.96, 9.52, 13.280000000000001, 8.2, 16.7, 11.9, 6.8, 9.96, 4.6000000000000005, 11.76, 8.38, 14.620000000000001, 10.48, 12.18, 11.08, 4.94, 11.96, 11.76, 9.1, 10.6, 12.38, 12.9, 12.22, 6.72, 3.88, 6.48, 9.700000000000001, 8.82, 13.38, 12.86, 12.82, 13.26, 10.84, 6.62, 13.36, 14.200000000000001, 12.34, 8.16, 10.72, 8.34, 5.66, 7.4, 14.4, 8.5, 10.86, 11.96, 9.88, 11.48, 7.9, 6.42, 14.26, 8.5, 7.42, 7.32, 10.94, 5.0200000000000005, 10.6, 10.18, 8.96, 12.36, 14.08, 10.48, 10.98, 5.86, 14.02, 8.32, 9.040000000000001, 8.700000000000001, 10.18, 10.16, 6.2, 14.68, 7.44, 13.4, 9.18, 6.72, 8.1, 3.36, 12.44, 8.24, 6.46, 6.98, 13.06, 9.46, 8.46, 4.62, 8.540000000000001, 7.42, 7.44, 12.0, 12.18, 14.44, 12.88, 4.48, 13.08, 9.76, 7.18, 11.36, 13.66, 4.42, 7.4, 10.24, 14.48, 10.200000000000001, 7.16, 10.5, 12.56, 10.02, 7.16, 12.44, 10.28, 10.3, 15.9, 8.14, 10.56, 4.62, 9.64, 8.22, 12.14, 11.700000000000001, 13.32, 9.34, 6.24, 10.040000000000001, 11.700000000000001, 12.38, 15.200000000000001, 11.86, 8.22, 10.76, 10.28, 10.44, 6.6000000000000005, 10.96, 12.040000000000001, 8.1, 10.86, 10.5, 7.98, 8.44, 11.02, 10.68, 14.64, 5.76, 10.72, 14.120000000000001, 10.14, 11.1, 9.38, 10.8, 12.9, 6.36, 7.96, 10.82, 8.700000000000001, 10.24, 10.22, 8.72, 10.88, 10.92, 14.24, 10.1, 9.78, 8.2, 5.98, 8.74, 13.36, 8.26, 10.16, 18.82, 7.140000000000001, 7.54, 11.66, 8.46, 8.88, 4.44, 10.26, 12.6, 10.84, 10.3, 13.200000000000001, 9.68, 8.94, 7.54, 9.74, 9.26, 9.16, 9.96, 16.8, 11.46, 10.96, 8.620000000000001, 9.48, 9.18, 8.82, 11.34, 8.84, 5.38, 14.52, 12.620000000000001, 12.72, 11.28, 7.8, 5.66, 9.200000000000001, 11.4, 12.120000000000001, 11.0, 12.18, 9.6, 7.28, 8.72, 11.16, 9.56, 13.4, 6.640000000000001, 10.52, 9.42, 8.08, 10.46, 4.54, 13.5, 6.92, 9.94, 10.6, 12.3, 5.18, 9.9, 11.44, 9.9, 8.66, 12.120000000000001, 12.76, 10.3, 8.6, 8.42, 7.86, 7.72, 15.6, 6.640000000000001, 7.88, 10.58, 11.4, 8.52, 12.32, 8.02, 13.68, 11.84, 7.34, 9.48, 11.64, 7.8, 11.4, 10.32, 7.32, 11.06, 9.96, 6.42, 10.1, 8.620000000000001, 13.86, 12.6, 9.540000000000001, 8.120000000000001, 11.32, 8.96, 9.42, 12.44, 13.36, 13.08, 9.6, 11.02, 12.26, 9.44, 9.86, 6.28, 10.72, 10.84, 9.44, 10.9, 13.24, 10.84, 9.78, 4.92, 10.56, 5.58, 7.68, 12.280000000000001, 15.700000000000001, 9.28, 9.18, 14.98, 10.18, 10.86, 6.9, 13.74, 8.5, 12.040000000000001, 8.36, 7.88, 10.84, 11.52, 12.92, 18.42, 13.780000000000001, 13.24, 13.56, 11.16, 9.78, 9.94, 9.84, 12.42, 8.8, 7.8, 7.76, 10.700000000000001, 8.48, 12.34, 12.32, 5.74, 8.86, 8.9, 11.64, 8.74, 12.06, 7.84, 12.56, 11.02, 12.280000000000001, 9.9, 6.32, 7.86, 13.4, 9.44, 10.78, 8.08, 5.42, 10.06, 10.64, 12.22, 12.84, 8.74, 9.84, 9.4, 8.94, 10.06, 7.34, 7.9, 6.98, 13.620000000000001, 16.44, 10.040000000000001, 6.62, 12.18, 6.640000000000001, 6.82, 5.4, 12.22, 8.540000000000001, 9.22, 10.48, 9.92, 15.540000000000001, 9.24, 12.68, 3.04, 11.78, 8.620000000000001, 9.68, 9.8, 9.68, 10.24, 7.12, 9.18, 10.88, 13.96, 13.44, 6.640000000000001, 6.68, 13.0, 15.26, 15.6, 8.44, 12.26, 13.34, 9.44, 9.46, 8.78, 9.08, 9.06, 10.08, 9.200000000000001, 7.58, 14.94, 13.72, 11.98, 8.74, 10.76, 12.58, 9.26, 12.700000000000001, 6.38, 14.38, 5.5200000000000005, 7.640000000000001, 6.7, 9.96, 2.3000000000000003, 10.88, 9.9, 10.52, 7.3, 15.8, 11.06, 6.74, 8.42, 6.26, 8.66, 9.6, 9.540000000000001, 15.42, 12.280000000000001, 11.26, 10.72, 7.88, 8.74, 11.3, 11.16, 11.4, 7.82, 9.700000000000001, 8.1, 7.78, 10.88, 7.34, 5.24, 6.2, 4.28, 9.700000000000001, 8.86, 10.28, 15.08, 9.3, 9.52, 8.4, 3.24, 10.68, 9.78, 13.88, 14.1, 13.92, 15.34, 10.14, 10.700000000000001, 10.46, 10.86, 5.2, 12.82, 9.92, 9.1, 7.32, 18.36, 13.0, 5.6000000000000005, 4.5600000000000005, 3.96, 10.94, 8.94, 13.98, 14.120000000000001, 6.84, 6.92, 8.8, 9.200000000000001, 12.94, 7.0200000000000005, 11.4, 12.620000000000001, 14.84, 11.24, 10.8, 10.46, 8.5, 15.48, 5.26, 12.02, 11.38, 11.32, 8.68, 4.5600000000000005, 9.48, 10.74, 9.74, 7.140000000000001, 11.58, 7.98, 8.76, 13.02, 10.5, 7.36, 8.620000000000001, 5.48, 10.06, 11.700000000000001, 15.34, 11.8, 11.120000000000001, 8.92, 6.66, 14.24, 7.12, 11.66, 11.68, 9.88, 7.5600000000000005, 10.8, 5.92, 13.42, 10.68, 12.24, 8.6, 15.56, 10.28, 10.26, 5.62, 8.08, 7.68, 6.88, 13.58, 11.08, 7.92, 11.540000000000001, 5.62, 11.1, 4.82, 11.72, 8.8, 11.78, 14.84, 6.3, 5.2, 7.18, 9.58, 4.0600000000000005, 10.040000000000001, 10.78, 5.94, 10.84, 10.9, 6.86, 7.68, 11.32, 6.6000000000000005, 2.08, 7.66, 7.46, 8.26, 11.22, 6.96, 6.6000000000000005, 5.84, 13.06, 10.44, 9.46, 5.44, 14.22, 10.700000000000001, 10.44, 12.120000000000001, 6.0600000000000005, 9.38, 15.96, 11.1, 12.3, 9.68, 9.34, 10.9, 11.58, 11.48, 11.94, 14.8, 9.66, 10.1, 11.28, 10.26, 9.82, 16.36, 4.68, 10.16, 11.6, 11.0, 8.68, 14.74, 9.48, 12.14, 10.42, 10.28, 11.82, 9.42, 12.540000000000001, 8.98, 5.0600000000000005, 14.3, 15.1, 11.5, 12.9, 9.74, 6.34, 13.18, 6.3, 8.82, 11.6, 8.540000000000001, 4.14, 9.96, 9.24, 4.5, 10.34, 12.620000000000001, 11.56, 12.4, 12.22, 11.32, 5.14, 15.860000000000001, 10.72, 8.42, 6.76, 8.1, 3.38, 4.32, 6.26, 0.9400000000000001, 13.120000000000001, 13.16, 3.9, 10.9, 12.780000000000001, 12.84, 7.42, 12.4, 10.040000000000001, 13.700000000000001, 12.86, 6.24, 14.1, 4.72, 13.48, 8.82, 6.8, 12.42, 9.98, 5.44, 9.76, 14.84, 14.86, 11.64, 11.700000000000001, 7.7, 7.66, 15.52, 11.98, 4.48, 8.5, 5.14, 11.4, 11.96, 10.06, 11.92, 10.42, 4.62, 9.540000000000001, 13.9, 11.42, 10.700000000000001, 1.16, 8.52, 14.98, 10.14, 6.86, 9.18, 7.74, 8.82, 7.36, 10.66, 9.08, 12.64, 10.08, 5.92, 5.38, 7.22, 10.6, 7.36, 9.28, 11.48, 5.18, 8.96, 9.0, 10.48, 14.06, 12.42, 8.28, 12.16, 4.34, 5.68, 11.68, 8.82, 5.32, 11.82, 7.46, 7.96, 10.120000000000001, 8.96, 10.22, 8.040000000000001, 9.32, 11.32, 11.46, 9.02, 11.98, 12.700000000000001, 14.72, 4.9, 8.06, 8.4, 8.700000000000001, 11.64, 12.58, 10.38, 6.44, 7.22, 14.9, 13.040000000000001, 8.120000000000001, 7.28, 14.02, 11.48, 8.36, 7.72, 10.98, 14.58, 9.1, 14.34, 7.2, 16.46, 10.3, 12.06, 8.82, 11.0, 7.0, 8.52, 13.58, 9.28, 12.76, 2.56, 12.280000000000001, 6.9, 11.38, 11.02, 6.08, 6.24, 6.04, 6.38, 10.0, 14.02, 12.76, 2.64, 3.18, 9.02, 5.64, 11.78, 10.26, 9.14, 8.700000000000001, 12.96, 12.280000000000001, 9.6, 6.54, 9.78, 5.96, 9.66, 11.540000000000001, 10.56, 10.98, 6.98, 10.200000000000001, 8.86, 9.4, 8.3, 9.5, 8.42, 7.24, 7.7, 9.48, 11.68, 13.040000000000001, 9.38, 8.6, 10.32, 8.92, 7.5200000000000005, 6.66, 7.08, 13.88, 9.16, 13.9, 11.26, 7.72, 10.76, 12.08, 7.4, 9.6, 9.040000000000001, 7.78, 16.76, 13.22, 9.92, 16.22, 5.0200000000000005, 9.84, 9.64]\n", - "[ -3.432, -3.076] : \n", - "[ -3.076, -2.720] : \n", - "[ -2.720, -2.364] : \n", - "[ -2.364, -2.008] : #\n", - "[ -2.008, -1.652] : ####\n", - "[ -1.652, -1.296] : ######\n", - "[ -1.296, -0.940] : ###########\n", - "[ -0.940, -0.584] : ################\n", - "[ -0.584, -0.228] : ####################\n", - "[ -0.228, 0.128] : ################\n", - "[ 0.128, 0.484] : ##################\n", - "[ 0.484, 0.840] : ###############\n", - "[ 0.840, 1.196] : #########\n", - "[ 1.196, 1.552] : #######\n", - "[ 1.552, 1.908] : ###\n", - "[ 1.908, 2.264] : ###\n", - "[ 2.264, 2.620] : \n", - "[ 2.620, 2.976] : \n", - "[ 2.976, 3.332] : \n", - "[ 3.332, 3.688] : \n", - "g1 mean = -0.03141599999999989\n", - "g1 variance = 1.0479690029439992\n", - "[ 0.940, 1.834] : \n", - "[ 1.834, 2.728] : \n", - "[ 2.728, 3.622] : #\n", - "[ 3.622, 4.516] : ##\n", - "[ 4.516, 5.410] : #####\n", - "[ 5.410, 6.304] : ######\n", - "[ 6.304, 7.198] : ##########\n", - "[ 7.198, 8.092] : ##############\n", - "[ 8.092, 8.986] : ###################\n", - "[ 8.986, 9.880] : ##################\n", - "[ 9.880, 10.774] : ####################\n", - "[ 10.774, 11.668] : ##################\n", - "[ 11.668, 12.562] : ##############\n", - "[ 12.562, 13.456] : ###########\n", - "[ 13.456, 14.350] : #######\n", - "[ 14.350, 15.244] : ####\n", - "[ 15.244, 16.138] : ##\n", - "[ 16.138, 17.032] : #\n", - "[ 17.032, 17.926] : \n", - "[ 17.926, 18.820] : \n", - "g2 mean = 9.907799999999993\n", - "g2 variance = 8.300915960000015\n" + "[0.15200000000000014, 0.7519999999999998, 0.3360000000000003, -1.08, -1.104, -0.05600000000000005, -0.08000000000000007, -0.7839999999999998, -0.992, -0.32799999999999985, 0.3120000000000003, -0.21599999999999975, -0.8959999999999999, -0.552, 0.2240000000000002, 0.16800000000000015, 0.3600000000000003, -1.096, 0.04800000000000004, 1.12, 0.6479999999999997, -1.112, -0.3759999999999999, 0.5520000000000005, -0.45599999999999996, 0.984, 0.16000000000000014, -0.6640000000000001, 1.3680000000000003, -0.9039999999999999, -1.3599999999999999, 0.08000000000000007, -0.496, 0.8959999999999999, 1.096, -1.2719999999999998, 0.15200000000000014, -0.7439999999999998, -0.14400000000000013, 0.5120000000000005, 0.4400000000000004, 0.1200000000000001, -0.44799999999999995, 0.944, 0.3360000000000003, 0.968, -0.3039999999999998, -1.1840000000000002, -1.6959999999999997, -0.552, 0.07200000000000006, 0.6559999999999997, -1.024, 0.4240000000000004, -0.7599999999999998, -0.8959999999999999, -1.52, -0.6719999999999997, -0.9039999999999999, -1.496, 1.5120000000000005, 0.40800000000000036, -0.48, -0.04800000000000004, -0.8239999999999998, -0.7039999999999997, -0.8799999999999999, 1.3360000000000003, 0.09600000000000009, -0.9359999999999999, -1.1840000000000002, 0.08800000000000008, -1.3199999999999998, 0.7999999999999998, 0.2240000000000002, 0.16800000000000015, -0.3599999999999999, -0.8959999999999999, 0.17600000000000016, -0.33599999999999985, -0.528, -1.992, 1.2160000000000002, 1.2160000000000002, 1.3600000000000003, 1.7519999999999998, -0.472, -1.6720000000000002, -0.944, -0.528, 0.6879999999999997, 0.0, -0.3679999999999999, -0.7039999999999997, -1.6320000000000001, 1.1360000000000001, 0.7039999999999997, 2.4960000000000004, 1.2640000000000002, -0.8559999999999999, 0.2400000000000002, -0.8639999999999999, 0.3200000000000003, 0.8399999999999999, 0.20800000000000018, -0.2879999999999998, 1.3200000000000003, 1.6000000000000005, 0.5360000000000005, -0.976, -2.152, -1.064, -0.13600000000000012, 0.992, -0.536, -0.10400000000000009, -0.1120000000000001, -0.56, -0.7839999999999998, 0.7999999999999998, -0.2719999999999998, -1.2319999999999998, -0.13600000000000012, 2.032, -1.2559999999999998, -1.1999999999999997, -0.6959999999999997, 0.8399999999999999, 0.1280000000000001, 1.8559999999999999, 0.944, 0.7119999999999997, -1.376, -0.4159999999999999, -0.4079999999999999, 0.6239999999999997, 1.7359999999999998, 0.3440000000000003, -1.6480000000000001, 0.040000000000000036, 0.4400000000000004, 0.6959999999999997, -0.7599999999999998, 0.5520000000000005, -0.496, -0.6959999999999997, 0.3200000000000003, 0.952, 0.15200000000000014, 0.08800000000000008, 0.976, 0.19200000000000017, 0.38400000000000034, 0.7919999999999998, 0.9039999999999999, 1.6799999999999997, 0.3360000000000003, 1.3520000000000003, 0.03200000000000003, -1.464, -0.23999999999999977, -0.15200000000000014, 0.26400000000000023, -0.6959999999999997, 0.0, 0.4640000000000004, -0.944, 0.4560000000000004, 0.4640000000000004, 0.8879999999999999, -2.3120000000000003, -0.8319999999999999, 0.5200000000000005, -0.06400000000000006, 1.3840000000000003, -1.072, 2.056, -0.8639999999999999, -0.3839999999999999, 0.6799999999999997, -1.12, -0.7279999999999998, -0.22399999999999975, -2.048, 0.2320000000000002, -0.8159999999999998, 0.6319999999999997, -0.19999999999999973, 1.1840000000000002, -1.2639999999999998, 0.02400000000000002, -1.4, 0.03200000000000003, -1.576, -0.3599999999999999, 0.9119999999999999, -1.7119999999999997, -0.6799999999999997, -0.5760000000000001, -1.7519999999999998, -0.008000000000000007, -0.8079999999999998, -1.456, -0.5920000000000001, 0.9359999999999999, 0.8799999999999999, -0.08000000000000007, -0.09600000000000009, -1.2559999999999998, 1.6879999999999997, -1.112, -2.48, -0.7359999999999998, 0.5600000000000005, 0.30400000000000027, -0.5680000000000001, 0.6000000000000005, -0.6080000000000001, -1.6, 0.06400000000000006, 1.024, -1.1520000000000001, 2.024, 0.944, 1.8159999999999998, 0.06400000000000006, -0.8079999999999998, -0.008000000000000007, 0.4640000000000004, -0.008000000000000007, 0.3600000000000003, -0.6080000000000001, -0.976, 0.6639999999999997, -1.44, 1.8079999999999998, 0.20800000000000018, -0.6560000000000001, -0.6799999999999997, 0.8639999999999999, -0.8319999999999999, 0.16000000000000014, 0.6639999999999997, -0.008000000000000007, 0.4240000000000004, -0.952, 1.2000000000000002, 1.2640000000000002, -0.3919999999999999, -0.45599999999999996, -0.96, 2.0, 0.14400000000000013, 1.3520000000000003, -0.552, 0.5760000000000005, 0.0, 0.8719999999999999, -0.8079999999999998, -0.03200000000000003, -1.528, -0.05600000000000005, 0.008000000000000007, 0.4560000000000004, 1.04, 0.3520000000000003, -0.5680000000000001, -0.31999999999999984, 0.8079999999999998, -0.6240000000000001, 0.5120000000000005, -1.616, -0.46399999999999997, -0.1200000000000001, 0.7439999999999998, 0.19200000000000017, -0.8239999999999998, 0.008000000000000007, 1.976, 1.096, 0.8719999999999999, 1.024, 2.3200000000000003, 0.4800000000000004, 0.5200000000000005, -1.384, 0.3280000000000003, 0.8879999999999999, 0.6000000000000005, -0.03200000000000003, 0.8319999999999999, 0.7999999999999998, 0.5840000000000005, 1.064, -0.8959999999999999, -1.2159999999999997, -0.8639999999999999, -0.016000000000000014, 0.6399999999999997, 2.4640000000000004, -0.6320000000000001, -1.096, -1.2159999999999997, 0.24800000000000022, -0.7679999999999998, -0.2959999999999998, 0.15200000000000014, 2.152, -0.536, -0.8319999999999999, 0.4400000000000004, 0.16800000000000015, -2.7119999999999997, 1.032, -0.06400000000000006, -0.6160000000000001, -1.3279999999999998, -0.7599999999999998, 1.8159999999999998, -0.528, -0.6320000000000001, -0.7839999999999998, 0.5520000000000005, 0.016000000000000014, -0.6000000000000001, 0.992, 2.16, -0.22399999999999975, 1.2000000000000002, -0.6240000000000001, -0.13600000000000012, -1.6320000000000001, -0.7119999999999997, 1.056, 1.3360000000000003, -0.3919999999999999, 0.5840000000000005, 0.5680000000000005, -0.6000000000000001, 0.5200000000000005, -0.3679999999999999, 0.49600000000000044, -1.448, -0.23199999999999976, 1.3920000000000003, 0.2320000000000002, -0.46399999999999997, 0.3360000000000003, 0.26400000000000023, 0.5040000000000004, -0.984, 1.08, -0.976, -0.8079999999999998, -1.3119999999999998, 0.38400000000000034, -0.3919999999999999, 0.7279999999999998, 1.08, 0.8559999999999999, -0.19999999999999973, 1.032, 1.3280000000000003, -1.3439999999999999, 0.4640000000000004, -1.4, 1.5280000000000005, 0.2320000000000002, 0.7919999999999998, 0.08800000000000008, -1.016, -0.2559999999999998, 1.024, 0.15200000000000014, 0.3280000000000003, 0.7439999999999998, -0.8319999999999999, 0.7119999999999997, -0.7839999999999998, -0.22399999999999975, 1.984, 0.27200000000000024, -2.384, 0.6719999999999997, -0.7759999999999998, -0.1759999999999997, -1.7519999999999998, -1.1280000000000001, 0.96, 0.7679999999999998, 0.40800000000000036, 0.7199999999999998, 0.3600000000000003, -0.6320000000000001, -1.112, 0.3120000000000003, -0.1759999999999997, -1.032, -0.6640000000000001, 0.29600000000000026, -0.496, -0.7999999999999998, -1.448, 1.992, -0.7439999999999998, 0.5040000000000004, 1.4320000000000004, 0.38400000000000034, -0.4079999999999999, 1.6399999999999997, -0.8879999999999999, -1.8239999999999998, -0.7279999999999998, -0.7759999999999998, 1.3200000000000003, -1.048, -0.14400000000000013, 2.872, -0.9199999999999999, -2.528, 1.4480000000000004, -1.1999999999999997, -0.3759999999999999, 2.088, 0.13600000000000012, 0.6479999999999997, 0.008000000000000007, 1.2000000000000002, 0.08000000000000007, 0.28800000000000026, 0.8159999999999998, 0.24800000000000022, -0.04800000000000004, 1.4800000000000004, 0.8479999999999999, 1.08, -1.7519999999999998, -2.0, 0.03200000000000003, 0.30400000000000027, 0.02400000000000002, -2.248, 0.7039999999999997, 0.016000000000000014, 1.5520000000000005, 0.5360000000000005, 0.26400000000000023, 0.2240000000000002, 0.26400000000000023, -0.7039999999999997, -1.432, 0.6239999999999997, -0.6240000000000001, -1.968, -1.072, -0.15200000000000014, 0.2240000000000002, 0.16000000000000014, 1.064, -1.8479999999999999, 0.14400000000000013, 0.6959999999999997, 2.04, -0.02400000000000002, -0.1679999999999997, 0.6879999999999997, -0.040000000000000036, 0.7679999999999998, 2.856, -0.31999999999999984, -0.1280000000000001, 0.6959999999999997, 0.20000000000000018, -0.944, 0.8079999999999998, -1.2719999999999998, -1.984, -1.1360000000000001, -0.42399999999999993, 0.7999999999999998, -0.08800000000000008, -0.3839999999999999, 0.38400000000000034, -0.42399999999999993, 0.8799999999999999, 2.6080000000000005, -2.2720000000000002, 1.7999999999999998, 0.5680000000000005, -0.14400000000000013, 0.8719999999999999, 0.2560000000000002, 0.3280000000000003, 0.5840000000000005, 1.6799999999999997, -0.9359999999999999, -0.504, 0.5280000000000005, 0.08000000000000007, 1.7679999999999998, 0.5360000000000005, 0.5680000000000005, -0.1200000000000001, 0.20800000000000018, -1.416, -0.6719999999999997, -1.1760000000000002, -3.016, 2.6240000000000006, -0.6959999999999997, -0.56, -0.2719999999999998, 0.04800000000000004, -0.03200000000000003, -1.008, 2.088, 1.96, -1.1760000000000002, 0.28000000000000025, 0.040000000000000036, -0.3919999999999999, -0.1679999999999997, -0.6799999999999997, -0.8639999999999999, -1.56, 0.8799999999999999, 0.5040000000000004, 1.6959999999999997, -0.10400000000000009, -2.448, 0.07200000000000006, -0.9119999999999999, 0.016000000000000014, -1.408, -0.8559999999999999, -1.3439999999999999, 0.5440000000000005, 1.2640000000000002, -0.2799999999999998, -0.1679999999999997, -0.040000000000000036, 1.5920000000000005, -0.23199999999999976, 0.24800000000000022, 0.7279999999999998, 0.4320000000000004, 0.28000000000000025, -0.9279999999999999, 0.976, -1.1360000000000001, -0.8319999999999999, 0.6879999999999997, -0.04800000000000004, 0.48800000000000043, -0.33599999999999985, -0.22399999999999975, -1.888, -0.20799999999999974, -0.6160000000000001, 0.4320000000000004, 0.6080000000000005, -1.1360000000000001, -0.52, -0.008000000000000007, 1.2400000000000002, -0.7759999999999998, -0.1200000000000001, 0.17600000000000016, -0.18399999999999972, 2.4880000000000004, -0.7999999999999998, -1.6, -0.5760000000000001, -0.02400000000000002, -0.8399999999999999, 0.5120000000000005, 0.8079999999999998, 0.6399999999999997, -0.19999999999999973, 1.2560000000000002, 0.4320000000000004, -0.44799999999999995, 0.9359999999999999, 0.976, 2.7439999999999998, 0.3680000000000003, -1.432, -0.7199999999999998, 0.3200000000000003, 1.088, -0.1679999999999997, 0.28000000000000025, -0.3679999999999999, -0.040000000000000036, -0.34399999999999986, 1.6000000000000005, 0.6159999999999997, -1.2159999999999997, 0.09600000000000009, -2.2800000000000002, -1.0, -2.456, -1.2719999999999998, -1.08, -0.6000000000000001, -0.552, -0.23199999999999976, 0.5040000000000004, -0.8559999999999999, 0.03200000000000003, 0.040000000000000036, -0.1200000000000001, -0.06400000000000006, 0.02400000000000002, 1.6639999999999997, -2.56, -2.3040000000000003, -0.6959999999999997, 0.2400000000000002, 0.1280000000000001, 0.8639999999999999, 0.20000000000000018, -1.024, 0.1200000000000001, 2.4720000000000004, -0.2799999999999998, -0.15200000000000014, 0.04800000000000004, -1.6640000000000001, -1.3199999999999998, 0.16800000000000015, 0.06400000000000006, 1.0, 1.8479999999999999, -0.1759999999999997, -1.408, 0.5920000000000005, -1.072, 1.8719999999999999, -0.544, -0.18399999999999972, -1.8079999999999998, -1.392, 1.2160000000000002, 0.5920000000000005, -1.376, 1.072, -0.008000000000000007, 2.848, 2.096, -1.2719999999999998, -1.3199999999999998, -2.336, -1.1919999999999997, -0.7919999999999998, -0.544, 1.024, 0.03200000000000003, 0.5200000000000005, 0.3360000000000003, 0.2240000000000002, -0.536, 0.8399999999999999, 0.6639999999999997, -0.7519999999999998, -0.528, 1.2800000000000002, -0.05600000000000005, -0.1759999999999997, -0.7199999999999998, 1.3040000000000003, -0.33599999999999985, -1.096, 1.3280000000000003, 0.9119999999999999, -0.976, -0.5840000000000001, -1.1760000000000002, -0.544, -0.2879999999999998, -1.064, 0.09600000000000009, -0.7999999999999998, -0.3999999999999999, -1.2479999999999998, 1.2320000000000002, -0.1679999999999997, -0.488, 1.064, -0.9279999999999999, -0.31199999999999983, 0.3280000000000003, -0.8959999999999999, -0.34399999999999986, -0.5760000000000001, -0.8239999999999998, -0.7519999999999998, 0.8159999999999998, -0.4159999999999999, -0.10400000000000009, -0.6879999999999997, 0.7759999999999998, 0.27200000000000024, 0.5760000000000005, 1.8079999999999998, -2.096, 0.6239999999999997, 1.6319999999999997, 1.6240000000000006, -0.14400000000000013, 2.208, 0.16000000000000014, -1.016, 2.12, -1.456, -0.96, -0.008000000000000007, -1.6720000000000002, -1.0, 1.936, -1.2639999999999998, 0.06400000000000006, 0.5360000000000005, 0.8079999999999998, 1.064, -0.19999999999999973, 0.7999999999999998, -2.064, -1.056, 0.6479999999999997, -0.19199999999999973, 0.6159999999999997, -2.184, 0.29600000000000026, 0.49600000000000044, -0.6560000000000001, 0.8879999999999999, 0.8319999999999999, 0.6479999999999997, 1.1360000000000001, -1.7439999999999998, -1.592, 0.15200000000000014, 0.06400000000000006, 0.29600000000000026, -1.3439999999999999, -0.2799999999999998, -0.6959999999999997, -0.6320000000000001, -0.1679999999999997, -0.9039999999999999, -0.016000000000000014, -1.1760000000000002, -0.21599999999999975, -2.2640000000000002, -1.576, 2.128, -1.048, -0.5840000000000001, 0.1200000000000001, 0.38400000000000034, -0.6400000000000001, 1.1360000000000001, 1.1600000000000001, 1.072, 0.3440000000000003, 0.952, 1.08, 1.8399999999999999, 0.3120000000000003, -0.46399999999999997, -0.9039999999999999, -0.2719999999999998, -0.1759999999999997, -0.16000000000000014, 1.6000000000000005, 0.984, -2.088, 0.4800000000000004, -0.552, -1.7999999999999998, 0.28800000000000026, 1.3840000000000003, 0.5680000000000005, -1.6800000000000002, 1.5440000000000005, 0.7519999999999998, 0.1200000000000001, -0.488, 0.6080000000000005, 0.7199999999999998, 0.040000000000000036, -0.4159999999999999, -1.96, 0.16000000000000014, 1.5760000000000005, 0.14400000000000013, 0.6479999999999997, 0.0, 1.048, -0.496, 0.38400000000000034, 1.4640000000000004, -0.6480000000000001, 1.1760000000000002, 1.2320000000000002, 1.6559999999999997, -1.112, -2.392, -0.7599999999999998, -1.7919999999999998, -0.08000000000000007, -1.1999999999999997, 0.30400000000000027, -1.12, -0.5920000000000001, 0.49600000000000044, -0.7119999999999997, -0.2799999999999998, -0.22399999999999975, -0.9119999999999999, -0.3759999999999999, -1.3119999999999998, 0.17600000000000016, -0.08800000000000008, 0.1280000000000001, 1.7839999999999998, -0.2799999999999998, -0.2879999999999998, 0.7759999999999998, -0.7199999999999998, -1.7839999999999998, 0.6239999999999997, 0.05600000000000005, -0.21599999999999975, -1.04, -0.1120000000000001, -1.3519999999999999, -0.32799999999999985, 0.8559999999999999, -0.23199999999999976, -0.3839999999999999, 1.1920000000000002, -0.7199999999999998, 0.38400000000000034, -0.496, -0.23999999999999977, -0.7759999999999998, -0.1120000000000001, -0.23999999999999977, -1.392, 0.7999999999999998, -0.9359999999999999, -1.92, -0.07200000000000006, -1.7759999999999998, 0.2240000000000002, 1.4320000000000004, 1.08, -0.03200000000000003, -0.6160000000000001, 0.38400000000000034, 0.18400000000000016, -0.7359999999999998, -0.56, 0.8799999999999999, -0.3919999999999999, 1.1600000000000001, -1.456, -0.7999999999999998, -0.7119999999999997, -1.6, -1.6800000000000002, -1.2559999999999998, 1.1600000000000001, 0.5200000000000005, 2.176, -0.45599999999999996, -1.48, 1.0, -0.7919999999999998, -2.112, 0.7439999999999998, -1.3679999999999999, -0.496, -1.3359999999999999, 0.8879999999999999, -0.040000000000000036, -0.6400000000000001, 0.06400000000000006, -0.6879999999999997, -1.1999999999999997, -0.18399999999999972, 0.5200000000000005, 0.38400000000000034, -1.7839999999999998, -0.8319999999999999, 1.04, -1.7359999999999998, 0.5520000000000005, 1.2400000000000002, 0.4240000000000004, -0.040000000000000036, -0.8719999999999999, -0.7119999999999997, 0.20800000000000018, 1.4720000000000004, 0.4640000000000004, 0.5200000000000005, 0.5760000000000005, 0.4640000000000004, -1.1919999999999997, -0.9039999999999999, -1.2399999999999998, -1.584, 0.5280000000000005, 0.3120000000000003, 0.2320000000000002, -0.3839999999999999, 0.2400000000000002, 0.3200000000000003, -1.1680000000000001, 1.072, -1.7359999999999998, -0.31199999999999983, -0.15200000000000014, -1.4, -0.7839999999999998, -1.2079999999999997, 0.08800000000000008, 0.9199999999999999, 1.072, -0.1280000000000001, -0.992, -0.33599999999999985, -0.4159999999999999, 0.4720000000000004, 0.07200000000000006, 1.008, -1.096, -1.568, 1.0, 1.6559999999999997, -1.032, 2.6639999999999997, -1.1680000000000001, -1.096, 1.5200000000000005, 0.4240000000000004, -1.7279999999999998, -1.944, -0.10400000000000009, 0.984, -0.544, -1.3519999999999999, -0.5760000000000001, 0.8319999999999999, 0.4720000000000004, 0.24800000000000022, -1.072, -1.1999999999999997, -1.6959999999999997, -0.2719999999999998, -0.8079999999999998, 1.3200000000000003, 1.3040000000000003, 0.992, -0.3679999999999999, 1.6319999999999997, 0.1200000000000001, 0.016000000000000014, -0.512, 0.08000000000000007, -1.432, -0.10400000000000009, 0.040000000000000036, -0.7679999999999998, 1.984, 1.4080000000000004, 0.07200000000000006, 0.09600000000000009, 0.7759999999999998, 0.6399999999999997, 1.1840000000000002, 1.032, -0.7359999999999998, 0.7759999999999998, 0.4720000000000004, 0.28800000000000026, -0.536, 1.912, 0.9039999999999999, 1.4320000000000004, 1.5280000000000005, 0.16000000000000014, 0.9039999999999999, 0.3440000000000003, -2.376, -0.8479999999999999, 0.03200000000000003]\n", + "[9.74, 13.88, 8.42, 3.56, 8.4, 6.88, 8.78, 10.8, 11.78, 8.5, 12.26, 11.08, 10.0, 10.38, 11.72, 13.9, 8.9, 6.54, 8.52, 7.26, 3.5, 10.98, 11.64, 8.38, 10.14, 12.98, 8.84, 4.0200000000000005, 16.92, 11.94, 8.74, 9.34, 18.16, 8.64, 11.18, 12.58, 6.84, 11.120000000000001, 11.0, 9.56, 10.46, 10.72, 9.040000000000001, 5.64, 12.700000000000001, 11.46, 13.200000000000001, 11.44, 8.46, 10.76, 9.78, 9.72, 19.04, 9.56, 9.84, 11.3, 9.56, 11.14, 11.4, 7.2, 15.34, 8.540000000000001, 7.68, 7.16, 11.16, 10.1, 6.3, 9.94, 11.32, 4.22, 8.64, 5.08, 9.5, 10.620000000000001, 8.56, 11.6, 9.78, 12.68, 14.6, 9.540000000000001, 11.48, 11.14, 10.08, 9.84, 8.94, 6.3, 9.9, 13.96, 12.96, 8.4, 10.76, 7.34, 7.24, 11.700000000000001, 12.56, 13.74, 8.68, 12.58, 8.76, 12.72, 17.54, 5.66, 10.74, 8.72, 15.68, 7.62, 13.52, 11.36, 12.48, 12.58, 10.68, 5.62, 2.62, 9.64, 19.14, 13.14, 10.98, 10.56, 13.94, 11.58, 10.66, 4.92, 8.66, 8.6, 12.1, 10.32, 5.5200000000000005, 15.620000000000001, 8.32, 10.64, 8.84, 7.76, 7.96, 12.56, 13.14, 10.44, 9.700000000000001, 6.04, 8.22, 3.52, 5.36, 11.42, 9.66, 7.16, 12.6, 10.72, 12.040000000000001, 8.120000000000001, 10.18, 9.94, 8.4, 7.34, 10.44, 6.16, 10.02, 13.52, 15.06, 9.5, 12.64, 10.98, 8.94, 6.88, 11.36, 9.200000000000001, 8.3, 10.84, 11.040000000000001, 5.94, 10.48, 10.42, 6.08, 12.200000000000001, 10.72, 6.98, 12.88, 12.96, 5.08, 4.12, 10.86, 1.9000000000000001, 8.18, 10.1, 7.66, 10.18, 13.02, 9.36, 14.700000000000001, 9.200000000000001, 7.38, 10.06, 16.36, 14.96, 11.68, 8.68, 13.34, 10.5, 11.3, 12.22, 10.84, 9.14, 8.72, 8.52, 6.94, 9.78, 4.7, 2.9, 4.78, 12.84, 8.38, 15.700000000000001, 10.1, 8.96, 13.1, 8.66, 14.26, 16.12, 7.84, 12.200000000000001, 5.86, 14.16, 9.08, 12.6, 7.0200000000000005, 9.64, 9.84, 11.540000000000001, 11.82, 10.620000000000001, 15.860000000000001, 6.1000000000000005, 11.28, 14.120000000000001, 5.6000000000000005, 9.64, 6.68, 13.76, 10.84, 4.68, 11.32, 17.56, 8.44, 9.94, 7.140000000000001, 9.18, 9.94, 10.700000000000001, 8.6, 10.72, 11.9, 12.34, 9.4, 8.88, 9.5, 12.1, 8.32, 14.24, 14.64, 10.3, 7.92, 8.9, 9.42, 3.5, 5.96, 4.18, 11.200000000000001, 13.24, 13.34, 11.56, 10.42, 6.0, 12.040000000000001, 10.68, 6.62, 11.02, 12.32, 16.44, 8.4, 13.48, 13.66, 7.08, 7.08, 9.8, 10.620000000000001, 12.74, 8.18, 6.16, 10.72, 8.94, 5.96, 9.22, 8.28, 9.94, 13.42, 9.0, 10.86, 5.88, 5.4, 7.38, 11.8, 11.52, 12.540000000000001, 8.040000000000001, 7.82, 8.22, 7.4, 9.9, 14.4, 10.120000000000001, 4.68, 9.5, 9.44, 11.98, 12.06, 7.12, 13.8, 5.26, 13.02, 11.94, 6.5, 7.5, 15.46, 11.3, 9.98, 9.38, 7.08, 12.96, 15.0, 7.84, 10.94, 12.82, 8.44, 11.02, 11.620000000000001, 7.94, 7.12, 8.24, 4.0600000000000005, 7.18, 11.08, 15.34, 11.58, 10.98, 10.74, 4.36, 7.58, 13.82, 9.66, 6.8, 13.280000000000001, 10.32, 11.200000000000001, 16.56, 12.48, 7.26, 10.52, 10.94, 7.12, 10.58, 5.4, 7.54, 11.620000000000001, 12.42, 12.84, 10.84, 9.540000000000001, 11.02, 8.9, 11.44, 7.22, 10.76, 13.22, 14.36, 11.14, 8.72, 9.4, 6.54, 12.34, 7.08, 9.32, 7.140000000000001, 8.26, 13.0, 8.82, 12.98, 6.5200000000000005, 8.64, 6.28, 13.38, 8.44, 9.92, 8.8, 10.8, 12.9, 12.94, 13.4, 13.120000000000001, 7.74, 11.88, 9.1, 11.46, 8.18, 8.44, 6.48, 13.0, 7.94, 9.38, 8.700000000000001, 14.92, 11.3, 13.34, 1.4000000000000001, 7.54, 9.52, 8.22, 15.82, 9.48, 8.64, 7.9, 7.3, 11.36, 13.14, 5.78, 10.36, 6.5600000000000005, 5.96, 12.700000000000001, 11.620000000000001, 15.68, 9.88, 14.24, 8.88, 8.76, 11.52, 9.72, 12.32, 12.1, 4.12, 7.7, 9.92, 14.44, 2.44, 13.36, 11.44, 14.46, 16.82, 10.38, 9.14, 5.66, 15.0, 10.44, 9.74, 7.34, 12.48, 9.14, 7.22, 4.34, 11.02, 10.92, 9.28, 9.44, 7.7, 9.24, 10.18, 9.78, 12.540000000000001, 11.08, 9.28, 11.58, 8.18, 11.36, 12.96, 7.22, 6.4, 6.9, 10.4, 11.28, 12.48, 5.0200000000000005, 15.4, 11.0, 10.28, 12.6, 6.5200000000000005, 14.46, 3.2800000000000002, 10.74, 10.48, 13.5, 9.120000000000001, 8.620000000000001, 3.38, 7.24, 6.32, 12.76, 11.48, 10.58, 7.38, 10.56, 8.34, 8.66, 12.44, 6.0, 13.92, 12.9, 11.72, 10.620000000000001, 7.18, 12.98, 14.6, 8.38, 10.040000000000001, 12.48, 12.32, 9.06, 12.0, 9.700000000000001, 9.040000000000001, 12.22, 11.700000000000001, 0.86, 11.22, 5.0, 6.94, 11.32, 13.44, 13.6, 5.46, 11.66, 10.0, 8.58, 3.02, 9.26, 8.96, 10.88, 8.02, 11.9, 16.96, 8.38, 11.9, 12.040000000000001, 13.96, 13.94, 9.34, 5.36, 13.86, 9.14, 7.5600000000000005, 7.88, 10.64, 10.4, 14.280000000000001, 10.14, 8.52, 7.9, 8.78, 11.540000000000001, 10.18, 8.040000000000001, 10.56, 9.92, 8.08, 10.26, 7.54, 6.34, 5.14, 9.3, 10.64, 9.200000000000001, 7.140000000000001, 7.44, 7.54, 13.42, 4.34, 6.4, 9.38, 10.96, 10.82, 11.66, 9.0, 8.02, 10.14, 10.82, 10.4, 12.44, 8.620000000000001, 9.98, 6.58, 12.02, 9.14, 12.18, 7.28, 7.24, 11.120000000000001, 12.88, 8.38, 11.48, 12.64, 10.120000000000001, 13.14, 9.06, 11.700000000000001, 9.02, 7.42, 8.620000000000001, 8.26, 9.78, 8.48, 14.02, 5.34, 11.46, 5.94, 6.62, 12.9, 8.3, 15.58, 6.0600000000000005, 7.62, 14.8, 6.36, 10.120000000000001, 6.38, 9.72, 10.68, 7.48, 9.72, 6.54, 10.72, 14.16, 9.120000000000001, 9.86, 8.44, 9.82, 12.66, 5.7, 8.48, 14.58, 10.02, 6.640000000000001, 12.06, 6.22, 8.28, 13.46, 11.42, 9.88, 4.6000000000000005, 9.32, 13.200000000000001, 5.7, 9.48, 5.42, 6.1000000000000005, 5.6000000000000005, 11.9, 11.24, 10.700000000000001, 16.2, 12.92, 9.86, 13.26, 9.5, 9.38, 5.4, 9.0, 8.68, 9.94, 3.8000000000000003, 9.3, 10.8, 9.6, 17.1, 10.96, 13.48, 12.88, 15.42, 11.72, 14.780000000000001, 13.620000000000001, 10.78, 9.52, 5.3, 14.200000000000001, 8.64, 11.14, 6.9, 2.42, 9.28, 13.74, 9.96, 11.18, 17.52, 8.42, 12.02, 12.620000000000001, 12.58, 11.24, 10.32, 8.36, 13.98, 12.38, 13.86, 8.86, 11.44, 10.3, 10.92, 5.5600000000000005, 15.280000000000001, 10.32, 9.02, 5.1000000000000005, 10.6, 10.78, 7.3, 7.44, 8.22, 9.26, 7.12, 2.46, 12.700000000000001, 6.3, 12.44, 7.3, 7.28, 16.3, 9.76, 3.92, 10.44, 7.34, 9.46, 6.76, 7.640000000000001, 2.2600000000000002, 7.08, 10.52, 14.3, 3.0, 8.36, 8.620000000000001, 9.88, 14.040000000000001, 10.0, 14.44, 5.54, 12.48, 8.8, 12.16, 11.68, 10.42, 10.8, 7.32, 13.42, 11.26, 6.82, 5.08, 6.6000000000000005, 7.92, 11.120000000000001, 14.1, 9.82, 7.24, 6.58, 15.18, 7.5600000000000005, 9.82, 4.36, 10.58, 14.32, 6.9, 9.58, 10.34, 5.58, 11.540000000000001, 11.26, 5.5600000000000005, 9.6, 9.06, 8.74, 4.5200000000000005, 9.92, 10.72, 10.52, 5.72, 6.86, 10.78, 14.84, 15.82, 4.72, 10.120000000000001, 10.74, 10.68, 15.36, 12.92, 9.74, 10.52, 6.9, 4.3, 10.52, 4.04, 5.22, 11.540000000000001, 8.9, 12.8, 1.3, 10.66, 8.64, 9.8, 11.94, 9.200000000000001, 11.88, 7.7, 6.72, 9.8, 10.46, 15.56, 10.88, 7.0200000000000005, 9.200000000000001, 11.540000000000001, 8.06, 6.5, 13.56, 9.08, 10.6, 10.120000000000001, 9.32, 5.5600000000000005, 8.28, 10.9, 9.96, 9.16, 11.44, 13.72, 12.32, 12.72, 4.6000000000000005, 13.0, 11.46, 14.92, 5.98, 11.18, 11.48, 5.12, 15.58, 9.44, 12.3, 8.84, 12.68, 10.58, 16.34, 10.94, 10.46, 7.5600000000000005, 11.18, 12.5, 8.02, 7.94, 12.56, 8.2, 6.76, 11.58, 13.48, 9.52, 8.2, 9.34, 6.38, 13.280000000000001, 10.14, 9.16, 11.26, 8.9, 9.88, 9.6, 9.82, 6.86, 7.86, 13.0, 13.92, 9.22, 7.38, 9.98, 11.16, 10.46, 11.96, 6.98, 11.08, 12.34, 10.1, 12.52, 4.12, 8.86, 9.200000000000001, 8.08, 12.66, 15.88, 10.02, 6.24, 8.92, 12.98, 14.76, 9.74, 8.36, 14.84, 9.98, 10.74, 11.78, 12.64, 10.82, 13.3, 3.7600000000000002, 16.02, 4.38, 13.120000000000001, 11.06, 10.8, 9.88, 6.16, 11.58, 12.0, 10.24, 7.48, 11.22, 12.72, 5.62, 6.98, 7.2, 5.98, 8.5, 12.4, 9.02, 8.82, 13.02, 11.200000000000001, 8.5, 9.78, 7.7, 6.7, 12.22, 7.1000000000000005, 13.36, 5.04, 10.78, 8.76, 10.42, 7.6000000000000005, 3.48, 9.78, 8.0, 10.78, 10.56, 5.04, 8.64, 13.76, 12.84, 12.94, 7.6000000000000005, 13.48, 15.0, 4.0, 14.280000000000001, 9.28, 7.16, 7.08, 4.5200000000000005, 10.14, 11.72, 7.16, 6.26, 6.44, 10.44, 10.8, 11.76, 12.38, 9.92, 12.5, 12.120000000000001, 12.46, 6.32, 7.54, 14.18, 7.5200000000000005, 14.620000000000001, 9.8, 9.16, 6.9, 13.040000000000001, 14.74, 11.02, 6.32, 9.700000000000001, 15.280000000000001, 6.3, 13.8, 9.200000000000001, 6.18, 8.94, 12.72, 8.56, 8.5, 7.96, 7.48, 12.92]\n", + "[ -3.016, -2.722] : \n", + "[ -2.722, -2.427] : #\n", + "[ -2.427, -2.133] : ##\n", + "[ -2.133, -1.838] : ##\n", + "[ -1.838, -1.544] : #####\n", + "[ -1.544, -1.250] : ########\n", + "[ -1.250, -0.955] : ###########\n", + "[ -0.955, -0.661] : #################\n", + "[ -0.661, -0.366] : ################\n", + "[ -0.366, -0.072] : ################\n", + "[ -0.072, 0.222] : ####################\n", + "[ 0.222, 0.517] : #################\n", + "[ 0.517, 0.811] : ###############\n", + "[ 0.811, 1.106] : #############\n", + "[ 1.106, 1.400] : #######\n", + "[ 1.400, 1.694] : ####\n", + "[ 1.694, 1.989] : ###\n", + "[ 1.989, 2.283] : ##\n", + "[ 2.283, 2.578] : \n", + "[ 2.578, 2.872] : #\n", + "g1 mean = -0.0460959999999999\n", + "g1 variance = 1.0227577187840005\n", + "[ 0.860, 1.774] : \n", + "[ 1.774, 2.688] : \n", + "[ 2.688, 3.602] : #\n", + "[ 3.602, 4.516] : ##\n", + "[ 4.516, 5.430] : ####\n", + "[ 5.430, 6.344] : #######\n", + "[ 6.344, 7.258] : ###########\n", + "[ 7.258, 8.172] : ##########\n", + "[ 8.172, 9.086] : ##################\n", + "[ 9.086, 10.000] : ####################\n", + "[ 10.000, 10.914] : ##################\n", + "[ 10.914, 11.828] : ################\n", + "[ 11.828, 12.742] : ############\n", + "[ 12.742, 13.656] : ##########\n", + "[ 13.656, 14.570] : ######\n", + "[ 14.570, 15.484] : ####\n", + "[ 15.484, 16.398] : ##\n", + "[ 16.398, 17.312] : \n", + "[ 17.312, 18.226] : \n", + "[ 18.226, 19.140] : \n", + "g2 mean = 9.911780000000002\n", + "g2 variance = 8.671936831599995\n" ], "name": "stdout" } @@ -1004,10 +1005,10 @@ "metadata": { "id": "A51dSStW_1iF", "colab_type": "code", - "outputId": "8071f6c6-8402-44f2-9df9-b67698eaf0a6", + "outputId": "7d1b1f31-1786-4d1c-ae00-0249023581e9", "colab": { "base_uri": "https://localhost:8080/", - "height": 119 + "height": 153 } }, "source": [ @@ -1033,25 +1034,32 @@ " i += dx\n", " return integral\n", "\n", - "\n", - "print(\" data: {:4.3f}\".format(integrate(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)))\n", - "print(\"theory: {:4.3f}\".format(integrate_properly(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)))\n", - "print(\" d/t: {:3.1f}%\".format(100*integrate(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)/integrate_properly(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)))\n", - "print(\" data: {:1.3f}\".format(integrate(g2, 7, 13, 10000)/integrate(g2, 0, 20, 10000)))\n", - "print(\"theory: {:1.3f}\".format(integrate_properly(g2, 7, 13, 10000)/integrate_properly(g2, 0, 20, 10000)))\n", - "print(\" d/t: {:3.1f}%\".format(100*(integrate(g2, 7, 13, 10000)/integrate(g2, 0, 20, 10000))/(integrate_properly(g2, 7, 13, 10000)/integrate_properly(g2, 0, 20, 10000))))" + "mc_integral = integrate(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)\n", + "proper_integral = integrate_properly(lambda x: -((x - 10) ** 2) + 100, 0, 20, 10000)\n", + "print(\"Integral from {:d} to \".format(x_min) + \"{:d} of -((x - 10) ^ 2) + 100:\".format(x_max))\n", + "print(\" data: {:4.3f}\".format(mc_integral))\n", + "print(\" theory: {:4.3f}\".format(proper_integral))\n", + "print(\" % error: {:3.1f}%\".format(100*abs(mc_integral-proper_integral)/proper_integral))\n", + "data_norm_integral = integrate(g2, 7, 13, 10000)/integrate(g2, 0, 20, 10000)\n", + "proper_norm_integral = integrate_properly(g2, 7, 13, 10000)/integrate_properly(g2, 0, 20, 10000)\n", + "print(\"Percent of Normal distribution within one variance:\")\n", + "print(\" data: {:3.1f}%\".format(100*data_norm_integral))\n", + "print(\" theory: {:3.1f}%\".format(100*proper_norm_integral))\n", + "print(\" % error: {:3.1f}%\".format(100*abs(data_norm_integral-proper_norm_integral)/proper_norm_integral))" ], - "execution_count": 146, + "execution_count": 36, "outputs": [ { "output_type": "stream", "text": [ - " data: 1330.600\n", - "theory: 1333.333\n", - " d/t: 99.8%\n", - " data: 0.680\n", - "theory: 0.683\n", - " d/t: 99.5%\n" + "Integral from 0 to 10 of -((x - 10) ^ 2) + 100:\n", + " data: 1324.600\n", + " theory: 1333.333\n", + " % error: 0.7%\n", + "Percent of Normal distribution within one variance:\n", + " data: 68.2%\n", + " theory: 68.3%\n", + " % error: 0.1%\n" ], "name": "stdout" } From 506e8db2e0ca8d6e38d7cf71b454ad8201fb11c9 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Wed, 1 Apr 2020 15:59:39 -0500 Subject: [PATCH 10/24] Finished up to p5 --- Exams/Mid-term/Exam.ipynb | 258 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 250 insertions(+), 8 deletions(-) diff --git a/Exams/Mid-term/Exam.ipynb b/Exams/Mid-term/Exam.ipynb index a1350fc..ef91be7 100644 --- a/Exams/Mid-term/Exam.ipynb +++ b/Exams/Mid-term/Exam.ipynb @@ -18,7 +18,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -41,9 +41,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "False\n", + "True\n", + "False\n", + "True\n" + ] + } + ], "source": [ "print(\"A\">\"B\")\n", "print(\"B\">\"A\")\n", @@ -58,6 +69,32 @@ "Make sure your implementation isn't case sensitive. Do not use python's built-in `sort` or any other sort function you find." ] }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Alabama\n" + ] + } + ], + "source": [ + "def first_alphabetically(lst):\n", + " # return min(lst)\n", + " first = lst[0]\n", + " for state in lst:\n", + " if first.upper() > state.upper():\n", + " first = state\n", + " return first\n", + "\n", + "\n", + "print(first_alphabetically(states))" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -65,6 +102,35 @@ "2. Write a function `arg_first_alphabetically(lst)`, which does the same thing as in exercise 1 but returns the index of the first string alphabetically." ] }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "23\n" + ] + } + ], + "source": [ + "def arg_first_alphabetically(lst):\n", + " # return lst.index(min(lst))\n", + " \n", + " first = lst[0]\n", + " first_index = 0\n", + " for i in range(1, len(lst)):\n", + " if first.upper() > lst[i].upper():\n", + " first = lst[i]\n", + " first_index = i\n", + " return first_index\n", + "\n", + "\n", + "print(arg_first_alphabetically(states))" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -72,6 +138,79 @@ "3. Use your result in question 2 to implement a function `arg_sort_alphabetically(lst)` that returns a list that is alphabetically sorted. Sorting can be accomplished by successively applying the function in question 1 and removing the first element alphabetically. You can remove an element from a list using `pop()`. Do not use python's built-in `sort` or any other sort function you find. " ] }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['Alabama',\n", + " 'Alaska',\n", + " 'Arizona',\n", + " 'Arkansas',\n", + " 'California',\n", + " 'Colorado',\n", + " 'Connecticut',\n", + " 'Delaware',\n", + " 'Florida',\n", + " 'Georgia',\n", + " 'Hawaii',\n", + " 'Idaho',\n", + " 'Illinois',\n", + " 'Indiana',\n", + " 'Iowa',\n", + " 'Kansas',\n", + " 'Kentucky',\n", + " 'Louisiana',\n", + " 'Maine',\n", + " 'Maryland',\n", + " 'Massachusetts',\n", + " 'Michigan',\n", + " 'Minnesota',\n", + " 'Mississippi',\n", + " 'Missouri',\n", + " 'Montana',\n", + " 'Nebraska',\n", + " 'Nevada',\n", + " 'New Hampshire',\n", + " 'New Jersey',\n", + " 'New Mexico',\n", + " 'New York',\n", + " 'North Carolina',\n", + " 'North Dakota',\n", + " 'Ohio',\n", + " 'Oklahoma',\n", + " 'Oregon',\n", + " 'Pennsylvania',\n", + " 'Rhode Island',\n", + " 'South Carolina',\n", + " 'South Dakota',\n", + " 'Tennessee',\n", + " 'Texas',\n", + " 'Utah',\n", + " 'Vermont',\n", + " 'Virginia',\n", + " 'Washington',\n", + " 'West Virginia',\n", + " 'Wisconsin',\n", + " 'Wyoming']" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def arg_sort_alphabetically(lst):\n", + " return [lst.pop(arg_first_alphabetically(lst)) for i in range(len(lst))]\n", + "\n", + "\n", + "arg_sort_alphabetically(states)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -91,6 +230,38 @@ "In other words the elements of matrix C which is the outer product of A and B are $c_{ij} = a_i b_j$." ] }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[6, 7, 8]\n", + "[12, 14, 16]\n", + "[18, 21, 24]\n", + "[24, 28, 32]\n", + "[30, 35, 40]\n" + ] + } + ], + "source": [ + "def outer_product(lst1, lst2):\n", + " lst3 = [[0 for i in range(len(lst2))] for j in range(len(lst1))]\n", + " for i in range(len(lst1)):\n", + " for j in range(len(lst2)):\n", + " lst3[i][j] = lst1[i]*lst2[j]\n", + " return lst3\n", + "\n", + "tl1 = [1, 2, 3, 4, 5]\n", + "tl2 = [6, 7, 8]\n", + "op = outer_product(tl1, tl2)\n", + "for l in op:\n", + " print(l)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -98,6 +269,32 @@ "5. Implement a function `cumulative_sum(lst)` that takes a list of numbers and returns a list of same size where the element `i` is the sum of the elements `0` to `i` of the input list. For example given `[1,2,3]`, you should return [1,3,6]." ] }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[1, 2, 3, 4, 5]\n", + "[1, 3, 6, 10, 15]\n" + ] + } + ], + "source": [ + "def cumulative_sum(lst):\n", + " for i in range(1, len(lst)):\n", + " lst[i] += lst[i-1]\n", + " return lst\n", + "\n", + "\n", + "tl = list(range(1,6))\n", + "print(tl)\n", + "print(cumulative_sum(tl))" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -116,10 +313,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 68, "metadata": {}, "outputs": [], "source": [ + "import random\n", + "import math\n", + "\n", + "\n", + "# all code in this block from lectures #\n", + "\n", + "\n", + "def arange(x_min, x_max, steps=10):\n", + " step_size = (x_max - x_min) /steps\n", + " x = x_min\n", + " out = list()\n", + " for i in range(steps):\n", + " out.append(x)\n", + " x += step_size\n", + " return out\n", + "\n", + "\n", "def generate_normal(N,m=0,s=1):\n", " out = list() \n", " \n", @@ -156,10 +370,38 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 130, "metadata": {}, - "outputs": [], - "source": [] + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1.3119894260482794\n", + " x_90: 1.311989\n", + "under x_90: 90.0%\n" + ] + } + ], + "source": [ + "def find_x_90(x):\n", + " h = histogram(x, len(x))\n", + " N = float(len(x))\n", + " x_90 = h[1][1] # second to lowest edge by default\n", + " for i in range(1, len(h[0])):\n", + " if cumulative_sum(h[0][:i])[-1] < 0.9*N:\n", + " x_90 = h[1][i+1]\n", + " else:\n", + " break\n", + " return x_90\n", + "\n", + "x = generate_normal(1000)\n", + "x_90 = find_x_90(x)\n", + "hist = histogram(x,len(x))\n", + "print(hist[1][hist[1].index(x_90)])\n", + "print(\" x_90: {:f}\".format(x_90))\n", + "print(\"under x_90: {:3.1f}%\".format(100 * cumulative_sum(hist[0][:hist[1].index(x_90)])[-1]/len(x)))\n" + ] } ], "metadata": { @@ -178,7 +420,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.1" + "version": "3.7.2" } }, "nbformat": 4, From b866399c3b3aa8f312b0f828152732413ef6b145 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Wed, 1 Apr 2020 16:56:24 -0500 Subject: [PATCH 11/24] completed p6 --- Exams/Mid-term/Exam.ipynb | 53 ++++++++++++++++++++++++++++++--------- 1 file changed, 41 insertions(+), 12 deletions(-) diff --git a/Exams/Mid-term/Exam.ipynb b/Exams/Mid-term/Exam.ipynb index ef91be7..f6e3ec9 100644 --- a/Exams/Mid-term/Exam.ipynb +++ b/Exams/Mid-term/Exam.ipynb @@ -370,21 +370,42 @@ }, { "cell_type": "code", - "execution_count": 130, + "execution_count": 243, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "1.3119894260482794\n", - " x_90: 1.311989\n", - "under x_90: 90.0%\n" + " N: 1000\n", + " sigma: 1\n", + " x_90: 1.261\n", + " x_90*s: 1.261\n", + "% < x_90*s: 90.2%\n", + "\n", + " N: 1000\n", + " sigma: 2\n", + " x_90: 1.231\n", + " x_90*s: 2.463\n", + "% < x_90*s: 90.1%\n", + "\n", + " N: 1000\n", + " sigma: 3\n", + " x_90: 1.200\n", + " x_90*s: 3.600\n", + "% < x_90*s: 90.0%\n", + "\n", + " N: 1000\n", + " sigma: 4\n", + " x_90: 1.401\n", + " x_90*s: 5.605\n", + "% < x_90*s: 90.1%\n", + "\n" ] } ], "source": [ - "def find_x_90(x):\n", + "def find_x_90(x, m=0, s=1):\n", " h = histogram(x, len(x))\n", " N = float(len(x))\n", " x_90 = h[1][1] # second to lowest edge by default\n", @@ -393,14 +414,22 @@ " x_90 = h[1][i+1]\n", " else:\n", " break\n", - " return x_90\n", + " return x_90 / s # in sigma\n", "\n", - "x = generate_normal(1000)\n", - "x_90 = find_x_90(x)\n", - "hist = histogram(x,len(x))\n", - "print(hist[1][hist[1].index(x_90)])\n", - "print(\" x_90: {:f}\".format(x_90))\n", - "print(\"under x_90: {:3.1f}%\".format(100 * cumulative_sum(hist[0][:hist[1].index(x_90)])[-1]/len(x)))\n" + "\n", + "N = 1000\n", + "for sigma in range(1,5):\n", + " x = generate_normal(N, 0, sigma)\n", + " x_90 = find_x_90(x, 0, sigma)\n", + " hist = histogram(x,len(x))\n", + " first_index_above_x_90_sigma = hist[1].index([i for i in hist[1] if i >= x_90*sigma][0])\n", + " percentage = 100 * cumulative_sum(hist[0][:first_index_above_x_90_sigma])[-1]/len(x)\n", + " print(\" N:\", N)\n", + " print(\" sigma:\", sigma)\n", + " print(\" x_90: {:1.3f}\".format(x_90)) # I expect ~ 1.3\n", + " print(\" x_90*s: {:1.3f}\".format(x_90 * sigma))\n", + " print(\"% < x_90*s: {:3.1f}%\".format(percentage))\n", + " print()" ] } ], From 5f63a11a1eb8899abcc5308f16323aa0e2f07cee Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Mon, 13 Apr 2020 15:08:48 -0500 Subject: [PATCH 12/24] Revert "completed p6" This reverts commit b866399c3b3aa8f312b0f828152732413ef6b145. --- Exams/Mid-term/Exam.ipynb | 53 +++++++++------------------------------ 1 file changed, 12 insertions(+), 41 deletions(-) diff --git a/Exams/Mid-term/Exam.ipynb b/Exams/Mid-term/Exam.ipynb index c376fc0..420876e 100644 --- a/Exams/Mid-term/Exam.ipynb +++ b/Exams/Mid-term/Exam.ipynb @@ -386,42 +386,21 @@ }, { "cell_type": "code", - "execution_count": 243, + "execution_count": 130, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - " N: 1000\n", - " sigma: 1\n", - " x_90: 1.261\n", - " x_90*s: 1.261\n", - "% < x_90*s: 90.2%\n", - "\n", - " N: 1000\n", - " sigma: 2\n", - " x_90: 1.231\n", - " x_90*s: 2.463\n", - "% < x_90*s: 90.1%\n", - "\n", - " N: 1000\n", - " sigma: 3\n", - " x_90: 1.200\n", - " x_90*s: 3.600\n", - "% < x_90*s: 90.0%\n", - "\n", - " N: 1000\n", - " sigma: 4\n", - " x_90: 1.401\n", - " x_90*s: 5.605\n", - "% < x_90*s: 90.1%\n", - "\n" + "1.3119894260482794\n", + " x_90: 1.311989\n", + "under x_90: 90.0%\n" ] } ], "source": [ - "def find_x_90(x, m=0, s=1):\n", + "def find_x_90(x):\n", " h = histogram(x, len(x))\n", " N = float(len(x))\n", " x_90 = h[1][1] # second to lowest edge by default\n", @@ -430,22 +409,14 @@ " x_90 = h[1][i+1]\n", " else:\n", " break\n", - " return x_90 / s # in sigma\n", + " return x_90\n", "\n", - "\n", - "N = 1000\n", - "for sigma in range(1,5):\n", - " x = generate_normal(N, 0, sigma)\n", - " x_90 = find_x_90(x, 0, sigma)\n", - " hist = histogram(x,len(x))\n", - " first_index_above_x_90_sigma = hist[1].index([i for i in hist[1] if i >= x_90*sigma][0])\n", - " percentage = 100 * cumulative_sum(hist[0][:first_index_above_x_90_sigma])[-1]/len(x)\n", - " print(\" N:\", N)\n", - " print(\" sigma:\", sigma)\n", - " print(\" x_90: {:1.3f}\".format(x_90)) # I expect ~ 1.3\n", - " print(\" x_90*s: {:1.3f}\".format(x_90 * sigma))\n", - " print(\"% < x_90*s: {:3.1f}%\".format(percentage))\n", - " print()" + "x = generate_normal(1000)\n", + "x_90 = find_x_90(x)\n", + "hist = histogram(x,len(x))\n", + "print(hist[1][hist[1].index(x_90)])\n", + "print(\" x_90: {:f}\".format(x_90))\n", + "print(\"under x_90: {:3.1f}%\".format(100 * cumulative_sum(hist[0][:hist[1].index(x_90)])[-1]/len(x)))\n" ] } ], From 922854d2e487dec57c80965ac71e80a840e6ff3e Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Mon, 13 Apr 2020 15:19:00 -0500 Subject: [PATCH 13/24] added axam-checkpoint.ipynb because pull conflicts caused issues --- Exams/Mid-term/Exam-checkpoint.ipynb | 457 +++++++++++++++++++++++++++ 1 file changed, 457 insertions(+) create mode 100644 Exams/Mid-term/Exam-checkpoint.ipynb diff --git a/Exams/Mid-term/Exam-checkpoint.ipynb b/Exams/Mid-term/Exam-checkpoint.ipynb new file mode 100644 index 0000000..f6e3ec9 --- /dev/null +++ b/Exams/Mid-term/Exam-checkpoint.ipynb @@ -0,0 +1,457 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Mid-term Exam\n", + "\n", + "Add cells to this notebook as you need for you solutions and your test of your solutions." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "1. Write a function `first_alphabetically(lst)` that takes a list `lst` of strings and returns the string that is alphabetically first. For example, calling your function with the list of states:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "states=['Mississippi', 'Maryland', 'Delaware', 'Connecticut', 'Virginia', 'Utah', 'Kansas',\n", + " 'Wyoming', 'Indiana', 'Louisiana', 'Missouri', 'Illinois', 'Minnesota', 'Vermont', \n", + " 'New Mexico', 'North Dakota', 'Wisconsin', 'Tennessee', 'New York', 'Oklahoma', \n", + " 'Colorado', 'Pennsylvania', 'West Virginia', 'Alabama', 'Montana', 'Texas', \n", + " 'Washington', 'Michigan', 'New Hampshire', 'Arkansas', 'Hawaii', 'Iowa', \n", + " 'Idaho', 'Kentucky', 'Ohio', 'Nebraska', 'Alaska', 'Oregon', 'South Dakota', \n", + " 'New Jersey', 'Florida', 'Georgia', 'Rhode Island', 'Arizona', 'Maine', \n", + " 'South Carolina', 'California', 'Nevada', 'Massachusetts', 'North Carolina']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "should return the string `\"Alabama\"`. Note that you can compare strings:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "False\n", + "True\n", + "False\n", + "True\n" + ] + } + ], + "source": [ + "print(\"A\">\"B\")\n", + "print(\"B\">\"A\")\n", + "print(\"A\">\"a\")\n", + "print(\"bca\">\"bbc\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Make sure your implementation isn't case sensitive. Do not use python's built-in `sort` or any other sort function you find." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Alabama\n" + ] + } + ], + "source": [ + "def first_alphabetically(lst):\n", + " # return min(lst)\n", + " first = lst[0]\n", + " for state in lst:\n", + " if first.upper() > state.upper():\n", + " first = state\n", + " return first\n", + "\n", + "\n", + "print(first_alphabetically(states))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "2. Write a function `arg_first_alphabetically(lst)`, which does the same thing as in exercise 1 but returns the index of the first string alphabetically." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "23\n" + ] + } + ], + "source": [ + "def arg_first_alphabetically(lst):\n", + " # return lst.index(min(lst))\n", + " \n", + " first = lst[0]\n", + " first_index = 0\n", + " for i in range(1, len(lst)):\n", + " if first.upper() > lst[i].upper():\n", + " first = lst[i]\n", + " first_index = i\n", + " return first_index\n", + "\n", + "\n", + "print(arg_first_alphabetically(states))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "3. Use your result in question 2 to implement a function `arg_sort_alphabetically(lst)` that returns a list that is alphabetically sorted. Sorting can be accomplished by successively applying the function in question 1 and removing the first element alphabetically. You can remove an element from a list using `pop()`. Do not use python's built-in `sort` or any other sort function you find. " + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['Alabama',\n", + " 'Alaska',\n", + " 'Arizona',\n", + " 'Arkansas',\n", + " 'California',\n", + " 'Colorado',\n", + " 'Connecticut',\n", + " 'Delaware',\n", + " 'Florida',\n", + " 'Georgia',\n", + " 'Hawaii',\n", + " 'Idaho',\n", + " 'Illinois',\n", + " 'Indiana',\n", + " 'Iowa',\n", + " 'Kansas',\n", + " 'Kentucky',\n", + " 'Louisiana',\n", + " 'Maine',\n", + " 'Maryland',\n", + " 'Massachusetts',\n", + " 'Michigan',\n", + " 'Minnesota',\n", + " 'Mississippi',\n", + " 'Missouri',\n", + " 'Montana',\n", + " 'Nebraska',\n", + " 'Nevada',\n", + " 'New Hampshire',\n", + " 'New Jersey',\n", + " 'New Mexico',\n", + " 'New York',\n", + " 'North Carolina',\n", + " 'North Dakota',\n", + " 'Ohio',\n", + " 'Oklahoma',\n", + " 'Oregon',\n", + " 'Pennsylvania',\n", + " 'Rhode Island',\n", + " 'South Carolina',\n", + " 'South Dakota',\n", + " 'Tennessee',\n", + " 'Texas',\n", + " 'Utah',\n", + " 'Vermont',\n", + " 'Virginia',\n", + " 'Washington',\n", + " 'West Virginia',\n", + " 'Wisconsin',\n", + " 'Wyoming']" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def arg_sort_alphabetically(lst):\n", + " return [lst.pop(arg_first_alphabetically(lst)) for i in range(len(lst))]\n", + "\n", + "\n", + "arg_sort_alphabetically(states)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "4. Implement a function `outer_product` that takes two one-dimensional lists of numbers and returns the two-dimensional outer product matrix defined as:\n", + "\n", + "\\begin{equation*}\n", + "\\begin{pmatrix} x_1\\\\x_2\\\\ \\vdots \\\\x_m \\end{pmatrix} \\begin{pmatrix} y_1&y_2& \\dots &y_n\\end{pmatrix} =\n", + "\\begin{pmatrix}\n", + "x_1y_1 & x_1y_2 & \\dots & x_1y_n\\\\\n", + "x_2y_1 & x_2y_2 & \\dots & x_2y_n\\\\\n", + "\\vdots & \\vdots & \\ddots & \\vdots \\\\\n", + "x_my_1 & x_my_2 & \\dots & x_my_n\n", + "\\end{pmatrix}\n", + "\\end{equation*}\n", + "\n", + "In other words the elements of matrix C which is the outer product of A and B are $c_{ij} = a_i b_j$." + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[6, 7, 8]\n", + "[12, 14, 16]\n", + "[18, 21, 24]\n", + "[24, 28, 32]\n", + "[30, 35, 40]\n" + ] + } + ], + "source": [ + "def outer_product(lst1, lst2):\n", + " lst3 = [[0 for i in range(len(lst2))] for j in range(len(lst1))]\n", + " for i in range(len(lst1)):\n", + " for j in range(len(lst2)):\n", + " lst3[i][j] = lst1[i]*lst2[j]\n", + " return lst3\n", + "\n", + "tl1 = [1, 2, 3, 4, 5]\n", + "tl2 = [6, 7, 8]\n", + "op = outer_product(tl1, tl2)\n", + "for l in op:\n", + " print(l)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "5. Implement a function `cumulative_sum(lst)` that takes a list of numbers and returns a list of same size where the element `i` is the sum of the elements `0` to `i` of the input list. For example given `[1,2,3]`, you should return [1,3,6]." + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[1, 2, 3, 4, 5]\n", + "[1, 3, 6, 10, 15]\n" + ] + } + ], + "source": [ + "def cumulative_sum(lst):\n", + " for i in range(1, len(lst)):\n", + " lst[i] += lst[i-1]\n", + " return lst\n", + "\n", + "\n", + "tl = list(range(1,6))\n", + "print(tl)\n", + "print(cumulative_sum(tl))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "6. Imagine you have a normal distributed random variable `x`. For example `x` can be grades on this exam. Using the normal distribution generator and histogram functions from lecture (provided below) and `cumulative_sum` from previous question to compute what is the value of `x_{90}` in $\\sigma$ such that 90% of the values $x$ are below `x_{90}`. In other words:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "$$\n", + "\\int_{-\\infty}^{x_{90}} N(x;\\mu=0,\\sigma=1) dx = 0.9\n", + "$$" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "metadata": {}, + "outputs": [], + "source": [ + "import random\n", + "import math\n", + "\n", + "\n", + "# all code in this block from lectures #\n", + "\n", + "\n", + "def arange(x_min, x_max, steps=10):\n", + " step_size = (x_max - x_min) /steps\n", + " x = x_min\n", + " out = list()\n", + " for i in range(steps):\n", + " out.append(x)\n", + " x += step_size\n", + " return out\n", + "\n", + "\n", + "def generate_normal(N,m=0,s=1):\n", + " out = list() \n", + " \n", + " while len(out)=bin_edges[i] and d= x_90*sigma][0])\n", + " percentage = 100 * cumulative_sum(hist[0][:first_index_above_x_90_sigma])[-1]/len(x)\n", + " print(\" N:\", N)\n", + " print(\" sigma:\", sigma)\n", + " print(\" x_90: {:1.3f}\".format(x_90)) # I expect ~ 1.3\n", + " print(\" x_90*s: {:1.3f}\".format(x_90 * sigma))\n", + " print(\"% < x_90*s: {:3.1f}%\".format(percentage))\n", + " print()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 5362e770009294f71ffdd50dcf304bf5fbca4dfc Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Tue, 14 Apr 2020 21:33:12 -0500 Subject: [PATCH 14/24] started work on lab 5 --- Labs/Lab-5/Lab-5 - Copy.ipynb | 459 ++++++++++++++++++++++++++++++++++ 1 file changed, 459 insertions(+) create mode 100644 Labs/Lab-5/Lab-5 - Copy.ipynb diff --git a/Labs/Lab-5/Lab-5 - Copy.ipynb b/Labs/Lab-5/Lab-5 - Copy.ipynb new file mode 100644 index 0000000..3ca0505 --- /dev/null +++ b/Labs/Lab-5/Lab-5 - Copy.ipynb @@ -0,0 +1,459 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Lab 5- Object Oriented Programming\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github//afarbin/DATA1401-Spring-2020/blob/master/Labs/Lab-5/Lab-5.ipynb)\n", + "\n", + "For all of the exercises below, make sure you provide tests of your solutions.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "1. Write a \"counter\" class that can be incremented up to a specified maximum value, will print an error if an attempt is made to increment beyond that value, and allows reseting the counter. " + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "class counter:\n", + " def __init__(self,max_val):\n", + " self.max_val=max_val\n", + " self.cur_val=1\n", + " \n", + " def increment(self):\n", + " if self.cur_val>self.max_val:\n", + " print(\"Max value reached.\")\n", + " else:\n", + " self.cur_val+=1\n", + " \n", + " def reset(self):\n", + " self.cur_val=1\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "my_counter=counter(3)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Max value reached.\n", + "Max value reached.\n" + ] + } + ], + "source": [ + "my_counter.increment()\n", + "my_counter.increment()\n", + "my_counter.increment()\n", + "my_counter.increment()\n", + "my_counter.increment()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "my_counter.cur_val=100" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "2. Copy and paste your solution to question 1 and modify it so that all the data held by the counter is private. Implement functions to check the value of the counter, check the maximum value, and check if the counter is at the maximum." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "class counter:\n", + " def __init__(self,max_val):\n", + " self.__max_val=max_val\n", + " self.__cur_val=1\n", + " \n", + " def increment(self):\n", + " if self.__cur_val>self.__max_val:\n", + " print(\"Max value reached.\")\n", + " else:\n", + " self.__cur_val+=1\n", + " \n", + " def reset(self):\n", + " self.__cur_val=1\n", + " \n", + " def cur_val(self):\n", + " return self.__cur_val\n", + "\n", + " def max_val(self):\n", + " return self.__max_val\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "my_counter=counter(3)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "my_counter.cur_val()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "3. Implement a class to represent a rectangle, holding the length, width, and $x$ and $y$ coordinates of a corner of the object. Implement functions that compute the area and parameter of the rectangle. Make all data members private and privide accessors to retrieve values of data members. " + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "class rectangle:\n", + " def __init__(self,width,length,x,y):\n", + " self.__width=width\n", + " self.__length=length\n", + " self.__x=x\n", + " self.__y=y\n", + " \n", + " def area(self):\n", + " return self.__width*self.__length\n", + " \n", + " def perimeter(self):\n", + " return 2*(self.__width+self.__length)\n", + " \n", + " def x(self):\n", + " return self.__x\n", + " \n", + " def y(self):\n", + " return self.__y\n", + " \n", + " def width(self):\n", + " return self.__width\n", + " \n", + " def length(self):\n", + " return self.__length\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "6\n", + "10\n", + "1\n", + "4\n", + "2\n", + "3\n" + ] + } + ], + "source": [ + "my_rectangle = rectangle(2,3,1,4)\n", + "print(my_rectangle.area())\n", + "print(my_rectangle.perimeter())\n", + "print(my_rectangle.x())\n", + "print(my_rectangle.y())\n", + "print(my_rectangle.width())\n", + "print(my_rectangle.length())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "4. Implement a class to represent a circle, holding the radius and $x$ and $y$ coordinates of center of the object. Implement functions that compute the area and parameter of the rectangle. Make all data members private and privide accessors to retrieve values of data members. " + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "import math\n", + "\n", + "\n", + "class circle:\n", + " def __init__(self, radius, x, y):\n", + " self.__radius = radius\n", + " self.__x = x\n", + " self.__y = y\n", + " \n", + " def area(self):\n", + " return math.pi * self.__radius**2\n", + " \n", + " def perimeter(self):\n", + " return 2 * math.pi * self.__radius\n", + " \n", + " def radius(self):\n", + " return self.__radius\n", + " \n", + " def x(self):\n", + " return self.__x\n", + " \n", + " def y(self):\n", + " return self.__y" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "28.274333882308138\n", + "18.84955592153876\n", + "3\n", + "5\n", + "5\n" + ] + } + ], + "source": [ + "my_circle = circle(3,5,5)\n", + "print(my_circle.area())\n", + "print(my_circle.perimeter())\n", + "print(my_circle.radius())\n", + "print(my_circle.x())\n", + "print(my_circle.y())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "5. Implement a common base class for the classes implemented in 3 and 4 above which implements all common methods as dummy functions. Re-implement those classes to inherit from the base class and overload the functions accordingly. " + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "class shape:\n", + " def __init__(self, x, y):\n", + " self.__x = x\n", + " self.__y = y\n", + " \n", + " def x(self):\n", + " return self.__x\n", + " \n", + " def y(self):\n", + " return self.__y\n", + " \n", + " def area(self):\n", + " raise NotImplementedError\n", + " \n", + " def perimeter(self):\n", + " raise NotImplementedError" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "5\n", + "5\n" + ] + }, + { + "ename": "NotImplementedError", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mNotImplementedError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmy_shape\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mx\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmy_shape\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0my\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 4\u001b[1;33m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmy_shape\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0marea\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 5\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmy_shape\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mperimeter\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32m\u001b[0m in \u001b[0;36marea\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 11\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 12\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0marea\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 13\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0mNotImplementedError\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 14\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 15\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0mperimeter\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;31mNotImplementedError\u001b[0m: " + ] + } + ], + "source": [ + "my_shape = shape(5,5)\n", + "print(my_shape.x())\n", + "print(my_shape.y())\n", + "print(my_shape.area())\n", + "print(my_shape.perimeter())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "6. Implement an analogous triangle class." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [], + "source": [ + "class triangle(shape):\n", + " def __init__(self, x1, y1, x2, y2, x3, y3):\n", + " super().__init__(x1, y1)\n", + " self.__x2 = x2\n", + " self.__y2 = y2\n", + " self.__x3 = x3\n", + " self.__y3 = y3\n", + " \n", + " def x1(self):\n", + " return super().x()\n", + " def y1(self):\n", + " return super().y()\n", + " def x2(self):\n", + " return self.__x2\n", + " def y2(self):\n", + " return self.__y2\n", + " def x3(self):\n", + " return self.__x3\n", + " def y3(self):\n", + " return self.__y3\n", + " \n", + " def area(self):\n", + " return abs((self.x1()*(self.__y2-self.__y3) + self.__x2*(self.__y3-self.y1()) + self.__x3*(self.y1()-self.__y2))/2)\n", + " \n", + " def perimeter(self):\n", + " return math.sqrt((self.__x2-super().x())**2+(self.__y2-super().y())**2) + math.sqrt((self.__x3-self.__x2)**2+(self.__y3-self.__y2)**2) + math.sqrt((super().x()-self.__x3)**2+(super().x()-self.__y3)**2)" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0\n", + "0\n", + "1.0\n", + "5.23606797749979\n" + ] + } + ], + "source": [ + "my_triangle = triangle(0,0,1,0,1,2)\n", + "print(my_triangle.x())\n", + "print(my_triangle.x1())\n", + "print(my_triangle.area())\n", + "print(my_triangle.perimeter())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "7. Add a function to the object classes that test if a given set of $x$ and $y$ coordinates are inside of the object." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "8. Add a function to the object classes that return a list of up to 16 pairs of $x$ and $y$ points on the parameter of the object.\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "9. Add a function in the base class of the object classes that returns true/false testing that the object overlaps with another object." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 94f76f334929f1ffa467eda5d97bc450d7bbff94 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Wed, 15 Apr 2020 15:38:26 -0500 Subject: [PATCH 15/24] completed lab 5 thru p7 --- Labs/Lab-5/Lab-5 - Copy.ipynb | 172 ++++++++++++++++++++++++++-------- 1 file changed, 132 insertions(+), 40 deletions(-) diff --git a/Labs/Lab-5/Lab-5 - Copy.ipynb b/Labs/Lab-5/Lab-5 - Copy.ipynb index 3ca0505..3b82dad 100644 --- a/Labs/Lab-5/Lab-5 - Copy.ipynb +++ b/Labs/Lab-5/Lab-5 - Copy.ipynb @@ -20,7 +20,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 102, "metadata": {}, "outputs": [], "source": [ @@ -42,7 +42,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 103, "metadata": {}, "outputs": [], "source": [ @@ -51,7 +51,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 104, "metadata": {}, "outputs": [ { @@ -73,7 +73,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 105, "metadata": {}, "outputs": [], "source": [ @@ -89,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 106, "metadata": {}, "outputs": [], "source": [ @@ -117,7 +117,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 107, "metadata": {}, "outputs": [], "source": [ @@ -126,7 +126,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 108, "metadata": {}, "outputs": [ { @@ -135,7 +135,7 @@ "1" ] }, - "execution_count": 7, + "execution_count": 108, "metadata": {}, "output_type": "execute_result" } @@ -153,7 +153,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 109, "metadata": {}, "outputs": [], "source": [ @@ -185,7 +185,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 110, "metadata": {}, "outputs": [ { @@ -220,7 +220,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 111, "metadata": {}, "outputs": [], "source": [ @@ -251,23 +251,23 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 112, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "28.274333882308138\n", - "18.84955592153876\n", - "3\n", - "5\n", - "5\n" + "12.566370614359172\n", + "12.566370614359172\n", + "2\n", + "0\n", + "0\n" ] } ], "source": [ - "my_circle = circle(3,5,5)\n", + "my_circle = circle(2,0,0)\n", "print(my_circle.area())\n", "print(my_circle.perimeter())\n", "print(my_circle.radius())\n", @@ -284,7 +284,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 125, "metadata": {}, "outputs": [], "source": [ @@ -303,12 +303,56 @@ " raise NotImplementedError\n", " \n", " def perimeter(self):\n", - " raise NotImplementedError" + " raise NotImplementedError\n", + "\n", + "class rectangle(shape):\n", + " def __init__(self,width,length,x,y):\n", + " super().__init__(x, y)\n", + " self.__width=width\n", + " self.__length=length\n", + " \n", + " def area(self):\n", + " return self.__width*self.__length\n", + " \n", + " def perimeter(self):\n", + " return 2*(self.__width+self.__length)\n", + " \n", + " def x(self):\n", + " return super().x()\n", + " \n", + " def y(self):\n", + " return super().y()\n", + " \n", + " def width(self):\n", + " return self.__width\n", + " \n", + " def length(self):\n", + " return self.__length\n", + "\n", + "class circle(shape):\n", + " def __init__(self, radius, x, y):\n", + " super().__init__(x, y)\n", + " self.__radius = radius\n", + " \n", + " def area(self):\n", + " return math.pi * self.__radius**2\n", + " \n", + " def perimeter(self):\n", + " return 2 * math.pi * self.__radius\n", + " \n", + " def radius(self):\n", + " return self.__radius\n", + " \n", + " def x(self):\n", + " return super().x()\n", + " \n", + " def y(self):\n", + " return super().y()" ] }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 149, "metadata": {}, "outputs": [ { @@ -316,19 +360,11 @@ "output_type": "stream", "text": [ "5\n", - "5\n" - ] - }, - { - "ename": "NotImplementedError", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mNotImplementedError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmy_shape\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mx\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmy_shape\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0my\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 4\u001b[1;33m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmy_shape\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0marea\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 5\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmy_shape\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mperimeter\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m\u001b[0m in \u001b[0;36marea\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 11\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 12\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0marea\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 13\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0mNotImplementedError\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 14\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 15\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0mperimeter\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;31mNotImplementedError\u001b[0m: " + "5\n", + "6\n", + "10\n", + "12.566370614359172\n", + "12.566370614359172\n" ] } ], @@ -336,8 +372,12 @@ "my_shape = shape(5,5)\n", "print(my_shape.x())\n", "print(my_shape.y())\n", - "print(my_shape.area())\n", - "print(my_shape.perimeter())" + "my_rectangle = rectangle(3, 2, -1.5,-0.5)\n", + "print(my_rectangle.area())\n", + "print(my_rectangle.perimeter())\n", + "my_circle = circle(2, 0, 0)\n", + "print(my_circle.area())\n", + "print(my_circle.perimeter())\n" ] }, { @@ -349,7 +389,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 150, "metadata": {}, "outputs": [], "source": [ @@ -383,7 +423,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 151, "metadata": {}, "outputs": [ { @@ -414,10 +454,55 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 152, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "setattr(triangle,'inside', lambda self, x, y: triangle(x, y, self.x2(), self.y2(), self.x3(), self.y3()).area() + triangle(self.x1(), self.y1(), x, y, self.x3(), self.y3()).area() + triangle(self.x1(), self.y1(), self.x2(), self.y2(), x, y).area() == self.area())\n", + "setattr(circle,'inside', lambda self, x, y: math.sqrt((self.x()-x)**2+(self.y()-y)**2) <= self.radius())\n", + "setattr(rectangle,'inside', lambda self, x, y: self.x()<=x<=self.x()+self.width() and self.y()<=y<=self.y()+self.length())" + ] + }, + { + "cell_type": "code", + "execution_count": 154, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n", + "False\n", + "True\n", + "\n", + "True\n", + "True\n", + "True\n", + "False\n", + "\n", + "True\n", + "True\n", + "True\n", + "False\n" + ] + } + ], + "source": [ + "print(my_triangle.inside(0.75, 0.25))\n", + "print(my_triangle.inside(0.25, 0.75))\n", + "print(my_triangle.inside(0.5,0.5))\n", + "print()\n", + "print(my_circle.inside(0,0))\n", + "print(my_circle.inside(0,1))\n", + "print(my_circle.inside(0,2))\n", + "print(my_circle.inside(0,3))\n", + "print()\n", + "print(my_rectangle.inside(-1.5,0))\n", + "print(my_rectangle.inside(0,0))\n", + "print(my_rectangle.inside(1.5,0))\n", + "print(my_rectangle.inside(1.6,0))" + ] }, { "cell_type": "markdown", @@ -427,6 +512,13 @@ "\n" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "markdown", "metadata": {}, From 23431b13d55581dd432a637a012d9a20fa2b82f0 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Thu, 23 Apr 2020 04:23:18 -0500 Subject: [PATCH 16/24] finished up to problem 4 --- Labs/Lab-6/Lab-6 - Copy.ipynb | 390 ++++++++++++++++++++++++++++++++++ 1 file changed, 390 insertions(+) create mode 100644 Labs/Lab-6/Lab-6 - Copy.ipynb diff --git a/Labs/Lab-6/Lab-6 - Copy.ipynb b/Labs/Lab-6/Lab-6 - Copy.ipynb new file mode 100644 index 0000000..d4b947a --- /dev/null +++ b/Labs/Lab-6/Lab-6 - Copy.ipynb @@ -0,0 +1,390 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Lab 6\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Matrix Representation: In this lab you will be creating a simple linear algebra system. In memory, we will represent matrices as nested python lists as we have done in lecture. \n", + "\n", + "1. Create a `matrix` class with the following properties:\n", + " * It can be initialized in 2 ways:\n", + " 1. with arguments `n` and `m`, the size of the matrix. A newly instanciated matrix will contain all zeros.\n", + " 2. with a list of lists of values. Note that since we are using lists of lists to implement matrices, it is possible that not all rows have the same number of columns. Test explicitly that the matrix is properly specified.\n", + " * Matrix instances `M` can be indexed with `M[i][j]` and `M[i,j]`.\n", + " * Matrix assignment works in 2 ways:\n", + " 1. If `M_1` and `M_2` are `matrix` instances `M_1=M_2` sets the values of `M_1` to those of `M_2`, if they are the same size. Error otherwise.\n", + " 2. In example above `M_2` can be a list of lists of correct size.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": {}, + "outputs": [], + "source": [ + "class matrix:\n", + " _M = None\n", + " def __init__(self, *args):\n", + " if isinstance(args[0], list):\n", + " if (isinstance(args[0][_], list) for _ in range(len(args[0]))):\n", + " lengths = set(len(args[0][_]) for _ in range(len(args[0])))\n", + " if len(lengths) == 1:\n", + " self._M = args[0]\n", + " elif isinstance(args[0], int) and isinstance(args[1], int):\n", + " self._M = [[0 for _ in range(args[1])] for __ in range(args[0])]\n", + " \n", + " def __getitem__(self, index):\n", + " if isinstance(index, tuple):\n", + " i, j = index\n", + " return self._M[i][j]\n", + " else:\n", + " return self._M[index]" + ] + }, + { + "cell_type": "code", + "execution_count": 95, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "M=matrix(2,3): [[0, 0, 0], [0, 0, 0]]\n", + "M=matrix([[],[]]): [[1, 2, 3], [4, 5, 6]]\n", + "M[1][0]: 4\n", + "M[1,0]: 4\n", + "M = M2: [[2, 3], [4, 5], [6, 7]]\n" + ] + } + ], + "source": [ + "M = matrix(2,3)\n", + "print(\"M=matrix(2,3): {}\".format(M._M))\n", + "M = matrix([[1,2,3],[4,5,6]])\n", + "print(\"M=matrix([[],[]]): {}\".format(M._M))\n", + "print(\"M[1][0]: {}\".format(M[1][0]))\n", + "print(\"M[1,0]: {}\".format(M[1,0]))\n", + "# can't figure out how to override assignment operator\n", + "M2 = matrix([[2,3],[4,5],[6,7]])\n", + "M = M2\n", + "print(\"M = M2: {}\".format(M._M))\n", + "#M = [[3,4],[5,6],[7,8]]\n", + "#print(\"M=[[],[],[]]\".format(M.M()))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "2. Add the following methods:\n", + " * `shape()`: returns a tuple `(n,m)` of the shape of the matrix.\n", + " * `transpose()`: returns a new matrix instance which is the transpose of the matrix.\n", + " * `row(n)` and `column(n)`: that return the nth row or column of the matrix M as a new appropriately shaped matrix object.\n", + " * `to_list()`: which returns the matrix as a list of lists.\n", + " * `block(n_0,n_1,m_0,m_1)` that returns a smaller matrix located at the n_0 to n_1 columns and m_0 to m_1 rows. \n", + " * (Extra credit) Modify `__getitem__` implemented above to support slicing.\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 125, + "metadata": {}, + "outputs": [], + "source": [ + "def shape(self):\n", + " return len(self._M), len(self._M[0])\n", + "\n", + "def transpose(self):\n", + " n, m = self.shape()\n", + " M = matrix(m,n)\n", + " for _ in range(m):\n", + " for __ in range(n):\n", + " M[_][__] = self._M[__][_]\n", + " return M\n", + "\n", + "def row(self, n):\n", + " return matrix([self._M[n]])\n", + "def column(self, n):\n", + " return matrix([self.transpose()._M[n]])\n", + "\n", + "def to_list(self):\n", + " return self._M\n", + "\n", + "def block(self, n_0, n_1, m_0, m_1):\n", + " if 0 <= n_0 and 0 <= m_0 and n_1 <= len(self._M) and m_1 <= len(self.transpose()._M):\n", + " ml = [[0 for _ in range(m_1-m_0)] for __ in range(n_1-n_0)]\n", + " for n in range(n_0, n_1):\n", + " for m in range(m_0, m_1):\n", + " ml[n-n_0][m-m_0] = self._M[n][m]\n", + " return matrix(ml)\n", + " else:\n", + " return None\n", + "\n", + "# modification was not necessary but now explicit\n", + "def __getitem__(self, index):\n", + " if isinstance(index, tuple):\n", + " i, j = index\n", + " return self._M[i][j]\n", + " elif isinstance(index, slice):\n", + " return self._M[index]\n", + " else:\n", + " return self._M[index]\n", + "\n", + "setattr(matrix, 'shape', shape)\n", + "setattr(matrix, 'transpose', transpose)\n", + "setattr(matrix, 'row', row)\n", + "setattr(matrix, 'column', column)\n", + "setattr(matrix, 'to_list', to_list)\n", + "setattr(matrix, 'block', block)\n", + "setattr(matrix, '__getitem__', __getitem__)" + ] + }, + { + "cell_type": "code", + "execution_count": 126, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "M: [[2, 3], [4, 5], [6, 7]]\n", + "M.shape(): (3, 2)\n", + "M.transpose()._M: [[2, 4, 6], [3, 5, 7]]\n", + "M.row(0)._M: [[2, 3]]\n", + "M.column(0)._M: [[2, 4, 6]]\n", + "M.to_list(): [[2, 3], [4, 5], [6, 7]]\n", + "M.block(0,2,0,2): [[2, 3], [4, 5]]\n", + "M[:][:2]: [[2, 3], [4, 5]]\n" + ] + } + ], + "source": [ + "print(\"M: {}\".format(M._M))\n", + "print(\"M.shape(): {}\".format(M.shape()))\n", + "print(\"M.transpose()._M: {}\".format(M.transpose()._M))\n", + "print(\"M.row(0)._M: {}\".format(M.row(0)._M))\n", + "print(\"M.column(0)._M: {}\".format(M.column(0)._M))\n", + "print(\"M.to_list(): {}\".format(M.to_list()))\n", + "print(\"M.block(0,2,0,2): {}\".format(M.block(0,2,0,2).to_list()))\n", + "print(\"M[:][:2]: {}\".format(M[:][:2]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "3. Write functions that create special matrices (note these are standalone functions, not member functions of your `matrix` class):\n", + " * `constant(n,m,c)`: returns a `n` by `m` matrix filled with floats of value `c`.\n", + " * `zeros(n,m)` and `ones(n,m)`: return `n` by `m` matrices filled with floats of value `0` and `1`, respectively.\n", + " * `eye(n)`: returns the n by n identity matrix." + ] + }, + { + "cell_type": "code", + "execution_count": 134, + "metadata": {}, + "outputs": [], + "source": [ + "def constant(n,m,c):\n", + " return matrix([[c for _ in range(m)] for __ in range(n)])\n", + "\n", + "def zeros(n,m):\n", + " return matrix([[0.0 for _ in range(m)] for __ in range(n)])\n", + "\n", + "def ones(n,m):\n", + " return matrix([[1.0 for _ in range(m)] for __ in range(n)])\n", + "\n", + "def eye(n):\n", + " return matrix([[0 if _ != __ else 1 for _ in range(n)] for __ in range(n)])" + ] + }, + { + "cell_type": "code", + "execution_count": 137, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[5, 5, 5, 5], [5, 5, 5, 5], [5, 5, 5, 5]]\n", + "[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]]\n", + "[[1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0]]\n", + "[[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 1, 0, 0], [0, 0, 0, 1, 0], [0, 0, 0, 0, 1]]\n" + ] + } + ], + "source": [ + "print(constant(3,4,5).to_list())\n", + "print(zeros(3,4).to_list())\n", + "print(ones(3,4).to_list())\n", + "print(eye(5).to_list())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "4. Add the following member functions to your class. Make sure to appropriately test the dimensions of the matrices to make sure the operations are correct.\n", + " * `M.scalarmul(c)`: a matrix that is scalar product $cM$, where every element of $M$ is multiplied by $c$.\n", + " * `M.add(N)`: adds two matrices $M$ and $N$. Don’t forget to test that the sizes of the matrices are compatible for this and all other operations.\n", + " * `M.sub(N)`: subtracts two matrices $M$ and $N$.\n", + " * `M.mat_mult(N)`: returns a matrix that is the matrix product of two matrices $M$ and $N$.\n", + " * `M.element_mult(N)`: returns a matrix that is the element-wise product of two matrices $M$ and $N$.\n", + " * `M.equals(N)`: returns true/false if $M==N$." + ] + }, + { + "cell_type": "code", + "execution_count": 170, + "metadata": {}, + "outputs": [], + "source": [ + "def scalarmul(self, c):\n", + " n,m = self.shape()\n", + " M = matrix(n,m)\n", + " for _ in range(len(self._M)):\n", + " for __ in range(len(self._M[_])):\n", + " M._M[_][__] = self._M[_][__] * c\n", + " return M\n", + "def add(self, N):\n", + " n,m = self.shape()\n", + " M = matrix(n,m)\n", + " if self.shape() == N.shape():\n", + " for _ in range(n):\n", + " for __ in range(m):\n", + " M._M[_][__] = self._M[_][__] + N._M[_][__]\n", + " return M\n", + "def sub(self, N):\n", + " if self.shape() == N.shape():\n", + " n,m = self.shape()\n", + " M = matrix(n,m)\n", + " for _ in range(n):\n", + " for __ in range(m):\n", + " M._M[_][__] = self._M[_][__] - N._M[_][__]\n", + " return M\n", + "def mat_mult(self, N):\n", + " if len(self._M[0]) == len(N._M):\n", + " M = matrix(len(self._M), len(N._M[0]))\n", + " for i in range(len(self._M)):\n", + " for j in range(len(N._M[0])):\n", + " for k in range(len(N._M)):\n", + " M[i][j] += self._M[i][k] * N._M[k][j]\n", + " return M\n", + "def element_mult(self, N):\n", + " if self.shape() == N.shape():\n", + " n,m = self.shape()\n", + " M = matrix(n,m)\n", + " for _ in range(n):\n", + " for __ in range(m):\n", + " M._M[_][__] = self._M[_][__] * N._M[_][__]\n", + " return M\n", + "setattr(matrix, 'scalarmul', scalarmul)\n", + "setattr(matrix, 'add', add)\n", + "setattr(matrix, 'sub', sub)\n", + "setattr(matrix, 'mat_mult', mat_mult)\n", + "setattr(matrix, 'element_mult', element_mult)" + ] + }, + { + "cell_type": "code", + "execution_count": 176, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[1, 2, 3], [4, 5, 6]]\n", + "[[2, 4, 6], [8, 10, 12]]\n", + "[[4, 4, 4], [10, 10, 10]]\n", + "[[1, 1, 1], [2, 2, 2]]\n", + "[[1, 2, 3], [4, 5, 6]]\n", + "[[2, 6, 12], [0, 5, 12]]\n" + ] + } + ], + "source": [ + "M1 = matrix([[1,2,3],[4,5,6]])\n", + "print(M1.to_list())\n", + "print(M1.scalarmul(2).to_list())\n", + "print(M1.add(matrix([[3,2,1],[6,5,4]])).to_list())\n", + "print(M1.sub(matrix([[0,1,2],[2,3,4]])).to_list())\n", + "print(M1.mat_mult(eye(3)).to_list())\n", + "print(M1.element_mult(matrix([[2,3,4],[0,1,2]])).to_list())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "5. Overload python operators to appropriately use your functions in 4 and allow expressions like:\n", + " * 2*M\n", + " * M*2\n", + " * M+N\n", + " * M-N\n", + " * M*N\n", + " * M==N\n", + " * M=N\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "6. Demonstrate the basic properties of matrices with your matrix class by creating two 2 by 2 example matrices using your Matrix class and illustrating the following:\n", + "\n", + "$$\n", + "(AB)C=A(BC)\n", + "$$\n", + "$$\n", + "A(B+C)=AB+AC\n", + "$$\n", + "$$\n", + "AB\\neq BA\n", + "$$\n", + "$$\n", + "AI=A\n", + "$$" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From bdf98757a7ab0439112203339d49a61873bd04ef Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Thu, 23 Apr 2020 19:18:46 -0500 Subject: [PATCH 17/24] worked thru p6 on lab 6 --- Labs/Lab-6/Lab-6 - Copy.ipynb | 177 ++++++++++++++++++++++++++++------ 1 file changed, 148 insertions(+), 29 deletions(-) diff --git a/Labs/Lab-6/Lab-6 - Copy.ipynb b/Labs/Lab-6/Lab-6 - Copy.ipynb index d4b947a..07da0cf 100644 --- a/Labs/Lab-6/Lab-6 - Copy.ipynb +++ b/Labs/Lab-6/Lab-6 - Copy.ipynb @@ -25,7 +25,7 @@ }, { "cell_type": "code", - "execution_count": 93, + "execution_count": 248, "metadata": {}, "outputs": [], "source": [ @@ -45,12 +45,15 @@ " i, j = index\n", " return self._M[i][j]\n", " else:\n", - " return self._M[index]" + " return self._M[index]\n", + " \n", + " def __str__(self):\n", + " return str(self._M)" ] }, { "cell_type": "code", - "execution_count": 95, + "execution_count": 249, "metadata": {}, "outputs": [ { @@ -67,15 +70,15 @@ ], "source": [ "M = matrix(2,3)\n", - "print(\"M=matrix(2,3): {}\".format(M._M))\n", + "print(\"M=matrix(2,3): {}\".format(M))\n", "M = matrix([[1,2,3],[4,5,6]])\n", - "print(\"M=matrix([[],[]]): {}\".format(M._M))\n", + "print(\"M=matrix([[],[]]): {}\".format(M))\n", "print(\"M[1][0]: {}\".format(M[1][0]))\n", "print(\"M[1,0]: {}\".format(M[1,0]))\n", "# can't figure out how to override assignment operator\n", "M2 = matrix([[2,3],[4,5],[6,7]])\n", "M = M2\n", - "print(\"M = M2: {}\".format(M._M))\n", + "print(\"M = M2: {}\".format(M))\n", "#M = [[3,4],[5,6],[7,8]]\n", "#print(\"M=[[],[],[]]\".format(M.M()))" ] @@ -96,7 +99,7 @@ }, { "cell_type": "code", - "execution_count": 125, + "execution_count": 250, "metadata": {}, "outputs": [], "source": [ @@ -150,7 +153,7 @@ }, { "cell_type": "code", - "execution_count": 126, + "execution_count": 251, "metadata": {}, "outputs": [ { @@ -169,11 +172,11 @@ } ], "source": [ - "print(\"M: {}\".format(M._M))\n", + "print(\"M: {}\".format(M))\n", "print(\"M.shape(): {}\".format(M.shape()))\n", - "print(\"M.transpose()._M: {}\".format(M.transpose()._M))\n", - "print(\"M.row(0)._M: {}\".format(M.row(0)._M))\n", - "print(\"M.column(0)._M: {}\".format(M.column(0)._M))\n", + "print(\"M.transpose()._M: {}\".format(M.transpose()))\n", + "print(\"M.row(0)._M: {}\".format(M.row(0)))\n", + "print(\"M.column(0)._M: {}\".format(M.column(0)))\n", "print(\"M.to_list(): {}\".format(M.to_list()))\n", "print(\"M.block(0,2,0,2): {}\".format(M.block(0,2,0,2).to_list()))\n", "print(\"M[:][:2]: {}\".format(M[:][:2]))" @@ -191,7 +194,7 @@ }, { "cell_type": "code", - "execution_count": 134, + "execution_count": 252, "metadata": {}, "outputs": [], "source": [ @@ -210,7 +213,7 @@ }, { "cell_type": "code", - "execution_count": 137, + "execution_count": 253, "metadata": {}, "outputs": [ { @@ -225,10 +228,10 @@ } ], "source": [ - "print(constant(3,4,5).to_list())\n", - "print(zeros(3,4).to_list())\n", - "print(ones(3,4).to_list())\n", - "print(eye(5).to_list())" + "print(constant(3,4,5))\n", + "print(zeros(3,4))\n", + "print(ones(3,4))\n", + "print(eye(5))" ] }, { @@ -246,7 +249,7 @@ }, { "cell_type": "code", - "execution_count": 170, + "execution_count": 254, "metadata": {}, "outputs": [], "source": [ @@ -266,29 +269,32 @@ " M._M[_][__] = self._M[_][__] + N._M[_][__]\n", " return M\n", "def sub(self, N):\n", + " M = None\n", " if self.shape() == N.shape():\n", " n,m = self.shape()\n", " M = matrix(n,m)\n", " for _ in range(n):\n", " for __ in range(m):\n", " M._M[_][__] = self._M[_][__] - N._M[_][__]\n", - " return M\n", + " return M\n", "def mat_mult(self, N):\n", + " M = None\n", " if len(self._M[0]) == len(N._M):\n", " M = matrix(len(self._M), len(N._M[0]))\n", " for i in range(len(self._M)):\n", " for j in range(len(N._M[0])):\n", " for k in range(len(N._M)):\n", " M[i][j] += self._M[i][k] * N._M[k][j]\n", - " return M\n", + " return M\n", "def element_mult(self, N):\n", + " M = None\n", " if self.shape() == N.shape():\n", " n,m = self.shape()\n", " M = matrix(n,m)\n", " for _ in range(n):\n", " for __ in range(m):\n", " M._M[_][__] = self._M[_][__] * N._M[_][__]\n", - " return M\n", + " return M\n", "setattr(matrix, 'scalarmul', scalarmul)\n", "setattr(matrix, 'add', add)\n", "setattr(matrix, 'sub', sub)\n", @@ -298,7 +304,7 @@ }, { "cell_type": "code", - "execution_count": 176, + "execution_count": 255, "metadata": {}, "outputs": [ { @@ -316,12 +322,12 @@ ], "source": [ "M1 = matrix([[1,2,3],[4,5,6]])\n", - "print(M1.to_list())\n", - "print(M1.scalarmul(2).to_list())\n", - "print(M1.add(matrix([[3,2,1],[6,5,4]])).to_list())\n", - "print(M1.sub(matrix([[0,1,2],[2,3,4]])).to_list())\n", - "print(M1.mat_mult(eye(3)).to_list())\n", - "print(M1.element_mult(matrix([[2,3,4],[0,1,2]])).to_list())" + "print(M1)\n", + "print(M1.scalarmul(2))\n", + "print(M1.add(matrix([[3,2,1],[6,5,4]])))\n", + "print(M1.sub(matrix([[0,1,2],[2,3,4]])))\n", + "print(M1.mat_mult(eye(3)))\n", + "print(M1.element_mult(matrix([[2,3,4],[0,1,2]])))" ] }, { @@ -338,6 +344,85 @@ " * M=N\n" ] }, + { + "cell_type": "code", + "execution_count": 256, + "metadata": {}, + "outputs": [], + "source": [ + "def __mul__(self,o):\n", + " if isinstance(o, int) or isinstance(o, float):\n", + " return self.scalarmul(o)\n", + " elif len(self._M[0]) == len(o._M):\n", + " return self.mat_mult(o)\n", + " elif self.shape() == o.shape():\n", + " return self.element_mult(o)\n", + " else:\n", + " print(\"invalid __mul__ usage\")\n", + " return None\n", + "def __rmul__(self,o):\n", + " return self * o\n", + "def __add__(self,o):\n", + " if isinstance(o, matrix) and self.shape() == o.shape():\n", + " return self.add(o)\n", + " else:\n", + " print(\"invalid __add__ usage\")\n", + " return None\n", + "def __sub__(self,o):\n", + " if isinstance(o, matrix) and self.shape() == o.shape():\n", + " return self.sub(o)\n", + " else:\n", + " print(\"invalid __sub__ usage\")\n", + " return None\n", + "def __eq__(self,o):\n", + " if isinstance(o, matrix) and self.shape() == o.shape():\n", + " n,m = self.shape()\n", + " for i in range(n):\n", + " for j in range(m):\n", + " if self.to_list()[i][j] != o.to_list()[i][j]:\n", + " return False\n", + " return True\n", + " else:\n", + " return False\n", + "setattr(matrix, '__mul__', __mul__)\n", + "setattr(matrix, '__rmul__', __rmul__)\n", + "setattr(matrix, '__add__', __add__)\n", + "setattr(matrix, '__sub__', __sub__)\n", + "setattr(matrix, '__eq__', __eq__)" + ] + }, + { + "cell_type": "code", + "execution_count": 257, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[1, 2, 3], [4, 5, 6]]\n", + "[[2, 4, 6], [8, 10, 12]]\n", + "[[2, 4, 6], [8, 10, 12]]\n", + "[[4, 4, 4], [10, 10, 10]]\n", + "[[1, 1, 1], [2, 2, 2]]\n", + "[[1, 2, 3], [4, 5, 6]]\n", + "[[2, 6, 12], [0, 5, 12]]\n", + "True\n" + ] + } + ], + "source": [ + "M1 = matrix([[1,2,3],[4,5,6]])\n", + "print(M1)\n", + "print(M1*2)\n", + "print(2*M1)\n", + "print(M1+matrix([[3,2,1],[6,5,4]]))\n", + "print(M1-matrix([[0,1,2],[2,3,4]]))\n", + "print(M1*eye(3))\n", + "print(M1*matrix([[2,3,4],[0,1,2]]))\n", + "print(M1==matrix([[1,2,3],[4,5,6]]))" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -358,6 +443,40 @@ "$$" ] }, + { + "cell_type": "code", + "execution_count": 258, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(AB)C: [[95, 118], [211, 262]]\n", + "A(BC): [[95, 118], [211, 262]]\n", + "A(B+C): [[23, 29], [51, 65]]\n", + "AB+AC: [[23, 29], [51, 65]]\n", + "AB: [[10, 13], [22, 29]]\n", + "BA: [[11, 16], [19, 28]]\n", + "AI: [[1, 2], [3, 4]]\n", + "A: [[1, 2], [3, 4]]\n" + ] + } + ], + "source": [ + "A = matrix([[1,2],[3,4]])\n", + "B = matrix([[2,3],[4,5]])\n", + "C = matrix([[3,4],[5,6]])\n", + "print(\"(AB)C: {}\".format((A*B)* C))\n", + "print(\"A(BC): {}\".format(A*(B*C)))\n", + "print(\"A(B+C): {}\".format(A*(B+C)))\n", + "print(\"AB+AC: {}\".format(A*B + A*C))\n", + "print(\"AB: {}\".format(A*B))\n", + "print(\"BA: {}\".format(B*A))\n", + "print(\"AI: {}\".format(A*eye(2)))\n", + "print(\"A: {}\".format(A))" + ] + }, { "cell_type": "code", "execution_count": null, From 579ef858b9cc551385d466e793169c97ea996550 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Tue, 28 Apr 2020 17:30:13 -0500 Subject: [PATCH 18/24] update for next lab --- Labs/Lab-6/Lab-6 - Copy.ipynb | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/Labs/Lab-6/Lab-6 - Copy.ipynb b/Labs/Lab-6/Lab-6 - Copy.ipynb index 07da0cf..b172fda 100644 --- a/Labs/Lab-6/Lab-6 - Copy.ipynb +++ b/Labs/Lab-6/Lab-6 - Copy.ipynb @@ -153,25 +153,26 @@ }, { "cell_type": "code", - "execution_count": 251, + "execution_count": 265, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "M: [[2, 3], [4, 5], [6, 7]]\n", - "M.shape(): (3, 2)\n", - "M.transpose()._M: [[2, 4, 6], [3, 5, 7]]\n", - "M.row(0)._M: [[2, 3]]\n", - "M.column(0)._M: [[2, 4, 6]]\n", - "M.to_list(): [[2, 3], [4, 5], [6, 7]]\n", - "M.block(0,2,0,2): [[2, 3], [4, 5]]\n", - "M[:][:2]: [[2, 3], [4, 5]]\n" + "M: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]\n", + "M.shape(): (3, 3)\n", + "M.transpose()._M: [[1, 4, 7], [2, 5, 8], [3, 6, 9]]\n", + "M.row(0)._M: [[1, 2, 3]]\n", + "M.column(0)._M: [[1, 4, 7]]\n", + "M.to_list(): [[1, 2, 3], [4, 5, 6], [7, 8, 9]]\n", + "M.block(0,2,0,2): [[1, 2], [4, 5]]\n", + "M[:][:2]: [[1, 2, 3], [4, 5, 6]]\n" ] } ], "source": [ + "M = matrix([[1,2,3],[4,5,6],[7,8,9]])\n", "print(\"M: {}\".format(M))\n", "print(\"M.shape(): {}\".format(M.shape()))\n", "print(\"M.transpose()._M: {}\".format(M.transpose()))\n", @@ -179,7 +180,7 @@ "print(\"M.column(0)._M: {}\".format(M.column(0)))\n", "print(\"M.to_list(): {}\".format(M.to_list()))\n", "print(\"M.block(0,2,0,2): {}\".format(M.block(0,2,0,2).to_list()))\n", - "print(\"M[:][:2]: {}\".format(M[:][:2]))" + "print(\"M[:][:2]: {}\".format(M[:2]))" ] }, { From db5eb18c9b6fb96b265eda8cdacb4e995f47dfb4 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Thu, 30 Apr 2020 03:50:49 -0500 Subject: [PATCH 19/24] completed exercise 2 --- Labs/Lab-7/Lab-7.ipynb | 999 ++++++++++++++++++++++++++++++++++++++--- 1 file changed, 948 insertions(+), 51 deletions(-) diff --git a/Labs/Lab-7/Lab-7.ipynb b/Labs/Lab-7/Lab-7.ipynb index 5a9197d..a44062c 100644 --- a/Labs/Lab-7/Lab-7.ipynb +++ b/Labs/Lab-7/Lab-7.ipynb @@ -13,7 +13,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -41,7 +41,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 35, "metadata": {}, "outputs": [], "source": [ @@ -115,7 +115,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 36, "metadata": {}, "outputs": [], "source": [ @@ -129,7 +129,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 37, "metadata": {}, "outputs": [], "source": [ @@ -181,7 +181,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 38, "metadata": {}, "outputs": [], "source": [ @@ -236,7 +236,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 39, "metadata": {}, "outputs": [], "source": [ @@ -266,47 +266,30 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 40, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "l1_n,l1_1,12_n,l2_1,l2_2,l2_3,l2_4,l2_5,l2_6,l2_7,l3_n,l3_1,l3_2,l3_3,l3_4,l3_5,l3_6,l3_7,l3_8,l3_9,l3_10,l3_11,l3_12,l3_13,l3_14,l4_n,l4_1,l4_2,l4_3,l4_4,l4_5,l4_6,l4_7,l4_8,l4_9,l4_10,l4_11,q1_n,q1_1,e1_n,e1_1,e1_2,e1_3,e1_4,e1_5,e1_6,e1_7,e1_8,e1_9,e1_10,e1_11,e1_12,e1_13,e1_14,e1_15\r", - "\r\n", - "1,10,7,0,10,10,8,10,10,10,14,9,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,9.5,15,9,9,0,9,8,0,0,0,0,0,0,0,0,0,0\r", - "\r\n", - "1,10,7,0,0,0,0,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\r", - "\r\n", - "1,10,7,0,0,0,0,0,0,0,14,9,10,10,10,7,10,3,6,3,3,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,5,15,5,5,5,5,0,0,0,0,0,0,0,0,0,0,0\r", - "\r\n", - "1,10,7,10,10,3,9.5,10,10,9.5,14,10,10,10,8,5,10,5,10,3,0,10,3,10,8,11,10,10,10,10,10,10,0,0,10,5,0,1,10,15,9,9,10,9,7,9,0,0,10,10,9,5,10,8,10\r", - "\r\n", - "1,10,7,10,10,9.5,0,10,10,0,14,9.5,0,0,10,0,10,5,10,7,0,10,6,10,0,11,10,10,6,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,5,0,7,0,3,3,3,0,3,0,0\r", - "\r\n", - "1,10,7,10,10,10,9.5,10,10,9.5,14,5,9.5,9.5,8,10,10,8,10,8,0,5,6,0,0,11,0,10,10,10,0,5,0,0,0,0,0,1,9.5,15,9,9,10,9,9,10,7,0,9,9,9,0,5,0,0\r", - "\r\n", - "1,10,7,10,10,0,5,10,10,9.5,14,9.5,10,10,8,10,8,9,0,0,0,0,0,0,0,11,0,10,10,0,0,10,0,0,0,0,0,1,10,15,9,9,10,9,0,0,0,0,0,0,0,0,0,0,0\r", - "\r\n", - "1,10,7,10,10,10,9.5,10,10,9.5,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,3,3,0,0,5,0,0,1,10,15,9,9,10,0,10,0,7,5,9,9,9,0,0,0,0\r", - "\r\n", - "1,10,7,0,10,9.5,0,10,10,0,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,5,3,0,3,10,7,0,1,9.5,15,9,9,10,5,10,0,9,9,9,9,9,10,5,0,0\r", - "\r\n", - "1,10,7,10,10,0,10,10,10,10,14,10,6,10,0,0,0,0,0,0,0,0,0,0,0,11,10,10,0,7,0,0,0,0,0,0,0,1,9.5,15,9,9,10,9,5,9,7,9,10,10,10,5,10,5,0\r", - "\r\n", - "1,10,7,10,10,0,0,10,10,7,14,10,10,10,10,7,10,6,3,10,10,10,10,10,10,11,10,10,10,10,10,5,10,10,10,10,10,1,0,15,9,9,9,9,9,10,9,9,10,10,10,10,10,5,10\r", - "\r\n", - "1,10,7,10,10,9.5,9.5,10,10,9.5,14,9.5,10,10,10,8,10,8,10,10,7,5,0,0,0,11,10,10,10,10,5,6,0,0,0,0,0,1,10,15,9,9,10,9,8,9,7,9,10,10,10,10,0,0,0\r", - "\r\n", - "1,10,7,10,10,5,9.5,10,10,9.5,14,5,9,9,10,7,10,10,10,10,7,10,3,5,10,11,0,0,0,0,0,0,0,0,0,0,0,1,10,15,9,9,9,8,7,10,0,9,10,9,10,9,5,0,0\r", - "\r\n", - "1,10,7,10,10,9.5,0,10,10,0,14,9.5,10,10,10,10,10,10,10,0,0,10,5,10,10,11,0,10,10,0,0,5,0,0,0,0,0,1,0,15,9,9,10,0,8,9,7,9,10,10,10,10,10,0,0\r", - "\r\n", - "1,10,7,10,10,9.5,9,10,10,9.5,14,10,10,10,10,10,10,9,10,3,0,3,3,5,2,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,9,9,10,5,5,0,0,10,10,10,10,0,10,5,10\r", - "\r\n", - "1,10,7,10,10,3,7,10,10,9,14,10,10,10,10,0,10,9,10,7,7,3,7,5,8,11,10,10,10,8,5,3,0,0,7,0,0,1,9.5,15,9,9,10,10,7,10,10,10,10,10,10,10,9,8,2\r", - "\r\n" + "l1_n,l1_1,12_n,l2_1,l2_2,l2_3,l2_4,l2_5,l2_6,l2_7,l3_n,l3_1,l3_2,l3_3,l3_4,l3_5,l3_6,l3_7,l3_8,l3_9,l3_10,l3_11,l3_12,l3_13,l3_14,l4_n,l4_1,l4_2,l4_3,l4_4,l4_5,l4_6,l4_7,l4_8,l4_9,l4_10,l4_11,q1_n,q1_1,e1_n,e1_1,e1_2,e1_3,e1_4,e1_5,e1_6,e1_7,e1_8,e1_9,e1_10,e1_11,e1_12,e1_13,e1_14,e1_15\n", + "1,10,7,0,10,10,8,10,10,10,14,9,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,9.5,15,9,9,0,9,8,0,0,0,0,0,0,0,0,0,0\n", + "1,10,7,0,0,0,0,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\n", + "1,10,7,0,0,0,0,0,0,0,14,9,10,10,10,7,10,3,6,3,3,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,5,15,5,5,5,5,0,0,0,0,0,0,0,0,0,0,0\n", + "1,10,7,10,10,3,9.5,10,10,9.5,14,10,10,10,8,5,10,5,10,3,0,10,3,10,8,11,10,10,10,10,10,10,0,0,10,5,0,1,10,15,9,9,10,9,7,9,0,0,10,10,9,5,10,8,10\n", + "1,10,7,10,10,9.5,0,10,10,0,14,9.5,0,0,10,0,10,5,10,7,0,10,6,10,0,11,10,10,6,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,5,0,7,0,3,3,3,0,3,0,0\n", + "1,10,7,10,10,10,9.5,10,10,9.5,14,5,9.5,9.5,8,10,10,8,10,8,0,5,6,0,0,11,0,10,10,10,0,5,0,0,0,0,0,1,9.5,15,9,9,10,9,9,10,7,0,9,9,9,0,5,0,0\n", + "1,10,7,10,10,0,5,10,10,9.5,14,9.5,10,10,8,10,8,9,0,0,0,0,0,0,0,11,0,10,10,0,0,10,0,0,0,0,0,1,10,15,9,9,10,9,0,0,0,0,0,0,0,0,0,0,0\n", + "1,10,7,10,10,10,9.5,10,10,9.5,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,3,3,0,0,5,0,0,1,10,15,9,9,10,0,10,0,7,5,9,9,9,0,0,0,0\n", + "1,10,7,0,10,9.5,0,10,10,0,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,5,3,0,3,10,7,0,1,9.5,15,9,9,10,5,10,0,9,9,9,9,9,10,5,0,0\n", + "1,10,7,10,10,0,10,10,10,10,14,10,6,10,0,0,0,0,0,0,0,0,0,0,0,11,10,10,0,7,0,0,0,0,0,0,0,1,9.5,15,9,9,10,9,5,9,7,9,10,10,10,5,10,5,0\n", + "1,10,7,10,10,0,0,10,10,7,14,10,10,10,10,7,10,6,3,10,10,10,10,10,10,11,10,10,10,10,10,5,10,10,10,10,10,1,0,15,9,9,9,9,9,10,9,9,10,10,10,10,10,5,10\n", + "1,10,7,10,10,9.5,9.5,10,10,9.5,14,9.5,10,10,10,8,10,8,10,10,7,5,0,0,0,11,10,10,10,10,5,6,0,0,0,0,0,1,10,15,9,9,10,9,8,9,7,9,10,10,10,10,0,0,0\n", + "1,10,7,10,10,5,9.5,10,10,9.5,14,5,9,9,10,7,10,10,10,10,7,10,3,5,10,11,0,0,0,0,0,0,0,0,0,0,0,1,10,15,9,9,9,8,7,10,0,9,10,9,10,9,5,0,0\n", + "1,10,7,10,10,9.5,0,10,10,0,14,9.5,10,10,10,10,10,10,10,0,0,10,5,10,10,11,0,10,10,0,0,5,0,0,0,0,0,1,0,15,9,9,10,0,8,9,7,9,10,10,10,10,10,0,0\n", + "1,10,7,10,10,9.5,9,10,10,9.5,14,10,10,10,10,10,10,9,10,3,0,3,3,5,2,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,9,9,10,5,5,0,0,10,10,10,10,0,10,5,10\n", + "1,10,7,10,10,3,7,10,10,9,14,10,10,10,10,0,10,9,10,7,7,3,7,5,8,11,10,10,10,8,5,3,0,0,7,0,0,1,9.5,15,9,9,10,10,7,10,10,10,10,10,10,10,9,8,2\n" ] } ], @@ -333,16 +316,918 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 41, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[{'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '0',\n", + " 'l2_2': '10',\n", + " 'l2_3': '10',\n", + " 'l2_4': '8',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '10',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9',\n", + " 'l3_2': '0',\n", + " 'l3_3': '0',\n", + " 'l3_4': '0',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '0',\n", + " 'e1_4': '9',\n", + " 'e1_5': '8',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '0',\n", + " 'e1_10': '0',\n", + " 'e1_11': '0',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '0',\n", + " 'l2_2': '0',\n", + " 'l2_3': '0',\n", + " 'l2_4': '0',\n", + " 'l2_5': '0',\n", + " 'l2_6': '0',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '0',\n", + " 'l3_2': '0',\n", + " 'l3_3': '0',\n", + " 'l3_4': '0',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '0',\n", + " 'e1_2': '0',\n", + " 'e1_3': '0',\n", + " 'e1_4': '0',\n", + " 'e1_5': '0',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '0',\n", + " 'e1_10': '0',\n", + " 'e1_11': '0',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '0',\n", + " 'l2_2': '0',\n", + " 'l2_3': '0',\n", + " 'l2_4': '0',\n", + " 'l2_5': '0',\n", + " 'l2_6': '0',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '7',\n", + " 'l3_6': '10',\n", + " 'l3_7': '3',\n", + " 'l3_8': '6',\n", + " 'l3_9': '3',\n", + " 'l3_10': '3',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '5',\n", + " 'e1_2': '5',\n", + " 'e1_3': '5',\n", + " 'e1_4': '5',\n", + " 'e1_5': '0',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '0',\n", + " 'e1_10': '0',\n", + " 'e1_11': '0',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '3',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '8',\n", + " 'l3_5': '5',\n", + " 'l3_6': '10',\n", + " 'l3_7': '5',\n", + " 'l3_8': '10',\n", + " 'l3_9': '3',\n", + " 'l3_10': '0',\n", + " 'l3_11': '10',\n", + " 'l3_12': '3',\n", + " 'l3_13': '10',\n", + " 'l3_14': '8',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '10',\n", + " 'l4_6': '10',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '10',\n", + " 'l4_10': '5',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '7',\n", + " 'e1_6': '9',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '9',\n", + " 'e1_12': '5',\n", + " 'e1_13': '10',\n", + " 'e1_14': '8',\n", + " 'e1_15': '10'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '0',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9.5',\n", + " 'l3_2': '0',\n", + " 'l3_3': '0',\n", + " 'l3_4': '10',\n", + " 'l3_5': '0',\n", + " 'l3_6': '10',\n", + " 'l3_7': '5',\n", + " 'l3_8': '10',\n", + " 'l3_9': '7',\n", + " 'l3_10': '0',\n", + " 'l3_11': '10',\n", + " 'l3_12': '6',\n", + " 'l3_13': '10',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '6',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '0',\n", + " 'e1_2': '0',\n", + " 'e1_3': '0',\n", + " 'e1_4': '0',\n", + " 'e1_5': '5',\n", + " 'e1_6': '0',\n", + " 'e1_7': '7',\n", + " 'e1_8': '0',\n", + " 'e1_9': '3',\n", + " 'e1_10': '3',\n", + " 'e1_11': '3',\n", + " 'e1_12': '0',\n", + " 'e1_13': '3',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '10',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '5',\n", + " 'l3_2': '9.5',\n", + " 'l3_3': '9.5',\n", + " 'l3_4': '8',\n", + " 'l3_5': '10',\n", + " 'l3_6': '10',\n", + " 'l3_7': '8',\n", + " 'l3_8': '10',\n", + " 'l3_9': '8',\n", + " 'l3_10': '0',\n", + " 'l3_11': '5',\n", + " 'l3_12': '6',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '0',\n", + " 'l4_6': '5',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '9',\n", + " 'e1_6': '10',\n", + " 'e1_7': '7',\n", + " 'e1_8': '0',\n", + " 'e1_9': '9',\n", + " 'e1_10': '9',\n", + " 'e1_11': '9',\n", + " 'e1_12': '0',\n", + " 'e1_13': '5',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '0',\n", + " 'l2_4': '5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9.5',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '8',\n", + " 'l3_5': '10',\n", + " 'l3_6': '8',\n", + " 'l3_7': '9',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '10',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '0',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '0',\n", + " 'e1_10': '0',\n", + " 'e1_11': '0',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '10',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '3',\n", + " 'l4_6': '3',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '5',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '0',\n", + " 'e1_5': '10',\n", + " 'e1_6': '0',\n", + " 'e1_7': '7',\n", + " 'e1_8': '5',\n", + " 'e1_9': '9',\n", + " 'e1_10': '9',\n", + " 'e1_11': '9',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '0',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '0',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '5',\n", + " 'l4_6': '3',\n", + " 'l4_7': '0',\n", + " 'l4_8': '3',\n", + " 'l4_9': '10',\n", + " 'l4_10': '7',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '5',\n", + " 'e1_5': '10',\n", + " 'e1_6': '0',\n", + " 'e1_7': '9',\n", + " 'e1_8': '9',\n", + " 'e1_9': '9',\n", + " 'e1_10': '9',\n", + " 'e1_11': '9',\n", + " 'e1_12': '10',\n", + " 'e1_13': '5',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '0',\n", + " 'l2_4': '10',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '10',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '6',\n", + " 'l3_3': '10',\n", + " 'l3_4': '0',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '0',\n", + " 'l4_4': '7',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '5',\n", + " 'e1_6': '9',\n", + " 'e1_7': '7',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '5',\n", + " 'e1_13': '10',\n", + " 'e1_14': '5',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '0',\n", + " 'l2_4': '0',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '7',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '7',\n", + " 'l3_6': '10',\n", + " 'l3_7': '6',\n", + " 'l3_8': '3',\n", + " 'l3_9': '10',\n", + " 'l3_10': '10',\n", + " 'l3_11': '10',\n", + " 'l3_12': '10',\n", + " 'l3_13': '10',\n", + " 'l3_14': '10',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '10',\n", + " 'l4_6': '5',\n", + " 'l4_7': '10',\n", + " 'l4_8': '10',\n", + " 'l4_9': '10',\n", + " 'l4_10': '10',\n", + " 'l4_11': '10',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '9',\n", + " 'e1_4': '9',\n", + " 'e1_5': '9',\n", + " 'e1_6': '10',\n", + " 'e1_7': '9',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '10',\n", + " 'e1_13': '10',\n", + " 'e1_14': '5',\n", + " 'e1_15': '10'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9.5',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '8',\n", + " 'l3_6': '10',\n", + " 'l3_7': '8',\n", + " 'l3_8': '10',\n", + " 'l3_9': '10',\n", + " 'l3_10': '7',\n", + " 'l3_11': '5',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '5',\n", + " 'l4_6': '6',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '8',\n", + " 'e1_6': '9',\n", + " 'e1_7': '7',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '10',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '5',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '5',\n", + " 'l3_2': '9',\n", + " 'l3_3': '9',\n", + " 'l3_4': '10',\n", + " 'l3_5': '7',\n", + " 'l3_6': '10',\n", + " 'l3_7': '10',\n", + " 'l3_8': '10',\n", + " 'l3_9': '10',\n", + " 'l3_10': '7',\n", + " 'l3_11': '10',\n", + " 'l3_12': '3',\n", + " 'l3_13': '5',\n", + " 'l3_14': '10',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '9',\n", + " 'e1_4': '8',\n", + " 'e1_5': '7',\n", + " 'e1_6': '10',\n", + " 'e1_7': '0',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '9',\n", + " 'e1_11': '10',\n", + " 'e1_12': '9',\n", + " 'e1_13': '5',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '0',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9.5',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '10',\n", + " 'l3_6': '10',\n", + " 'l3_7': '10',\n", + " 'l3_8': '10',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '10',\n", + " 'l3_12': '5',\n", + " 'l3_13': '10',\n", + " 'l3_14': '10',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '5',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '0',\n", + " 'e1_5': '8',\n", + " 'e1_6': '9',\n", + " 'e1_7': '7',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '10',\n", + " 'e1_13': '10',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '9',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '10',\n", + " 'l3_6': '10',\n", + " 'l3_7': '9',\n", + " 'l3_8': '10',\n", + " 'l3_9': '3',\n", + " 'l3_10': '0',\n", + " 'l3_11': '3',\n", + " 'l3_12': '3',\n", + " 'l3_13': '5',\n", + " 'l3_14': '2',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '5',\n", + " 'e1_5': '5',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '10',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '0',\n", + " 'e1_13': '10',\n", + " 'e1_14': '5',\n", + " 'e1_15': '10'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " '12_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '3',\n", + " 'l2_4': '7',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '0',\n", + " 'l3_6': '10',\n", + " 'l3_7': '9',\n", + " 'l3_8': '10',\n", + " 'l3_9': '7',\n", + " 'l3_10': '7',\n", + " 'l3_11': '3',\n", + " 'l3_12': '7',\n", + " 'l3_13': '5',\n", + " 'l3_14': '8',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '8',\n", + " 'l4_5': '5',\n", + " 'l4_6': '3',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '7',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '10',\n", + " 'e1_5': '7',\n", + " 'e1_6': '10',\n", + " 'e1_7': '10',\n", + " 'e1_8': '10',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '10',\n", + " 'e1_13': '9',\n", + " 'e1_14': '8',\n", + " 'e1_15': '2'}]" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Your solution here.\n", "\n", "def cvs_reader(filename):\n", " data=list() # if you choose first option\n", + " f=open(filename,\"r\")\n", + " columns = f.readline().rstrip().split(\",\")\n", + " \n", + " for line in f:\n", + " row = dict()\n", + " l = line.rstrip()\n", + " items = l.split(\",\")\n", + " for i, item in enumerate(items):\n", + " row[columns[i]] = item\n", + " data.append(row)\n", " \n", - " return data" + " return data\n", + "\n", + "cvs_reader(\"Data1401-Grades.csv\")" ] }, { @@ -356,20 +1241,32 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 51, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "TypeError", + "evalue": "list indices must be integers or slices, not str", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mTypeError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 8\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 9\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mk\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mclass_data\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mstudent_i\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mkeys\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 10\u001b[1;33m \u001b[0ma_student_0\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0madd_grade\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mgrade\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mk\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mvalue\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfloat\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mclass_data\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mk\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mstudent_i\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 11\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 12\u001b[0m \u001b[0ma_grade_book\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0madd_student\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0ma_student_0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;31mTypeError\u001b[0m: list indices must be integers or slices, not str" + ] + } + ], "source": [ "import pandas as pd\n", - "class_data=pd.read_csv(\"Data1401-Grades.csv\")\n", + "class_data=cvs_reader(\"Data1401-Grades.csv\")\n", "\n", "a_grade_book=grade_book(\"Data 1401\")\n", "\n", - "for student_i in range(class_data.shape[0]):\n", + "for student_i in range(len(class_data)):\n", " a_student_0=student(\"Student\",str(student_i),student_i)\n", "\n", - " for k in class_data.keys():\n", - " a_student_0.add_grade(grade(k,value=class_data[k][student_i]))\n", + " for k in class_data[student_i].keys():\n", + " a_student_0.add_grade(grade(k,value=float(class_data[k][student_i])))\n", "\n", " a_grade_book.add_student(a_student_0)\n", " " @@ -578,7 +1475,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.0" + "version": "3.7.2" } }, "nbformat": 4, From 81e7291989cc27f306c1df0055509adc3bb34e1a Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Fri, 1 May 2020 01:19:09 -0500 Subject: [PATCH 20/24] completed exercise 4 --- Labs/Lab-7/Data1401-Grades.csv | 34 +++--- Labs/Lab-7/Lab-7.ipynb | 182 ++++++++++++++++++++++----------- 2 files changed, 139 insertions(+), 77 deletions(-) diff --git a/Labs/Lab-7/Data1401-Grades.csv b/Labs/Lab-7/Data1401-Grades.csv index ad6e2f6..126bd44 100644 --- a/Labs/Lab-7/Data1401-Grades.csv +++ b/Labs/Lab-7/Data1401-Grades.csv @@ -1,17 +1,17 @@ -l1_n,l1_1,12_n,l2_1,l2_2,l2_3,l2_4,l2_5,l2_6,l2_7,l3_n,l3_1,l3_2,l3_3,l3_4,l3_5,l3_6,l3_7,l3_8,l3_9,l3_10,l3_11,l3_12,l3_13,l3_14,l4_n,l4_1,l4_2,l4_3,l4_4,l4_5,l4_6,l4_7,l4_8,l4_9,l4_10,l4_11,q1_n,q1_1,e1_n,e1_1,e1_2,e1_3,e1_4,e1_5,e1_6,e1_7,e1_8,e1_9,e1_10,e1_11,e1_12,e1_13,e1_14,e1_15 -1,10,7,0,10,10,8,10,10,10,14,9,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,9.5,15,9,9,0,9,8,0,0,0,0,0,0,0,0,0,0 -1,10,7,0,0,0,0,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 -1,10,7,0,0,0,0,0,0,0,14,9,10,10,10,7,10,3,6,3,3,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,5,15,5,5,5,5,0,0,0,0,0,0,0,0,0,0,0 -1,10,7,10,10,3,9.5,10,10,9.5,14,10,10,10,8,5,10,5,10,3,0,10,3,10,8,11,10,10,10,10,10,10,0,0,10,5,0,1,10,15,9,9,10,9,7,9,0,0,10,10,9,5,10,8,10 -1,10,7,10,10,9.5,0,10,10,0,14,9.5,0,0,10,0,10,5,10,7,0,10,6,10,0,11,10,10,6,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,5,0,7,0,3,3,3,0,3,0,0 -1,10,7,10,10,10,9.5,10,10,9.5,14,5,9.5,9.5,8,10,10,8,10,8,0,5,6,0,0,11,0,10,10,10,0,5,0,0,0,0,0,1,9.5,15,9,9,10,9,9,10,7,0,9,9,9,0,5,0,0 -1,10,7,10,10,0,5,10,10,9.5,14,9.5,10,10,8,10,8,9,0,0,0,0,0,0,0,11,0,10,10,0,0,10,0,0,0,0,0,1,10,15,9,9,10,9,0,0,0,0,0,0,0,0,0,0,0 -1,10,7,10,10,10,9.5,10,10,9.5,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,3,3,0,0,5,0,0,1,10,15,9,9,10,0,10,0,7,5,9,9,9,0,0,0,0 -1,10,7,0,10,9.5,0,10,10,0,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,5,3,0,3,10,7,0,1,9.5,15,9,9,10,5,10,0,9,9,9,9,9,10,5,0,0 -1,10,7,10,10,0,10,10,10,10,14,10,6,10,0,0,0,0,0,0,0,0,0,0,0,11,10,10,0,7,0,0,0,0,0,0,0,1,9.5,15,9,9,10,9,5,9,7,9,10,10,10,5,10,5,0 -1,10,7,10,10,0,0,10,10,7,14,10,10,10,10,7,10,6,3,10,10,10,10,10,10,11,10,10,10,10,10,5,10,10,10,10,10,1,0,15,9,9,9,9,9,10,9,9,10,10,10,10,10,5,10 -1,10,7,10,10,9.5,9.5,10,10,9.5,14,9.5,10,10,10,8,10,8,10,10,7,5,0,0,0,11,10,10,10,10,5,6,0,0,0,0,0,1,10,15,9,9,10,9,8,9,7,9,10,10,10,10,0,0,0 -1,10,7,10,10,5,9.5,10,10,9.5,14,5,9,9,10,7,10,10,10,10,7,10,3,5,10,11,0,0,0,0,0,0,0,0,0,0,0,1,10,15,9,9,9,8,7,10,0,9,10,9,10,9,5,0,0 -1,10,7,10,10,9.5,0,10,10,0,14,9.5,10,10,10,10,10,10,10,0,0,10,5,10,10,11,0,10,10,0,0,5,0,0,0,0,0,1,0,15,9,9,10,0,8,9,7,9,10,10,10,10,10,0,0 -1,10,7,10,10,9.5,9,10,10,9.5,14,10,10,10,10,10,10,9,10,3,0,3,3,5,2,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,9,9,10,5,5,0,0,10,10,10,10,0,10,5,10 -1,10,7,10,10,3,7,10,10,9,14,10,10,10,10,0,10,9,10,7,7,3,7,5,8,11,10,10,10,8,5,3,0,0,7,0,0,1,9.5,15,9,9,10,10,7,10,10,10,10,10,10,10,9,8,2 +l1_n,l1_1,l2_n,l2_1,l2_2,l2_3,l2_4,l2_5,l2_6,l2_7,l3_n,l3_1,l3_2,l3_3,l3_4,l3_5,l3_6,l3_7,l3_8,l3_9,l3_10,l3_11,l3_12,l3_13,l3_14,l4_n,l4_1,l4_2,l4_3,l4_4,l4_5,l4_6,l4_7,l4_8,l4_9,l4_10,l4_11,q1_n,q1_1,e1_n,e1_1,e1_2,e1_3,e1_4,e1_5,e1_6,e1_7,e1_8,e1_9,e1_10,e1_11,e1_12,e1_13,e1_14,e1_15 +1,10,7,0,10,10,8,10,10,10,14,9,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,9.5,15,9,9,0,9,8,0,0,0,0,0,0,0,0,0,0 +1,10,7,0,0,0,0,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +1,10,7,0,0,0,0,0,0,0,14,9,10,10,10,7,10,3,6,3,3,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,5,15,5,5,5,5,0,0,0,0,0,0,0,0,0,0,0 +1,10,7,10,10,3,9.5,10,10,9.5,14,10,10,10,8,5,10,5,10,3,0,10,3,10,8,11,10,10,10,10,10,10,0,0,10,5,0,1,10,15,9,9,10,9,7,9,0,0,10,10,9,5,10,8,10 +1,10,7,10,10,9.5,0,10,10,0,14,9.5,0,0,10,0,10,5,10,7,0,10,6,10,0,11,10,10,6,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,5,0,7,0,3,3,3,0,3,0,0 +1,10,7,10,10,10,9.5,10,10,9.5,14,5,9.5,9.5,8,10,10,8,10,8,0,5,6,0,0,11,0,10,10,10,0,5,0,0,0,0,0,1,9.5,15,9,9,10,9,9,10,7,0,9,9,9,0,5,0,0 +1,10,7,10,10,0,5,10,10,9.5,14,9.5,10,10,8,10,8,9,0,0,0,0,0,0,0,11,0,10,10,0,0,10,0,0,0,0,0,1,10,15,9,9,10,9,0,0,0,0,0,0,0,0,0,0,0 +1,10,7,10,10,10,9.5,10,10,9.5,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,3,3,0,0,5,0,0,1,10,15,9,9,10,0,10,0,7,5,9,9,9,0,0,0,0 +1,10,7,0,10,9.5,0,10,10,0,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,5,3,0,3,10,7,0,1,9.5,15,9,9,10,5,10,0,9,9,9,9,9,10,5,0,0 +1,10,7,10,10,0,10,10,10,10,14,10,6,10,0,0,0,0,0,0,0,0,0,0,0,11,10,10,0,7,0,0,0,0,0,0,0,1,9.5,15,9,9,10,9,5,9,7,9,10,10,10,5,10,5,0 +1,10,7,10,10,0,0,10,10,7,14,10,10,10,10,7,10,6,3,10,10,10,10,10,10,11,10,10,10,10,10,5,10,10,10,10,10,1,0,15,9,9,9,9,9,10,9,9,10,10,10,10,10,5,10 +1,10,7,10,10,9.5,9.5,10,10,9.5,14,9.5,10,10,10,8,10,8,10,10,7,5,0,0,0,11,10,10,10,10,5,6,0,0,0,0,0,1,10,15,9,9,10,9,8,9,7,9,10,10,10,10,0,0,0 +1,10,7,10,10,5,9.5,10,10,9.5,14,5,9,9,10,7,10,10,10,10,7,10,3,5,10,11,0,0,0,0,0,0,0,0,0,0,0,1,10,15,9,9,9,8,7,10,0,9,10,9,10,9,5,0,0 +1,10,7,10,10,9.5,0,10,10,0,14,9.5,10,10,10,10,10,10,10,0,0,10,5,10,10,11,0,10,10,0,0,5,0,0,0,0,0,1,0,15,9,9,10,0,8,9,7,9,10,10,10,10,10,0,0 +1,10,7,10,10,9.5,9,10,10,9.5,14,10,10,10,10,10,10,9,10,3,0,3,3,5,2,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,9,9,10,5,5,0,0,10,10,10,10,0,10,5,10 +1,10,7,10,10,3,7,10,10,9,14,10,10,10,10,0,10,9,10,7,7,3,7,5,8,11,10,10,10,8,5,3,0,0,7,0,0,1,9.5,15,9,9,10,10,7,10,10,10,10,10,10,10,9,8,2 diff --git a/Labs/Lab-7/Lab-7.ipynb b/Labs/Lab-7/Lab-7.ipynb index a44062c..0b7b296 100644 --- a/Labs/Lab-7/Lab-7.ipynb +++ b/Labs/Lab-7/Lab-7.ipynb @@ -13,7 +13,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 318, "metadata": {}, "outputs": [], "source": [ @@ -41,7 +41,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 319, "metadata": {}, "outputs": [], "source": [ @@ -115,7 +115,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 320, "metadata": {}, "outputs": [], "source": [ @@ -129,7 +129,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 321, "metadata": {}, "outputs": [], "source": [ @@ -147,7 +147,10 @@ " # New method to access data\n", " def __getitem__(self,key):\n", " return self.__data[key]\n", - " \n", + " \n", + " def data(self):\n", + " return self.__data\n", + " \n", " # New method to add data\n", " def __setitem__(self, key, value):\n", " self.__data[key] = value\n", @@ -181,7 +184,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 322, "metadata": {}, "outputs": [], "source": [ @@ -236,17 +239,21 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 323, "metadata": {}, "outputs": [], "source": [ "class mean_std_calculator(calculator):\n", - " def __init__(self):\n", + " def __init__(self,grade_name):\n", + " self.__grade_name = grade_name\n", " calculator.__init__(self,\"Mean and Standard Deviation Calculator\")\n", " \n", - " def apply(self,a_grade_book,grade_name,**kwargs):\n", + " def apply(self,a_grade_book,grade_name=None,**kwargs):\n", + " if not grade_name:\n", + " grade_name = self.__grade_name\n", + " \n", " grades=list()\n", - " for k,a_student in a_grade_book.get_students().iteritems():\n", + " for k,a_student in a_grade_book.get_students().items():\n", " grades.append(a_student[grade_name].value())\n", " \n", " a_grade_book[grade_name+\" Mean\"] = np.mean(grades)\n", @@ -266,14 +273,14 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 324, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "l1_n,l1_1,12_n,l2_1,l2_2,l2_3,l2_4,l2_5,l2_6,l2_7,l3_n,l3_1,l3_2,l3_3,l3_4,l3_5,l3_6,l3_7,l3_8,l3_9,l3_10,l3_11,l3_12,l3_13,l3_14,l4_n,l4_1,l4_2,l4_3,l4_4,l4_5,l4_6,l4_7,l4_8,l4_9,l4_10,l4_11,q1_n,q1_1,e1_n,e1_1,e1_2,e1_3,e1_4,e1_5,e1_6,e1_7,e1_8,e1_9,e1_10,e1_11,e1_12,e1_13,e1_14,e1_15\n", + "l1_n,l1_1,l2_n,l2_1,l2_2,l2_3,l2_4,l2_5,l2_6,l2_7,l3_n,l3_1,l3_2,l3_3,l3_4,l3_5,l3_6,l3_7,l3_8,l3_9,l3_10,l3_11,l3_12,l3_13,l3_14,l4_n,l4_1,l4_2,l4_3,l4_4,l4_5,l4_6,l4_7,l4_8,l4_9,l4_10,l4_11,q1_n,q1_1,e1_n,e1_1,e1_2,e1_3,e1_4,e1_5,e1_6,e1_7,e1_8,e1_9,e1_10,e1_11,e1_12,e1_13,e1_14,e1_15\n", "1,10,7,0,10,10,8,10,10,10,14,9,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,9.5,15,9,9,0,9,8,0,0,0,0,0,0,0,0,0,0\n", "1,10,7,0,0,0,0,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\n", "1,10,7,0,0,0,0,0,0,0,14,9,10,10,10,7,10,3,6,3,3,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,5,15,5,5,5,5,0,0,0,0,0,0,0,0,0,0,0\n", @@ -316,7 +323,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 325, "metadata": {}, "outputs": [ { @@ -324,7 +331,7 @@ "text/plain": [ "[{'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '0',\n", " 'l2_2': '10',\n", " 'l2_3': '10',\n", @@ -379,7 +386,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '0',\n", " 'l2_2': '0',\n", " 'l2_3': '0',\n", @@ -434,7 +441,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '0',\n", " 'l2_2': '0',\n", " 'l2_3': '0',\n", @@ -489,7 +496,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '3',\n", @@ -544,7 +551,7 @@ " 'e1_15': '10'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '9.5',\n", @@ -599,7 +606,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '10',\n", @@ -654,7 +661,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '0',\n", @@ -709,7 +716,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '10',\n", @@ -764,7 +771,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '0',\n", " 'l2_2': '10',\n", " 'l2_3': '9.5',\n", @@ -819,7 +826,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '0',\n", @@ -874,7 +881,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '0',\n", @@ -929,7 +936,7 @@ " 'e1_15': '10'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '9.5',\n", @@ -984,7 +991,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '5',\n", @@ -1039,7 +1046,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '9.5',\n", @@ -1094,7 +1101,7 @@ " 'e1_15': '0'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '9.5',\n", @@ -1149,7 +1156,7 @@ " 'e1_15': '10'},\n", " {'l1_n': '1',\n", " 'l1_1': '10',\n", - " '12_n': '7',\n", + " 'l2_n': '7',\n", " 'l2_1': '10',\n", " 'l2_2': '10',\n", " 'l2_3': '3',\n", @@ -1204,7 +1211,7 @@ " 'e1_15': '2'}]" ] }, - "execution_count": 41, + "execution_count": 325, "metadata": {}, "output_type": "execute_result" } @@ -1224,7 +1231,7 @@ " for i, item in enumerate(items):\n", " row[columns[i]] = item\n", " data.append(row)\n", - " \n", + " f.close()\n", " return data\n", "\n", "cvs_reader(\"Data1401-Grades.csv\")" @@ -1241,23 +1248,10 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 326, "metadata": {}, - "outputs": [ - { - "ename": "TypeError", - "evalue": "list indices must be integers or slices, not str", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mTypeError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 8\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 9\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mk\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mclass_data\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mstudent_i\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mkeys\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 10\u001b[1;33m \u001b[0ma_student_0\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0madd_grade\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mgrade\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mk\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mvalue\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfloat\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mclass_data\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mk\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mstudent_i\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 11\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 12\u001b[0m \u001b[0ma_grade_book\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0madd_student\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0ma_student_0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;31mTypeError\u001b[0m: list indices must be integers or slices, not str" - ] - } - ], + "outputs": [], "source": [ - "import pandas as pd\n", "class_data=cvs_reader(\"Data1401-Grades.csv\")\n", "\n", "a_grade_book=grade_book(\"Data 1401\")\n", @@ -1266,7 +1260,7 @@ " a_student_0=student(\"Student\",str(student_i),student_i)\n", "\n", " for k in class_data[student_i].keys():\n", - " a_student_0.add_grade(grade(k,value=float(class_data[k][student_i])))\n", + " a_student_0.add_grade(grade(k,value=float(class_data[student_i][k])))\n", "\n", " a_grade_book.add_student(a_student_0)\n", " " @@ -1283,7 +1277,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 327, "metadata": {}, "outputs": [], "source": [ @@ -1296,38 +1290,106 @@ " def apply(self,a_student):\n", " raise NotImplementedError\n", "\n", - "class grade_summer(summary_calculator):\n", + "class grade_summer(calculator):\n", " def __init__(self,prefix,n):\n", " self.__prefix=prefix\n", " self.__n=n\n", - " summary_calculator.__init__(self,\"Sum Grades\")\n", + " calculator.__init__(self,\"Sum Grades\")\n", " \n", - " def apply(self,a_student):\n", - " labels=[self.__prefix+str(x) for x in range(1,self.__n)]\n", + " def apply(self,a_gradebook,**kwargs):\n", + " labels=[self.__prefix + str(x) for x in range(1,self.__n)]\n", " \n", - " grade_sum=0.\n", - " for label in labels:\n", - " grade_sum+=a_student[label].value()\n", + " for k,a_student in a_grade_book.get_students().items(): \n", + "\n", + " grade_sum=0.\n", + " for label in labels:\n", + " grade_sum+=a_student[label].value()\n", "\n", - " a_student.add_grade(grade(self.__prefix+\"sum\",value=grade_sum))" + " a_student.add_grade(grade(self.__prefix+\"sum\",value=grade_sum),**kwargs)" + ] + }, + { + "cell_type": "code", + "execution_count": 329, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['e1_', 'l1_', 'l2_', 'l3_', 'l4_', 'q1_']\n" + ] + } + ], + "source": [ + "a_grade_book=grade_book(\"Data 1401\")\n", + "\n", + "for student_i in range(len(class_data)):\n", + " a_student_0=student(\"Student\",str(student_i),student_i)\n", + "\n", + " for k in class_data[student_i].keys():\n", + " a_student_0.add_grade(grade(k,value=float(class_data[student_i][k])))\n", + "\n", + " a_grade_book.add_student(a_student_0)\n", + "#print(class_data)\n", + "prefixes=sorted(list(set([k.split(\"_\")[0] + \"_\" for k in class_data[0].keys()])))\n", + "#labels=[self.__prefix+str(x) for x in range(1,self.__n)]\n", + "print(prefixes)\n", + "for j, prefix in enumerate(prefixes):\n", + " a_grade_book.apply_calculator(grade_summer(prefix, int(list(a_grade_book.get_students().values())[j][prefix+\"n\"].value())+1))" + ] + }, + { + "cell_type": "code", + "execution_count": 330, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "e1_ [35.0, 0, 20.0, 115.0, 24.0, 95.0, 37.0, 77.0, 103.0, 117.0, 138.0, 110.0, 104.0, 111.0, 103.0, 134.0]\n", + "l1_ [10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0]\n", + "l2_ [58.0, 0, 0, 62.0, 49.5, 69.0, 54.5, 69.0, 39.5, 60.0, 47.0, 68.5, 64.0, 49.5, 68.0, 59.0]\n", + "l3_ [9.0, 0, 71.0, 102.0, 77.5, 89.0, 64.5, 40.0, 40.0, 26.0, 126.0, 97.5, 115.0, 114.5, 95.0, 106.0]\n", + "l4_ [0, 0, 0, 75.0, 26.0, 35.0, 30.0, 51.0, 68.0, 27.0, 105.0, 51.0, 0, 25.0, 0, 53.0]\n", + "q1_ [9.5, 0, 5.0, 10.0, 0, 9.5, 10.0, 10.0, 9.5, 9.5, 0, 10.0, 10.0, 0, 0, 9.5]\n" + ] + } + ], + "source": [ + "for prefix in prefixes:\n", + " print(prefix,list(a_student[prefix+\"sum\"].value() for k,a_student in a_grade_book.get_students().items()))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Curving Grades\n", + "# Curving Grades\n", "\n", "*Exercise 4:* Use the `mean_std_calculator` above to calculate the mean and standard deviation for every lab, quiz, and exam in the class. Add a new print function to the `grade_book` class to print out such information in a nice way, and use this function to show your results.\n" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 331, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'e1_sum Mean': 82.6875, 'e1_sum STD': 42.937045121316864, 'l1_sum Mean': 10.0, 'l1_sum STD': 0.0, 'l2_sum Mean': 51.09375, 'l2_sum STD': 21.05663401252679, 'l3_sum Mean': 73.3125, 'l3_sum STD': 38.301792957379945, 'l4_sum Mean': 34.125, 'l4_sum STD': 30.421774685248064, 'q1_sum Mean': 6.40625, 'q1_sum STD': 4.469405546322688}\n" + ] + } + ], "source": [ - "# Your solution here" + "# Your solution here\n", + "prefixes=sorted(list(set([k.split(\"_\")[0] + \"_\" for k in class_data[0].keys()])))\n", + "for prefix in prefixes:\n", + " a_grade_book.apply_calculator(mean_std_calculator(grade_name=prefix+\"sum\"))\n", + "print(a_grade_book.data())" ] }, { From 4805f35f467682bbceb09fd07b099112366a3b24 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Fri, 1 May 2020 02:04:35 -0500 Subject: [PATCH 21/24] completed exercise 5 --- Labs/Lab-7/Lab-7.ipynb | 1258 ++++++++++++++++++++++++++++++++++++++-- 1 file changed, 1219 insertions(+), 39 deletions(-) diff --git a/Labs/Lab-7/Lab-7.ipynb b/Labs/Lab-7/Lab-7.ipynb index 0b7b296..117b0b2 100644 --- a/Labs/Lab-7/Lab-7.ipynb +++ b/Labs/Lab-7/Lab-7.ipynb @@ -13,7 +13,7 @@ }, { "cell_type": "code", - "execution_count": 318, + "execution_count": 393, "metadata": {}, "outputs": [], "source": [ @@ -41,7 +41,7 @@ }, { "cell_type": "code", - "execution_count": 319, + "execution_count": 394, "metadata": {}, "outputs": [], "source": [ @@ -115,7 +115,7 @@ }, { "cell_type": "code", - "execution_count": 320, + "execution_count": 395, "metadata": {}, "outputs": [], "source": [ @@ -129,7 +129,7 @@ }, { "cell_type": "code", - "execution_count": 321, + "execution_count": 396, "metadata": {}, "outputs": [], "source": [ @@ -151,6 +151,16 @@ " def data(self):\n", " return self.__data\n", " \n", + " def print_data(self):\n", + " for k,v in self.__data.items():\n", + " print (k,\":\",v)\n", + " \n", + " def print_students(self):\n", + " for k,a_student in self.__students.items():\n", + " print (k, a_student.name())\n", + " a_student.print_grades()\n", + " print (\"_______________________________________\")\n", + " \n", " # New method to add data\n", " def __setitem__(self, key, value):\n", " self.__data[key] = value\n", @@ -184,7 +194,7 @@ }, { "cell_type": "code", - "execution_count": 322, + "execution_count": 437, "metadata": {}, "outputs": [], "source": [ @@ -220,7 +230,7 @@ " grade_name=self.__grade_name\n", " \n", " \n", - " for k,a_student in a_grade_book.get_students().iteritems():\n", + " for k,a_student in a_grade_book.get_students().items():\n", " a_grade=a_student[grade_name]\n", "\n", " if not a_grade.numerical():\n", @@ -239,7 +249,7 @@ }, { "cell_type": "code", - "execution_count": 323, + "execution_count": 398, "metadata": {}, "outputs": [], "source": [ @@ -273,7 +283,7 @@ }, { "cell_type": "code", - "execution_count": 324, + "execution_count": 399, "metadata": {}, "outputs": [ { @@ -323,7 +333,7 @@ }, { "cell_type": "code", - "execution_count": 325, + "execution_count": 400, "metadata": {}, "outputs": [ { @@ -1211,7 +1221,7 @@ " 'e1_15': '2'}]" ] }, - "execution_count": 325, + "execution_count": 400, "metadata": {}, "output_type": "execute_result" } @@ -1248,7 +1258,7 @@ }, { "cell_type": "code", - "execution_count": 326, + "execution_count": 401, "metadata": {}, "outputs": [], "source": [ @@ -1277,7 +1287,7 @@ }, { "cell_type": "code", - "execution_count": 327, + "execution_count": 402, "metadata": {}, "outputs": [], "source": [ @@ -1310,7 +1320,7 @@ }, { "cell_type": "code", - "execution_count": 329, + "execution_count": 403, "metadata": {}, "outputs": [ { @@ -1341,7 +1351,7 @@ }, { "cell_type": "code", - "execution_count": 330, + "execution_count": 404, "metadata": {}, "outputs": [ { @@ -1373,14 +1383,25 @@ }, { "cell_type": "code", - "execution_count": 331, + "execution_count": 405, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "{'e1_sum Mean': 82.6875, 'e1_sum STD': 42.937045121316864, 'l1_sum Mean': 10.0, 'l1_sum STD': 0.0, 'l2_sum Mean': 51.09375, 'l2_sum STD': 21.05663401252679, 'l3_sum Mean': 73.3125, 'l3_sum STD': 38.301792957379945, 'l4_sum Mean': 34.125, 'l4_sum STD': 30.421774685248064, 'q1_sum Mean': 6.40625, 'q1_sum STD': 4.469405546322688}\n" + "e1_sum Mean : 82.6875\n", + "e1_sum STD : 42.937045121316864\n", + "l1_sum Mean : 10.0\n", + "l1_sum STD : 0.0\n", + "l2_sum Mean : 51.09375\n", + "l2_sum STD : 21.05663401252679\n", + "l3_sum Mean : 73.3125\n", + "l3_sum STD : 38.301792957379945\n", + "l4_sum Mean : 34.125\n", + "l4_sum STD : 30.421774685248064\n", + "q1_sum Mean : 6.40625\n", + "q1_sum STD : 4.469405546322688\n" ] } ], @@ -1389,7 +1410,7 @@ "prefixes=sorted(list(set([k.split(\"_\")[0] + \"_\" for k in class_data[0].keys()])))\n", "for prefix in prefixes:\n", " a_grade_book.apply_calculator(mean_std_calculator(grade_name=prefix+\"sum\"))\n", - "print(a_grade_book.data())" + "a_grade_book.print_data()" ] }, { @@ -1401,11 +1422,11 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 428, "metadata": {}, "outputs": [], "source": [ - "class curved_letter_grade(grade_calculator):\n", + "class curved_letter_grade(calculator):\n", " __grades_definition=[ (.97,\"A+\"),\n", " (.93,\"A\"),\n", " (.9,\"A-\"),\n", @@ -1429,33 +1450,1192 @@ " self.__mean=mean\n", " self.__std=std\n", " self.__grade_name=grade_name\n", - " grade_calculator.__init__(self,\n", + " calculator.__init__(self,\n", " \"Curved Percent Based Grade Calculator \"+self.__grade_name+ \\\n", " \" Mean=\"+str(self.__mean)+\\\n", " \" STD=\"+str(self.__std)+\\\n", " \" Max=\"+str(self.__max_grade))\n", " \n", "\n", - " def apply(self,a_grade):\n", - " if not isinstance(a_grade,grade):\n", - " print (self.name()+ \" Error: Did not get an proper grade as input.\")\n", - " raise Exception\n", - " if not a_grade.numerical():\n", - " print (self.name()+ \" Error: Did not get a numerical grade as input.\")\n", - " raise Exception\n", + " def apply(self,a_grade_book,grade_name=None,**kwargs):\n", + " if grade_name:\n", + " pass\n", + " else:\n", + " grade_name=self.__grade_name\n", " \n", - " # Rescale the grade\n", - " percent=a_grade.value()/self.__max_grade\n", - " shift_to_zero=percent-(self.__mean/self.__max_grade)\n", - " scale_std=0.1*shift_to_zero/(self.__std/self.__max_grade)\n", - " scaled_percent=scale_std+0.8\n", - " \n", - " for i,v in enumerate(self.__grades_definition):\n", - " if scaled_percent>=v[0]:\n", - " break\n", - " \n", - " return grade(self.__grade_name,value=self.__grades_definition[i][1])\n", - " " + " for k,a_student in a_grade_book.get_students().items():\n", + " a_grade = a_student[grade_name]\n", + "\n", + " if not a_grade.numerical():\n", + " print(self.name()+\"Error: Did not get a numerical grade as input.\")\n", + " raise Exception\n", + "\n", + " # Rescale the grade\n", + " scaled_percent=1\n", + " if self.__std != 0:\n", + " percent=a_grade.value()/self.__max_grade\n", + " shift_to_zero=percent-(self.__mean/self.__max_grade)\n", + " scale_std=0.1*shift_to_zero/(self.__std/self.__max_grade)\n", + " scaled_percent=scale_std+0.8\n", + "\n", + " for i,v in enumerate(self.__grades_definition):\n", + " if scaled_percent>=v[0]:\n", + " break\n", + "\n", + " overwrite = kwargs[\"overwrite\"] if \"overwrite\" in kwargs else False\n", + " #g = grade(grade_name+\" Letter\",value=self.__grades_definition[i][1])\n", + " a_student.add_grade(grade(grade_name+\" Letter\",value=self.__grades_definition[i][1]),overwrite=overwrite)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 436, + "metadata": {}, + "outputs": [ + { + "ename": "AttributeError", + "evalue": "'dict' object has no attribute 'iteritems'", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mprefix\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mprefixes\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[0mgrade_name\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mprefix\u001b[0m\u001b[1;33m+\u001b[0m\u001b[1;34m\"sum\"\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 3\u001b[1;33m \u001b[0ma_grade_book\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mapply_calculator\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0muncurved_letter_grade_percent\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mgrade_name\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mmax_grade\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m100\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[1;32m\u001b[0m in \u001b[0;36mapply_calculator\u001b[1;34m(self, a_calculator, **kwargs)\u001b[0m\n\u001b[0;32m 53\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 54\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0mapply_calculator\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0ma_calculator\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 55\u001b[1;33m \u001b[0ma_calculator\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mapply\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 56\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 57\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32m\u001b[0m in \u001b[0;36mapply\u001b[1;34m(self, a_grade_book, grade_name, **kwargs)\u001b[0m\n\u001b[0;32m 31\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 32\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 33\u001b[1;33m \u001b[1;32mfor\u001b[0m \u001b[0mk\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0ma_student\u001b[0m \u001b[1;32min\u001b[0m \u001b[0ma_grade_book\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mget_students\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0miteritems\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 34\u001b[0m \u001b[0ma_grade\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0ma_student\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mgrade_name\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 35\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;31mAttributeError\u001b[0m: 'dict' object has no attribute 'iteritems'" + ] + } + ], + "source": [ + "for prefix in prefixes:\n", + " grade_name = prefix+\"sum\"\n", + " a_grade_book.apply_calculator(uncurved_letter_grade_percent(grade_name,max_grade=100))" + ] + }, + { + "cell_type": "code", + "execution_count": 431, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0 Student 0 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 8.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 35.0\n", + "l1_sum: 10.0\n", + "l2_sum: 58.0\n", + "l3_sum: 9.0\n", + "l4_sum: 0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "1 Student 1 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: C-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "2 Student 2 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 3.0\n", + "l3_8: 6.0\n", + "l3_9: 3.0\n", + "l3_10: 3.0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 5.0\n", + "e1_n: 15.0\n", + "e1_1: 5.0\n", + "e1_2: 5.0\n", + "e1_3: 5.0\n", + "e1_4: 5.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 20.0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 71.0\n", + "l4_sum: 0\n", + "q1_sum: 5.0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "3 Student 3 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 5.0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 10.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 10.0\n", + "l4_10: 5.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 7.0\n", + "e1_6: 9.0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 9.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 8.0\n", + "e1_15: 10.0\n", + "e1_sum: 115.0\n", + "l1_sum: 10.0\n", + "l2_sum: 62.0\n", + "l3_sum: 102.0\n", + "l4_sum: 75.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: A\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "4 Student 4 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 6.0\n", + "l3_13: 10.0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 6.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 3.0\n", + "e1_10: 3.0\n", + "e1_11: 3.0\n", + "e1_12: 0\n", + "e1_13: 3.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 24.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 77.5\n", + "l4_sum: 26.0\n", + "q1_sum: 0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: B-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "5 Student 5 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.5\n", + "l3_3: 9.5\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 8.0\n", + "l3_10: 0\n", + "l3_11: 5.0\n", + "l3_12: 6.0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 95.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 89.0\n", + "l4_sum: 35.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B-\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "6 Student 6 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 5.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 8.0\n", + "l3_7: 9.0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 37.0\n", + "l1_sum: 10.0\n", + "l2_sum: 54.5\n", + "l3_sum: 64.5\n", + "l4_sum: 30.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B-\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "7 Student 7 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 3.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 5.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 5.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 77.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 40.0\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "8 Student 8 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 3.0\n", + "l4_9: 10.0\n", + "l4_10: 7.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 10.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 39.5\n", + "l3_sum: 40.0\n", + "l4_sum: 68.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: A-\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "9 Student 9 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 10.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 6.0\n", + "l3_3: 10.0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 0\n", + "l4_4: 7.0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 5.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 0\n", + "e1_sum: 117.0\n", + "l1_sum: 10.0\n", + "l2_sum: 60.0\n", + "l3_sum: 26.0\n", + "l4_sum: 27.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "10 Student 10 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 7.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 6.0\n", + "l3_8: 3.0\n", + "l3_9: 10.0\n", + "l3_10: 10.0\n", + "l3_11: 10.0\n", + "l3_12: 10.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 5.0\n", + "l4_7: 10.0\n", + "l4_8: 10.0\n", + "l4_9: 10.0\n", + "l4_10: 10.0\n", + "l4_11: 10.0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 138.0\n", + "l1_sum: 10.0\n", + "l2_sum: 47.0\n", + "l3_sum: 126.0\n", + "l4_sum: 105.0\n", + "q1_sum: 0\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A\n", + "l4_sum Letter: A+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "11 Student 11 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 8.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 5.0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 6.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 110.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.5\n", + "l3_sum: 97.5\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "12 Student 12 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 5.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.0\n", + "l3_3: 9.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 8.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 9.0\n", + "e1_11: 10.0\n", + "e1_12: 9.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 104.0\n", + "l1_sum: 10.0\n", + "l2_sum: 64.0\n", + "l3_sum: 115.0\n", + "l4_sum: 0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "13 Student 13 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 5.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 111.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 114.5\n", + "l4_sum: 25.0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "14 Student 14 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 3.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 2.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.0\n", + "l3_sum: 95.0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "15 Student 15 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 7.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 7.0\n", + "l3_11: 3.0\n", + "l3_12: 7.0\n", + "l3_13: 5.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 8.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 7.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 10.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 10.0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 9.0\n", + "e1_14: 8.0\n", + "e1_15: 2.0\n", + "e1_sum: 134.0\n", + "l1_sum: 10.0\n", + "l2_sum: 59.0\n", + "l3_sum: 106.0\n", + "l4_sum: 53.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B\n", + "_______________________________________\n" + ] + } + ], + "source": [ + "for prefix in prefixes:\n", + " grade_name = prefix+\"sum\"\n", + " a_grade_book.apply_calculator(curved_letter_grade(grade_name,\n", + " a_grade_book[grade_name+\" Mean\"],\n", + " a_grade_book[grade_name+\" STD\"]),\n", + " overwrite=True)\n", + "a_grade_book.print_students()" ] }, { From 9f399c1ba311f71817b6c22ab4969248d6c0c878 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Fri, 1 May 2020 03:06:15 -0500 Subject: [PATCH 22/24] exercise 6 in progress --- Labs/Lab-7/Lab-7.ipynb | 2708 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 2555 insertions(+), 153 deletions(-) diff --git a/Labs/Lab-7/Lab-7.ipynb b/Labs/Lab-7/Lab-7.ipynb index 117b0b2..4a2a368 100644 --- a/Labs/Lab-7/Lab-7.ipynb +++ b/Labs/Lab-7/Lab-7.ipynb @@ -13,7 +13,7 @@ }, { "cell_type": "code", - "execution_count": 393, + "execution_count": 789, "metadata": {}, "outputs": [], "source": [ @@ -41,7 +41,7 @@ }, { "cell_type": "code", - "execution_count": 394, + "execution_count": 790, "metadata": {}, "outputs": [], "source": [ @@ -101,6 +101,9 @@ " print (self.name()+\" Error Adding Grade \"+a_grade.name()+\". Grade already exists.\")\n", " raise Exception\n", "\n", + " def grades(self):\n", + " return self.__grades\n", + " \n", " def id_number(self):\n", " return self.__id_number\n", " \n", @@ -115,7 +118,7 @@ }, { "cell_type": "code", - "execution_count": 395, + "execution_count": 791, "metadata": {}, "outputs": [], "source": [ @@ -129,7 +132,7 @@ }, { "cell_type": "code", - "execution_count": 396, + "execution_count": 792, "metadata": {}, "outputs": [], "source": [ @@ -194,7 +197,7 @@ }, { "cell_type": "code", - "execution_count": 437, + "execution_count": 793, "metadata": {}, "outputs": [], "source": [ @@ -249,7 +252,7 @@ }, { "cell_type": "code", - "execution_count": 398, + "execution_count": 794, "metadata": {}, "outputs": [], "source": [ @@ -283,7 +286,7 @@ }, { "cell_type": "code", - "execution_count": 399, + "execution_count": 795, "metadata": {}, "outputs": [ { @@ -333,7 +336,7 @@ }, { "cell_type": "code", - "execution_count": 400, + "execution_count": 796, "metadata": {}, "outputs": [ { @@ -1221,7 +1224,7 @@ " 'e1_15': '2'}]" ] }, - "execution_count": 400, + "execution_count": 796, "metadata": {}, "output_type": "execute_result" } @@ -1258,7 +1261,7 @@ }, { "cell_type": "code", - "execution_count": 401, + "execution_count": 797, "metadata": {}, "outputs": [], "source": [ @@ -1287,7 +1290,7 @@ }, { "cell_type": "code", - "execution_count": 402, + "execution_count": 798, "metadata": {}, "outputs": [], "source": [ @@ -1320,7 +1323,7 @@ }, { "cell_type": "code", - "execution_count": 403, + "execution_count": 799, "metadata": {}, "outputs": [ { @@ -1351,7 +1354,7 @@ }, { "cell_type": "code", - "execution_count": 404, + "execution_count": 800, "metadata": {}, "outputs": [ { @@ -1383,7 +1386,7 @@ }, { "cell_type": "code", - "execution_count": 405, + "execution_count": 801, "metadata": {}, "outputs": [ { @@ -1422,7 +1425,7 @@ }, { "cell_type": "code", - "execution_count": 428, + "execution_count": 802, "metadata": {}, "outputs": [], "source": [ @@ -1489,32 +1492,7 @@ }, { "cell_type": "code", - "execution_count": 436, - "metadata": {}, - "outputs": [ - { - "ename": "AttributeError", - "evalue": "'dict' object has no attribute 'iteritems'", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mprefix\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mprefixes\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[0mgrade_name\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mprefix\u001b[0m\u001b[1;33m+\u001b[0m\u001b[1;34m\"sum\"\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 3\u001b[1;33m \u001b[0ma_grade_book\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mapply_calculator\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0muncurved_letter_grade_percent\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mgrade_name\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mmax_grade\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m100\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[1;32m\u001b[0m in \u001b[0;36mapply_calculator\u001b[1;34m(self, a_calculator, **kwargs)\u001b[0m\n\u001b[0;32m 53\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 54\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0mapply_calculator\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0ma_calculator\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 55\u001b[1;33m \u001b[0ma_calculator\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mapply\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 56\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 57\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m\u001b[0m in \u001b[0;36mapply\u001b[1;34m(self, a_grade_book, grade_name, **kwargs)\u001b[0m\n\u001b[0;32m 31\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 32\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 33\u001b[1;33m \u001b[1;32mfor\u001b[0m \u001b[0mk\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0ma_student\u001b[0m \u001b[1;32min\u001b[0m \u001b[0ma_grade_book\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mget_students\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0miteritems\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 34\u001b[0m \u001b[0ma_grade\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0ma_student\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mgrade_name\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 35\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;31mAttributeError\u001b[0m: 'dict' object has no attribute 'iteritems'" - ] - } - ], - "source": [ - "for prefix in prefixes:\n", - " grade_name = prefix+\"sum\"\n", - " a_grade_book.apply_calculator(uncurved_letter_grade_percent(grade_name,max_grade=100))" - ] - }, - { - "cell_type": "code", - "execution_count": 431, + "execution_count": 803, "metadata": {}, "outputs": [ { @@ -1583,12 +1561,12 @@ "l3_sum: 9.0\n", "l4_sum: 0\n", "q1_sum: 9.5\n", - "e1_sum Letter: C+\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B\n", - "l3_sum Letter: C\n", - "l4_sum Letter: C+\n", - "q1_sum Letter: B\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F+\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "1 Student 1 Student Data\n", "l1_n: 1.0\n", @@ -1652,12 +1630,12 @@ "l3_sum: 0\n", "l4_sum: 0\n", "q1_sum: 0\n", - "e1_sum Letter: C-\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: F\n", - "l3_sum Letter: C-\n", - "l4_sum Letter: C+\n", - "q1_sum Letter: C\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "2 Student 2 Student Data\n", "l1_n: 1.0\n", @@ -1721,12 +1699,12 @@ "l3_sum: 71.0\n", "l4_sum: 0\n", "q1_sum: 5.0\n", - "e1_sum Letter: C\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: F\n", - "l3_sum Letter: C+\n", - "l4_sum Letter: C+\n", - "q1_sum Letter: C\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "3 Student 3 Student Data\n", "l1_n: 1.0\n", @@ -1790,12 +1768,12 @@ "l3_sum: 102.0\n", "l4_sum: 75.0\n", "q1_sum: 10.0\n", - "e1_sum Letter: B+\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B\n", - "l3_sum Letter: B+\n", - "l4_sum Letter: A\n", - "q1_sum Letter: B+\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C-\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: C\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "4 Student 4 Student Data\n", "l1_n: 1.0\n", @@ -1859,12 +1837,12 @@ "l3_sum: 77.5\n", "l4_sum: 26.0\n", "q1_sum: 0\n", - "e1_sum Letter: C\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: C+\n", - "l3_sum Letter: B-\n", - "l4_sum Letter: C+\n", - "q1_sum Letter: C\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "5 Student 5 Student Data\n", "l1_n: 1.0\n", @@ -1928,12 +1906,12 @@ "l3_sum: 89.0\n", "l4_sum: 35.0\n", "q1_sum: 9.5\n", - "e1_sum Letter: B-\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B+\n", - "l3_sum Letter: B\n", - "l4_sum Letter: B-\n", - "q1_sum Letter: B\n", + "e1_sum Letter: A\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "6 Student 6 Student Data\n", "l1_n: 1.0\n", @@ -1997,12 +1975,12 @@ "l3_sum: 64.5\n", "l4_sum: 30.0\n", "q1_sum: 10.0\n", - "e1_sum Letter: C+\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B-\n", - "l3_sum Letter: C+\n", - "l4_sum Letter: C+\n", - "q1_sum Letter: B+\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "7 Student 7 Student Data\n", "l1_n: 1.0\n", @@ -2067,11 +2045,11 @@ "l4_sum: 51.0\n", "q1_sum: 10.0\n", "e1_sum Letter: C+\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B+\n", - "l3_sum Letter: C-\n", - "l4_sum Letter: B\n", - "q1_sum Letter: B+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "8 Student 8 Student Data\n", "l1_n: 1.0\n", @@ -2135,12 +2113,12 @@ "l3_sum: 40.0\n", "l4_sum: 68.0\n", "q1_sum: 9.5\n", - "e1_sum Letter: B\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: C\n", - "l3_sum Letter: C-\n", - "l4_sum Letter: A-\n", - "q1_sum Letter: B\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "9 Student 9 Student Data\n", "l1_n: 1.0\n", @@ -2204,12 +2182,12 @@ "l3_sum: 26.0\n", "l4_sum: 27.0\n", "q1_sum: 9.5\n", - "e1_sum Letter: B+\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B\n", - "l3_sum Letter: C+\n", - "l4_sum Letter: C+\n", - "q1_sum Letter: B\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C-\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "10 Student 10 Student Data\n", "l1_n: 1.0\n", @@ -2273,12 +2251,12 @@ "l3_sum: 126.0\n", "l4_sum: 105.0\n", "q1_sum: 0\n", - "e1_sum Letter: A-\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: C+\n", - "l3_sum Letter: A\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: A+\n", "l4_sum Letter: A+\n", - "q1_sum Letter: C\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "11 Student 11 Student Data\n", "l1_n: 1.0\n", @@ -2342,12 +2320,12 @@ "l3_sum: 97.5\n", "l4_sum: 51.0\n", "q1_sum: 10.0\n", - "e1_sum Letter: B\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B+\n", - "l3_sum Letter: B\n", - "l4_sum Letter: B\n", - "q1_sum Letter: B+\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "12 Student 12 Student Data\n", "l1_n: 1.0\n", @@ -2411,12 +2389,12 @@ "l3_sum: 115.0\n", "l4_sum: 0\n", "q1_sum: 10.0\n", - "e1_sum Letter: B\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B\n", - "l3_sum Letter: A-\n", - "l4_sum Letter: C+\n", - "q1_sum Letter: B+\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "13 Student 13 Student Data\n", "l1_n: 1.0\n", @@ -2480,12 +2458,12 @@ "l3_sum: 114.5\n", "l4_sum: 25.0\n", "q1_sum: 0\n", - "e1_sum Letter: B\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: C+\n", - "l3_sum Letter: A-\n", - "l4_sum Letter: C+\n", - "q1_sum Letter: C\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "14 Student 14 Student Data\n", "l1_n: 1.0\n", @@ -2549,12 +2527,12 @@ "l3_sum: 95.0\n", "l4_sum: 0\n", "q1_sum: 0\n", - "e1_sum Letter: B\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B+\n", - "l3_sum Letter: B\n", - "l4_sum Letter: C+\n", - "q1_sum Letter: C\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", "_______________________________________\n", "15 Student 15 Student Data\n", "l1_n: 1.0\n", @@ -2600,7 +2578,13 @@ "e1_1: 9.0\n", "e1_2: 9.0\n", "e1_3: 10.0\n", - "e1_4: 10.0\n", + "e1_4: 10.0\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ "e1_5: 7.0\n", "e1_6: 10.0\n", "e1_7: 10.0\n", @@ -2618,12 +2602,12 @@ "l3_sum: 106.0\n", "l4_sum: 53.0\n", "q1_sum: 9.5\n", - "e1_sum Letter: A-\n", - "l1_sum Letter: A+\n", - "l2_sum Letter: B\n", - "l3_sum Letter: B+\n", - "l4_sum Letter: B\n", - "q1_sum Letter: B\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F+\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: F\n", + "q1_sum Letter: F-\n", "_______________________________________\n" ] } @@ -2631,29 +2615,2447 @@ "source": [ "for prefix in prefixes:\n", " grade_name = prefix+\"sum\"\n", - " a_grade_book.apply_calculator(curved_letter_grade(grade_name,\n", - " a_grade_book[grade_name+\" Mean\"],\n", - " a_grade_book[grade_name+\" STD\"]),\n", - " overwrite=True)\n", + " a_grade_book.apply_calculator(uncurved_letter_grade_percent(grade_name,max_grade=100))\n", "a_grade_book.print_students()" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Final Course Grade\n", - "\n", - "*Exercise 6:* Write a new calculator that sums grades with a prefix, as in the `grade_summer` calculator, but drops `n` lowest grades. Apply the algorithm to drop the lowest lab grade in the data.\n" - ] - }, { "cell_type": "code", - "execution_count": null, + "execution_count": 804, "metadata": {}, - "outputs": [], - "source": [ - "# Your solution here" + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0 Student 0 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 8.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 35.0\n", + "l1_sum: 10.0\n", + "l2_sum: 58.0\n", + "l3_sum: 9.0\n", + "l4_sum: 0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "1 Student 1 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: C-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "2 Student 2 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 3.0\n", + "l3_8: 6.0\n", + "l3_9: 3.0\n", + "l3_10: 3.0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 5.0\n", + "e1_n: 15.0\n", + "e1_1: 5.0\n", + "e1_2: 5.0\n", + "e1_3: 5.0\n", + "e1_4: 5.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 20.0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 71.0\n", + "l4_sum: 0\n", + "q1_sum: 5.0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "3 Student 3 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 5.0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 10.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 10.0\n", + "l4_10: 5.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 7.0\n", + "e1_6: 9.0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 9.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 8.0\n", + "e1_15: 10.0\n", + "e1_sum: 115.0\n", + "l1_sum: 10.0\n", + "l2_sum: 62.0\n", + "l3_sum: 102.0\n", + "l4_sum: 75.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: A\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "4 Student 4 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 6.0\n", + "l3_13: 10.0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 6.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 3.0\n", + "e1_10: 3.0\n", + "e1_11: 3.0\n", + "e1_12: 0\n", + "e1_13: 3.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 24.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 77.5\n", + "l4_sum: 26.0\n", + "q1_sum: 0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: B-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "5 Student 5 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.5\n", + "l3_3: 9.5\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 8.0\n", + "l3_10: 0\n", + "l3_11: 5.0\n", + "l3_12: 6.0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 95.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 89.0\n", + "l4_sum: 35.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B-\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "6 Student 6 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 5.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 8.0\n", + "l3_7: 9.0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 37.0\n", + "l1_sum: 10.0\n", + "l2_sum: 54.5\n", + "l3_sum: 64.5\n", + "l4_sum: 30.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B-\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "7 Student 7 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 3.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 5.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 5.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 77.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 40.0\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "8 Student 8 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 3.0\n", + "l4_9: 10.0\n", + "l4_10: 7.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 10.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 39.5\n", + "l3_sum: 40.0\n", + "l4_sum: 68.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: A-\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "9 Student 9 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 10.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 6.0\n", + "l3_3: 10.0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 0\n", + "l4_4: 7.0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 5.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 0\n", + "e1_sum: 117.0\n", + "l1_sum: 10.0\n", + "l2_sum: 60.0\n", + "l3_sum: 26.0\n", + "l4_sum: 27.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "10 Student 10 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 7.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 6.0\n", + "l3_8: 3.0\n", + "l3_9: 10.0\n", + "l3_10: 10.0\n", + "l3_11: 10.0\n", + "l3_12: 10.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 5.0\n", + "l4_7: 10.0\n", + "l4_8: 10.0\n", + "l4_9: 10.0\n", + "l4_10: 10.0\n", + "l4_11: 10.0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 138.0\n", + "l1_sum: 10.0\n", + "l2_sum: 47.0\n", + "l3_sum: 126.0\n", + "l4_sum: 105.0\n", + "q1_sum: 0\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A\n", + "l4_sum Letter: A+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "11 Student 11 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 8.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 5.0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 6.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 110.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.5\n", + "l3_sum: 97.5\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "12 Student 12 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 5.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.0\n", + "l3_3: 9.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 8.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 9.0\n", + "e1_11: 10.0\n", + "e1_12: 9.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 104.0\n", + "l1_sum: 10.0\n", + "l2_sum: 64.0\n", + "l3_sum: 115.0\n", + "l4_sum: 0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "13 Student 13 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 5.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 111.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "l3_sum: 114.5\n", + "l4_sum: 25.0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "14 Student 14 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 3.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 2.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.0\n", + "l3_sum: 95.0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "15 Student 15 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 7.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 7.0\n", + "l3_11: 3.0\n", + "l3_12: 7.0\n", + "l3_13: 5.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 8.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 7.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 10.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 10.0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 9.0\n", + "e1_14: 8.0\n", + "e1_15: 2.0\n", + "e1_sum: 134.0\n", + "l1_sum: 10.0\n", + "l2_sum: 59.0\n", + "l3_sum: 106.0\n", + "l4_sum: 53.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B\n", + "_______________________________________\n" + ] + } + ], + "source": [ + "for prefix in prefixes:\n", + " grade_name = prefix+\"sum\"\n", + " a_grade_book.apply_calculator(curved_letter_grade(grade_name,\n", + " a_grade_book[grade_name+\" Mean\"],\n", + " a_grade_book[grade_name+\" STD\"]),\n", + " overwrite=True)\n", + "a_grade_book.print_students()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Final Course Grade\n", + "\n", + "*Exercise 6:* Write a new calculator that sums grades with a prefix, as in the `grade_summer` calculator, but drops `n` lowest grades. Apply the algorithm to drop the lowest lab grade in the data.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 787, + "metadata": {}, + "outputs": [], + "source": [ + "# Your solution here\n", + "class grade_dropper(calculator):\n", + " def __init__(self,prefix,n):\n", + " self.__prefix=prefix\n", + " self.__n=n\n", + " calculator.__init__(self,\"Final Course Grades\")\n", + " \n", + " def apply(self,a_gradebook,**kwargs):\n", + " for k,a_student in a_grade_book.get_students().items():\n", + " labels=list(filter(lambda x: \n", + " self.__prefix in x, \n", + " a_student.grades().keys()))\n", + " print(labels)\n", + " grade_sum=0.\n", + " grades=list()\n", + " for label in labels:\n", + " if '_' in label and isinstance(a_student[label].value(),(int,float)):\n", + " grades.append(a_student[label].value())\n", + " grades = sorted(grades)\n", + " if len(grades) > self.__n:\n", + " grade_sum = sum(grades[self.__n:])\n", + " else:\n", + " grade_sum = sum(grades)\n", + "# grade_sum = sum(grades)\n", + "\n", + " a_student.add_grade(grade(self.__prefix+\" dropped sum\",value=grade_sum),**kwargs)" + ] + }, + { + "cell_type": "code", + "execution_count": 805, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['l1_sum', 'l2_sum', 'l3_sum', 'l4_sum', 'l1_sum Letter', 'l2_sum Letter', 'l3_sum Letter', 'l4_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "0 Student 0 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 8.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 35.0\n", + "l1_sum: 10.0\n", + "l2_sum: 58.0\n", + "l3_sum: 9.0\n", + "l4_sum: 0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 58.0\n", + "l3_sum dropped sum: 9.0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "1 Student 1 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: C-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 0\n", + "l3_sum dropped sum: 0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "2 Student 2 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 3.0\n", + "l3_8: 6.0\n", + "l3_9: 3.0\n", + "l3_10: 3.0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 5.0\n", + "e1_n: 15.0\n", + "e1_1: 5.0\n", + "e1_2: 5.0\n", + "e1_3: 5.0\n", + "e1_4: 5.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 20.0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 71.0\n", + "l4_sum: 0\n", + "q1_sum: 5.0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 0\n", + "l3_sum dropped sum: 71.0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "3 Student 3 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 5.0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 10.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 10.0\n", + "l4_10: 5.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 7.0\n", + "e1_6: 9.0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 9.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 8.0\n", + "e1_15: 10.0\n", + "e1_sum: 115.0\n", + "l1_sum: 10.0\n", + "l2_sum: 62.0\n", + "l3_sum: 102.0\n", + "l4_sum: 75.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: A\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 62.0\n", + "l3_sum dropped sum: 102.0\n", + "l4_sum dropped sum: 75.0\n", + "_______________________________________\n", + "4 Student 4 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 6.0\n", + "l3_13: 10.0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 6.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 3.0\n", + "e1_10: 3.0\n", + "e1_11: 3.0\n", + "e1_12: 0\n", + "e1_13: 3.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 24.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 77.5\n", + "l4_sum: 26.0\n", + "q1_sum: 0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: B-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 49.5\n", + "l3_sum dropped sum: 77.5\n", + "l4_sum dropped sum: 26.0\n", + "_______________________________________\n", + "5 Student 5 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.5\n", + "l3_3: 9.5\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 8.0\n", + "l3_10: 0\n", + "l3_11: 5.0\n", + "l3_12: 6.0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 95.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 89.0\n", + "l4_sum: 35.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B-\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 69.0\n", + "l3_sum dropped sum: 89.0\n", + "l4_sum dropped sum: 35.0\n", + "_______________________________________\n", + "6 Student 6 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 5.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 8.0\n", + "l3_7: 9.0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 37.0\n", + "l1_sum: 10.0\n", + "l2_sum: 54.5\n", + "l3_sum: 64.5\n", + "l4_sum: 30.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B-\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 54.5\n", + "l3_sum dropped sum: 64.5\n", + "l4_sum dropped sum: 30.0\n", + "_______________________________________\n", + "7 Student 7 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 3.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 5.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 5.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 77.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 40.0\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 69.0\n", + "l3_sum dropped sum: 40.0\n", + "l4_sum dropped sum: 51.0\n", + "_______________________________________\n", + "8 Student 8 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 3.0\n", + "l4_9: 10.0\n", + "l4_10: 7.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 10.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 39.5\n", + "l3_sum: 40.0\n", + "l4_sum: 68.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: A-\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 39.5\n", + "l3_sum dropped sum: 40.0\n", + "l4_sum dropped sum: 68.0\n", + "_______________________________________\n", + "9 Student 9 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 10.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 6.0\n", + "l3_3: 10.0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 0\n", + "l4_4: 7.0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 5.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 0\n", + "e1_sum: 117.0\n", + "l1_sum: 10.0\n", + "l2_sum: 60.0\n", + "l3_sum: 26.0\n", + "l4_sum: 27.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 60.0\n", + "l3_sum dropped sum: 26.0\n", + "l4_sum dropped sum: 27.0\n", + "_______________________________________\n", + "10 Student 10 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 7.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 6.0\n", + "l3_8: 3.0\n", + "l3_9: 10.0\n", + "l3_10: 10.0\n", + "l3_11: 10.0\n", + "l3_12: 10.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 5.0\n", + "l4_7: 10.0\n", + "l4_8: 10.0\n", + "l4_9: 10.0\n", + "l4_10: 10.0\n", + "l4_11: 10.0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 138.0\n", + "l1_sum: 10.0\n", + "l2_sum: 47.0\n", + "l3_sum: 126.0\n", + "l4_sum: 105.0\n", + "q1_sum: 0\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A\n", + "l4_sum Letter: A+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 47.0\n", + "l3_sum dropped sum: 126.0\n", + "l4_sum dropped sum: 105.0\n", + "_______________________________________\n", + "11 Student 11 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 8.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 5.0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 6.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 110.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.5\n", + "l3_sum: 97.5\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 68.5\n", + "l3_sum dropped sum: 97.5\n", + "l4_sum dropped sum: 51.0\n", + "_______________________________________\n", + "12 Student 12 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 5.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.0\n", + "l3_3: 9.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 8.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 9.0\n", + "e1_11: 10.0\n", + "e1_12: 9.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 104.0\n", + "l1_sum: 10.0\n", + "l2_sum: 64.0\n", + "l3_sum: 115.0\n", + "l4_sum: 0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 64.0\n", + "l3_sum dropped sum: 115.0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "13 Student 13 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 5.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 111.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 114.5\n", + "l4_sum: 25.0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 49.5\n", + "l3_sum dropped sum: 114.5\n", + "l4_sum dropped sum: 25.0\n", + "_______________________________________\n", + "14 Student 14 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 3.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 2.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.0\n", + "l3_sum: 95.0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 68.0\n", + "l3_sum dropped sum: 95.0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "15 Student 15 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 7.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 7.0\n", + "l3_11: 3.0\n", + "l3_12: 7.0\n", + "l3_13: 5.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 8.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 7.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 10.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 10.0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 9.0\n", + "e1_14: 8.0\n", + "e1_15: 2.0\n", + "e1_sum: 134.0\n", + "l1_sum: 10.0\n", + "l2_sum: 59.0\n", + "l3_sum: 106.0\n", + "l4_sum: 53.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 59.0\n", + "l3_sum dropped sum: 106.0\n", + "l4_sum dropped sum: 53.0\n", + "_______________________________________\n" + ] + } + ], + "source": [ + "# a_grade_book=grade_book(\"Data 1401\")\n", + "\n", + "# for student_i in range(len(class_data)):\n", + "# a_student_0=student(\"Student\",str(student_i),student_i)\n", + "\n", + "# for k in class_data[student_i].keys():\n", + "# a_student_0.add_grade(grade(k,value=float(class_data[student_i][k])))\n", + "\n", + "# a_grade_book.add_student(a_student_0)\n", + "lab_prefixes = list(filter(lambda x: 'l' in x and 'sum' in x, list(a_grade_book.get_students().values())[0].grades().keys()))\n", + "print(lab_prefixes)\n", + "\n", + "for prefix in lab_prefixes:\n", + " if ' ' not in prefix:\n", + " a_grade_book.apply_calculator(grade_dropper(prefix,1), overwrite=True)\n", + "a_grade_book.print_students()" ] }, { From 14958e2e9d17c0aba629c02ccb7cede558a7700e Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Fri, 8 May 2020 13:17:13 -0500 Subject: [PATCH 23/24] made copy of lab 7 for grading --- Labs/Lab-7/Lab-7-Copy1.ipynb | 5127 ++++++++++++++++++++++++++++++++++ 1 file changed, 5127 insertions(+) create mode 100644 Labs/Lab-7/Lab-7-Copy1.ipynb diff --git a/Labs/Lab-7/Lab-7-Copy1.ipynb b/Labs/Lab-7/Lab-7-Copy1.ipynb new file mode 100644 index 0000000..4a2a368 --- /dev/null +++ b/Labs/Lab-7/Lab-7-Copy1.ipynb @@ -0,0 +1,5127 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Lab 7\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github//afarbin/DATA1401-Spring-2020/blob/master/Labs/Lab-7/Lab-7.ipynb)\n", + "\n", + "Here are the \"Gradebook\" classes from lecture. For this lab, you will use these classes and are encouraged to modify them as you need." + ] + }, + { + "cell_type": "code", + "execution_count": 789, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import math\n", + "\n", + "# Create some virtual classes\n", + "class base:\n", + " __name=\"\"\n", + " \n", + " def __init__(self,name):\n", + " self.__name=name\n", + "\n", + " def name(self):\n", + " return self.__name\n", + "\n", + "class data(base):\n", + " def __init__(self,name):\n", + " base.__init__(self,name)\n", + " \n", + "class alg(base):\n", + " def __init__(self,name):\n", + " base.__init__(self,name)" + ] + }, + { + "cell_type": "code", + "execution_count": 790, + "metadata": {}, + "outputs": [], + "source": [ + "class grade(data):\n", + " __value=0\n", + " __numerical=True\n", + " __gradebook_name=str()\n", + " __letter_grades=[\"F-\",\"F\",\"F+\",\"D-\",\"D\",\"D+\",\"C-\",\"C\",\"C+\",\"B-\",\"B\",\"B+\",\"A-\",\"A\",\"A+\"]\n", + " \n", + " def __init__(self,name,numerical=True,value=None):\n", + " if value:\n", + " if isinstance(value,(int,float)):\n", + " self.__numerical=True\n", + " elif isinstance(value,str):\n", + " self.__numerical=False\n", + " self.set(value)\n", + " else: \n", + " self.__numerical=numerical\n", + " self.__gradebook_name=name\n", + " data.__init__(self,name+\" Grade Algorithm\") \n", + "\n", + " def set(self,value):\n", + " if isinstance(value,(int,float)) and self.__numerical:\n", + " self.__value=value\n", + " elif isinstance(value,str) and not self.__numerical:\n", + " if value in self.__letter_grades:\n", + " self.__value=value\n", + " else:\n", + " print (self.name()+\" Error: Bad Grade.\")\n", + " raise Exception\n", + " \n", + " def value(self):\n", + " return self.__value\n", + " \n", + " def numerical(self):\n", + " return self.__numerical\n", + " \n", + " def gradebook_name(self):\n", + " return self.__gradebook_name\n", + " \n", + " def __str__(self):\n", + " return self.__gradebook_name+\": \"+str(self.__value)\n", + "\n", + "class student(data):\n", + " __id_number=0\n", + " __grades=dict()\n", + " \n", + " def __init__(self,first_name, last_name,id_number):\n", + " self.__id_number=id_number\n", + " self.__grades=dict()\n", + " data.__init__(self,first_name+\" \"+last_name+\" Student Data\")\n", + "\n", + " def add_grade(self,a_grade,overwrite=False):\n", + " if overwrite or not a_grade.gradebook_name() in self.__grades:\n", + " self.__grades[a_grade.gradebook_name()]=a_grade\n", + " else:\n", + " print (self.name()+\" Error Adding Grade \"+a_grade.name()+\". Grade already exists.\")\n", + " raise Exception\n", + "\n", + " def grades(self):\n", + " return self.__grades\n", + " \n", + " def id_number(self):\n", + " return self.__id_number\n", + " \n", + " def __getitem__(self,key):\n", + " return self.__grades[key]\n", + " \n", + " def print_grades(self):\n", + " for grade in self.__grades:\n", + " print (self.__grades[grade])\n", + " \n" + ] + }, + { + "cell_type": "code", + "execution_count": 791, + "metadata": {}, + "outputs": [], + "source": [ + "class calculator(alg): \n", + " def __init__(self,name):\n", + " alg.__init__(self,name)\n", + "\n", + " def apply(self,a_grade_book):\n", + " raise NotImplementedError\n" + ] + }, + { + "cell_type": "code", + "execution_count": 792, + "metadata": {}, + "outputs": [], + "source": [ + "class grade_book(data):\n", + " # New member class to hold arbitrary data associated with the class\n", + "\n", + " __data=dict()\n", + " __students=dict()\n", + " \n", + " def __init__(self,name):\n", + " data.__init__(self,name+\" Course Grade Book\")\n", + " self.__students=dict()\n", + " self.__data=dict()\n", + " \n", + " # New method to access data\n", + " def __getitem__(self,key):\n", + " return self.__data[key]\n", + " \n", + " def data(self):\n", + " return self.__data\n", + " \n", + " def print_data(self):\n", + " for k,v in self.__data.items():\n", + " print (k,\":\",v)\n", + " \n", + " def print_students(self):\n", + " for k,a_student in self.__students.items():\n", + " print (k, a_student.name())\n", + " a_student.print_grades()\n", + " print (\"_______________________________________\")\n", + " \n", + " # New method to add data\n", + " def __setitem__(self, key, value):\n", + " self.__data[key] = value\n", + " \n", + " def add_student(self,a_student):\n", + " self.__students[a_student.id_number()]=a_student\n", + "\n", + " # New method to allow iterating over students\n", + " def get_students(self):\n", + " return self.__students\n", + " \n", + " def assign_grade(self,key,a_grade):\n", + " the_student=None\n", + " try:\n", + " the_student=self.__students[key]\n", + " except:\n", + " for id in self.__students:\n", + " if key == self.__students[id].name():\n", + " the_student=self.__students[id]\n", + " break\n", + " if the_student:\n", + " the_student.add_grade(a_grade)\n", + " else:\n", + " print (self.name()+\" Error: Did not find student.\")\n", + " \n", + " def apply_calculator(self,a_calculator,**kwargs):\n", + " a_calculator.apply(self,**kwargs)\n", + " \n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 793, + "metadata": {}, + "outputs": [], + "source": [ + "class uncurved_letter_grade_percent(calculator):\n", + " __grades_definition=[ (.97,\"A+\"),\n", + " (.93,\"A\"),\n", + " (.9,\"A-\"),\n", + " (.87,\"B+\"),\n", + " (.83,\"B\"),\n", + " (.8,\"B-\"),\n", + " (.77,\"C+\"),\n", + " (.73,\"C\"),\n", + " (.7,\"C-\"),\n", + " (.67,\"C+\"),\n", + " (.63,\"C\"),\n", + " (.6,\"C-\"),\n", + " (.57,\"F+\"),\n", + " (.53,\"F\"),\n", + " (0.,\"F-\")]\n", + " __max_grade=100.\n", + " __grade_name=str()\n", + " \n", + " def __init__(self,grade_name,max_grade=100.):\n", + " self.__max_grade=max_grade\n", + " self.__grade_name=grade_name\n", + " calculator.__init__(self,\n", + " \"Uncurved Percent Based Grade Calculator \"+self.__grade_name+\" Max=\"+str(self.__max_grade))\n", + " \n", + " def apply(self,a_grade_book,grade_name=None,**kwargs):\n", + " if grade_name:\n", + " pass\n", + " else:\n", + " grade_name=self.__grade_name\n", + " \n", + " \n", + " for k,a_student in a_grade_book.get_students().items():\n", + " a_grade=a_student[grade_name]\n", + "\n", + " if not a_grade.numerical():\n", + " print (self.name()+ \" Error: Did not get a numerical grade as input.\")\n", + " raise Exception\n", + " \n", + " percent=a_grade.value()/self.__max_grade\n", + " \n", + " for i,v in enumerate(self.__grades_definition):\n", + " if percent>=v[0]:\n", + " break\n", + " \n", + " a_student.add_grade(grade(grade_name+\" Letter\",value=self.__grades_definition[i][1]))\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 794, + "metadata": {}, + "outputs": [], + "source": [ + "class mean_std_calculator(calculator):\n", + " def __init__(self,grade_name):\n", + " self.__grade_name = grade_name\n", + " calculator.__init__(self,\"Mean and Standard Deviation Calculator\")\n", + " \n", + " def apply(self,a_grade_book,grade_name=None,**kwargs):\n", + " if not grade_name:\n", + " grade_name = self.__grade_name\n", + " \n", + " grades=list()\n", + " for k,a_student in a_grade_book.get_students().items():\n", + " grades.append(a_student[grade_name].value())\n", + " \n", + " a_grade_book[grade_name+\" Mean\"] = np.mean(grades)\n", + " a_grade_book[grade_name+\" STD\"] = math.sqrt(np.var(grades))\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "## CSV Reader\n", + "\n", + "*Exercise 1*: The data for a class are stored in a \"camma separated values\" (CSV) file name `Data1401-Grades.csv` in the directory of this lab. You can see the contents using the `cat` shell command:" + ] + }, + { + "cell_type": "code", + "execution_count": 795, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "l1_n,l1_1,l2_n,l2_1,l2_2,l2_3,l2_4,l2_5,l2_6,l2_7,l3_n,l3_1,l3_2,l3_3,l3_4,l3_5,l3_6,l3_7,l3_8,l3_9,l3_10,l3_11,l3_12,l3_13,l3_14,l4_n,l4_1,l4_2,l4_3,l4_4,l4_5,l4_6,l4_7,l4_8,l4_9,l4_10,l4_11,q1_n,q1_1,e1_n,e1_1,e1_2,e1_3,e1_4,e1_5,e1_6,e1_7,e1_8,e1_9,e1_10,e1_11,e1_12,e1_13,e1_14,e1_15\n", + "1,10,7,0,10,10,8,10,10,10,14,9,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,9.5,15,9,9,0,9,8,0,0,0,0,0,0,0,0,0,0\n", + "1,10,7,0,0,0,0,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\n", + "1,10,7,0,0,0,0,0,0,0,14,9,10,10,10,7,10,3,6,3,3,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,1,5,15,5,5,5,5,0,0,0,0,0,0,0,0,0,0,0\n", + "1,10,7,10,10,3,9.5,10,10,9.5,14,10,10,10,8,5,10,5,10,3,0,10,3,10,8,11,10,10,10,10,10,10,0,0,10,5,0,1,10,15,9,9,10,9,7,9,0,0,10,10,9,5,10,8,10\n", + "1,10,7,10,10,9.5,0,10,10,0,14,9.5,0,0,10,0,10,5,10,7,0,10,6,10,0,11,10,10,6,0,0,0,0,0,0,0,0,1,0,15,0,0,0,0,5,0,7,0,3,3,3,0,3,0,0\n", + "1,10,7,10,10,10,9.5,10,10,9.5,14,5,9.5,9.5,8,10,10,8,10,8,0,5,6,0,0,11,0,10,10,10,0,5,0,0,0,0,0,1,9.5,15,9,9,10,9,9,10,7,0,9,9,9,0,5,0,0\n", + "1,10,7,10,10,0,5,10,10,9.5,14,9.5,10,10,8,10,8,9,0,0,0,0,0,0,0,11,0,10,10,0,0,10,0,0,0,0,0,1,10,15,9,9,10,9,0,0,0,0,0,0,0,0,0,0,0\n", + "1,10,7,10,10,10,9.5,10,10,9.5,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,3,3,0,0,5,0,0,1,10,15,9,9,10,0,10,0,7,5,9,9,9,0,0,0,0\n", + "1,10,7,0,10,9.5,0,10,10,0,14,10,10,10,10,0,0,0,0,0,0,0,0,0,0,11,10,10,10,10,5,3,0,3,10,7,0,1,9.5,15,9,9,10,5,10,0,9,9,9,9,9,10,5,0,0\n", + "1,10,7,10,10,0,10,10,10,10,14,10,6,10,0,0,0,0,0,0,0,0,0,0,0,11,10,10,0,7,0,0,0,0,0,0,0,1,9.5,15,9,9,10,9,5,9,7,9,10,10,10,5,10,5,0\n", + "1,10,7,10,10,0,0,10,10,7,14,10,10,10,10,7,10,6,3,10,10,10,10,10,10,11,10,10,10,10,10,5,10,10,10,10,10,1,0,15,9,9,9,9,9,10,9,9,10,10,10,10,10,5,10\n", + "1,10,7,10,10,9.5,9.5,10,10,9.5,14,9.5,10,10,10,8,10,8,10,10,7,5,0,0,0,11,10,10,10,10,5,6,0,0,0,0,0,1,10,15,9,9,10,9,8,9,7,9,10,10,10,10,0,0,0\n", + "1,10,7,10,10,5,9.5,10,10,9.5,14,5,9,9,10,7,10,10,10,10,7,10,3,5,10,11,0,0,0,0,0,0,0,0,0,0,0,1,10,15,9,9,9,8,7,10,0,9,10,9,10,9,5,0,0\n", + "1,10,7,10,10,9.5,0,10,10,0,14,9.5,10,10,10,10,10,10,10,0,0,10,5,10,10,11,0,10,10,0,0,5,0,0,0,0,0,1,0,15,9,9,10,0,8,9,7,9,10,10,10,10,10,0,0\n", + "1,10,7,10,10,9.5,9,10,10,9.5,14,10,10,10,10,10,10,9,10,3,0,3,3,5,2,11,0,0,0,0,0,0,0,0,0,0,0,1,0,15,9,9,10,5,5,0,0,10,10,10,10,0,10,5,10\n", + "1,10,7,10,10,3,7,10,10,9,14,10,10,10,10,0,10,9,10,7,7,3,7,5,8,11,10,10,10,8,5,3,0,0,7,0,0,1,9.5,15,9,9,10,10,7,10,10,10,10,10,10,10,9,8,2\n" + ] + } + ], + "source": [ + "!cat Data1401-Grades.csv " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You will note that the first line has the names of the \"columns\" of data, and that subsequent lines (or \"rows\") have the data for each student, separated by cammas.\n", + "\n", + "Recalling that in lecture we created a file reader, create a CSV reader function that takes a filename as input and returns data structure(s) that store the data in the file. Note that you are not allowed to use a library. The point here is for *you* to write the CSV reader. Some options for your data structures (pick one):\n", + "\n", + "* A list of dictionaries, where each element of the list is corresponds to a row of data and the dictionaries are keyed by the column name. For example `data[5][\"l3_5\"]` corresponds to the 6th student's grade on lab 3 question 5.\n", + "\n", + "* A list of lists (i.e. a 2-D array or matrix) and a dictionary, where each element of the \"matrix\" corresponds to a a specific grade for a specific student and the dictionary maps the name of the column to the column index. For example `data[5][column_names[\"l1_5\"]]` corresponds to the 6th student's grade on lab 3 question 5.\n", + "\n", + "* A dictionary of lists, where each element of the dictionary corresponds to a column of data and the lists contain the data in that column. For example `data[\"l3_5\"][5]` corresponds to the 6th student's grade on lab 3 question 5.\n", + "\n", + "* (Extra Credit) A class that simultaneously supports all of the above methods." + ] + }, + { + "cell_type": "code", + "execution_count": 796, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '0',\n", + " 'l2_2': '10',\n", + " 'l2_3': '10',\n", + " 'l2_4': '8',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '10',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9',\n", + " 'l3_2': '0',\n", + " 'l3_3': '0',\n", + " 'l3_4': '0',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '0',\n", + " 'e1_4': '9',\n", + " 'e1_5': '8',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '0',\n", + " 'e1_10': '0',\n", + " 'e1_11': '0',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '0',\n", + " 'l2_2': '0',\n", + " 'l2_3': '0',\n", + " 'l2_4': '0',\n", + " 'l2_5': '0',\n", + " 'l2_6': '0',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '0',\n", + " 'l3_2': '0',\n", + " 'l3_3': '0',\n", + " 'l3_4': '0',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '0',\n", + " 'e1_2': '0',\n", + " 'e1_3': '0',\n", + " 'e1_4': '0',\n", + " 'e1_5': '0',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '0',\n", + " 'e1_10': '0',\n", + " 'e1_11': '0',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '0',\n", + " 'l2_2': '0',\n", + " 'l2_3': '0',\n", + " 'l2_4': '0',\n", + " 'l2_5': '0',\n", + " 'l2_6': '0',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '7',\n", + " 'l3_6': '10',\n", + " 'l3_7': '3',\n", + " 'l3_8': '6',\n", + " 'l3_9': '3',\n", + " 'l3_10': '3',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '5',\n", + " 'e1_2': '5',\n", + " 'e1_3': '5',\n", + " 'e1_4': '5',\n", + " 'e1_5': '0',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '0',\n", + " 'e1_10': '0',\n", + " 'e1_11': '0',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '3',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '8',\n", + " 'l3_5': '5',\n", + " 'l3_6': '10',\n", + " 'l3_7': '5',\n", + " 'l3_8': '10',\n", + " 'l3_9': '3',\n", + " 'l3_10': '0',\n", + " 'l3_11': '10',\n", + " 'l3_12': '3',\n", + " 'l3_13': '10',\n", + " 'l3_14': '8',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '10',\n", + " 'l4_6': '10',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '10',\n", + " 'l4_10': '5',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '7',\n", + " 'e1_6': '9',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '9',\n", + " 'e1_12': '5',\n", + " 'e1_13': '10',\n", + " 'e1_14': '8',\n", + " 'e1_15': '10'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '0',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9.5',\n", + " 'l3_2': '0',\n", + " 'l3_3': '0',\n", + " 'l3_4': '10',\n", + " 'l3_5': '0',\n", + " 'l3_6': '10',\n", + " 'l3_7': '5',\n", + " 'l3_8': '10',\n", + " 'l3_9': '7',\n", + " 'l3_10': '0',\n", + " 'l3_11': '10',\n", + " 'l3_12': '6',\n", + " 'l3_13': '10',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '6',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '0',\n", + " 'e1_2': '0',\n", + " 'e1_3': '0',\n", + " 'e1_4': '0',\n", + " 'e1_5': '5',\n", + " 'e1_6': '0',\n", + " 'e1_7': '7',\n", + " 'e1_8': '0',\n", + " 'e1_9': '3',\n", + " 'e1_10': '3',\n", + " 'e1_11': '3',\n", + " 'e1_12': '0',\n", + " 'e1_13': '3',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '10',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '5',\n", + " 'l3_2': '9.5',\n", + " 'l3_3': '9.5',\n", + " 'l3_4': '8',\n", + " 'l3_5': '10',\n", + " 'l3_6': '10',\n", + " 'l3_7': '8',\n", + " 'l3_8': '10',\n", + " 'l3_9': '8',\n", + " 'l3_10': '0',\n", + " 'l3_11': '5',\n", + " 'l3_12': '6',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '0',\n", + " 'l4_6': '5',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '9',\n", + " 'e1_6': '10',\n", + " 'e1_7': '7',\n", + " 'e1_8': '0',\n", + " 'e1_9': '9',\n", + " 'e1_10': '9',\n", + " 'e1_11': '9',\n", + " 'e1_12': '0',\n", + " 'e1_13': '5',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '0',\n", + " 'l2_4': '5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9.5',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '8',\n", + " 'l3_5': '10',\n", + " 'l3_6': '8',\n", + " 'l3_7': '9',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '10',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '0',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '0',\n", + " 'e1_9': '0',\n", + " 'e1_10': '0',\n", + " 'e1_11': '0',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '10',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '3',\n", + " 'l4_6': '3',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '5',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '0',\n", + " 'e1_5': '10',\n", + " 'e1_6': '0',\n", + " 'e1_7': '7',\n", + " 'e1_8': '5',\n", + " 'e1_9': '9',\n", + " 'e1_10': '9',\n", + " 'e1_11': '9',\n", + " 'e1_12': '0',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '0',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '0',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '5',\n", + " 'l4_6': '3',\n", + " 'l4_7': '0',\n", + " 'l4_8': '3',\n", + " 'l4_9': '10',\n", + " 'l4_10': '7',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '5',\n", + " 'e1_5': '10',\n", + " 'e1_6': '0',\n", + " 'e1_7': '9',\n", + " 'e1_8': '9',\n", + " 'e1_9': '9',\n", + " 'e1_10': '9',\n", + " 'e1_11': '9',\n", + " 'e1_12': '10',\n", + " 'e1_13': '5',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '0',\n", + " 'l2_4': '10',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '10',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '6',\n", + " 'l3_3': '10',\n", + " 'l3_4': '0',\n", + " 'l3_5': '0',\n", + " 'l3_6': '0',\n", + " 'l3_7': '0',\n", + " 'l3_8': '0',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '0',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '0',\n", + " 'l4_4': '7',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '5',\n", + " 'e1_6': '9',\n", + " 'e1_7': '7',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '5',\n", + " 'e1_13': '10',\n", + " 'e1_14': '5',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '0',\n", + " 'l2_4': '0',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '7',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '7',\n", + " 'l3_6': '10',\n", + " 'l3_7': '6',\n", + " 'l3_8': '3',\n", + " 'l3_9': '10',\n", + " 'l3_10': '10',\n", + " 'l3_11': '10',\n", + " 'l3_12': '10',\n", + " 'l3_13': '10',\n", + " 'l3_14': '10',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '10',\n", + " 'l4_6': '5',\n", + " 'l4_7': '10',\n", + " 'l4_8': '10',\n", + " 'l4_9': '10',\n", + " 'l4_10': '10',\n", + " 'l4_11': '10',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '9',\n", + " 'e1_4': '9',\n", + " 'e1_5': '9',\n", + " 'e1_6': '10',\n", + " 'e1_7': '9',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '10',\n", + " 'e1_13': '10',\n", + " 'e1_14': '5',\n", + " 'e1_15': '10'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9.5',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '8',\n", + " 'l3_6': '10',\n", + " 'l3_7': '8',\n", + " 'l3_8': '10',\n", + " 'l3_9': '10',\n", + " 'l3_10': '7',\n", + " 'l3_11': '5',\n", + " 'l3_12': '0',\n", + " 'l3_13': '0',\n", + " 'l3_14': '0',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '10',\n", + " 'l4_5': '5',\n", + " 'l4_6': '6',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '9',\n", + " 'e1_5': '8',\n", + " 'e1_6': '9',\n", + " 'e1_7': '7',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '10',\n", + " 'e1_13': '0',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '5',\n", + " 'l2_4': '9.5',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '5',\n", + " 'l3_2': '9',\n", + " 'l3_3': '9',\n", + " 'l3_4': '10',\n", + " 'l3_5': '7',\n", + " 'l3_6': '10',\n", + " 'l3_7': '10',\n", + " 'l3_8': '10',\n", + " 'l3_9': '10',\n", + " 'l3_10': '7',\n", + " 'l3_11': '10',\n", + " 'l3_12': '3',\n", + " 'l3_13': '5',\n", + " 'l3_14': '10',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '10',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '9',\n", + " 'e1_4': '8',\n", + " 'e1_5': '7',\n", + " 'e1_6': '10',\n", + " 'e1_7': '0',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '9',\n", + " 'e1_11': '10',\n", + " 'e1_12': '9',\n", + " 'e1_13': '5',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '0',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '0',\n", + " 'l3_n': '14',\n", + " 'l3_1': '9.5',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '10',\n", + " 'l3_6': '10',\n", + " 'l3_7': '10',\n", + " 'l3_8': '10',\n", + " 'l3_9': '0',\n", + " 'l3_10': '0',\n", + " 'l3_11': '10',\n", + " 'l3_12': '5',\n", + " 'l3_13': '10',\n", + " 'l3_14': '10',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '5',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '0',\n", + " 'e1_5': '8',\n", + " 'e1_6': '9',\n", + " 'e1_7': '7',\n", + " 'e1_8': '9',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '10',\n", + " 'e1_13': '10',\n", + " 'e1_14': '0',\n", + " 'e1_15': '0'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '9.5',\n", + " 'l2_4': '9',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9.5',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '10',\n", + " 'l3_6': '10',\n", + " 'l3_7': '9',\n", + " 'l3_8': '10',\n", + " 'l3_9': '3',\n", + " 'l3_10': '0',\n", + " 'l3_11': '3',\n", + " 'l3_12': '3',\n", + " 'l3_13': '5',\n", + " 'l3_14': '2',\n", + " 'l4_n': '11',\n", + " 'l4_1': '0',\n", + " 'l4_2': '0',\n", + " 'l4_3': '0',\n", + " 'l4_4': '0',\n", + " 'l4_5': '0',\n", + " 'l4_6': '0',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '0',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '0',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '5',\n", + " 'e1_5': '5',\n", + " 'e1_6': '0',\n", + " 'e1_7': '0',\n", + " 'e1_8': '10',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '0',\n", + " 'e1_13': '10',\n", + " 'e1_14': '5',\n", + " 'e1_15': '10'},\n", + " {'l1_n': '1',\n", + " 'l1_1': '10',\n", + " 'l2_n': '7',\n", + " 'l2_1': '10',\n", + " 'l2_2': '10',\n", + " 'l2_3': '3',\n", + " 'l2_4': '7',\n", + " 'l2_5': '10',\n", + " 'l2_6': '10',\n", + " 'l2_7': '9',\n", + " 'l3_n': '14',\n", + " 'l3_1': '10',\n", + " 'l3_2': '10',\n", + " 'l3_3': '10',\n", + " 'l3_4': '10',\n", + " 'l3_5': '0',\n", + " 'l3_6': '10',\n", + " 'l3_7': '9',\n", + " 'l3_8': '10',\n", + " 'l3_9': '7',\n", + " 'l3_10': '7',\n", + " 'l3_11': '3',\n", + " 'l3_12': '7',\n", + " 'l3_13': '5',\n", + " 'l3_14': '8',\n", + " 'l4_n': '11',\n", + " 'l4_1': '10',\n", + " 'l4_2': '10',\n", + " 'l4_3': '10',\n", + " 'l4_4': '8',\n", + " 'l4_5': '5',\n", + " 'l4_6': '3',\n", + " 'l4_7': '0',\n", + " 'l4_8': '0',\n", + " 'l4_9': '7',\n", + " 'l4_10': '0',\n", + " 'l4_11': '0',\n", + " 'q1_n': '1',\n", + " 'q1_1': '9.5',\n", + " 'e1_n': '15',\n", + " 'e1_1': '9',\n", + " 'e1_2': '9',\n", + " 'e1_3': '10',\n", + " 'e1_4': '10',\n", + " 'e1_5': '7',\n", + " 'e1_6': '10',\n", + " 'e1_7': '10',\n", + " 'e1_8': '10',\n", + " 'e1_9': '10',\n", + " 'e1_10': '10',\n", + " 'e1_11': '10',\n", + " 'e1_12': '10',\n", + " 'e1_13': '9',\n", + " 'e1_14': '8',\n", + " 'e1_15': '2'}]" + ] + }, + "execution_count": 796, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Your solution here.\n", + "\n", + "def cvs_reader(filename):\n", + " data=list() # if you choose first option\n", + " f=open(filename,\"r\")\n", + " columns = f.readline().rstrip().split(\",\")\n", + " \n", + " for line in f:\n", + " row = dict()\n", + " l = line.rstrip()\n", + " items = l.split(\",\")\n", + " for i, item in enumerate(items):\n", + " row[columns[i]] = item\n", + " data.append(row)\n", + " f.close()\n", + " return data\n", + "\n", + "cvs_reader(\"Data1401-Grades.csv\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Creating a Gradebook\n", + "\n", + "*Exercise 2:* In lecture we used pandas to read the CSV file and create the grade book. The example below works for the CSV file for this lab. Modify the code below to use your CSV reader instead." + ] + }, + { + "cell_type": "code", + "execution_count": 797, + "metadata": {}, + "outputs": [], + "source": [ + "class_data=cvs_reader(\"Data1401-Grades.csv\")\n", + "\n", + "a_grade_book=grade_book(\"Data 1401\")\n", + "\n", + "for student_i in range(len(class_data)):\n", + " a_student_0=student(\"Student\",str(student_i),student_i)\n", + "\n", + " for k in class_data[student_i].keys():\n", + " a_student_0.add_grade(grade(k,value=float(class_data[student_i][k])))\n", + "\n", + " a_grade_book.add_student(a_student_0)\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Grade Summing\n", + "\n", + "*Exercise 3:* In lectre we will change the design of our algorithm classes and then update the `uncurved_letter_grade_percent` calculator. In lecture we also created a `grade_summer` calcuator that takes a prefix (for example `e1_` and a number `n`) and sums all grades starting with that prefix up to `n` and creates a new sum grade. Update this calculator (below) to the new design of our algorithm classes. Test your updated calculator by using it to sum the grades for all labs, quizzes, and exams of each student." + ] + }, + { + "cell_type": "code", + "execution_count": 798, + "metadata": {}, + "outputs": [], + "source": [ + "# Note this is the OLD design... you will need to modify it.\n", + "\n", + "class summary_calculator(alg): \n", + " def __init__(self,name):\n", + " alg.__init__(self,name)\n", + "\n", + " def apply(self,a_student):\n", + " raise NotImplementedError\n", + "\n", + "class grade_summer(calculator):\n", + " def __init__(self,prefix,n):\n", + " self.__prefix=prefix\n", + " self.__n=n\n", + " calculator.__init__(self,\"Sum Grades\")\n", + " \n", + " def apply(self,a_gradebook,**kwargs):\n", + " labels=[self.__prefix + str(x) for x in range(1,self.__n)]\n", + " \n", + " for k,a_student in a_grade_book.get_students().items(): \n", + "\n", + " grade_sum=0.\n", + " for label in labels:\n", + " grade_sum+=a_student[label].value()\n", + "\n", + " a_student.add_grade(grade(self.__prefix+\"sum\",value=grade_sum),**kwargs)" + ] + }, + { + "cell_type": "code", + "execution_count": 799, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['e1_', 'l1_', 'l2_', 'l3_', 'l4_', 'q1_']\n" + ] + } + ], + "source": [ + "a_grade_book=grade_book(\"Data 1401\")\n", + "\n", + "for student_i in range(len(class_data)):\n", + " a_student_0=student(\"Student\",str(student_i),student_i)\n", + "\n", + " for k in class_data[student_i].keys():\n", + " a_student_0.add_grade(grade(k,value=float(class_data[student_i][k])))\n", + "\n", + " a_grade_book.add_student(a_student_0)\n", + "#print(class_data)\n", + "prefixes=sorted(list(set([k.split(\"_\")[0] + \"_\" for k in class_data[0].keys()])))\n", + "#labels=[self.__prefix+str(x) for x in range(1,self.__n)]\n", + "print(prefixes)\n", + "for j, prefix in enumerate(prefixes):\n", + " a_grade_book.apply_calculator(grade_summer(prefix, int(list(a_grade_book.get_students().values())[j][prefix+\"n\"].value())+1))" + ] + }, + { + "cell_type": "code", + "execution_count": 800, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "e1_ [35.0, 0, 20.0, 115.0, 24.0, 95.0, 37.0, 77.0, 103.0, 117.0, 138.0, 110.0, 104.0, 111.0, 103.0, 134.0]\n", + "l1_ [10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0]\n", + "l2_ [58.0, 0, 0, 62.0, 49.5, 69.0, 54.5, 69.0, 39.5, 60.0, 47.0, 68.5, 64.0, 49.5, 68.0, 59.0]\n", + "l3_ [9.0, 0, 71.0, 102.0, 77.5, 89.0, 64.5, 40.0, 40.0, 26.0, 126.0, 97.5, 115.0, 114.5, 95.0, 106.0]\n", + "l4_ [0, 0, 0, 75.0, 26.0, 35.0, 30.0, 51.0, 68.0, 27.0, 105.0, 51.0, 0, 25.0, 0, 53.0]\n", + "q1_ [9.5, 0, 5.0, 10.0, 0, 9.5, 10.0, 10.0, 9.5, 9.5, 0, 10.0, 10.0, 0, 0, 9.5]\n" + ] + } + ], + "source": [ + "for prefix in prefixes:\n", + " print(prefix,list(a_student[prefix+\"sum\"].value() for k,a_student in a_grade_book.get_students().items()))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Curving Grades\n", + "\n", + "*Exercise 4:* Use the `mean_std_calculator` above to calculate the mean and standard deviation for every lab, quiz, and exam in the class. Add a new print function to the `grade_book` class to print out such information in a nice way, and use this function to show your results.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 801, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "e1_sum Mean : 82.6875\n", + "e1_sum STD : 42.937045121316864\n", + "l1_sum Mean : 10.0\n", + "l1_sum STD : 0.0\n", + "l2_sum Mean : 51.09375\n", + "l2_sum STD : 21.05663401252679\n", + "l3_sum Mean : 73.3125\n", + "l3_sum STD : 38.301792957379945\n", + "l4_sum Mean : 34.125\n", + "l4_sum STD : 30.421774685248064\n", + "q1_sum Mean : 6.40625\n", + "q1_sum STD : 4.469405546322688\n" + ] + } + ], + "source": [ + "# Your solution here\n", + "prefixes=sorted(list(set([k.split(\"_\")[0] + \"_\" for k in class_data[0].keys()])))\n", + "for prefix in prefixes:\n", + " a_grade_book.apply_calculator(mean_std_calculator(grade_name=prefix+\"sum\"))\n", + "a_grade_book.print_data()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "*Exercise 5:* In lecture we will change the design of our algorithms classes and then update the `uncurved_letter_grade_percent` calculator. Do the same for the `curved_letter_grade` calculator below and by curving all the lab, quiz, and exam grades." + ] + }, + { + "cell_type": "code", + "execution_count": 802, + "metadata": {}, + "outputs": [], + "source": [ + "class curved_letter_grade(calculator):\n", + " __grades_definition=[ (.97,\"A+\"),\n", + " (.93,\"A\"),\n", + " (.9,\"A-\"),\n", + " (.87,\"B+\"),\n", + " (.83,\"B\"),\n", + " (.8,\"B-\"),\n", + " (.77,\"C+\"),\n", + " (.73,\"C\"),\n", + " (.7,\"C-\"),\n", + " (.67,\"C+\"),\n", + " (.63,\"C\"),\n", + " (.6,\"C-\"),\n", + " (.57,\"F+\"),\n", + " (.53,\"F\"),\n", + " (0.,\"F-\")]\n", + " __max_grade=100.\n", + " __grade_name=str()\n", + " \n", + " def __init__(self,grade_name,mean,std,max_grade=100.):\n", + " self.__max_grade=max_grade\n", + " self.__mean=mean\n", + " self.__std=std\n", + " self.__grade_name=grade_name\n", + " calculator.__init__(self,\n", + " \"Curved Percent Based Grade Calculator \"+self.__grade_name+ \\\n", + " \" Mean=\"+str(self.__mean)+\\\n", + " \" STD=\"+str(self.__std)+\\\n", + " \" Max=\"+str(self.__max_grade))\n", + " \n", + "\n", + " def apply(self,a_grade_book,grade_name=None,**kwargs):\n", + " if grade_name:\n", + " pass\n", + " else:\n", + " grade_name=self.__grade_name\n", + " \n", + " for k,a_student in a_grade_book.get_students().items():\n", + " a_grade = a_student[grade_name]\n", + "\n", + " if not a_grade.numerical():\n", + " print(self.name()+\"Error: Did not get a numerical grade as input.\")\n", + " raise Exception\n", + "\n", + " # Rescale the grade\n", + " scaled_percent=1\n", + " if self.__std != 0:\n", + " percent=a_grade.value()/self.__max_grade\n", + " shift_to_zero=percent-(self.__mean/self.__max_grade)\n", + " scale_std=0.1*shift_to_zero/(self.__std/self.__max_grade)\n", + " scaled_percent=scale_std+0.8\n", + "\n", + " for i,v in enumerate(self.__grades_definition):\n", + " if scaled_percent>=v[0]:\n", + " break\n", + "\n", + " overwrite = kwargs[\"overwrite\"] if \"overwrite\" in kwargs else False\n", + " #g = grade(grade_name+\" Letter\",value=self.__grades_definition[i][1])\n", + " a_student.add_grade(grade(grade_name+\" Letter\",value=self.__grades_definition[i][1]),overwrite=overwrite)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 803, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0 Student 0 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 8.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 35.0\n", + "l1_sum: 10.0\n", + "l2_sum: 58.0\n", + "l3_sum: 9.0\n", + "l4_sum: 0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F+\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "1 Student 1 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "2 Student 2 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 3.0\n", + "l3_8: 6.0\n", + "l3_9: 3.0\n", + "l3_10: 3.0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 5.0\n", + "e1_n: 15.0\n", + "e1_1: 5.0\n", + "e1_2: 5.0\n", + "e1_3: 5.0\n", + "e1_4: 5.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 20.0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 71.0\n", + "l4_sum: 0\n", + "q1_sum: 5.0\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "3 Student 3 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 5.0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 10.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 10.0\n", + "l4_10: 5.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 7.0\n", + "e1_6: 9.0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 9.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 8.0\n", + "e1_15: 10.0\n", + "e1_sum: 115.0\n", + "l1_sum: 10.0\n", + "l2_sum: 62.0\n", + "l3_sum: 102.0\n", + "l4_sum: 75.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C-\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: C\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "4 Student 4 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 6.0\n", + "l3_13: 10.0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 6.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 3.0\n", + "e1_10: 3.0\n", + "e1_11: 3.0\n", + "e1_12: 0\n", + "e1_13: 3.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 24.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 77.5\n", + "l4_sum: 26.0\n", + "q1_sum: 0\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "5 Student 5 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.5\n", + "l3_3: 9.5\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 8.0\n", + "l3_10: 0\n", + "l3_11: 5.0\n", + "l3_12: 6.0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 95.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 89.0\n", + "l4_sum: 35.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: A\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "6 Student 6 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 5.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 8.0\n", + "l3_7: 9.0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 37.0\n", + "l1_sum: 10.0\n", + "l2_sum: 54.5\n", + "l3_sum: 64.5\n", + "l4_sum: 30.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: F-\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "7 Student 7 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 3.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 5.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 5.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 77.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 40.0\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "8 Student 8 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 3.0\n", + "l4_9: 10.0\n", + "l4_10: 7.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 10.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 39.5\n", + "l3_sum: 40.0\n", + "l4_sum: 68.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "9 Student 9 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 10.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 6.0\n", + "l3_3: 10.0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 0\n", + "l4_4: 7.0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 5.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 0\n", + "e1_sum: 117.0\n", + "l1_sum: 10.0\n", + "l2_sum: 60.0\n", + "l3_sum: 26.0\n", + "l4_sum: 27.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C-\n", + "l3_sum Letter: F-\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "10 Student 10 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 7.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 6.0\n", + "l3_8: 3.0\n", + "l3_9: 10.0\n", + "l3_10: 10.0\n", + "l3_11: 10.0\n", + "l3_12: 10.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 5.0\n", + "l4_7: 10.0\n", + "l4_8: 10.0\n", + "l4_9: 10.0\n", + "l4_10: 10.0\n", + "l4_11: 10.0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 138.0\n", + "l1_sum: 10.0\n", + "l2_sum: 47.0\n", + "l3_sum: 126.0\n", + "l4_sum: 105.0\n", + "q1_sum: 0\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: A+\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "11 Student 11 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 8.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 5.0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 6.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 110.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.5\n", + "l3_sum: 97.5\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "12 Student 12 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 5.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.0\n", + "l3_3: 9.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 8.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 9.0\n", + "e1_11: 10.0\n", + "e1_12: 9.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 104.0\n", + "l1_sum: 10.0\n", + "l2_sum: 64.0\n", + "l3_sum: 115.0\n", + "l4_sum: 0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "13 Student 13 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 5.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 111.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 114.5\n", + "l4_sum: 25.0\n", + "q1_sum: 0\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F-\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "14 Student 14 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 3.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 2.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.0\n", + "l3_sum: 95.0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A\n", + "l4_sum Letter: F-\n", + "q1_sum Letter: F-\n", + "_______________________________________\n", + "15 Student 15 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 7.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 7.0\n", + "l3_11: 3.0\n", + "l3_12: 7.0\n", + "l3_13: 5.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 8.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 7.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 10.0\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 10.0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 9.0\n", + "e1_14: 8.0\n", + "e1_15: 2.0\n", + "e1_sum: 134.0\n", + "l1_sum: 10.0\n", + "l2_sum: 59.0\n", + "l3_sum: 106.0\n", + "l4_sum: 53.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: A+\n", + "l1_sum Letter: F-\n", + "l2_sum Letter: F+\n", + "l3_sum Letter: A+\n", + "l4_sum Letter: F\n", + "q1_sum Letter: F-\n", + "_______________________________________\n" + ] + } + ], + "source": [ + "for prefix in prefixes:\n", + " grade_name = prefix+\"sum\"\n", + " a_grade_book.apply_calculator(uncurved_letter_grade_percent(grade_name,max_grade=100))\n", + "a_grade_book.print_students()" + ] + }, + { + "cell_type": "code", + "execution_count": 804, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0 Student 0 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 8.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 35.0\n", + "l1_sum: 10.0\n", + "l2_sum: 58.0\n", + "l3_sum: 9.0\n", + "l4_sum: 0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "1 Student 1 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: C-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "2 Student 2 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 3.0\n", + "l3_8: 6.0\n", + "l3_9: 3.0\n", + "l3_10: 3.0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 5.0\n", + "e1_n: 15.0\n", + "e1_1: 5.0\n", + "e1_2: 5.0\n", + "e1_3: 5.0\n", + "e1_4: 5.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 20.0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 71.0\n", + "l4_sum: 0\n", + "q1_sum: 5.0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "3 Student 3 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 5.0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 10.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 10.0\n", + "l4_10: 5.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 7.0\n", + "e1_6: 9.0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 9.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 8.0\n", + "e1_15: 10.0\n", + "e1_sum: 115.0\n", + "l1_sum: 10.0\n", + "l2_sum: 62.0\n", + "l3_sum: 102.0\n", + "l4_sum: 75.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: A\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "4 Student 4 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 6.0\n", + "l3_13: 10.0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 6.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 3.0\n", + "e1_10: 3.0\n", + "e1_11: 3.0\n", + "e1_12: 0\n", + "e1_13: 3.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 24.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 77.5\n", + "l4_sum: 26.0\n", + "q1_sum: 0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: B-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "5 Student 5 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.5\n", + "l3_3: 9.5\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 8.0\n", + "l3_10: 0\n", + "l3_11: 5.0\n", + "l3_12: 6.0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 95.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 89.0\n", + "l4_sum: 35.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B-\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "6 Student 6 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 5.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 8.0\n", + "l3_7: 9.0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 37.0\n", + "l1_sum: 10.0\n", + "l2_sum: 54.5\n", + "l3_sum: 64.5\n", + "l4_sum: 30.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B-\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "7 Student 7 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 3.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 5.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 5.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 77.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 40.0\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "8 Student 8 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 3.0\n", + "l4_9: 10.0\n", + "l4_10: 7.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 10.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 39.5\n", + "l3_sum: 40.0\n", + "l4_sum: 68.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: A-\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "9 Student 9 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 10.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 6.0\n", + "l3_3: 10.0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 0\n", + "l4_4: 7.0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 5.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 0\n", + "e1_sum: 117.0\n", + "l1_sum: 10.0\n", + "l2_sum: 60.0\n", + "l3_sum: 26.0\n", + "l4_sum: 27.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "_______________________________________\n", + "10 Student 10 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 7.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 6.0\n", + "l3_8: 3.0\n", + "l3_9: 10.0\n", + "l3_10: 10.0\n", + "l3_11: 10.0\n", + "l3_12: 10.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 5.0\n", + "l4_7: 10.0\n", + "l4_8: 10.0\n", + "l4_9: 10.0\n", + "l4_10: 10.0\n", + "l4_11: 10.0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 138.0\n", + "l1_sum: 10.0\n", + "l2_sum: 47.0\n", + "l3_sum: 126.0\n", + "l4_sum: 105.0\n", + "q1_sum: 0\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A\n", + "l4_sum Letter: A+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "11 Student 11 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 8.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 5.0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 6.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 110.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.5\n", + "l3_sum: 97.5\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "12 Student 12 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 5.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.0\n", + "l3_3: 9.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 8.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 9.0\n", + "e1_11: 10.0\n", + "e1_12: 9.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 104.0\n", + "l1_sum: 10.0\n", + "l2_sum: 64.0\n", + "l3_sum: 115.0\n", + "l4_sum: 0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "_______________________________________\n", + "13 Student 13 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 5.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 111.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "l3_sum: 114.5\n", + "l4_sum: 25.0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "14 Student 14 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 3.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 2.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.0\n", + "l3_sum: 95.0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "_______________________________________\n", + "15 Student 15 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 7.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 7.0\n", + "l3_11: 3.0\n", + "l3_12: 7.0\n", + "l3_13: 5.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 8.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 7.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 10.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 10.0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 9.0\n", + "e1_14: 8.0\n", + "e1_15: 2.0\n", + "e1_sum: 134.0\n", + "l1_sum: 10.0\n", + "l2_sum: 59.0\n", + "l3_sum: 106.0\n", + "l4_sum: 53.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B\n", + "_______________________________________\n" + ] + } + ], + "source": [ + "for prefix in prefixes:\n", + " grade_name = prefix+\"sum\"\n", + " a_grade_book.apply_calculator(curved_letter_grade(grade_name,\n", + " a_grade_book[grade_name+\" Mean\"],\n", + " a_grade_book[grade_name+\" STD\"]),\n", + " overwrite=True)\n", + "a_grade_book.print_students()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Final Course Grade\n", + "\n", + "*Exercise 6:* Write a new calculator that sums grades with a prefix, as in the `grade_summer` calculator, but drops `n` lowest grades. Apply the algorithm to drop the lowest lab grade in the data.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 787, + "metadata": {}, + "outputs": [], + "source": [ + "# Your solution here\n", + "class grade_dropper(calculator):\n", + " def __init__(self,prefix,n):\n", + " self.__prefix=prefix\n", + " self.__n=n\n", + " calculator.__init__(self,\"Final Course Grades\")\n", + " \n", + " def apply(self,a_gradebook,**kwargs):\n", + " for k,a_student in a_grade_book.get_students().items():\n", + " labels=list(filter(lambda x: \n", + " self.__prefix in x, \n", + " a_student.grades().keys()))\n", + " print(labels)\n", + " grade_sum=0.\n", + " grades=list()\n", + " for label in labels:\n", + " if '_' in label and isinstance(a_student[label].value(),(int,float)):\n", + " grades.append(a_student[label].value())\n", + " grades = sorted(grades)\n", + " if len(grades) > self.__n:\n", + " grade_sum = sum(grades[self.__n:])\n", + " else:\n", + " grade_sum = sum(grades)\n", + "# grade_sum = sum(grades)\n", + "\n", + " a_student.add_grade(grade(self.__prefix+\" dropped sum\",value=grade_sum),**kwargs)" + ] + }, + { + "cell_type": "code", + "execution_count": 805, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['l1_sum', 'l2_sum', 'l3_sum', 'l4_sum', 'l1_sum Letter', 'l2_sum Letter', 'l3_sum Letter', 'l4_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l1_sum', 'l1_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l2_sum', 'l2_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l3_sum', 'l3_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "['l4_sum', 'l4_sum Letter']\n", + "0 Student 0 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 8.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 35.0\n", + "l1_sum: 10.0\n", + "l2_sum: 58.0\n", + "l3_sum: 9.0\n", + "l4_sum: 0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 58.0\n", + "l3_sum dropped sum: 9.0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "1 Student 1 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 0\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: C-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 0\n", + "l3_sum dropped sum: 0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "2 Student 2 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 0\n", + "l2_6: 0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 3.0\n", + "l3_8: 6.0\n", + "l3_9: 3.0\n", + "l3_10: 3.0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 5.0\n", + "e1_n: 15.0\n", + "e1_1: 5.0\n", + "e1_2: 5.0\n", + "e1_3: 5.0\n", + "e1_4: 5.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 20.0\n", + "l1_sum: 10.0\n", + "l2_sum: 0\n", + "l3_sum: 71.0\n", + "l4_sum: 0\n", + "q1_sum: 5.0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: F\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 0\n", + "l3_sum dropped sum: 71.0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "3 Student 3 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 5.0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 10.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 10.0\n", + "l4_10: 5.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 7.0\n", + "e1_6: 9.0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 9.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 8.0\n", + "e1_15: 10.0\n", + "e1_sum: 115.0\n", + "l1_sum: 10.0\n", + "l2_sum: 62.0\n", + "l3_sum: 102.0\n", + "l4_sum: 75.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: A\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 62.0\n", + "l3_sum dropped sum: 102.0\n", + "l4_sum dropped sum: 75.0\n", + "_______________________________________\n", + "4 Student 4 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 0\n", + "l3_3: 0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 5.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 6.0\n", + "l3_13: 10.0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 6.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 0\n", + "e1_2: 0\n", + "e1_3: 0\n", + "e1_4: 0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 3.0\n", + "e1_10: 3.0\n", + "e1_11: 3.0\n", + "e1_12: 0\n", + "e1_13: 3.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 24.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 77.5\n", + "l4_sum: 26.0\n", + "q1_sum: 0\n", + "e1_sum Letter: C\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: B-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 49.5\n", + "l3_sum dropped sum: 77.5\n", + "l4_sum dropped sum: 26.0\n", + "_______________________________________\n", + "5 Student 5 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.5\n", + "l3_3: 9.5\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 8.0\n", + "l3_10: 0\n", + "l3_11: 5.0\n", + "l3_12: 6.0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 7.0\n", + "e1_8: 0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 95.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 89.0\n", + "l4_sum: 35.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B-\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 69.0\n", + "l3_sum dropped sum: 89.0\n", + "l4_sum dropped sum: 35.0\n", + "_______________________________________\n", + "6 Student 6 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 5.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 8.0\n", + "l3_5: 10.0\n", + "l3_6: 8.0\n", + "l3_7: 9.0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 10.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 0\n", + "e1_9: 0\n", + "e1_10: 0\n", + "e1_11: 0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 37.0\n", + "l1_sum: 10.0\n", + "l2_sum: 54.5\n", + "l3_sum: 64.5\n", + "l4_sum: 30.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B-\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 54.5\n", + "l3_sum dropped sum: 64.5\n", + "l4_sum dropped sum: 30.0\n", + "_______________________________________\n", + "7 Student 7 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 10.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 3.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 5.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 7.0\n", + "e1_8: 5.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 77.0\n", + "l1_sum: 10.0\n", + "l2_sum: 69.0\n", + "l3_sum: 40.0\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: C+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 69.0\n", + "l3_sum dropped sum: 40.0\n", + "l4_sum dropped sum: 51.0\n", + "_______________________________________\n", + "8 Student 8 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 3.0\n", + "l4_9: 10.0\n", + "l4_10: 7.0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 10.0\n", + "e1_6: 0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 9.0\n", + "e1_10: 9.0\n", + "e1_11: 9.0\n", + "e1_12: 10.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 39.5\n", + "l3_sum: 40.0\n", + "l4_sum: 68.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C\n", + "l3_sum Letter: C-\n", + "l4_sum Letter: A-\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 39.5\n", + "l3_sum dropped sum: 40.0\n", + "l4_sum dropped sum: 68.0\n", + "_______________________________________\n", + "9 Student 9 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 10.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 10.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 6.0\n", + "l3_3: 10.0\n", + "l3_4: 0\n", + "l3_5: 0\n", + "l3_6: 0\n", + "l3_7: 0\n", + "l3_8: 0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 0\n", + "l4_4: 7.0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 5.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 5.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 0\n", + "e1_sum: 117.0\n", + "l1_sum: 10.0\n", + "l2_sum: 60.0\n", + "l3_sum: 26.0\n", + "l4_sum: 27.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: B+\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: C+\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 60.0\n", + "l3_sum dropped sum: 26.0\n", + "l4_sum dropped sum: 27.0\n", + "_______________________________________\n", + "10 Student 10 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 0\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 7.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 6.0\n", + "l3_8: 3.0\n", + "l3_9: 10.0\n", + "l3_10: 10.0\n", + "l3_11: 10.0\n", + "l3_12: 10.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 10.0\n", + "l4_6: 5.0\n", + "l4_7: 10.0\n", + "l4_8: 10.0\n", + "l4_9: 10.0\n", + "l4_10: 10.0\n", + "l4_11: 10.0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 9.0\n", + "e1_5: 9.0\n", + "e1_6: 10.0\n", + "e1_7: 9.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 138.0\n", + "l1_sum: 10.0\n", + "l2_sum: 47.0\n", + "l3_sum: 126.0\n", + "l4_sum: 105.0\n", + "q1_sum: 0\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A\n", + "l4_sum Letter: A+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 47.0\n", + "l3_sum dropped sum: 126.0\n", + "l4_sum dropped sum: 105.0\n", + "_______________________________________\n", + "11 Student 11 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 8.0\n", + "l3_6: 10.0\n", + "l3_7: 8.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 5.0\n", + "l3_12: 0\n", + "l3_13: 0\n", + "l3_14: 0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 10.0\n", + "l4_5: 5.0\n", + "l4_6: 6.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 9.0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 110.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.5\n", + "l3_sum: 97.5\n", + "l4_sum: 51.0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 68.5\n", + "l3_sum dropped sum: 97.5\n", + "l4_sum dropped sum: 51.0\n", + "_______________________________________\n", + "12 Student 12 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 5.0\n", + "l2_4: 9.5\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 5.0\n", + "l3_2: 9.0\n", + "l3_3: 9.0\n", + "l3_4: 10.0\n", + "l3_5: 7.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 10.0\n", + "l3_10: 7.0\n", + "l3_11: 10.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 10.0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 9.0\n", + "e1_4: 8.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 9.0\n", + "e1_11: 10.0\n", + "e1_12: 9.0\n", + "e1_13: 5.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 104.0\n", + "l1_sum: 10.0\n", + "l2_sum: 64.0\n", + "l3_sum: 115.0\n", + "l4_sum: 0\n", + "q1_sum: 10.0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: B+\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 64.0\n", + "l3_sum dropped sum: 115.0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "13 Student 13 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 0\n", + "l3_n: 14.0\n", + "l3_1: 9.5\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 10.0\n", + "l3_8: 10.0\n", + "l3_9: 0\n", + "l3_10: 0\n", + "l3_11: 10.0\n", + "l3_12: 5.0\n", + "l3_13: 10.0\n", + "l3_14: 10.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 5.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 0\n", + "e1_5: 8.0\n", + "e1_6: 9.0\n", + "e1_7: 7.0\n", + "e1_8: 9.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 10.0\n", + "e1_14: 0\n", + "e1_15: 0\n", + "e1_sum: 111.0\n", + "l1_sum: 10.0\n", + "l2_sum: 49.5\n", + "l3_sum: 114.5\n", + "l4_sum: 25.0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: C+\n", + "l3_sum Letter: A-\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 49.5\n", + "l3_sum dropped sum: 114.5\n", + "l4_sum dropped sum: 25.0\n", + "_______________________________________\n", + "14 Student 14 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 9.5\n", + "l2_4: 9.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.5\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 10.0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 3.0\n", + "l3_10: 0\n", + "l3_11: 3.0\n", + "l3_12: 3.0\n", + "l3_13: 5.0\n", + "l3_14: 2.0\n", + "l4_n: 11.0\n", + "l4_1: 0\n", + "l4_2: 0\n", + "l4_3: 0\n", + "l4_4: 0\n", + "l4_5: 0\n", + "l4_6: 0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 0\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 5.0\n", + "e1_5: 5.0\n", + "e1_6: 0\n", + "e1_7: 0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 0\n", + "e1_13: 10.0\n", + "e1_14: 5.0\n", + "e1_15: 10.0\n", + "e1_sum: 103.0\n", + "l1_sum: 10.0\n", + "l2_sum: 68.0\n", + "l3_sum: 95.0\n", + "l4_sum: 0\n", + "q1_sum: 0\n", + "e1_sum Letter: B\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B+\n", + "l3_sum Letter: B\n", + "l4_sum Letter: C+\n", + "q1_sum Letter: C\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 68.0\n", + "l3_sum dropped sum: 95.0\n", + "l4_sum dropped sum: 0\n", + "_______________________________________\n", + "15 Student 15 Student Data\n", + "l1_n: 1.0\n", + "l1_1: 10.0\n", + "l2_n: 7.0\n", + "l2_1: 10.0\n", + "l2_2: 10.0\n", + "l2_3: 3.0\n", + "l2_4: 7.0\n", + "l2_5: 10.0\n", + "l2_6: 10.0\n", + "l2_7: 9.0\n", + "l3_n: 14.0\n", + "l3_1: 10.0\n", + "l3_2: 10.0\n", + "l3_3: 10.0\n", + "l3_4: 10.0\n", + "l3_5: 0\n", + "l3_6: 10.0\n", + "l3_7: 9.0\n", + "l3_8: 10.0\n", + "l3_9: 7.0\n", + "l3_10: 7.0\n", + "l3_11: 3.0\n", + "l3_12: 7.0\n", + "l3_13: 5.0\n", + "l3_14: 8.0\n", + "l4_n: 11.0\n", + "l4_1: 10.0\n", + "l4_2: 10.0\n", + "l4_3: 10.0\n", + "l4_4: 8.0\n", + "l4_5: 5.0\n", + "l4_6: 3.0\n", + "l4_7: 0\n", + "l4_8: 0\n", + "l4_9: 7.0\n", + "l4_10: 0\n", + "l4_11: 0\n", + "q1_n: 1.0\n", + "q1_1: 9.5\n", + "e1_n: 15.0\n", + "e1_1: 9.0\n", + "e1_2: 9.0\n", + "e1_3: 10.0\n", + "e1_4: 10.0\n", + "e1_5: 7.0\n", + "e1_6: 10.0\n", + "e1_7: 10.0\n", + "e1_8: 10.0\n", + "e1_9: 10.0\n", + "e1_10: 10.0\n", + "e1_11: 10.0\n", + "e1_12: 10.0\n", + "e1_13: 9.0\n", + "e1_14: 8.0\n", + "e1_15: 2.0\n", + "e1_sum: 134.0\n", + "l1_sum: 10.0\n", + "l2_sum: 59.0\n", + "l3_sum: 106.0\n", + "l4_sum: 53.0\n", + "q1_sum: 9.5\n", + "e1_sum Letter: A-\n", + "l1_sum Letter: A+\n", + "l2_sum Letter: B\n", + "l3_sum Letter: B+\n", + "l4_sum Letter: B\n", + "q1_sum Letter: B\n", + "l1_sum dropped sum: 10.0\n", + "l2_sum dropped sum: 59.0\n", + "l3_sum dropped sum: 106.0\n", + "l4_sum dropped sum: 53.0\n", + "_______________________________________\n" + ] + } + ], + "source": [ + "# a_grade_book=grade_book(\"Data 1401\")\n", + "\n", + "# for student_i in range(len(class_data)):\n", + "# a_student_0=student(\"Student\",str(student_i),student_i)\n", + "\n", + "# for k in class_data[student_i].keys():\n", + "# a_student_0.add_grade(grade(k,value=float(class_data[student_i][k])))\n", + "\n", + "# a_grade_book.add_student(a_student_0)\n", + "lab_prefixes = list(filter(lambda x: 'l' in x and 'sum' in x, list(a_grade_book.get_students().values())[0].grades().keys()))\n", + "print(lab_prefixes)\n", + "\n", + "for prefix in lab_prefixes:\n", + " if ' ' not in prefix:\n", + " a_grade_book.apply_calculator(grade_dropper(prefix,1), overwrite=True)\n", + "a_grade_book.print_students()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "*Exercise 7*: Write a new calculator that creates a new letter grade based on a weighted average of letter grades, by assigning the following numerical values to letter grades:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "GradeMap={\"A+\":12,\n", + " \"A\":11,\n", + " \"A-\":10,\n", + " \"B+\":9,\n", + " \"B\":8,\n", + " \"B-\":7,\n", + " \"C+\":6,\n", + " \"C\":5,\n", + " \"C-\":4,\n", + " \"D+\":3,\n", + " \"D\":2,\n", + " \"D-\":1,\n", + " \"F\":0}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Test you calculator by applying the weights from the syllabus of this course and computing everyone's grade in the course." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Your solution here" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 3536f3fa68bd3e90afb8a0c7f3895172925e0e56 Mon Sep 17 00:00:00 2001 From: Samson Nguyen Date: Fri, 8 May 2020 16:05:00 -0500 Subject: [PATCH 24/24] completed final exam --- Exams/Final/Final - Copy.ipynb | 892 +++++++++++++++++++++++++++++++++ 1 file changed, 892 insertions(+) create mode 100644 Exams/Final/Final - Copy.ipynb diff --git a/Exams/Final/Final - Copy.ipynb b/Exams/Final/Final - Copy.ipynb new file mode 100644 index 0000000..6bf9cfa --- /dev/null +++ b/Exams/Final/Final - Copy.ipynb @@ -0,0 +1,892 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Final Exam" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Recall the drawing system from lecture 18:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "class Canvas:\n", + " def __init__(self, width, height):\n", + " self.width = width\n", + " self.height = height\n", + " self.data = [[' '] * width for i in range(height)]\n", + "\n", + " def set_pixel(self, row, col, char='*'):\n", + " self.data[row][col] = char\n", + "\n", + " def get_pixel(self, row, col):\n", + " return self.data[row][col]\n", + " \n", + " def h_line(self, x, y, w, **kargs):\n", + " for i in range(x,x+w):\n", + " self.set_pixel(i,y, **kargs)\n", + "\n", + " def v_line(self, x, y, h, **kargs):\n", + " for i in range(y,y+h):\n", + " self.set_pixel(x,i, **kargs)\n", + " \n", + " def line(self, x1, y1, x2, y2, **kargs):\n", + " slope = 0\n", + " if x2 != x1:\n", + " slope = (y2-y1) / (x2-x1)\n", + " if (slope > 0 or slope < 0) and y1 != y2 and x1 != x2:\n", + " if x1 < x2:\n", + " for x in range(x1,x2):\n", + " y= y1 + int((x - x1) * slope)\n", + " self.set_pixel(x,y, **kargs)\n", + " elif x2 < x1:\n", + " for x in range(x1,x2, -1):\n", + " y=y2 + int((x-x2) * slope)\n", + " self.set_pixel(x,y, **kargs)\n", + " elif y2 == y1:\n", + " if x1 > x2:\n", + " self.h_line(x2, y2, x1-x2, **kargs)\n", + " elif x2 > x1:\n", + " self.h_line(x2, y2, x2-x1, **kargs)\n", + " elif x2 == x1:\n", + " if y1 > y2:\n", + " self.v_line(x2, y2, y1-y2, **kargs)\n", + " elif y2 > y2:\n", + " self.v_line(x2, y2, y2-y1, **kargs)\n", + " \n", + " \n", + " def display(self):\n", + " print(\"\\n\".join([\"\".join(row) for row in self.data]))" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "class Shape:\n", + " def __init__(self, name=\"\", **kwargs):\n", + " self.name=name\n", + " self.kwargs=kwargs\n", + " \n", + " def paint(self, canvas): pass\n", + "\n", + "class Rectangle(Shape):\n", + " def __init__(self, x, y, w, h, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x = x\n", + " self.y = y\n", + " self.w = w\n", + " self.h = h\n", + "\n", + " def paint(self, canvas):\n", + " canvas.h_line(self.x, self.y, self.w, **self.kwargs)\n", + " canvas.h_line(self.x, self.y + self.h, self.w, **self.kwargs)\n", + " canvas.v_line(self.x, self.y, self.h, **self.kwargs)\n", + " canvas.v_line(self.x + self.w, self.y, self.h, **self.kwargs)\n", + "\n", + "class Square(Rectangle):\n", + " def __init__(self, x, y, size, **kwargs):\n", + " Rectangle.__init__(self, x, y, size, size, **kwargs)\n", + "\n", + "class Line(Shape):\n", + " def __init__(self, x1, y1, x2, y2, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x1=x1\n", + " self.y1=y1\n", + " self.x2=x2\n", + " self.y2=y2\n", + " \n", + " def paint(self, canvas):\n", + " canvas.line(self.x1,self.y1,self.x2,self.y2)\n", + " \n", + "class CompoundShape(Shape):\n", + " def __init__(self, shapes):\n", + " self.shapes = shapes\n", + "\n", + " def paint(self, canvas):\n", + " for s in self.shapes:\n", + " s.paint(canvas)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "class RasterDrawing:\n", + " def __init__(self):\n", + " self.shapes=dict()\n", + " self.shape_names=list()\n", + " \n", + " def add_shape(self,shape):\n", + " if shape.name == \"\":\n", + " shape.name = self.assign_name()\n", + " \n", + " self.shapes[shape.name]=shape\n", + " self.shape_names.append(shape.name)\n", + " \n", + " def paint(self,canvas):\n", + " for shape_name in self.shape_names:\n", + " self.shapes[shape_name].paint(canvas)\n", + " \n", + " def assign_name(self):\n", + " name_base=\"shape\"\n", + " name = name_base+\"_0\"\n", + " \n", + " i=1\n", + " while name in self.shapes:\n", + " name = name_base+\"_\"+str(i)\n", + " \n", + " return name\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "1. Add `Point` and `Triangle` classes and test them." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "class Point(Shape):\n", + " def __init__(self, x, y, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x = x\n", + " self.y = y\n", + "\n", + " def paint(self, canvas):\n", + " canvas.set_pixel(self.x, self.y, **self.kwargs)\n", + "\n", + "class Triangle(Shape):\n", + " def __init__(self, x1, y1, x2, y2, x3, y3, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x1 = x1\n", + " self.y1 = y1\n", + " self.x2 = x2\n", + " self.y2 = y2\n", + " self.x3 = x3\n", + " self.y3 = y3\n", + "\n", + " def paint(self, canvas):\n", + " canvas.line(self.x1, self.y1, self.x2, self.y2, **self.kwargs)\n", + " canvas.line(self.x2, self.y2, self.x3, self.y3, **self.kwargs)\n", + " canvas.line(self.x3, self.y3, self.x1, self.y1, **self.kwargs)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " \n", + " \n", + " ++++++++++++++++++++++++++++++ \n", + " + + \n", + " + + \n", + " . + + \n", + " . . + + \n", + " . . + + \n", + " . . + \n", + " . . \n", + " . . \n", + " . . \n", + " . . \n", + " . . \n", + " . . \n", + " . . \n", + " . . \n", + " . . \n", + " . . \n", + " . . \n", + " . \n", + " \n", + " \n", + " \n", + " \n" + ] + } + ], + "source": [ + "c1=Canvas(50,25)\n", + "#s1=Square(5,5,20,char=\"^\")\n", + "#s1.paint(c1)\n", + "p1=Point(20,20,char=\".\")\n", + "p1.paint(c1)\n", + "t1=Triangle(5,5,10,20,20,5, char=\".\")\n", + "t1.paint(c1)\n", + "t2=Triangle(2,5,8,25,2,35, char=\"+\")\n", + "t2.paint(c1)\n", + "c1.display()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "2. Add an `Arc` class that is instantiated with a center location, two axis lengths, and starting and ending angles. If start and end are not specified or are the same angle, the `Arc` instance should draw an oval. If in addition the two axes are the same, the `Arc` instance should draw a circle. Create `Oval` and `Circle` classes that inherit from `Arc`. Test everything." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import math\n", + "class Arc(Shape):\n", + " def __init__(self, x, y, rx, ry, start=None, stop=None, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x = x\n", + " self.y = y\n", + " self.rx = rx\n", + " self.ry = ry\n", + " self.start = start\n", + " self.stop = stop\n", + "\n", + " def paint(self, canvas):\n", + " if self.start != None and self.stop != None and self.start != self.stop:\n", + " dt = (self.stop - self.start)/360\n", + " for theta in np.arange(self.start, self.stop, dt):\n", + " x = int(self.rx * math.cos(theta) + self.x)\n", + " y = int(self.ry * math.sin(theta) + self.y)\n", + " canvas.set_pixel(x,y,**self.kwargs)\n", + " else:\n", + " dt = 2*math.pi/360\n", + " for theta in np.arange(0, 2*math.pi, dt):\n", + " x = int(self.rx * math.cos(theta) + self.x)\n", + " y = int(self.ry * math.sin(theta) + self.y)\n", + " canvas.set_pixel(x,y,**self.kwargs)\n", + "class Oval(Arc):\n", + " def __init__(self, x, y, rx, ry, **kwargs):\n", + " Arc.__init__(self, x, y, rx, ry, start=None, stop=None, **kwargs)\n", + "class Circle(Arc):\n", + " def __init__(self, x, y, r, **kwargs):\n", + " Arc.__init__(self, x, y, r, r, start=None, stop=None, **kwargs)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " \n", + " \n", + " \n", + " \n", + " \n", + " 111111111111 222222222222 \n", + " 11 11 22 22 \n", + " 11 11 22 22 \n", + " 1 1 2 2 \n", + " 1 1 2 2 \n", + " 11 2 22 \n", + " 1 2 2 \n", + " 11 22 22 \n", + " 11 22 22 \n", + " 111111 222222222222 \n", + " 1 2 \n", + " \n", + " \n", + " \n", + " \n", + " 333333333333 444444444444 \n", + " 33 33 44 44 \n", + " 33 33 44 44 \n", + " 3 3 4 4 \n", + " 3 3 4 4 \n", + " 3 33 4 44 \n", + " 3 3 4 4 \n", + " 33 33 44 44 \n", + " 33 33 44 44 \n", + " 333333333333 444444444444 \n", + " 3 4 \n", + " \n", + " \n", + " \n", + " \n", + " 555555 666666 \n", + " 5 5 6 6 \n", + " 5 5 6 6 \n", + " 5 5 6 6 \n", + " 5 5 6 6 \n", + " 5 55 6 66 \n", + " 5 5 6 6 \n", + " 5 5 6 6 \n", + " 5 5 6 6 \n", + " 555555 666666 \n", + " 5 6 \n", + " \n", + " \n", + " \n", + " \n" + ] + } + ], + "source": [ + "c1=Canvas(70,50)\n", + "# normal arc\n", + "# draws (part of) an oval but looks like a circle\n", + "a1=Arc(10,15,5,10,0,1.5*math.pi, char=\"1\")\n", + "a1.paint(c1)\n", + "# arc w/ same start and end\n", + "a2=Arc(10,40,5,10,math.pi,math.pi, char=\"2\")\n", + "a2.paint(c1)\n", + "# arc w/o start and end\n", + "# draws an oval but looks like a circle\n", + "a3=Arc(25,15,5,10,char=\"3\")\n", + "a3.paint(c1)\n", + "o1=Oval(25,40,5,10,char=\"4\")\n", + "o1.paint(c1)\n", + "# arc with same rx and ry\n", + "# draws a circle but looks like an oval\n", + "a4=Arc(40,15,5,5,char=\"5\")\n", + "a4.paint(c1)\n", + "cr1=Circle(40,40,5,char=\"6\")\n", + "cr1.paint(c1)\n", + "c1.display()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "3. Use your classes to create a `RasterDrawing` that draws a happy face." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 111111111111 \n", + " 111 111 \n", + " 11 11 \n", + " 11 11 \n", + " 11 11 \n", + " 1 444444 1 \n", + " 1 44 1 \n", + " 11 44 11 \n", + " 1 4 22222222 1 \n", + "11 4 2 2 11 \n", + "1 44 2 22 1 \n", + "1 4 22222222 1 \n", + "1 4 2 1 \n", + "1 4 1 \n", + "1 4 1 \n", + "1 4 11\n", + "1 4 1 \n", + "1 4 1 \n", + "1 4 33333333 1 \n", + "1 44 3 3 1 \n", + "11 4 3 33 11 \n", + " 1 4 33333333 1 \n", + " 11 44 3 11 \n", + " 1 44 1 \n", + " 1 44444 1 \n", + " 11 11 \n", + " 11 11 \n", + " 11 11 \n", + " 111 111 \n", + " 111111111111 \n", + " 1 \n" + ] + } + ], + "source": [ + "c1=Canvas(31,31)\n", + "rd=RasterDrawing()\n", + "\n", + "rd.add_shape(Circle(15,15,15,char=\"1\",name=\"head\"))\n", + "rd.add_shape(Oval(10,18,2,4,char=\"2\",name=\"left eye\"))\n", + "rd.add_shape(Oval(20,18,2,4,char=\"3\",name=\"right eye\"))\n", + "rd.add_shape(Arc(15,15,10,10,math.pi,2*math.pi,char=\"4\",name=\"mouth\"))\n", + "\n", + "rd.paint(c1)\n", + "\n", + "c1.display()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "4. Add to the `Shape` base class a `__str__()` method. Overwrite the method in each shape to generate a string of the python code necessary to reinstantiate the object. For example, for a rectangle originally instantiated using `Square(5,5,20,char=\"^\")`, `__str__()` should return the string `'Square(5,5,20,char=\"^\")'`.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "class Shape:\n", + " def __init__(self, name=\"\", **kwargs):\n", + " self.name=name\n", + " self.kwargs=kwargs\n", + " \n", + " def paint(self, canvas): pass\n", + " \n", + " def __str__(self): pass\n", + "\n", + "class Rectangle(Shape):\n", + " def __init__(self, x, y, w, h, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x = x\n", + " self.y = y\n", + " self.w = w\n", + " self.h = h\n", + "\n", + " def paint(self, canvas):\n", + " canvas.h_line(self.x, self.y, self.w, **self.kwargs)\n", + " canvas.h_line(self.x, self.y + self.h, self.w, **self.kwargs)\n", + " canvas.v_line(self.x, self.y, self.h, **self.kwargs)\n", + " canvas.v_line(self.x + self.w, self.y, self.h, **self.kwargs)\n", + " \n", + " def __str__(self):\n", + " string = 'Rectangle({},{},{},{},'.format(self.x,self.y,self.w,self.h)\n", + " for key in self.kwargs.keys():\n", + " if isinstance(self.kwargs[key],str):\n", + " string += key + \"='\" + self.kwargs[key] + \"',\"\n", + " else:\n", + " string += key + '=' + self.kwargs[key] + ','\n", + " if self.name != '':\n", + " string += \"name='\" + self.name + \"')\"\n", + " else:\n", + " string = string[:-1] + \")\"\n", + " return string\n", + "\n", + "class Square(Rectangle):\n", + " def __init__(self, x, y, size, **kwargs):\n", + " Rectangle.__init__(self, x, y, size, size, **kwargs)\n", + " \n", + " def __str__(self):\n", + " string = 'Square({},{},{},'.format(self.x,self.y,self.w)\n", + " for key in self.kwargs.keys():\n", + " if isinstance(self.kwargs[key],str):\n", + " string += key + \"='\" + self.kwargs[key] + \"',\"\n", + " else:\n", + " string += key + '=' + self.kwargs[key] + ','\n", + " if self.name != '':\n", + " string += \"name='\" + self.name + \"')\"\n", + " else:\n", + " string = string[:-1] + \")\"\n", + " return string\n", + "\n", + "class Line(Shape):\n", + " def __init__(self, x1, y1, x2, y2, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x1=x1\n", + " self.y1=y1\n", + " self.x2=x2\n", + " self.y2=y2\n", + " \n", + " def paint(self, canvas):\n", + " canvas.line(self.x1,self.y1,self.x2,self.y2)\n", + " \n", + " def __str__(self):\n", + " string = 'Line({},{},{},{},'.format(self.x1,self.y1,self.x2,self.y2)\n", + " for key in self.kwargs.keys():\n", + " if isinstance(self.kwargs[key],str):\n", + " string += key + \"='\" + self.kwargs[key] + \"',\"\n", + " else:\n", + " string += key + '=' + self.kwargs[key] + ','\n", + " if self.name != '':\n", + " string += \"name='\" + self.name + \"')\"\n", + " else:\n", + " string = string[:-1] + \")\"\n", + " return string\n", + " \n", + "class CompoundShape(Shape):\n", + " def __init__(self, shapes):\n", + " self.shapes = shapes\n", + "\n", + " def paint(self, canvas):\n", + " for s in self.shapes:\n", + " s.paint(canvas)\n", + " \n", + " def __str__(self):\n", + " string = 'CompoundShape(['\n", + " for shape in self.shapes:\n", + " string += str(shape) + ','\n", + " string = string[:-1] + '])'\n", + " return string\n", + "\n", + "class Point(Shape):\n", + " def __init__(self, x, y, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x = x\n", + " self.y = y\n", + "\n", + " def paint(self, canvas):\n", + " canvas.set_pixel(self.x, self.y, **self.kwargs)\n", + " \n", + " def __str__(self):\n", + " string = 'Point({},{},'.format(self.x,self.y)\n", + " for key in self.kwargs.keys():\n", + " if isinstance(self.kwargs[key],str):\n", + " string += key + \"='\" + self.kwargs[key] + \"',\"\n", + " else:\n", + " string += key + '=' + self.kwargs[key] + ','\n", + " if self.name != '':\n", + " string += \"name='\" + self.name + \"')\"\n", + " else:\n", + " string = string[:-1] + \")\"\n", + " return string\n", + "\n", + "class Triangle(Shape):\n", + " def __init__(self, x1, y1, x2, y2, x3, y3, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x1 = x1\n", + " self.y1 = y1\n", + " self.x2 = x2\n", + " self.y2 = y2\n", + " self.x3 = x3\n", + " self.y3 = y3\n", + "\n", + " def paint(self, canvas):\n", + " canvas.line(self.x1, self.y1, self.x2, self.y2, **self.kwargs)\n", + " canvas.line(self.x2, self.y2, self.x3, self.y3, **self.kwargs)\n", + " canvas.line(self.x3, self.y3, self.x1, self.y1, **self.kwargs)\n", + " \n", + " def __str__(self):\n", + " string = 'Triangle({},{},{},{},{},{},'.format(self.x1,self.y1,self.x2,self.y2,self.x3,self.y3)\n", + " for key in self.kwargs.keys():\n", + " if isinstance(self.kwargs[key],str):\n", + " string += key + \"='\" + self.kwargs[key] + \"',\"\n", + " else:\n", + " string += key + '=' + self.kwargs[key] + ','\n", + " if self.name != '':\n", + " string += \"name='\" + self.name + \"')\"\n", + " else:\n", + " string = string[:-1] + \")\"\n", + " return string\n", + "\n", + "class Arc(Shape):\n", + " def __init__(self, x, y, rx, ry, start=None, stop=None, **kwargs):\n", + " Shape.__init__(self, **kwargs)\n", + " self.x = x\n", + " self.y = y\n", + " self.rx = rx\n", + " self.ry = ry\n", + " self.start = start\n", + " self.stop = stop\n", + "\n", + " def paint(self, canvas):\n", + " if self.start != None and self.stop != None and self.start != self.stop:\n", + " dt = (self.stop - self.start)/360\n", + " for theta in np.arange(self.start, self.stop, dt):\n", + " x = int(self.rx * math.cos(theta) + self.x)\n", + " y = int(self.ry * math.sin(theta) + self.y)\n", + " canvas.set_pixel(x,y,**self.kwargs)\n", + " else:\n", + " dt = 2*math.pi/360\n", + " for theta in np.arange(0, 2*math.pi, dt):\n", + " x = int(self.rx * math.cos(theta) + self.x)\n", + " y = int(self.ry * math.sin(theta) + self.y)\n", + " canvas.set_pixel(x,y,**self.kwargs)\n", + " \n", + " def __str__(self):\n", + " string = 'Arc({},{},{},{},{},{},'.format(self.x,self.y,self.rx,self.ry,self.start,self.stop)\n", + " for key in self.kwargs.keys():\n", + " if isinstance(self.kwargs[key],str):\n", + " string += key + \"='\" + self.kwargs[key] + \"',\"\n", + " else:\n", + " string += key + '=' + self.kwargs[key] + ','\n", + " if self.name != '':\n", + " string += \"name='\" + self.name + \"')\"\n", + " else:\n", + " string = string[:-1] + \")\"\n", + " return string\n", + "\n", + "class Oval(Arc):\n", + " def __init__(self, x, y, rx, ry, **kwargs):\n", + " Arc.__init__(self, x, y, rx, ry, start=None, stop=None, **kwargs)\n", + " \n", + " def __str__(self):\n", + " string = 'Oval({},{},{},{},'.format(self.x,self.y,self.rx,self.ry)\n", + " for key in self.kwargs.keys():\n", + " if isinstance(self.kwargs[key],str):\n", + " string += key + \"='\" + self.kwargs[key] + \"',\"\n", + " else:\n", + " string += key + '=' + self.kwargs[key] + ','\n", + " if self.name != '':\n", + " string += \"name='\" + self.name + \"')\"\n", + " else:\n", + " string = string[:-1] + \")\"\n", + " return string\n", + "\n", + "class Circle(Arc):\n", + " def __init__(self, x, y, r, **kwargs):\n", + " Arc.__init__(self, x, y, r, r, start=None, stop=None, **kwargs)\n", + " \n", + " def __str__(self):\n", + " string = 'Circle({},{},{},'.format(self.x,self.y,self.rx)\n", + " for key in self.kwargs.keys():\n", + " if isinstance(self.kwargs[key],str):\n", + " string += key + \"='\" + self.kwargs[key] + \"',\"\n", + " else:\n", + " string += key + '=' + self.kwargs[key] + ','\n", + " if self.name != '':\n", + " string += \"name='\" + self.name + \"')\"\n", + " else:\n", + " string = string[:-1] + \")\"\n", + " return string" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rectangle(4,3,2,1,char='1',name='r1')\n", + "Square(3,2,1,char='2')\n", + "Line(4,3,2,1,char='3')\n", + "CompoundShape([Rectangle(4,3,2,1,char='1',name='r1'),Square(3,2,1,char='2'),Line(4,3,2,1,char='3')])\n", + "Point(2,1,char='4')\n", + "Triangle(6,5,4,3,2,1,char='5')\n", + "Arc(6,5,4,3,0,3.141592653589793,char='6')\n", + "Oval(4,3,2,1,char='7')\n", + "Circle(3,2,1,char='8')\n" + ] + } + ], + "source": [ + "r1=Rectangle(4,3,2,1,char='1',name='r1')\n", + "s1=Square(3,2,1,char='2')\n", + "l1=Line(4,3,2,1,char='3')\n", + "compound=CompoundShape([r1,s1,l1])\n", + "p1=Point(2,1,char='4')\n", + "t1=Triangle(6,5,4,3,2,1,char='5')\n", + "a1=Arc(6,5,4,3,0,math.pi,char='6')\n", + "o1=Oval(4,3,2,1,char='7')\n", + "c1=Circle(3,2,1,char='8')\n", + "print(str(r1))\n", + "print(str(s1))\n", + "print(str(l1))\n", + "print(str(compound))\n", + "print(str(p1))\n", + "print(str(t1))\n", + "print(str(a1))\n", + "print(str(o1))\n", + "print(str(c1))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "5. Add to `RasterDrawing` two functions, `save(filename)` and `load(filename)`. The save function writes the `__str__()` of all of the shapes in the drawing to a file (one shape per line). The load function, reads the file, and instantiates each object using the python `eval(expression)` function, and adds each shape to the drawing, thereby recreating a \"saved\" raster drawing. Use this functionality to save and load your happy face.\n", + "\n", + " `eval` takes a string that contains a fragment of a python code and executes it. Consider the following examples: " + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello\n" + ] + } + ], + "source": [ + "eval(\"print('Hello')\")" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "3\n" + ] + } + ], + "source": [ + "x = eval('1+2')\n", + "print(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "class RasterDrawing:\n", + " def __init__(self):\n", + " self.shapes=dict()\n", + " self.shape_names=list()\n", + " \n", + " def add_shape(self,shape):\n", + " if shape.name == \"\":\n", + " shape.name = self.assign_name()\n", + " \n", + " self.shapes[shape.name]=shape\n", + " self.shape_names.append(shape.name)\n", + " \n", + " def paint(self,canvas):\n", + " for shape_name in self.shape_names:\n", + " self.shapes[shape_name].paint(canvas)\n", + " \n", + " def assign_name(self):\n", + " name_base=\"shape\"\n", + " name = name_base+\"_0\"\n", + " \n", + " i=1\n", + " while name in self.shapes:\n", + " name = name_base+\"_\"+str(i)\n", + " \n", + " return name\n", + " \n", + " def save(self,filename):\n", + " sf = open(filename, 'w')\n", + " string = \"\"\n", + " for shape_name in self.shape_names:\n", + " string += str(self.shapes[shape_name])+'\\n'\n", + " sf.write(string[:-1])\n", + " sf.close()\n", + " \n", + " def load(self,filename):\n", + " sf = open(filename, 'r')\n", + " for line in sf:\n", + " self.add_shape(eval(line))\n", + " sf.close()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 111111111111 \n", + " 111 111 \n", + " 11 11 \n", + " 11 11 \n", + " 11 11 \n", + " 1 444444 1 \n", + " 1 44 1 \n", + " 11 44 11 \n", + " 1 4 22222222 1 \n", + "11 4 2 2 11 \n", + "1 44 2 22 1 \n", + "1 4 22222222 1 \n", + "1 4 2 1 \n", + "1 4 1 \n", + "1 4 1 \n", + "1 4 11\n", + "1 4 1 \n", + "1 4 1 \n", + "1 4 33333333 1 \n", + "1 44 3 3 1 \n", + "11 4 3 33 11 \n", + " 1 4 33333333 1 \n", + " 11 44 3 11 \n", + " 1 44 1 \n", + " 1 44444 1 \n", + " 11 11 \n", + " 11 11 \n", + " 11 11 \n", + " 111 111 \n", + " 111111111111 \n", + " 1 \n" + ] + } + ], + "source": [ + "rd=RasterDrawing()\n", + "\n", + "rd.add_shape(Circle(15,15,15,char=\"1\",name=\"head\"))\n", + "rd.add_shape(Oval(10,18,2,4,char=\"2\",name=\"left eye\"))\n", + "rd.add_shape(Oval(20,18,2,4,char=\"3\",name=\"right eye\"))\n", + "rd.add_shape(Arc(15,15,10,10,math.pi,2*math.pi,char=\"4\",name=\"mouth\"))\n", + "\n", + "rd.save(\"savedRasterDrawing\")\n", + "del rd\n", + "rd = RasterDrawing()\n", + "rd.load(\"savedRasterDrawing\")\n", + "\n", + "c1=Canvas(31,31)\n", + "\n", + "rd.paint(c1)\n", + "\n", + "c1.display()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}