Browse Source

Add softmax & ce

fetches/feikei/master
Shuhui Bu 6 years ago
parent
commit
f5b544079a
23 changed files with 20263 additions and 193 deletions
  1. +4
    -5
      0_numpy_matplotlib_scipy_sympy/matplotlib_ani1.ipynb
  2. +2
    -2
      0_numpy_matplotlib_scipy_sympy/matplotlib_ani2.ipynb
  3. +322
    -0
      0_numpy_matplotlib_scipy_sympy/notebook.tex
  4. +4
    -0
      1_kmeans/README.md
  5. +165
    -1
      1_knn/knn_classification.ipynb
  6. +109
    -1
      1_knn/knn_classification.py
  7. +400
    -0
      dataset_circle.csv
  8. +7
    -21
      nn/Perceptron.ipynb
  9. +43
    -99
      nn/mlp_bp.ipynb
  10. +37
    -52
      nn/mlp_bp.py
  11. +176
    -0
      nn/softmax_ce.ipynb
  12. +146
    -0
      nn/softmax_ce.py
  13. +9426
    -0
      references/Matplotlib.ipynb
  14. +11
    -1
      references/References.md
  15. +1353
    -0
      references/SciPy.ipynb
  16. +1069
    -0
      references/Scikit-learn.ipynb
  17. +5481
    -0
      references/Seaborn.ipynb
  18. +781
    -0
      references/Statsmodels.ipynb
  19. +400
    -0
      tips/dataset_circles.csv
  20. +55
    -3
      tips/datasets.ipynb
  21. +32
    -3
      tips/datasets.py
  22. +159
    -5
      tips/notebook_tips.ipynb
  23. +81
    -0
      tips/notebook_tips.py

+ 4
- 5
0_numpy_matplotlib_scipy_sympy/matplotlib_ani1.ipynb View File

@@ -9,7 +9,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 1,
"metadata": {},
"outputs": [
{
@@ -49,7 +49,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 2,
"metadata": {},
"outputs": [
{
@@ -523,7 +523,7 @@
"<IPython.core.display.HTML object>"
]
},
"execution_count": 4,
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
@@ -567,8 +567,7 @@
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
},
"main_language": "python"
}
},
"nbformat": 4,
"nbformat_minor": 2


+ 2
- 2
0_numpy_matplotlib_scipy_sympy/matplotlib_ani2.ipynb
File diff suppressed because it is too large
View File


+ 322
- 0
0_numpy_matplotlib_scipy_sympy/notebook.tex View File

@@ -0,0 +1,322 @@

% Default to the notebook output style



% Inherit from the specified cell style.




\documentclass[11pt]{article}

\usepackage[T1]{fontenc}
% Nicer default font (+ math font) than Computer Modern for most use cases
\usepackage{mathpazo}

% Basic figure setup, for now with no caption control since it's done
% automatically by Pandoc (which extracts ![](path) syntax from Markdown).
\usepackage{graphicx}
% We will generate all images so they have a width \maxwidth. This means
% that they will get their normal width if they fit onto the page, but
% are scaled down if they would overflow the margins.
\makeatletter
\def\maxwidth{\ifdim\Gin@nat@width>\linewidth\linewidth
\else\Gin@nat@width\fi}
\makeatother
\let\Oldincludegraphics\includegraphics
% Set max figure width to be 80% of text width, for now hardcoded.
\renewcommand{\includegraphics}[1]{\Oldincludegraphics[width=.8\maxwidth]{#1}}
% Ensure that by default, figures have no caption (until we provide a
% proper Figure object with a Caption API and a way to capture that
% in the conversion process - todo).
\usepackage{caption}
\DeclareCaptionLabelFormat{nolabel}{}
\captionsetup{labelformat=nolabel}

\usepackage{adjustbox} % Used to constrain images to a maximum size
\usepackage{xcolor} % Allow colors to be defined
\usepackage{enumerate} % Needed for markdown enumerations to work
\usepackage{geometry} % Used to adjust the document margins
\usepackage{amsmath} % Equations
\usepackage{amssymb} % Equations
\usepackage{textcomp} % defines textquotesingle
% Hack from http://tex.stackexchange.com/a/47451/13684:
\AtBeginDocument{%
\def\PYZsq{\textquotesingle}% Upright quotes in Pygmentized code
}
\usepackage{upquote} % Upright quotes for verbatim code
\usepackage{eurosym} % defines \euro
\usepackage[mathletters]{ucs} % Extended unicode (utf-8) support
\usepackage[utf8x]{inputenc} % Allow utf-8 characters in the tex document
\usepackage{fancyvrb} % verbatim replacement that allows latex
\usepackage{grffile} % extends the file name processing of package graphics
% to support a larger range
% The hyperref package gives us a pdf with properly built
% internal navigation ('pdf bookmarks' for the table of contents,
% internal cross-reference links, web links for URLs, etc.)
\usepackage{hyperref}
\usepackage{longtable} % longtable support required by pandoc >1.10
\usepackage{booktabs} % table support for pandoc > 1.12.2
\usepackage[inline]{enumitem} % IRkernel/repr support (it uses the enumerate* environment)
\usepackage[normalem]{ulem} % ulem is needed to support strikethroughs (\sout)
% normalem makes italics be italics, not underlines

% Colors for the hyperref package
\definecolor{urlcolor}{rgb}{0,.145,.698}
\definecolor{linkcolor}{rgb}{.71,0.21,0.01}
\definecolor{citecolor}{rgb}{.12,.54,.11}

% ANSI colors
\definecolor{ansi-black}{HTML}{3E424D}
\definecolor{ansi-black-intense}{HTML}{282C36}
\definecolor{ansi-red}{HTML}{E75C58}
\definecolor{ansi-red-intense}{HTML}{B22B31}
\definecolor{ansi-green}{HTML}{00A250}
\definecolor{ansi-green-intense}{HTML}{007427}
\definecolor{ansi-yellow}{HTML}{DDB62B}
\definecolor{ansi-yellow-intense}{HTML}{B27D12}
\definecolor{ansi-blue}{HTML}{208FFB}
\definecolor{ansi-blue-intense}{HTML}{0065CA}
\definecolor{ansi-magenta}{HTML}{D160C4}
\definecolor{ansi-magenta-intense}{HTML}{A03196}
\definecolor{ansi-cyan}{HTML}{60C6C8}
\definecolor{ansi-cyan-intense}{HTML}{258F8F}
\definecolor{ansi-white}{HTML}{C5C1B4}
\definecolor{ansi-white-intense}{HTML}{A1A6B2}

% commands and environments needed by pandoc snippets
% extracted from the output of `pandoc -s`
\providecommand{\tightlist}{%
\setlength{\itemsep}{0pt}\setlength{\parskip}{0pt}}
\DefineVerbatimEnvironment{Highlighting}{Verbatim}{commandchars=\\\{\}}
% Add ',fontsize=\small' for more characters per line
\newenvironment{Shaded}{}{}
\newcommand{\KeywordTok}[1]{\textcolor[rgb]{0.00,0.44,0.13}{\textbf{{#1}}}}
\newcommand{\DataTypeTok}[1]{\textcolor[rgb]{0.56,0.13,0.00}{{#1}}}
\newcommand{\DecValTok}[1]{\textcolor[rgb]{0.25,0.63,0.44}{{#1}}}
\newcommand{\BaseNTok}[1]{\textcolor[rgb]{0.25,0.63,0.44}{{#1}}}
\newcommand{\FloatTok}[1]{\textcolor[rgb]{0.25,0.63,0.44}{{#1}}}
\newcommand{\CharTok}[1]{\textcolor[rgb]{0.25,0.44,0.63}{{#1}}}
\newcommand{\StringTok}[1]{\textcolor[rgb]{0.25,0.44,0.63}{{#1}}}
\newcommand{\CommentTok}[1]{\textcolor[rgb]{0.38,0.63,0.69}{\textit{{#1}}}}
\newcommand{\OtherTok}[1]{\textcolor[rgb]{0.00,0.44,0.13}{{#1}}}
\newcommand{\AlertTok}[1]{\textcolor[rgb]{1.00,0.00,0.00}{\textbf{{#1}}}}
\newcommand{\FunctionTok}[1]{\textcolor[rgb]{0.02,0.16,0.49}{{#1}}}
\newcommand{\RegionMarkerTok}[1]{{#1}}
\newcommand{\ErrorTok}[1]{\textcolor[rgb]{1.00,0.00,0.00}{\textbf{{#1}}}}
\newcommand{\NormalTok}[1]{{#1}}
% Additional commands for more recent versions of Pandoc
\newcommand{\ConstantTok}[1]{\textcolor[rgb]{0.53,0.00,0.00}{{#1}}}
\newcommand{\SpecialCharTok}[1]{\textcolor[rgb]{0.25,0.44,0.63}{{#1}}}
\newcommand{\VerbatimStringTok}[1]{\textcolor[rgb]{0.25,0.44,0.63}{{#1}}}
\newcommand{\SpecialStringTok}[1]{\textcolor[rgb]{0.73,0.40,0.53}{{#1}}}
\newcommand{\ImportTok}[1]{{#1}}
\newcommand{\DocumentationTok}[1]{\textcolor[rgb]{0.73,0.13,0.13}{\textit{{#1}}}}
\newcommand{\AnnotationTok}[1]{\textcolor[rgb]{0.38,0.63,0.69}{\textbf{\textit{{#1}}}}}
\newcommand{\CommentVarTok}[1]{\textcolor[rgb]{0.38,0.63,0.69}{\textbf{\textit{{#1}}}}}
\newcommand{\VariableTok}[1]{\textcolor[rgb]{0.10,0.09,0.49}{{#1}}}
\newcommand{\ControlFlowTok}[1]{\textcolor[rgb]{0.00,0.44,0.13}{\textbf{{#1}}}}
\newcommand{\OperatorTok}[1]{\textcolor[rgb]{0.40,0.40,0.40}{{#1}}}
\newcommand{\BuiltInTok}[1]{{#1}}
\newcommand{\ExtensionTok}[1]{{#1}}
\newcommand{\PreprocessorTok}[1]{\textcolor[rgb]{0.74,0.48,0.00}{{#1}}}
\newcommand{\AttributeTok}[1]{\textcolor[rgb]{0.49,0.56,0.16}{{#1}}}
\newcommand{\InformationTok}[1]{\textcolor[rgb]{0.38,0.63,0.69}{\textbf{\textit{{#1}}}}}
\newcommand{\WarningTok}[1]{\textcolor[rgb]{0.38,0.63,0.69}{\textbf{\textit{{#1}}}}}
% Define a nice break command that doesn't care if a line doesn't already
% exist.
\def\br{\hspace*{\fill} \\* }
% Math Jax compatability definitions
\def\gt{>}
\def\lt{<}
% Document parameters
\title{matplotlib\_ani2}

% Pygments definitions
\makeatletter
\def\PY@reset{\let\PY@it=\relax \let\PY@bf=\relax%
\let\PY@ul=\relax \let\PY@tc=\relax%
\let\PY@bc=\relax \let\PY@ff=\relax}
\def\PY@tok#1{\csname PY@tok@#1\endcsname}
\def\PY@toks#1+{\ifx\relax#1\empty\else%
\PY@tok{#1}\expandafter\PY@toks\fi}
\def\PY@do#1{\PY@bc{\PY@tc{\PY@ul{%
\PY@it{\PY@bf{\PY@ff{#1}}}}}}}
\def\PY#1#2{\PY@reset\PY@toks#1+\relax+\PY@do{#2}}

\expandafter\def\csname PY@tok@gi\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.00,0.63,0.00}{##1}}}
\expandafter\def\csname PY@tok@ni\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.60,0.60,0.60}{##1}}}
\expandafter\def\csname PY@tok@ow\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.67,0.13,1.00}{##1}}}
\expandafter\def\csname PY@tok@dl\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.13,0.13}{##1}}}
\expandafter\def\csname PY@tok@vi\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.10,0.09,0.49}{##1}}}
\expandafter\def\csname PY@tok@nl\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.63,0.63,0.00}{##1}}}
\expandafter\def\csname PY@tok@mb\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.40,0.40,0.40}{##1}}}
\expandafter\def\csname PY@tok@m\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.40,0.40,0.40}{##1}}}
\expandafter\def\csname PY@tok@mo\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.40,0.40,0.40}{##1}}}
\expandafter\def\csname PY@tok@sa\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.13,0.13}{##1}}}
\expandafter\def\csname PY@tok@nb\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@ss\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.10,0.09,0.49}{##1}}}
\expandafter\def\csname PY@tok@kp\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@sx\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@kn\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@mh\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.40,0.40,0.40}{##1}}}
\expandafter\def\csname PY@tok@gu\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.50,0.00,0.50}{##1}}}
\expandafter\def\csname PY@tok@sc\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.13,0.13}{##1}}}
\expandafter\def\csname PY@tok@c1\endcsname{\let\PY@it=\textit\def\PY@tc##1{\textcolor[rgb]{0.25,0.50,0.50}{##1}}}
\expandafter\def\csname PY@tok@gh\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.00,0.50}{##1}}}
\expandafter\def\csname PY@tok@gt\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.00,0.27,0.87}{##1}}}
\expandafter\def\csname PY@tok@vm\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.10,0.09,0.49}{##1}}}
\expandafter\def\csname PY@tok@nc\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.00,1.00}{##1}}}
\expandafter\def\csname PY@tok@sh\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.13,0.13}{##1}}}
\expandafter\def\csname PY@tok@gp\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.00,0.50}{##1}}}
\expandafter\def\csname PY@tok@nv\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.10,0.09,0.49}{##1}}}
\expandafter\def\csname PY@tok@w\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.73,0.73}{##1}}}
\expandafter\def\csname PY@tok@ne\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.82,0.25,0.23}{##1}}}
\expandafter\def\csname PY@tok@gd\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.63,0.00,0.00}{##1}}}
\expandafter\def\csname PY@tok@cm\endcsname{\let\PY@it=\textit\def\PY@tc##1{\textcolor[rgb]{0.25,0.50,0.50}{##1}}}
\expandafter\def\csname PY@tok@k\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@bp\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@gr\endcsname{\def\PY@tc##1{\textcolor[rgb]{1.00,0.00,0.00}{##1}}}
\expandafter\def\csname PY@tok@sb\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.13,0.13}{##1}}}
\expandafter\def\csname PY@tok@cp\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.74,0.48,0.00}{##1}}}
\expandafter\def\csname PY@tok@mi\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.40,0.40,0.40}{##1}}}
\expandafter\def\csname PY@tok@mf\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.40,0.40,0.40}{##1}}}
\expandafter\def\csname PY@tok@il\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.40,0.40,0.40}{##1}}}
\expandafter\def\csname PY@tok@nf\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.00,0.00,1.00}{##1}}}
\expandafter\def\csname PY@tok@se\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.73,0.40,0.13}{##1}}}
\expandafter\def\csname PY@tok@nt\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@si\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.73,0.40,0.53}{##1}}}
\expandafter\def\csname PY@tok@nd\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.67,0.13,1.00}{##1}}}
\expandafter\def\csname PY@tok@cpf\endcsname{\let\PY@it=\textit\def\PY@tc##1{\textcolor[rgb]{0.25,0.50,0.50}{##1}}}
\expandafter\def\csname PY@tok@cs\endcsname{\let\PY@it=\textit\def\PY@tc##1{\textcolor[rgb]{0.25,0.50,0.50}{##1}}}
\expandafter\def\csname PY@tok@vc\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.10,0.09,0.49}{##1}}}
\expandafter\def\csname PY@tok@sd\endcsname{\let\PY@it=\textit\def\PY@tc##1{\textcolor[rgb]{0.73,0.13,0.13}{##1}}}
\expandafter\def\csname PY@tok@err\endcsname{\def\PY@bc##1{\setlength{\fboxsep}{0pt}\fcolorbox[rgb]{1.00,0.00,0.00}{1,1,1}{\strut ##1}}}
\expandafter\def\csname PY@tok@na\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.49,0.56,0.16}{##1}}}
\expandafter\def\csname PY@tok@s1\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.13,0.13}{##1}}}
\expandafter\def\csname PY@tok@ch\endcsname{\let\PY@it=\textit\def\PY@tc##1{\textcolor[rgb]{0.25,0.50,0.50}{##1}}}
\expandafter\def\csname PY@tok@s\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.13,0.13}{##1}}}
\expandafter\def\csname PY@tok@fm\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.00,0.00,1.00}{##1}}}
\expandafter\def\csname PY@tok@c\endcsname{\let\PY@it=\textit\def\PY@tc##1{\textcolor[rgb]{0.25,0.50,0.50}{##1}}}
\expandafter\def\csname PY@tok@kd\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@ge\endcsname{\let\PY@it=\textit}
\expandafter\def\csname PY@tok@kr\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@vg\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.10,0.09,0.49}{##1}}}
\expandafter\def\csname PY@tok@go\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.53,0.53,0.53}{##1}}}
\expandafter\def\csname PY@tok@no\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.53,0.00,0.00}{##1}}}
\expandafter\def\csname PY@tok@o\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.40,0.40,0.40}{##1}}}
\expandafter\def\csname PY@tok@kc\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.50,0.00}{##1}}}
\expandafter\def\csname PY@tok@gs\endcsname{\let\PY@bf=\textbf}
\expandafter\def\csname PY@tok@sr\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.40,0.53}{##1}}}
\expandafter\def\csname PY@tok@nn\endcsname{\let\PY@bf=\textbf\def\PY@tc##1{\textcolor[rgb]{0.00,0.00,1.00}{##1}}}
\expandafter\def\csname PY@tok@s2\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.73,0.13,0.13}{##1}}}
\expandafter\def\csname PY@tok@kt\endcsname{\def\PY@tc##1{\textcolor[rgb]{0.69,0.00,0.25}{##1}}}

\def\PYZbs{\char`\\}
\def\PYZus{\char`\_}
\def\PYZob{\char`\{}
\def\PYZcb{\char`\}}
\def\PYZca{\char`\^}
\def\PYZam{\char`\&}
\def\PYZlt{\char`\<}
\def\PYZgt{\char`\>}
\def\PYZsh{\char`\#}
\def\PYZpc{\char`\%}
\def\PYZdl{\char`\$}
\def\PYZhy{\char`\-}
\def\PYZsq{\char`\'}
\def\PYZdq{\char`\"}
\def\PYZti{\char`\~}
% for compatibility with earlier versions
\def\PYZat{@}
\def\PYZlb{[}
\def\PYZrb{]}
\makeatother


% Exact colors from NB
\definecolor{incolor}{rgb}{0.0, 0.0, 0.5}
\definecolor{outcolor}{rgb}{0.545, 0.0, 0.0}



% Prevent overflowing lines due to hard-to-break entities
\sloppy
% Setup hyperref package
\hypersetup{
breaklinks=true, % so long urls are correctly broken across lines
colorlinks=true,
urlcolor=urlcolor,
linkcolor=linkcolor,
citecolor=citecolor,
}
% Slightly bigger margins than the latex defaults
\geometry{verbose,tmargin=1in,bmargin=1in,lmargin=1in,rmargin=1in}

\begin{document}
\maketitle

\begin{Verbatim}[commandchars=\\\{\}]
{\color{incolor}In [{\color{incolor}1}]:} \PY{c+c1}{\PYZsh{}\PYZpc{}matplotlib nbagg}
\PY{o}{\PYZpc{}}\PY{k}{matplotlib} nbagg
\PY{k+kn}{import} \PY{n+nn}{numpy} \PY{k}{as} \PY{n+nn}{np}
\PY{k+kn}{import} \PY{n+nn}{matplotlib}\PY{n+nn}{.}\PY{n+nn}{pyplot} \PY{k}{as} \PY{n+nn}{plt}
\PY{k+kn}{import} \PY{n+nn}{matplotlib}\PY{n+nn}{.}\PY{n+nn}{animation} \PY{k}{as} \PY{n+nn}{animation}
\PY{n}{fig} \PY{o}{=} \PY{n}{plt}\PY{o}{.}\PY{n}{figure}\PY{p}{(}\PY{p}{)}
\PY{n}{x} \PY{o}{=} \PY{n}{np}\PY{o}{.}\PY{n}{arange}\PY{p}{(}\PY{l+m+mi}{0}\PY{p}{,} \PY{l+m+mi}{10}\PY{p}{,} \PY{l+m+mf}{0.1}\PY{p}{)}
\PY{n}{ims} \PY{o}{=} \PY{p}{[}\PY{p}{]}
\PY{k}{for} \PY{n}{a} \PY{o+ow}{in} \PY{n+nb}{range}\PY{p}{(}\PY{l+m+mi}{50}\PY{p}{)}\PY{p}{:}
\PY{n}{y} \PY{o}{=} \PY{n}{np}\PY{o}{.}\PY{n}{sin}\PY{p}{(}\PY{n}{x} \PY{o}{\PYZhy{}} \PY{n}{a}\PY{p}{)}
\PY{n}{im} \PY{o}{=} \PY{n}{plt}\PY{o}{.}\PY{n}{plot}\PY{p}{(}\PY{n}{x}\PY{p}{,} \PY{n}{y}\PY{p}{,} \PY{l+s+s2}{\PYZdq{}}\PY{l+s+s2}{r}\PY{l+s+s2}{\PYZdq{}}\PY{p}{)}
\PY{n}{ims}\PY{o}{.}\PY{n}{append}\PY{p}{(}\PY{n}{im}\PY{p}{)}
\PY{n}{ani} \PY{o}{=} \PY{n}{animation}\PY{o}{.}\PY{n}{ArtistAnimation}\PY{p}{(}\PY{n}{fig}\PY{p}{,} \PY{n}{ims}\PY{p}{)}
\PY{n}{plt}\PY{o}{.}\PY{n}{show}\PY{p}{(}\PY{p}{)}
\end{Verbatim}


\begin{verbatim}
<IPython.core.display.Javascript object>
\end{verbatim}

\begin{verbatim}
<IPython.core.display.HTML object>
\end{verbatim}


% Add a bibliography block to the postdoc
\end{document}

+ 4
- 0
1_kmeans/README.md View File

@@ -0,0 +1,4 @@


## References
* [如何使用 Keras 实现无监督聚类](http://m.sohu.com/a/236221126_717210)

+ 165
- 1
1_knn/knn_classification.ipynb
File diff suppressed because it is too large
View File


+ 109
- 1
1_knn/knn_classification.py View File

@@ -48,6 +48,114 @@
# ## Program

# +
import numpy as np
import operator

class KNN(object):

def __init__(self, k=3):
self.k = k

def fit(self, x, y):
self.x = x
self.y = y

def _square_distance(self, v1, v2):
return np.sum(np.square(v1-v2))

def _vote(self, ys):
ys_unique = np.unique(ys)
vote_dict = {}
for y in ys:
if y not in vote_dict.keys():
vote_dict[y] = 1
else:
vote_dict[y] += 1
sorted_vote_dict = sorted(vote_dict.items(), key=operator.itemgetter(1), reverse=True)
return sorted_vote_dict[0][0]

def predict(self, x):
y_pred = []
for i in range(len(x)):
dist_arr = [self._square_distance(x[i], self.x[j]) for j in range(len(self.x))]
sorted_index = np.argsort(dist_arr)
top_k_index = sorted_index[:self.k]
y_pred.append(self._vote(ys=self.y[top_k_index]))
return np.array(y_pred)

def score(self, y_true=None, y_pred=None):
if y_true is None and y_pred is None:
y_pred = self.predict(self.x)
y_true = self.y
score = 0.0
for i in range(len(y_true)):
if y_true[i] == y_pred[i]:
score += 1
score /= len(y_true)
return score

# +
# %matplotlib inline

import numpy as np
import matplotlib.pyplot as plt

# data generation
np.random.seed(314)
data_size_1 = 300
x1_1 = np.random.normal(loc=5.0, scale=1.0, size=data_size_1)
x2_1 = np.random.normal(loc=4.0, scale=1.0, size=data_size_1)
y_1 = [0 for _ in range(data_size_1)]

data_size_2 = 400
x1_2 = np.random.normal(loc=10.0, scale=2.0, size=data_size_2)
x2_2 = np.random.normal(loc=8.0, scale=2.0, size=data_size_2)
y_2 = [1 for _ in range(data_size_2)]

x1 = np.concatenate((x1_1, x1_2), axis=0)
x2 = np.concatenate((x2_1, x2_2), axis=0)
x = np.hstack((x1.reshape(-1,1), x2.reshape(-1,1)))
y = np.concatenate((y_1, y_2), axis=0)

data_size_all = data_size_1+data_size_2
shuffled_index = np.random.permutation(data_size_all)
x = x[shuffled_index]
y = y[shuffled_index]

split_index = int(data_size_all*0.7)
x_train = x[:split_index]
y_train = y[:split_index]
x_test = x[split_index:]
y_test = y[split_index:]

# visualize data
plt.scatter(x_train[:,0], x_train[:,1], c=y_train, marker='.')
plt.title("train data")
plt.show()
plt.scatter(x_test[:,0], x_test[:,1], c=y_test, marker='.')
plt.title("test data")
plt.show()



# +
# data preprocessing
x_train = (x_train - np.min(x_train, axis=0)) / (np.max(x_train, axis=0) - np.min(x_train, axis=0))
x_test = (x_test - np.min(x_test, axis=0)) / (np.max(x_test, axis=0) - np.min(x_test, axis=0))

# knn classifier
clf = KNN(k=3)
clf.fit(x_train, y_train)

print('train accuracy: {:.3}'.format(clf.score()))

y_test_pred = clf.predict(x_test)
print('test accuracy: {:.3}'.format(clf.score(y_test, y_test_pred)))
# -

# ## sklearn program

# +
% matplotlib inline

import matplotlib.pyplot as plt
@@ -95,4 +203,4 @@ print('LogisticRegression score: %f' % logistic.fit(X_train, y_train).score(X_te

# ## References
# * [Digits Classification Exercise](http://scikit-learn.org/stable/auto_examples/exercises/plot_digits_classification_exercise.html)
#
# * [knn算法的原理与实现](https://zhuanlan.zhihu.com/p/36549000)

+ 400
- 0
dataset_circle.csv View File

@@ -0,0 +1,400 @@
-4.998874451622919324e+00,4.727671430051504586e+00,0.000000000000000000e+00
3.280980164418858092e+00,1.135719744099867690e+01,0.000000000000000000e+00
-3.989307577792735593e+00,-7.472125124436091781e+00,0.000000000000000000e+00
-2.845588117474840750e+00,-1.110207598677712149e+01,0.000000000000000000e+00
-4.736524057786282604e+00,-9.232347813516641466e+00,0.000000000000000000e+00
-2.997596049424991360e+00,1.045323111011670036e+01,0.000000000000000000e+00
-7.808569372236178197e+00,6.841137640119772101e+00,0.000000000000000000e+00
4.719962553632155000e+00,-5.946625096098887120e+00,0.000000000000000000e+00
-8.270798405113193752e+00,1.027792682624399490e+01,0.000000000000000000e+00
4.232628995836200114e-01,-1.335784821899873975e+01,0.000000000000000000e+00
4.893887192102374328e+00,5.486359338973567645e+00,0.000000000000000000e+00
1.953078035901100407e+00,-1.100069524628494300e+01,0.000000000000000000e+00
-9.639984273015738125e+00,-2.957480661280089684e+00,0.000000000000000000e+00
-9.937439807849466789e+00,-8.269310082917247229e+00,0.000000000000000000e+00
2.204734939814951833e+00,-6.357260901975879008e+00,0.000000000000000000e+00
1.274217189544729223e+01,1.841748008125298375e+00,0.000000000000000000e+00
5.672242663876837732e+00,2.330635774766193880e+00,0.000000000000000000e+00
-7.338456675417745600e+00,1.882312145323686847e+00,0.000000000000000000e+00
-6.947515164920790021e+00,-3.658284293516441021e+00,0.000000000000000000e+00
7.088187308453501423e+00,6.263413958397093140e+00,0.000000000000000000e+00
-1.039295836199625001e+01,-2.962603920087568010e+00,0.000000000000000000e+00
5.002046571685252019e+00,-3.644062366457090807e+00,0.000000000000000000e+00
8.067419960052502503e+00,-8.443685884446271217e+00,0.000000000000000000e+00
1.549263958387518736e+00,1.360545642744239636e+01,0.000000000000000000e+00
-1.010119265227723773e+01,-2.928519634527351201e+00,0.000000000000000000e+00
4.048415111200645455e+00,1.042218191509591740e+01,0.000000000000000000e+00
-5.880502261005368103e-01,-9.966963114333513118e+00,0.000000000000000000e+00
-9.471879751092165733e+00,7.246321559295248349e+00,0.000000000000000000e+00
-5.527486155535233259e+00,-4.540557681865953654e+00,0.000000000000000000e+00
4.659675372099115087e+00,8.202590460146153006e+00,0.000000000000000000e+00
1.269023913615559707e+01,-5.416295582522442587e+00,0.000000000000000000e+00
-2.724914602202719749e+00,1.086205270915533028e+01,0.000000000000000000e+00
3.492062621369136366e+00,4.941165814379694154e+00,0.000000000000000000e+00
-1.136297094437039767e+01,-4.418309032965443528e+00,0.000000000000000000e+00
-6.610445400535019722e+00,-4.723850967144393564e+00,0.000000000000000000e+00
2.945875049475957130e+00,5.421645755190175997e+00,0.000000000000000000e+00
8.441152467024686246e+00,-5.733949763032617497e+00,0.000000000000000000e+00
-5.337748568820175343e+00,-2.894641349143626474e+00,0.000000000000000000e+00
-5.638657441343185361e+00,6.371912940018310323e+00,0.000000000000000000e+00
5.219543579234116493e+00,8.655948521272765817e+00,0.000000000000000000e+00
1.137404405700222121e+01,-7.990877338269509744e-01,0.000000000000000000e+00
-1.102521847936960953e+01,-4.463209620181475934e+00,0.000000000000000000e+00
-4.426607752755420400e+00,-1.107485746787114422e+01,0.000000000000000000e+00
8.947435686205526650e+00,-2.652579411969618217e+00,0.000000000000000000e+00
-5.804551086860440634e+00,1.651285937035928297e+00,0.000000000000000000e+00
8.022634490760131243e+00,-3.328905071455355191e+00,0.000000000000000000e+00
9.797034373218822978e+00,-2.398353279354699996e+00,0.000000000000000000e+00
4.101084418772985352e+00,-7.770073879622331425e+00,0.000000000000000000e+00
-1.097037941212336420e+01,2.691560463736894171e+00,0.000000000000000000e+00
-8.493626604162708205e+00,4.028866188921357505e+00,0.000000000000000000e+00
-1.808727426346540934e+00,6.373654750199562180e+00,0.000000000000000000e+00
-1.134703599947220809e+00,1.372231898814027851e+01,0.000000000000000000e+00
-9.244121242406452055e+00,-5.781992459050565358e+00,0.000000000000000000e+00
-5.759426131918345071e+00,7.205510239047915988e+00,0.000000000000000000e+00
3.771520479958048622e+00,8.565896745900721143e+00,0.000000000000000000e+00
-4.727725223740230076e-01,8.226683007907888978e+00,0.000000000000000000e+00
-1.308436146490704033e+01,2.159635187474379103e-01,0.000000000000000000e+00
1.316991133438216721e+01,-8.577077144100451189e-01,0.000000000000000000e+00
3.762476688091432209e+00,6.329839531959273735e+00,0.000000000000000000e+00
1.168475182553070368e+00,7.772573409341900330e+00,0.000000000000000000e+00
-1.152090613329429480e+01,-9.232442486441853058e-01,0.000000000000000000e+00
4.893098057671737777e+00,7.230378368552453416e+00,0.000000000000000000e+00
4.147691247562475425e+00,-4.945226037571873512e+00,0.000000000000000000e+00
-7.615234893199520627e-01,-1.065069682138725682e+01,0.000000000000000000e+00
4.593746251898513044e+00,-3.971878474045261065e+00,0.000000000000000000e+00
8.332067227771284834e+00,-1.081294914826669462e+01,0.000000000000000000e+00
-1.281878411831117059e+00,6.151739151774008540e+00,0.000000000000000000e+00
1.549020274643722273e+00,8.475746236493849395e+00,0.000000000000000000e+00
-1.312835432164462723e+01,2.895890781775360079e+00,0.000000000000000000e+00
-6.879674869024260175e+00,-6.005871464642620339e+00,0.000000000000000000e+00
7.120206196569053958e+00,4.267492581920733663e+00,0.000000000000000000e+00
-3.676623883394837478e+00,-8.327347687527096198e+00,0.000000000000000000e+00
9.950339284853116695e+00,-1.780878495316860821e+00,0.000000000000000000e+00
-1.044089542954608696e+01,-3.512136861383201580e+00,0.000000000000000000e+00
-7.587104265706257067e+00,3.587490533294854345e+00,0.000000000000000000e+00
6.643156192343808719e+00,7.106449819699371950e+00,0.000000000000000000e+00
7.477250685200753644e+00,-2.444484379289254328e+00,0.000000000000000000e+00
4.528212169753472516e+00,8.752891696925013676e+00,0.000000000000000000e+00
-5.910210499914843041e+00,-3.866998159937307111e+00,0.000000000000000000e+00
1.096992004135587395e+00,-1.000681800365598662e+01,0.000000000000000000e+00
1.151043259372304028e+01,-3.110479095656271564e+00,0.000000000000000000e+00
8.994094857683874622e+00,-1.028508523539741981e+00,0.000000000000000000e+00
4.575858771396155156e+00,-4.844852238217020712e+00,0.000000000000000000e+00
-6.506141747505763462e+00,-2.082806902950740113e+00,0.000000000000000000e+00
3.632653413182652002e+00,-1.250445496171638560e+01,0.000000000000000000e+00
9.015220625644202457e+00,1.356044413526625192e+00,0.000000000000000000e+00
-7.909640480474056545e+00,-2.443660342431176691e+00,0.000000000000000000e+00
-1.093311130711816626e+00,-6.805586551288273611e+00,0.000000000000000000e+00
5.468924148717472455e+00,3.047967598152591773e+00,0.000000000000000000e+00
-1.700676425797274849e+00,-7.473472989986249537e+00,0.000000000000000000e+00
-8.153665412746766705e+00,-5.307423884028198202e+00,0.000000000000000000e+00
-1.019420405459193368e+01,7.026817204704700615e+00,0.000000000000000000e+00
1.216291369840269532e+01,-2.095522204463549087e+00,0.000000000000000000e+00
-7.232498227553167958e+00,3.445633673154797627e-01,0.000000000000000000e+00
1.281345232435039350e+01,-2.067891205217199158e+00,0.000000000000000000e+00
9.142160522178615523e+00,-3.160438550644457945e-02,0.000000000000000000e+00
-9.816955137377885166e+00,2.799259632922912466e-01,0.000000000000000000e+00
4.962398314995689064e-01,-6.135477824292228100e+00,0.000000000000000000e+00
9.671712713236194858e+00,8.370515179846837128e+00,0.000000000000000000e+00
-9.043606859436660983e+00,-6.141375555264566799e+00,0.000000000000000000e+00
7.471351966036366976e-01,7.518586880851066745e+00,0.000000000000000000e+00
8.799592298693627024e-01,-7.514435376619853280e+00,0.000000000000000000e+00
4.941070586263158759e+00,1.157869751130275837e+01,0.000000000000000000e+00
-9.570150782095817377e+00,3.896718735961908209e+00,0.000000000000000000e+00
-6.314182668230160722e+00,-4.111016220677472965e+00,0.000000000000000000e+00
-1.339481700536117970e+01,-1.135787832288092769e+00,0.000000000000000000e+00
-3.057627345888538795e+00,-7.906936284528271131e+00,0.000000000000000000e+00
1.275489779531916001e+00,-9.288886973141030623e+00,0.000000000000000000e+00
3.280352073779522648e+00,-7.709132013091286595e+00,0.000000000000000000e+00
7.258123743653465354e+00,-5.330867587024999743e+00,0.000000000000000000e+00
4.887062318871694622e+00,5.317808164698954343e+00,0.000000000000000000e+00
1.006412918815497726e+01,7.663376319592393848e+00,0.000000000000000000e+00
2.146177544892221789e+00,6.797864555657393559e+00,0.000000000000000000e+00
6.570671589026503945e+00,-1.126975629116406807e+01,0.000000000000000000e+00
-6.977889121586767551e+00,-2.331833796503289147e+00,0.000000000000000000e+00
-5.690598708789069704e+00,-5.597086784482708133e+00,0.000000000000000000e+00
1.605452239709390216e+00,6.464934684315307933e+00,0.000000000000000000e+00
3.470972688576804410e-01,-1.327648582649006315e+01,0.000000000000000000e+00
-7.381144288304149370e+00,-5.156481056563678500e+00,0.000000000000000000e+00
5.113305246413767158e+00,-4.310394641097014201e+00,0.000000000000000000e+00
-2.447502200939821293e+00,7.939592051454749111e+00,0.000000000000000000e+00
-3.404168412622622153e+00,-6.832636952280327414e+00,0.000000000000000000e+00
1.286861594932697983e+01,1.553966184507693526e+00,0.000000000000000000e+00
5.278860659561945390e+00,-6.430051460428339638e+00,0.000000000000000000e+00
6.953973998717738247e+00,1.220882956250419982e+00,0.000000000000000000e+00
1.078929647857148177e+00,-1.140119287293926220e+01,0.000000000000000000e+00
1.021467857395671608e+01,8.816393293395096364e+00,0.000000000000000000e+00
-6.500414765761497016e+00,1.097869475334080747e+01,0.000000000000000000e+00
-4.075595160844034837e+00,-1.022701785461823221e+01,0.000000000000000000e+00
-1.261518294439356325e+01,2.352661249397130838e+00,0.000000000000000000e+00
-2.092561453918301950e+00,5.649800285934039934e+00,0.000000000000000000e+00
-8.395172554026888889e+00,-7.355336662870007203e+00,0.000000000000000000e+00
1.011894580495321350e+01,2.863287385767578463e+00,0.000000000000000000e+00
6.440098975491926225e+00,5.407937306238225439e+00,0.000000000000000000e+00
1.009390009280021516e+01,-2.697672104953574124e+00,0.000000000000000000e+00
-1.095974568393570259e+01,-2.320042133815701568e+00,0.000000000000000000e+00
-5.435323914030652404e+00,3.608832268612751637e+00,0.000000000000000000e+00
-8.181796103407455334e+00,-7.169572693476187197e+00,0.000000000000000000e+00
-6.335249672604938986e+00,-5.851649364801576603e+00,0.000000000000000000e+00
4.725674889291250125e+00,-9.044469885217635508e+00,0.000000000000000000e+00
-4.174975972142638270e+00,6.469189355518081719e+00,0.000000000000000000e+00
-1.768292579955591748e+00,7.895050617972255047e+00,0.000000000000000000e+00
-6.743092747917891927e+00,-9.674488283785420251e+00,0.000000000000000000e+00
-6.051758168185039644e+00,-1.384417078025135472e+00,0.000000000000000000e+00
5.912571656871686621e+00,-1.019552467938347640e+01,0.000000000000000000e+00
-1.073662372628176698e+01,-1.094455982713069986e+00,0.000000000000000000e+00
1.920758615137001968e+00,1.060249196062641985e+01,0.000000000000000000e+00
9.492285482622786930e+00,7.457027651813548097e+00,0.000000000000000000e+00
-1.178837417661738307e+01,-8.983810015540972804e-01,0.000000000000000000e+00
6.616842638292316003e+00,-1.145307128001671515e+01,0.000000000000000000e+00
2.344242967842403491e+00,-1.364175232190520859e+01,0.000000000000000000e+00
7.219096291116632536e+00,-7.103202178555553026e+00,0.000000000000000000e+00
-1.137473544795068570e+01,1.042017704631689634e+00,0.000000000000000000e+00
-2.210492883793041541e-01,-1.071494952087631880e+01,0.000000000000000000e+00
4.999402767739136166e+00,-5.249282848981218663e+00,0.000000000000000000e+00
3.939925020599769123e+00,-1.014326764954310889e+01,0.000000000000000000e+00
1.967794675705813345e+00,1.035465999425737138e+01,0.000000000000000000e+00
-9.418445435815625544e+00,8.979913989156472098e+00,0.000000000000000000e+00
6.793123870613265503e+00,4.092999160136503889e+00,0.000000000000000000e+00
6.637993214199637393e+00,1.116046204208427639e+01,0.000000000000000000e+00
-6.015608161107520502e+00,8.315042120086134636e-01,0.000000000000000000e+00
-9.790667573649734834e+00,-4.261491078660923471e+00,0.000000000000000000e+00
3.008684550809284541e+00,5.953626021472883778e+00,0.000000000000000000e+00
4.616311169846965434e+00,7.725254513146059487e+00,0.000000000000000000e+00
-2.667078671430157755e+00,-6.347007249127237571e+00,0.000000000000000000e+00
-1.748592270990125819e+00,-1.315744297437313826e+01,0.000000000000000000e+00
5.955252033533550815e+00,-1.251728734987527325e+01,0.000000000000000000e+00
-2.612533043388519438e+00,7.133476364554665494e+00,0.000000000000000000e+00
1.017407247111040647e+01,4.247446167669684414e+00,0.000000000000000000e+00
6.914675549487817818e+00,4.127373010422411781e+00,0.000000000000000000e+00
6.670636317220846934e+00,-5.641487655987883265e+00,0.000000000000000000e+00
1.062615644813485893e+01,6.244424228562758472e+00,0.000000000000000000e+00
-6.169791600437800838e+00,1.249748440142003092e-01,0.000000000000000000e+00
-7.001930132629896608e-01,1.300760470713475492e+01,0.000000000000000000e+00
-6.558321900875884403e+00,-9.204213866524561638e+00,0.000000000000000000e+00
8.011083353997189960e+00,-6.049473945953777410e+00,0.000000000000000000e+00
-1.074271129719204154e+01,-7.633397329542090048e+00,0.000000000000000000e+00
-3.984045586971812103e+00,1.324391982032081394e+01,0.000000000000000000e+00
5.827998652509109867e+00,-3.571306546452413855e+00,0.000000000000000000e+00
-8.273398101851878295e+00,-1.313277757722923100e+00,0.000000000000000000e+00
-9.089064650206521989e+00,2.386460797472218065e+00,0.000000000000000000e+00
-6.117237918351078108e+00,-8.230725593702798548e+00,0.000000000000000000e+00
-9.310494051367378177e+00,2.892141468064257204e+00,0.000000000000000000e+00
-5.420112423056186124e+00,8.342523568701238901e+00,0.000000000000000000e+00
-1.251767095133966379e+01,6.603965812677897729e-01,0.000000000000000000e+00
8.875409740472520737e-01,7.898399394405656970e+00,0.000000000000000000e+00
-1.225465649316174144e+01,-3.777333531207176076e+00,0.000000000000000000e+00
6.587892908968072447e+00,1.127849124532697722e+01,0.000000000000000000e+00
-5.980199324871170674e+00,7.456243975428951565e+00,0.000000000000000000e+00
1.035424594893476957e+01,-4.484098136654492528e+00,0.000000000000000000e+00
-7.117476147145587184e+00,-2.819048408111779480e+00,0.000000000000000000e+00
1.990785706505008301e+00,1.137971089662191915e+01,0.000000000000000000e+00
-6.629990058126231212e+00,5.812540358407785046e-01,0.000000000000000000e+00
6.010091945071859953e+00,1.134266418530905440e+01,0.000000000000000000e+00
1.563540700463055710e+00,6.582003958354652795e+00,0.000000000000000000e+00
8.471557189942037880e+00,-1.050544525914208371e+01,0.000000000000000000e+00
-6.598986063527275014e+00,5.693582969311762554e+00,0.000000000000000000e+00
8.433219563542468933e+00,1.067163321165924827e+01,0.000000000000000000e+00
-1.011285137769216469e+00,-1.219545555001986692e+01,0.000000000000000000e+00
1.256727699257306696e+01,4.797924561371654129e+00,0.000000000000000000e+00
-1.276828523536011062e+01,1.485427257184312921e+01,1.000000000000000000e+00
2.200700553643587298e+01,-5.109980858152146865e+00,1.000000000000000000e+00
-9.015832319879871548e+00,1.357709172587320268e+01,1.000000000000000000e+00
-2.064308479232057536e+01,7.683059398107904947e+00,1.000000000000000000e+00
-1.305339087233159923e+01,-1.377770214506358393e+01,1.000000000000000000e+00
-1.058198473565134279e+01,-1.579210776112037529e+01,1.000000000000000000e+00
2.159085522882962405e+00,-2.214526860726335045e+01,1.000000000000000000e+00
-1.595555611141086949e+01,3.847374159362159318e+00,1.000000000000000000e+00
5.755892792673394709e+00,1.792719959809483399e+01,1.000000000000000000e+00
-4.094655249023293919e+00,1.865674575281047964e+01,1.000000000000000000e+00
1.504545607498590343e+01,1.322977627698143444e+01,1.000000000000000000e+00
1.335157567066028683e+01,-1.583800419058930764e+01,1.000000000000000000e+00
2.090538629015262995e+00,2.071601112544702517e+01,1.000000000000000000e+00
2.155108918382654792e+01,-6.819970876788357117e+00,1.000000000000000000e+00
-3.361885513320386654e+00,1.988966673833889232e+01,1.000000000000000000e+00
-5.238828304327662444e+00,1.735666652802654752e+01,1.000000000000000000e+00
1.456437564020385089e+01,-1.256431224170169259e+01,1.000000000000000000e+00
-4.279457049842544158e+00,2.024711507538967226e+01,1.000000000000000000e+00
4.132355925210051129e-01,-2.210281011416533303e+01,1.000000000000000000e+00
-6.173948251655802189e+00,1.953225177250030242e+01,1.000000000000000000e+00
1.459888528231329197e+01,1.196973099346357117e+01,1.000000000000000000e+00
1.236830105347152120e+01,1.329171622622360971e+01,1.000000000000000000e+00
-2.055370904116432484e+01,-1.157598573224092497e+01,1.000000000000000000e+00
-1.781228608768412158e+01,1.407428361796575444e+01,1.000000000000000000e+00
-2.613358396594463340e+00,2.212832006401573182e+01,1.000000000000000000e+00
-4.631552015612522055e+00,1.952227419708583156e+01,1.000000000000000000e+00
-4.054582766052930998e-01,-2.315968441188986660e+01,1.000000000000000000e+00
-2.130849784623066512e+01,-8.360471456003718771e+00,1.000000000000000000e+00
-2.096708169676974975e+01,3.065404221271932350e+00,1.000000000000000000e+00
1.012814990232144829e+01,-1.710688782558848686e+01,1.000000000000000000e+00
-1.886228044460812825e+01,1.456662412183641386e+01,1.000000000000000000e+00
2.054570922725679338e+01,1.016686516490931425e+01,1.000000000000000000e+00
-1.781356955563831335e+01,1.105794464820366940e+01,1.000000000000000000e+00
-1.882094142469163600e+01,9.571019884213677997e+00,1.000000000000000000e+00
1.686939636143700838e+01,1.151714783349403781e+01,1.000000000000000000e+00
-1.466159371937605904e+01,1.869999120055744513e+01,1.000000000000000000e+00
-2.095519182127504010e+01,-7.810257975450833889e+00,1.000000000000000000e+00
1.204168603507365276e+01,1.753305348385429241e+01,1.000000000000000000e+00
-1.309312824479403226e+01,-1.703502335051060967e+01,1.000000000000000000e+00
6.968530985563967661e+00,1.579289053832121326e+01,1.000000000000000000e+00
2.121143805124364690e+01,5.234705357933253644e-01,1.000000000000000000e+00
-1.136778824386126097e+01,1.202252461886451584e+01,1.000000000000000000e+00
-1.490653743586005042e+01,-1.065453467874318783e+01,1.000000000000000000e+00
5.930951893554082588e+00,1.966376798287095795e+01,1.000000000000000000e+00
-7.657447289550228797e-01,-2.365440412500549883e+01,1.000000000000000000e+00
1.713217863539989239e+01,-1.331740399435540567e+00,1.000000000000000000e+00
-2.094869365061085631e+01,8.666992258263725546e+00,1.000000000000000000e+00
1.120784009336820120e+01,1.524895954078855453e+01,1.000000000000000000e+00
1.544574850389397014e+01,1.622432746966154227e+01,1.000000000000000000e+00
2.203909906040028233e+01,-1.495473776473204897e+00,1.000000000000000000e+00
3.406917884280199260e+00,-1.700768942915551918e+01,1.000000000000000000e+00
2.243181081456949499e+01,-7.241367089152824121e+00,1.000000000000000000e+00
1.397256447919322397e+01,-8.970692032560323881e+00,1.000000000000000000e+00
1.630338607375047033e+01,-3.235311185947623791e+00,1.000000000000000000e+00
2.007865587243750838e+01,-1.045532713058078045e+01,1.000000000000000000e+00
1.674891793061967959e+01,1.693356669413548587e+01,1.000000000000000000e+00
-2.132668396060672933e+01,5.783349878735465355e-01,1.000000000000000000e+00
-1.522396895393467275e+01,8.202953318116103176e+00,1.000000000000000000e+00
-2.108761813887401360e+01,8.254358941623493706e+00,1.000000000000000000e+00
-1.158593976432672967e+01,-1.126428988269355536e+01,1.000000000000000000e+00
2.193240760554629887e+01,5.235388163497050051e+00,1.000000000000000000e+00
-1.249042635798417855e+01,-1.597925733075420851e+01,1.000000000000000000e+00
-1.327153600302076875e+01,1.886118178564102976e+01,1.000000000000000000e+00
-2.716096832198759969e-01,-2.053111234162230048e+01,1.000000000000000000e+00
1.870977268496434931e+01,-6.020128277491814117e+00,1.000000000000000000e+00
9.066026373681397743e+00,1.719833327737538653e+01,1.000000000000000000e+00
9.754901942293498607e+00,-2.150786139557243359e+01,1.000000000000000000e+00
5.225986557643799379e+00,-1.851730741620698595e+01,1.000000000000000000e+00
-2.221334423653201995e+01,-5.783879325424583939e+00,1.000000000000000000e+00
1.025710983522760777e+01,1.632071028313165328e+01,1.000000000000000000e+00
9.453048857471646471e+00,1.907210084750916224e+01,1.000000000000000000e+00
2.356904492282887631e+01,-1.535820206309244096e+00,1.000000000000000000e+00
9.596073309462024525e+00,-1.408881719444052649e+01,1.000000000000000000e+00
1.533715082237675809e+01,5.218041165941144754e+00,1.000000000000000000e+00
1.597941654644196952e+01,-3.199722811957190327e+00,1.000000000000000000e+00
7.710684179679512529e-02,-1.945941496300915929e+01,1.000000000000000000e+00
-1.635173025955581849e+01,-8.961664441019049576e+00,1.000000000000000000e+00
-1.726785969753059646e+01,1.110928181418177374e+01,1.000000000000000000e+00
-1.976827267544182476e+01,-5.801616282561767868e+00,1.000000000000000000e+00
1.720673118238611465e+01,1.380754320853958106e+01,1.000000000000000000e+00
-1.778215790803131213e+01,7.481340069022668793e+00,1.000000000000000000e+00
-1.537533888505376112e+01,1.354781942828487828e+01,1.000000000000000000e+00
-1.874647681158858603e+01,-2.086018881879410980e+00,1.000000000000000000e+00
-1.841548344237031642e+01,-1.199413211513814126e+01,1.000000000000000000e+00
1.851772480994277359e+01,6.850430369669953556e+00,1.000000000000000000e+00
-5.199648154333016414e-01,1.699131830698892642e+01,1.000000000000000000e+00
-1.746074179128081028e+01,-2.130885407114637697e+00,1.000000000000000000e+00
2.074980260021153100e+01,-5.338439795355067297e+00,1.000000000000000000e+00
2.241520803513171245e+01,-5.683938977401072457e+00,1.000000000000000000e+00
-2.178559334548606685e+01,2.504910822961404993e+00,1.000000000000000000e+00
-1.414508041707358643e+01,-8.101768631586185876e+00,1.000000000000000000e+00
-1.862002290242275748e+01,8.616856625267361736e+00,1.000000000000000000e+00
-1.031203821148930544e+01,1.591904818440532132e+01,1.000000000000000000e+00
-1.629685765485212201e+01,-2.382938031825091674e+00,1.000000000000000000e+00
-1.132562067695731933e+01,-1.843466755631968468e+01,1.000000000000000000e+00
-1.452496608920110432e+01,-8.510516214915753608e+00,1.000000000000000000e+00
1.640941344250320810e+01,-2.349967766929355051e+00,1.000000000000000000e+00
1.895801817811362611e+01,1.279532674704763950e+00,1.000000000000000000e+00
1.685898478310680559e+01,-8.585000421609902954e-01,1.000000000000000000e+00
-2.355323719417258133e+01,-1.867342377437736234e+00,1.000000000000000000e+00
1.031000052146675472e+01,1.345785980751973554e+01,1.000000000000000000e+00
1.561726855543554215e+01,5.254893355492839646e+00,1.000000000000000000e+00
-9.814663453158873452e+00,1.967051263382919757e+01,1.000000000000000000e+00
-1.386540805399906162e+01,-1.203587962799444178e+01,1.000000000000000000e+00
-1.301101935751681005e+01,-9.983142649640374344e+00,1.000000000000000000e+00
-1.171509236657193576e+01,1.496626280438744061e+01,1.000000000000000000e+00
1.167693745437675368e+01,1.207543933812325498e+01,1.000000000000000000e+00
-1.567005104152508821e+01,-7.694244936710157745e+00,1.000000000000000000e+00
7.594297107755436649e+00,1.938724680764088504e+01,1.000000000000000000e+00
-1.166557231242479098e+01,-1.525600786631990324e+01,1.000000000000000000e+00
1.802869488426354394e+01,9.660607718670391364e+00,1.000000000000000000e+00
1.674728765281428622e+01,1.538616827684373156e+01,1.000000000000000000e+00
2.329746424154684803e+01,-2.516275632552658070e+00,1.000000000000000000e+00
2.795203593960862598e+00,1.718978437221393207e+01,1.000000000000000000e+00
7.664639447149939500e+00,-1.937731464290527583e+01,1.000000000000000000e+00
1.723841704768033978e+01,-4.493031017537385097e+00,1.000000000000000000e+00
-1.349355288713674561e+01,-1.460321966984954400e+01,1.000000000000000000e+00
1.813211287699012431e+01,-1.168412345696741284e+01,1.000000000000000000e+00
-1.875536871361635960e+01,-1.433926831336835583e+01,1.000000000000000000e+00
1.777486246180380647e+01,-8.965987135167223343e+00,1.000000000000000000e+00
2.070525330083701832e+01,7.680707460288881627e+00,1.000000000000000000e+00
-1.045274253331607817e+01,1.410796217549621190e+01,1.000000000000000000e+00
-1.671227927663892387e+01,1.483176419058371920e+01,1.000000000000000000e+00
-2.028412579269172511e+01,-1.164033040208841463e+01,1.000000000000000000e+00
1.484901399967457891e+01,-7.403971040192215192e+00,1.000000000000000000e+00
-1.403252746950399477e+01,1.489423762483883174e+01,1.000000000000000000e+00
1.649330199788062146e+01,-5.027199259991481206e+00,1.000000000000000000e+00
1.628299339557154823e+01,9.776129193439311749e+00,1.000000000000000000e+00
-1.471821765988692832e+01,-1.274770175500240121e+01,1.000000000000000000e+00
-2.265205038433772344e+01,-5.273292845865867662e+00,1.000000000000000000e+00
-9.444264631545278732e+00,1.515886911263916303e+01,1.000000000000000000e+00
4.043266122186780720e+00,-2.286775294044096185e+01,1.000000000000000000e+00
-5.022929194249878826e+00,1.829883608569098641e+01,1.000000000000000000e+00
6.010132267597817490e-01,-2.269799887835729635e+01,1.000000000000000000e+00
1.094309352500864918e+01,1.683686507570662272e+01,1.000000000000000000e+00
-7.244448775989381417e+00,-1.527115111705389516e+01,1.000000000000000000e+00
-1.983678351590502942e+01,8.445929871272308986e+00,1.000000000000000000e+00
2.039685192868745744e+01,-6.904852343871127340e-01,1.000000000000000000e+00
-5.563240229470832965e+00,-1.838760877307888109e+01,1.000000000000000000e+00
1.567973312694492627e+01,-8.216260946740296944e+00,1.000000000000000000e+00
-1.699926940884898485e+01,1.304248754658484977e+01,1.000000000000000000e+00
2.146067659012629036e+01,7.972298139149521568e+00,1.000000000000000000e+00
-1.714946669785888389e+01,-1.239205977614868992e+01,1.000000000000000000e+00
-9.133159500043866785e+00,1.316527462303929852e+01,1.000000000000000000e+00
1.949542885121009661e+01,-4.755821190359950101e+00,1.000000000000000000e+00
1.758784465262350949e+01,-3.024247953202296557e+00,1.000000000000000000e+00
2.316183669546501012e+00,1.792581821698398059e+01,1.000000000000000000e+00
-3.874403224320596806e+00,1.719563605293987507e+01,1.000000000000000000e+00
1.673479038677152531e+01,1.681597401314791185e+01,1.000000000000000000e+00
1.768737536825787160e+01,-2.544767122923435876e-01,1.000000000000000000e+00
1.487046462239019107e+01,6.413010657495455291e+00,1.000000000000000000e+00
1.947951286015037198e+01,4.722271365689197253e+00,1.000000000000000000e+00
-1.524706902936355313e+01,-1.029427503048526127e+01,1.000000000000000000e+00
1.379481431270261460e+01,1.628169717477686618e+01,1.000000000000000000e+00
1.362924150830941450e+01,1.825248836436263034e+01,1.000000000000000000e+00
2.109537520551469214e+01,3.085311315878485505e+00,1.000000000000000000e+00
-1.940133588519703167e+01,-6.955817321976740963e+00,1.000000000000000000e+00
1.487080455326461070e+01,-9.285178375103486204e+00,1.000000000000000000e+00
1.116213775484291482e+01,-1.556849098986591784e+01,1.000000000000000000e+00
-1.001729175779743208e+01,-1.621479619750725831e+01,1.000000000000000000e+00
-8.365844374272201067e-01,2.160080821044751787e+01,1.000000000000000000e+00
-9.823513218967070415e+00,2.164210202731340615e+01,1.000000000000000000e+00
-1.504989138139720239e+01,-7.404669774650498582e+00,1.000000000000000000e+00
-1.860669148147666618e+01,-6.592957807040463969e+00,1.000000000000000000e+00
1.691309845875895590e+01,1.244606399846592737e+01,1.000000000000000000e+00
8.147214228509078282e+00,1.573722042505914587e+01,1.000000000000000000e+00
-1.263493833040107184e+01,-1.567137247846872583e+01,1.000000000000000000e+00
-1.256934220439505978e+01,-1.892833280789729145e+01,1.000000000000000000e+00
-1.001072282682530901e+00,2.241726582770620979e+01,1.000000000000000000e+00
1.736803339613195973e+01,8.958774078558725762e+00,1.000000000000000000e+00
-2.131033698401920518e+01,7.763011871574468259e+00,1.000000000000000000e+00
1.777700005694999419e+01,1.565687443388428335e+01,1.000000000000000000e+00
8.156406264876757461e+00,-1.538203333947896567e+01,1.000000000000000000e+00
1.535416930857535789e+01,1.073219100543042437e+01,1.000000000000000000e+00
1.393351876337050577e+01,1.437531766552787182e+01,1.000000000000000000e+00
-1.751198379986455578e+01,-5.165020091763743437e+00,1.000000000000000000e+00
1.240237820320886897e+01,-1.666745796346258501e+01,1.000000000000000000e+00
1.685374655644983477e+01,-1.672715126431511479e+01,1.000000000000000000e+00
-1.936918689493436219e+01,1.144643638294396659e+01,1.000000000000000000e+00
8.623293458190159910e+00,2.096192697864922749e+01,1.000000000000000000e+00
1.799856010395983930e+01,1.462766791697583280e+01,1.000000000000000000e+00
-7.349573717105968740e+00,-1.742430182240346781e+01,1.000000000000000000e+00
-9.343126549158796479e+00,1.688611527095216758e+01,1.000000000000000000e+00
1.787661108095623774e+01,5.328860327519521434e+00,1.000000000000000000e+00
1.789659771680197053e+01,3.845264879970244021e+00,1.000000000000000000e+00
-4.489666551565596464e+00,1.722137369702340592e+01,1.000000000000000000e+00
2.040698314659437074e+01,-5.531992448574443166e+00,1.000000000000000000e+00
-9.691908581855285476e+00,1.902948549559508251e+01,1.000000000000000000e+00
-7.410224453076865281e+00,2.158595909934987489e+01,1.000000000000000000e+00
7.689774304376260972e+00,-1.859797310491102351e+01,1.000000000000000000e+00
-4.494550632492879672e+00,-2.198983720648939766e+01,1.000000000000000000e+00
1.560586620340868080e+01,-1.430903591152745769e+01,1.000000000000000000e+00
-1.650051567374045902e+01,-1.021013700144491487e+01,1.000000000000000000e+00
7.464899787745848059e+00,-1.477802584368157213e+01,1.000000000000000000e+00
1.608677005761695256e+01,2.497804016041496045e+00,1.000000000000000000e+00
-1.832329726415541771e+01,1.562469368109879131e+00,1.000000000000000000e+00
1.488204674808572658e+00,-1.597126700925981524e+01,1.000000000000000000e+00
-1.711747104872819891e+00,1.907425139135742498e+01,1.000000000000000000e+00
-1.846386149004602117e+01,-1.188235505038806572e+01,1.000000000000000000e+00
1.990171207746382009e+01,-1.313517862727534080e+01,1.000000000000000000e+00

+ 7
- 21
nn/Perceptron.ipynb View File

@@ -141,23 +141,11 @@
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"update weight and bias: 1.5 4.0 0.5\n",
"update weight and bias: -0.5 3.5 0.0\n",
"update weight and bias: -2.0 3.0 -0.5\n",
"w = [-2.0, 3.0]\n",
"b = -0.5\n",
"[ 1 1 1 1 -1 -1 -1 -1]\n",
"[1, 1, 1, 1, -1, -1, -1, -1]\n"
]
}
],
"execution_count": null,
"metadata": {
"lines_to_end_of_cell_marker": 2
},
"outputs": [],
"source": [
"import random\n",
"import numpy as np\n",
@@ -212,8 +200,7 @@
"y_pred = perceptron_pred(train_data, w, b)\n",
"\n",
"print(train_data[:, 2])\n",
"print(y_pred)\n",
"\n"
"print(y_pred)"
]
},
{
@@ -244,8 +231,7 @@
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
},
"main_language": "python"
}
},
"nbformat": 4,
"nbformat_minor": 2


+ 43
- 99
nn/mlp_bp.ipynb View File

@@ -14,6 +14,7 @@
"## 神经元\n",
"\n",
"神经元和感知器本质上是一样的,只不过我们说感知器的时候,它的激活函数是阶跃函数;而当我们说神经元时,激活函数往往选择为sigmoid函数或tanh函数。如下图所示:\n",
"\n",
"![neuron](images/neuron.gif)\n",
"\n",
"计算一个神经元的输出的方法和计算一个感知器的输出是一样的。假设神经元的输入是向量$\\vec{x}$,权重向量是$\\vec{w}$(偏置项是$w_0$),激活函数是sigmoid函数,则其输出y:\n",
@@ -31,13 +32,15 @@
"$$\n",
"\n",
"sigmoid函数是一个非线性函数,值域是(0,1)。函数图像如下图所示\n",
"\n",
"![sigmod_function](images/sigmod.jpg)\n",
"\n",
"sigmoid函数的导数是:\n",
"$$\n",
"y = sigmod(x) \\ \\ \\ \\ \\ \\ (1) \\\\\n",
"y' = y(1-y)\n",
"$$\n",
"\\begin{eqnarray}\n",
"y & = & sigmod(x) \\tag{1} \\\\\n",
"y' & = & y(1-y)\n",
"\\end{eqnarray}\n",
"\n",
"可以看到,sigmoid函数的导数非常有趣,它可以用sigmoid函数自身来表示。这样,一旦计算出sigmoid函数的值,计算它的导数的值就非常方便。\n",
"\n"
]
@@ -83,11 +86,13 @@
"为了计算节点4的输出值,我们必须先得到其所有上游节点(也就是节点1、2、3)的输出值。节点1、2、3是输入层的节点,所以,他们的输出值就是输入向量$\\vec{x}$本身。按照上图画出的对应关系,可以看到节点1、2、3的输出值分别是$x_1$,$x_2$,$x_3$。我们要求输入向量的维度和输入层神经元个数相同,而输入向量的某个元素对应到哪个输入节点是可以自由决定的,你偏非要把$x_1$赋值给节点2也是完全没有问题的,但这样除了把自己弄晕之外,并没有什么价值。\n",
"\n",
"一旦我们有了节点1、2、3的输出值,我们就可以根据式1计算节点4的输出值$a_4$:\n",
"\n",
"![eqn_3_4](images/eqn_3_4.png)\n",
"\n",
"上式的$w_{4b}$是节点4的偏置项,图中没有画出来。而$w_{41}$,$w_{42}$,$w_{43}$分别为节点1、2、3到节点4连接的权重,在给权重$w_{ji}$编号时,我们把目标节点的编号$j$放在前面,把源节点的编号$i$放在后面。\n",
"\n",
"同样,我们可以继续计算出节点5、6、7的输出值$a_5$,$a_6$,$a_7$。这样,隐藏层的4个节点的输出值就计算完成了,我们就可以接着计算输出层的节点8的输出值$y_1$:\n",
"\n",
"![eqn_5_6](images/eqn_5_6.png)\n",
"\n",
"同理,我们还可以计算出$y_2$的值。这样输出层所有节点的输出值计算完毕,我们就得到了在输入向量$\\vec{x} = (x_1, x_2, x_3)^T$时,神经网络的输出向量$\\vec{y} = (y_1, y_2)^T$。这里我们也看到,输出向量的维度和输出层神经元个数相同。\n",
@@ -103,6 +108,7 @@
"神经网络的计算如果用矩阵来表示会很方便(当然逼格也更高),我们先来看看隐藏层的矩阵表示。\n",
"\n",
"首先我们把隐藏层4个节点的计算依次排列出来:\n",
"\n",
"![eqn_hidden_units](images/eqn_hidden_units.png)\n",
"\n",
"接着,定义网络的输入向量$\\vec{x}$和隐藏层每个节点的权重向量$\\vec{w}$。令\n",
@@ -114,17 +120,21 @@
"![eqn_13_16](images/eqn_13_16.png)\n",
"\n",
"现在,我们把上述计算$a_4$, $a_5$,$a_6$,$a_7$的四个式子写到一个矩阵里面,每个式子作为矩阵的一行,就可以利用矩阵来表示它们的计算了。令\n",
"\n",
"![eqn_matrix1](images/eqn_matrix1.png)\n",
"\n",
"带入前面的一组式子,得到\n",
"\n",
"![formular_2](images/formular_2.png)\n",
"\n",
"在式2中,$f$是激活函数,在本例中是$sigmod$函数;$W$是某一层的权重矩阵;$\\vec{x}$是某层的输入向量;$\\vec{a}$是某层的输出向量。式2说明神经网络的每一层的作用实际上就是先将输入向量左乘一个数组进行线性变换,得到一个新的向量,然后再对这个向量逐元素应用一个激活函数。\n",
"\n",
"每一层的算法都是一样的。比如,对于包含一个输入层,一个输出层和三个隐藏层的神经网络,我们假设其权重矩阵分别为$W_1$,$W_2$,$W_3$,$W_4$,每个隐藏层的输出分别是$\\vec{a}_1$,$\\vec{a}_2$,$\\vec{a}_3$,神经网络的输入为$\\vec{x}$,神经网络的输出为$\\vec{y}$,如下图所示:\n",
"\n",
"![nn_parameters_demo](images/nn_parameters_demo.png)\n",
"\n",
"则每一层的输出向量的计算可以表示为:\n",
"\n",
"![eqn_17_20](images/eqn_17_20.png)\n",
"\n",
"\n",
@@ -146,17 +156,21 @@
"按照机器学习的通用套路,我们先确定神经网络的目标函数,然后用随机梯度下降优化算法去求目标函数最小值时的参数值。\n",
"\n",
"我们取网络所有输出层节点的误差平方和作为目标函数:\n",
"\n",
"![bp_loss](images/bp_loss.png)\n",
"\n",
"其中,$E_d$表示是样本$d$的误差。\n",
"\n",
"然后,使用随机梯度下降算法对目标函数进行优化:\n",
"\n",
"![bp_weight_update](images/bp_weight_update.png)\n",
"\n",
"随机梯度下降算法也就是需要求出误差$E_d$对于每个权重$w_{ji}$的偏导数(也就是梯度),怎么求呢?\n",
"\n",
"![nn3](images/nn3.png)\n",
"\n",
"观察上图,我们发现权重$w_{ji}$仅能通过影响节点$j$的输入值影响网络的其它部分,设$net_j$是节点$j$的加权输入,即\n",
"\n",
"![eqn_21_22](images/eqn_21_22.png)\n",
"\n",
"$E_d$是$net_j$的函数,而$net_j$是$w_{ji}$的函数。根据链式求导法则,可以得到:\n",
@@ -179,22 +193,28 @@
"![nn3](images/nn3.png)\n",
"\n",
"对于输出层来说,$net_j$仅能通过节点$j$的输出值$y_j$来影响网络其它部分,也就是说$E_d$是$y_j$的函数,而$y_j$是$net_j$的函数,其中$y_j = sigmod(net_j)$。所以我们可以再次使用链式求导法则:\n",
"\n",
"![eqn_26](images/eqn_26.png)\n",
"\n",
"考虑上式第一项:\n",
"\n",
"![eqn_27_29](images/eqn_27_29.png)\n",
"\n",
"\n",
"考虑上式第二项:\n",
"\n",
"![eqn_30_31](images/eqn_30_31.png)\n",
"\n",
"将第一项和第二项带入,得到:\n",
"\n",
"![eqn_ed_net_j.png](images/eqn_ed_net_j.png)\n",
"\n",
"如果令$\\delta_j = - \\frac{\\partial E_d}{\\partial net_j}$,也就是一个节点的误差项$\\delta$是网络误差对这个节点输入的偏导数的相反数。带入上式,得到:\n",
"\n",
"![eqn_delta_j.png](images/eqn_delta_j.png)\n",
"\n",
"将上述推导带入随机梯度下降公式,得到:\n",
"\n",
"![eqn_32_34.png](images/eqn_32_34.png)\n"
]
},
@@ -209,9 +229,11 @@
"![nn3](images/nn3.png)\n",
"\n",
"首先,我们需要定义节点$j$的所有直接下游节点的集合$Downstream(j)$。例如,对于节点4来说,它的直接下游节点是节点8、节点9。可以看到$net_j$只能通过影响$Downstream(j)$再影响$E_d$。设$net_k$是节点$j$的下游节点的输入,则$E_d$是$net_k$的函数,而$net_k$是$net_j$的函数。因为$net_k$有多个,我们应用全导数公式,可以做出如下推导:\n",
"\n",
"![eqn_35_40](images/eqn_35_40.png)\n",
"\n",
"因为$\\delta_j = - \\frac{\\partial E_d}{\\partial net_j}$,带入上式得到:\n",
"\n",
"![eqn_delta_hidden.png](images/eqn_delta_hidden.png)\n",
"\n",
"\n",
@@ -233,28 +255,38 @@
"然后,我们按照下面的方法计算出每个节点的误差项$\\delta_i$:\n",
"\n",
"* **对于输出层节点$i$**\n",
"\n",
"![formular_3.png](images/formular_3.png)\n",
"\n",
"其中,$\\delta_i$是节点$i$的误差项,$y_i$是节点$i$的输出值,$t_i$是样本对应于节点$i$的目标值。举个例子,根据上图,对于输出层节点8来说,它的输出值是$y_1$,而样本的目标值是$t_1$,带入上面的公式得到节点8的误差项应该是:\n",
"\n",
"![forumlar_delta8.png](images/forumlar_delta8.png)\n",
"\n",
"* **对于隐藏层节点**\n",
"\n",
"![formular_4.png](images/formular_4.png)\n",
"\n",
"其中,$a_i$是节点$i$的输出值,$w_{ki}$是节点$i$到它的下一层节点$k$的连接的权重,$\\delta_k$是节点$i$的下一层节点$k$的误差项。例如,对于隐藏层节点4来说,计算方法如下:\n",
"\n",
"![forumlar_delta4.png](images/forumlar_delta4.png)\n",
"\n",
"\n",
"\n",
"最后,更新每个连接上的权值:\n",
"\n",
"![formular_5.png](images/formular_5.png)\n",
"\n",
"其中,$w_{ji}$是节点$i$到节点$j$的权重,$\\eta$是一个成为学习速率的常数,$\\delta_j$是节点$j$的误差项,$x_{ji}$是节点$i$传递给节点$j$的输入。例如,权重$w_{84}$的更新方法如下:\n",
"\n",
"![eqn_w84_update.png](images/eqn_w84_update.png)\n",
"\n",
"类似的,权重$w_{41}$的更新方法如下:\n",
"\n",
"![eqn_w41_update.png](images/eqn_w41_update.png)\n",
"\n",
"\n",
"偏置项的输入值永远为1。例如,节点4的偏置项$w_{4b}$应该按照下面的方法计算:\n",
"\n",
"![eqn_w4b_update.png](images/eqn_w4b_update.png)\n",
"\n",
"我们已经介绍了神经网络每个节点误差项的计算和权重更新方法。显然,计算一个节点的误差项,需要先计算每个与其相连的下一层节点的误差项。这就要求误差项的计算顺序必须是从输出层开始,然后反向依次计算每个隐藏层的误差项,直到与输入层相连的那个隐藏层。这就是反向传播算法的名字的含义。当所有节点的误差项计算完毕后,我们就可以根据式5来更新所有的权重。\n",
@@ -959,13 +991,7 @@
"epoch [ 578] L = 38.565342, acc = 0.850000\n",
"epoch [ 579] L = 38.565098, acc = 0.850000\n",
"epoch [ 580] L = 38.564855, acc = 0.850000\n",
"epoch [ 581] L = 38.564613, acc = 0.850000\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch [ 581] L = 38.564613, acc = 0.850000\n",
"epoch [ 582] L = 38.564371, acc = 0.850000\n",
"epoch [ 583] L = 38.564130, acc = 0.850000\n",
"epoch [ 584] L = 38.563891, acc = 0.850000\n",
@@ -1559,13 +1585,7 @@
"epoch [1172] L = 38.480196, acc = 0.845000\n",
"epoch [1173] L = 38.480101, acc = 0.845000\n",
"epoch [1174] L = 38.480005, acc = 0.845000\n",
"epoch [1175] L = 38.479909, acc = 0.845000\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch [1175] L = 38.479909, acc = 0.845000\n",
"epoch [1176] L = 38.479814, acc = 0.845000\n",
"epoch [1177] L = 38.479719, acc = 0.845000\n",
"epoch [1178] L = 38.479623, acc = 0.845000\n",
@@ -2192,13 +2212,7 @@
"epoch [1799] L = 38.432669, acc = 0.845000\n",
"epoch [1800] L = 38.432608, acc = 0.845000\n",
"epoch [1801] L = 38.432546, acc = 0.845000\n",
"epoch [1802] L = 38.432485, acc = 0.845000\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch [1802] L = 38.432485, acc = 0.845000\n",
"epoch [1803] L = 38.432423, acc = 0.845000\n",
"epoch [1804] L = 38.432362, acc = 0.845000\n",
"epoch [1805] L = 38.432301, acc = 0.845000\n",
@@ -3229,13 +3243,7 @@
"L = 37.861420, acc = 0.865000\n",
"L = 37.857369, acc = 0.865000\n",
"L = 37.853295, acc = 0.865000\n",
"L = 37.849197, acc = 0.865000\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"L = 37.849197, acc = 0.865000\n",
"L = 37.845075, acc = 0.865000\n",
"L = 37.840929, acc = 0.865000\n",
"L = 37.836759, acc = 0.865000\n",
@@ -3846,13 +3854,7 @@
"L = 25.844314, acc = 0.925000\n",
"L = 25.817979, acc = 0.925000\n",
"L = 25.791672, acc = 0.925000\n",
"L = 25.765391, acc = 0.925000\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"L = 25.765391, acc = 0.925000\n",
"L = 25.739137, acc = 0.925000\n",
"L = 25.712911, acc = 0.930000\n",
"L = 25.686712, acc = 0.935000\n",
@@ -4467,13 +4469,7 @@
"L = 15.309885, acc = 0.960000\n",
"L = 15.301054, acc = 0.960000\n",
"L = 15.292240, acc = 0.960000\n",
"L = 15.283445, acc = 0.960000\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"L = 15.283445, acc = 0.960000\n",
"L = 15.274667, acc = 0.960000\n",
"L = 15.265907, acc = 0.960000\n",
"L = 15.257165, acc = 0.960000\n",
@@ -4761,64 +4757,12 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"## Softmax & 交叉熵代价函数\n",
"\n",
"softmax经常被添加在分类任务的神经网络中的输出层,神经网络的反向传播中关键的步骤就是求导,从这个过程也可以更深刻地理解反向传播的过程,还可以对梯度传播的问题有更多的思考。\n",
"\n",
"### softmax 函数\n",
"\n",
"softmax(柔性最大值)函数,一般在神经网络中, softmax可以作为分类任务的输出层。其实可以认为softmax输出的是几个类别选择的概率,比如我有一个分类任务,要分为三个类,softmax函数可以根据它们相对的大小,输出三个类别选取的概率,并且概率和为1。\n",
"\n",
"softmax函数的公式是这种形式:\n",
"![softmax](images/softmax.png)\n",
"\n",
"* $S_i$是经过softmax的类别概率输出\n",
"* $z_k$是神经元的输出\n",
"\n",
"更形象的如下图表示:\n",
"![softmax_demo](images/softmax_demo.png)\n",
"softmax直白来说就是将原来输出是3,1,-3通过softmax函数一作用,就映射成为(0,1)的值,而这些值的累和为1(满足概率的性质),那么我们就可以将它理解成概率,在最后选取输出结点的时候,我们就可以选取概率最大(也就是值对应最大的)结点,作为我们的预测目标!\n",
"\n",
"\n",
"\n",
"首先是神经元的输出,一个神经元如下图:\n",
"![softmax_neuron](images/softmax_neuron.png)\n",
"\n",
"神经元的输出设为:\n",
"![softmax_neuron_output_eqn.png](images/softmax_neuron_output_eqn.png)\n",
"其中$W_{ij}$是第$i$个神经元的第$j$个权重,$b$是偏置。$z_i$表示该网络的第$i$个输出。\n",
"\n",
"给这个输出加上一个softmax函数,那就变成了这样:\n",
"![softmax_neuron_output2_eqn.png](images/softmax_neuron_output2_eqn.png)\n",
"$a_i$代表softmax的第$i$个输出值,右侧套用了softmax函数。\n",
"\n",
"\n",
"### 损失函数 loss function\n",
"\n",
"在神经网络反向传播中,要求一个损失函数,这个损失函数其实表示的是真实值与网络的估计值的误差,知道误差了,才能知道怎样去修改网络中的权重。\n",
"\n",
"损失函数可以有很多形式,这里用的是交叉熵函数,主要是由于这个求导结果比较简单,易于计算,并且交叉熵解决某些损失函数学习缓慢的问题。交叉熵的函数是这样的:\n",
"\n",
"![cross_entropy_loss](images/cross_entropy_loss.png)\n",
"\n",
"其中$y_i$表示真实的分类结果。\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## References\n",
"* 反向传播算法\n",
" * [零基础入门深度学习(3) - 神经网络和反向传播算法](https://www.zybuluo.com/hanbingtao/note/476663)\n",
" * [Neural Network Using Python and Numpy](https://www.python-course.eu/neural_networks_with_python_numpy.php)\n",
" * http://www.cedar.buffalo.edu/%7Esrihari/CSE574/Chap5/Chap5.3-BackProp.pdf\n",
" * https://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/\n",
"* Softmax & 交叉熵\n",
" * [交叉熵代价函数(作用及公式推导)](https://blog.csdn.net/u014313009/article/details/51043064)\n",
" * [手打例子一步一步带你看懂softmax函数以及相关求导过程](https://www.jianshu.com/p/ffa51250ba2e)\n",
" * [简单易懂的softmax交叉熵损失函数求导](https://www.jianshu.com/p/c02a1fbffad6)"
" * https://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/\n"
]
}
],


+ 37
- 52
nn/mlp_bp.py View File

@@ -24,6 +24,7 @@
# ## 神经元
#
# 神经元和感知器本质上是一样的,只不过我们说感知器的时候,它的激活函数是阶跃函数;而当我们说神经元时,激活函数往往选择为sigmoid函数或tanh函数。如下图所示:
#
# ![neuron](images/neuron.gif)
#
# 计算一个神经元的输出的方法和计算一个感知器的输出是一样的。假设神经元的输入是向量$\vec{x}$,权重向量是$\vec{w}$(偏置项是$w_0$),激活函数是sigmoid函数,则其输出y:
@@ -41,13 +42,15 @@
# $$
#
# sigmoid函数是一个非线性函数,值域是(0,1)。函数图像如下图所示
#
# ![sigmod_function](images/sigmod.jpg)
#
# sigmoid函数的导数是:
# $$
# y = sigmod(x) \ \ \ \ \ \ (1) \\
# y' = y(1-y)
# $$
# \begin{eqnarray}
# y & = & sigmod(x) \tag{1} \\
# y' & = & y(1-y)
# \end{eqnarray}
#
# 可以看到,sigmoid函数的导数非常有趣,它可以用sigmoid函数自身来表示。这样,一旦计算出sigmoid函数的值,计算它的导数的值就非常方便。
#
#
@@ -85,11 +88,13 @@
# 为了计算节点4的输出值,我们必须先得到其所有上游节点(也就是节点1、2、3)的输出值。节点1、2、3是输入层的节点,所以,他们的输出值就是输入向量$\vec{x}$本身。按照上图画出的对应关系,可以看到节点1、2、3的输出值分别是$x_1$,$x_2$,$x_3$。我们要求输入向量的维度和输入层神经元个数相同,而输入向量的某个元素对应到哪个输入节点是可以自由决定的,你偏非要把$x_1$赋值给节点2也是完全没有问题的,但这样除了把自己弄晕之外,并没有什么价值。
#
# 一旦我们有了节点1、2、3的输出值,我们就可以根据式1计算节点4的输出值$a_4$:
#
# ![eqn_3_4](images/eqn_3_4.png)
#
# 上式的$w_{4b}$是节点4的偏置项,图中没有画出来。而$w_{41}$,$w_{42}$,$w_{43}$分别为节点1、2、3到节点4连接的权重,在给权重$w_{ji}$编号时,我们把目标节点的编号$j$放在前面,把源节点的编号$i$放在后面。
#
# 同样,我们可以继续计算出节点5、6、7的输出值$a_5$,$a_6$,$a_7$。这样,隐藏层的4个节点的输出值就计算完成了,我们就可以接着计算输出层的节点8的输出值$y_1$:
#
# ![eqn_5_6](images/eqn_5_6.png)
#
# 同理,我们还可以计算出$y_2$的值。这样输出层所有节点的输出值计算完毕,我们就得到了在输入向量$\vec{x} = (x_1, x_2, x_3)^T$时,神经网络的输出向量$\vec{y} = (y_1, y_2)^T$。这里我们也看到,输出向量的维度和输出层神经元个数相同。
@@ -101,6 +106,7 @@
# 神经网络的计算如果用矩阵来表示会很方便(当然逼格也更高),我们先来看看隐藏层的矩阵表示。
#
# 首先我们把隐藏层4个节点的计算依次排列出来:
#
# ![eqn_hidden_units](images/eqn_hidden_units.png)
#
# 接着,定义网络的输入向量$\vec{x}$和隐藏层每个节点的权重向量$\vec{w}$。令
@@ -112,17 +118,21 @@
# ![eqn_13_16](images/eqn_13_16.png)
#
# 现在,我们把上述计算$a_4$, $a_5$,$a_6$,$a_7$的四个式子写到一个矩阵里面,每个式子作为矩阵的一行,就可以利用矩阵来表示它们的计算了。令
#
# ![eqn_matrix1](images/eqn_matrix1.png)
#
# 带入前面的一组式子,得到
#
# ![formular_2](images/formular_2.png)
#
# 在式2中,$f$是激活函数,在本例中是$sigmod$函数;$W$是某一层的权重矩阵;$\vec{x}$是某层的输入向量;$\vec{a}$是某层的输出向量。式2说明神经网络的每一层的作用实际上就是先将输入向量左乘一个数组进行线性变换,得到一个新的向量,然后再对这个向量逐元素应用一个激活函数。
#
# 每一层的算法都是一样的。比如,对于包含一个输入层,一个输出层和三个隐藏层的神经网络,我们假设其权重矩阵分别为$W_1$,$W_2$,$W_3$,$W_4$,每个隐藏层的输出分别是$\vec{a}_1$,$\vec{a}_2$,$\vec{a}_3$,神经网络的输入为$\vec{x}$,神经网络的输出为$\vec{y}$,如下图所示:
#
# ![nn_parameters_demo](images/nn_parameters_demo.png)
#
# 则每一层的输出向量的计算可以表示为:
#
# ![eqn_17_20](images/eqn_17_20.png)
#
#
@@ -140,17 +150,21 @@
# 按照机器学习的通用套路,我们先确定神经网络的目标函数,然后用随机梯度下降优化算法去求目标函数最小值时的参数值。
#
# 我们取网络所有输出层节点的误差平方和作为目标函数:
#
# ![bp_loss](images/bp_loss.png)
#
# 其中,$E_d$表示是样本$d$的误差。
#
# 然后,使用随机梯度下降算法对目标函数进行优化:
#
# ![bp_weight_update](images/bp_weight_update.png)
#
# 随机梯度下降算法也就是需要求出误差$E_d$对于每个权重$w_{ji}$的偏导数(也就是梯度),怎么求呢?
#
# ![nn3](images/nn3.png)
#
# 观察上图,我们发现权重$w_{ji}$仅能通过影响节点$j$的输入值影响网络的其它部分,设$net_j$是节点$j$的加权输入,即
#
# ![eqn_21_22](images/eqn_21_22.png)
#
# $E_d$是$net_j$的函数,而$net_j$是$w_{ji}$的函数。根据链式求导法则,可以得到:
@@ -169,22 +183,28 @@
# ![nn3](images/nn3.png)
#
# 对于输出层来说,$net_j$仅能通过节点$j$的输出值$y_j$来影响网络其它部分,也就是说$E_d$是$y_j$的函数,而$y_j$是$net_j$的函数,其中$y_j = sigmod(net_j)$。所以我们可以再次使用链式求导法则:
#
# ![eqn_26](images/eqn_26.png)
#
# 考虑上式第一项:
#
# ![eqn_27_29](images/eqn_27_29.png)
#
#
# 考虑上式第二项:
#
# ![eqn_30_31](images/eqn_30_31.png)
#
# 将第一项和第二项带入,得到:
#
# ![eqn_ed_net_j.png](images/eqn_ed_net_j.png)
#
# 如果令$\delta_j = - \frac{\partial E_d}{\partial net_j}$,也就是一个节点的误差项$\delta$是网络误差对这个节点输入的偏导数的相反数。带入上式,得到:
#
# ![eqn_delta_j.png](images/eqn_delta_j.png)
#
# 将上述推导带入随机梯度下降公式,得到:
#
# ![eqn_32_34.png](images/eqn_32_34.png)
#

@@ -195,9 +215,11 @@
# ![nn3](images/nn3.png)
#
# 首先,我们需要定义节点$j$的所有直接下游节点的集合$Downstream(j)$。例如,对于节点4来说,它的直接下游节点是节点8、节点9。可以看到$net_j$只能通过影响$Downstream(j)$再影响$E_d$。设$net_k$是节点$j$的下游节点的输入,则$E_d$是$net_k$的函数,而$net_k$是$net_j$的函数。因为$net_k$有多个,我们应用全导数公式,可以做出如下推导:
#
# ![eqn_35_40](images/eqn_35_40.png)
#
# 因为$\delta_j = - \frac{\partial E_d}{\partial net_j}$,带入上式得到:
#
# ![eqn_delta_hidden.png](images/eqn_delta_hidden.png)
#
#
@@ -215,28 +237,38 @@
# 然后,我们按照下面的方法计算出每个节点的误差项$\delta_i$:
#
# * **对于输出层节点$i$**
#
# ![formular_3.png](images/formular_3.png)
#
# 其中,$\delta_i$是节点$i$的误差项,$y_i$是节点$i$的输出值,$t_i$是样本对应于节点$i$的目标值。举个例子,根据上图,对于输出层节点8来说,它的输出值是$y_1$,而样本的目标值是$t_1$,带入上面的公式得到节点8的误差项应该是:
#
# ![forumlar_delta8.png](images/forumlar_delta8.png)
#
# * **对于隐藏层节点**
#
# ![formular_4.png](images/formular_4.png)
#
# 其中,$a_i$是节点$i$的输出值,$w_{ki}$是节点$i$到它的下一层节点$k$的连接的权重,$\delta_k$是节点$i$的下一层节点$k$的误差项。例如,对于隐藏层节点4来说,计算方法如下:
#
# ![forumlar_delta4.png](images/forumlar_delta4.png)
#
#
#
# 最后,更新每个连接上的权值:
#
# ![formular_5.png](images/formular_5.png)
#
# 其中,$w_{ji}$是节点$i$到节点$j$的权重,$\eta$是一个成为学习速率的常数,$\delta_j$是节点$j$的误差项,$x_{ji}$是节点$i$传递给节点$j$的输入。例如,权重$w_{84}$的更新方法如下:
#
# ![eqn_w84_update.png](images/eqn_w84_update.png)
#
# 类似的,权重$w_{41}$的更新方法如下:
#
# ![eqn_w41_update.png](images/eqn_w41_update.png)
#
#
# 偏置项的输入值永远为1。例如,节点4的偏置项$w_{4b}$应该按照下面的方法计算:
#
# ![eqn_w4b_update.png](images/eqn_w4b_update.png)
#
# 我们已经介绍了神经网络每个节点误差项的计算和权重更新方法。显然,计算一个节点的误差项,需要先计算每个与其相连的下一层节点的误差项。这就要求误差项的计算顺序必须是从输出层开始,然后反向依次计算每个隐藏层的误差项,直到与输入层相连的那个隐藏层。这就是反向传播算法的名字的含义。当所有节点的误差项计算完毕后,我们就可以根据式5来更新所有的权重。
@@ -516,57 +548,10 @@ print(y_res[1:10, :])
# 3. 如何能让神经网络更快的训练好?
# 4. 如何抽象,让神经网络的类支持更多的类型的层

# ## Softmax & 交叉熵代价函数
#
# softmax经常被添加在分类任务的神经网络中的输出层,神经网络的反向传播中关键的步骤就是求导,从这个过程也可以更深刻地理解反向传播的过程,还可以对梯度传播的问题有更多的思考。
#
# ### softmax 函数
#
# softmax(柔性最大值)函数,一般在神经网络中, softmax可以作为分类任务的输出层。其实可以认为softmax输出的是几个类别选择的概率,比如我有一个分类任务,要分为三个类,softmax函数可以根据它们相对的大小,输出三个类别选取的概率,并且概率和为1。
#
# softmax函数的公式是这种形式:
# ![softmax](images/softmax.png)
#
# * $S_i$是经过softmax的类别概率输出
# * $z_k$是神经元的输出
#
# 更形象的如下图表示:
# ![softmax_demo](images/softmax_demo.png)
# softmax直白来说就是将原来输出是3,1,-3通过softmax函数一作用,就映射成为(0,1)的值,而这些值的累和为1(满足概率的性质),那么我们就可以将它理解成概率,在最后选取输出结点的时候,我们就可以选取概率最大(也就是值对应最大的)结点,作为我们的预测目标!
#
#
#
# 首先是神经元的输出,一个神经元如下图:
# ![softmax_neuron](images/softmax_neuron.png)
#
# 神经元的输出设为:
# ![softmax_neuron_output_eqn.png](images/softmax_neuron_output_eqn.png)
# 其中$W_{ij}$是第$i$个神经元的第$j$个权重,$b$是偏置。$z_i$表示该网络的第$i$个输出。
#
# 给这个输出加上一个softmax函数,那就变成了这样:
# ![softmax_neuron_output2_eqn.png](images/softmax_neuron_output2_eqn.png)
# $a_i$代表softmax的第$i$个输出值,右侧套用了softmax函数。
#
#
# ### 损失函数 loss function
#
# 在神经网络反向传播中,要求一个损失函数,这个损失函数其实表示的是真实值与网络的估计值的误差,知道误差了,才能知道怎样去修改网络中的权重。
#
# 损失函数可以有很多形式,这里用的是交叉熵函数,主要是由于这个求导结果比较简单,易于计算,并且交叉熵解决某些损失函数学习缓慢的问题。交叉熵的函数是这样的:
#
# ![cross_entropy_loss](images/cross_entropy_loss.png)
#
# 其中$y_i$表示真实的分类结果。
#
#

# ## References
# * 反向传播算法
# * [零基础入门深度学习(3) - 神经网络和反向传播算法](https://www.zybuluo.com/hanbingtao/note/476663)
# * [Neural Network Using Python and Numpy](https://www.python-course.eu/neural_networks_with_python_numpy.php)
# * http://www.cedar.buffalo.edu/%7Esrihari/CSE574/Chap5/Chap5.3-BackProp.pdf
# * https://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/
# * Softmax & 交叉熵
# * [交叉熵代价函数(作用及公式推导)](https://blog.csdn.net/u014313009/article/details/51043064)
# * [手打例子一步一步带你看懂softmax函数以及相关求导过程](https://www.jianshu.com/p/ffa51250ba2e)
# * [简单易懂的softmax交叉熵损失函数求导](https://www.jianshu.com/p/c02a1fbffad6)
#

+ 176
- 0
nn/softmax_ce.ipynb View File

@@ -0,0 +1,176 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Softmax & 交叉熵代价函数\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"softmax经常被添加在分类任务的神经网络中的输出层,神经网络的反向传播中关键的步骤就是求导,从这个过程也可以更深刻地理解反向传播的过程,还可以对梯度传播的问题有更多的思考。\n",
"\n",
"## softmax 函数\n",
"\n",
"softmax(柔性最大值)函数,一般在神经网络中, softmax可以作为分类任务的输出层。其实可以认为softmax输出的是几个类别选择的概率,比如我有一个分类任务,要分为三个类,softmax函数可以根据它们相对的大小,输出三个类别选取的概率,并且概率和为1。\n",
"\n",
"softmax函数的公式是这种形式:\n",
"\n",
"$$\n",
"S_i = \\frac{e^{z_i}}{\\sum_k e^{z_k}}\n",
"$$\n",
"\n",
"* $S_i$是经过softmax的类别概率输出\n",
"* $z_k$是神经元的输出\n",
"\n",
"\n",
"更形象的如下图表示:\n",
"\n",
"![softmax_demo](images/softmax_demo.png)\n",
"\n",
"softmax直白来说就是将原来输出是$[3,1,-3]$通过softmax函数一作用,就映射成为(0,1)的值,而这些值的累和为1(满足概率的性质),那么我们就可以将它理解成概率,在最后选取输出结点的时候,我们就可以选取概率最大(也就是值对应最大的)结点,作为我们的预测目标!\n",
"\n",
"\n",
"\n",
"首先是神经元的输出,一个神经元如下图:\n",
"\n",
"![softmax_neuron](images/softmax_neuron.png)\n",
"\n",
"神经元的输出设为:\n",
"\n",
"$$\n",
"z_i = \\sum_{j} w_{ij} x_{j} + b\n",
"$$\n",
"\n",
"其中$W_{ij}$是第$i$个神经元的第$j$个权重,$b$是偏置。$z_i$表示该网络的第$i$个输出。\n",
"\n",
"给这个输出加上一个softmax函数,那就变成了这样:\n",
"\n",
"$$\n",
"a_i = \\frac{e^{z_i}}{\\sum_k e^{z_k}}\n",
"$$\n",
"\n",
"$a_i$代表softmax的第$i$个输出值,右侧套用了softmax函数。\n",
"\n",
"\n",
"### 损失函数 loss function\n",
"\n",
"在神经网络反向传播中,要求一个损失函数,这个损失函数其实表示的是真实值与网络的估计值的误差,知道误差了,才能知道怎样去修改网络中的权重。\n",
"\n",
"损失函数可以有很多形式,这里用的是交叉熵函数,主要是由于这个求导结果比较简单,易于计算,并且交叉熵解决某些损失函数学习缓慢的问题。**[交叉熵函数](https://blog.csdn.net/u014313009/article/details/51043064)**是这样的:\n",
"\n",
"$$\n",
"C = - \\sum_i y_i ln a_i\n",
"$$\n",
"\n",
"其中$y_i$表示真实的分类结果。\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 推导过程\n",
"\n",
"首先,我们要明确一下我们要求什么,我们要求的是我们的$loss$对于神经元输出($z_i$)的梯度,即:\n",
"\n",
"$$\n",
"\\frac{\\partial C}{\\partial z_i}\n",
"$$\n",
"\n",
"根据复合函数求导法则:\n",
"\n",
"$$\n",
"\\frac{\\partial C}{\\partial z_i} = \\frac{\\partial C}{\\partial a_j} \\frac{\\partial a_j}{\\partial z_i}\n",
"$$\n",
"\n",
"有个人可能有疑问了,这里为什么是$a_j$而不是$a_i$,这里要看一下$softmax$的公式了,因为$softmax$公式的特性,它的分母包含了所有神经元的输出,所以,对于不等于i的其他输出里面,也包含着$z_i$,所有的$a$都要纳入到计算范围中,并且后面的计算可以看到需要分为$i = j$和$i \\ne j$两种情况求导。\n",
"\n",
"### 针对$a_j$的偏导\n",
"\n",
"$$\n",
"\\frac{\\partial C}{\\partial a_j} = \\frac{(\\partial -\\sum_j y_j ln a_j)}{\\partial a_j} = -\\sum_j y_j \\frac{1}{a_j}\n",
"$$\n",
"\n",
"### 针对$z_i$的偏导\n",
"\n",
"如果 $i=j$ :\n",
"\n",
"\\begin{eqnarray}\n",
"\\frac{\\partial a_i}{\\partial z_i} & = & \\frac{\\partial (\\frac{e^{z_i}}{\\sum_k e^{z_k}})}{\\partial z_i} \\\\\n",
" & = & \\frac{\\sum_k e^{z_k} e^{z_i} - (e^{z_i})^2}{\\sum_k (e^{z_k})^2} \\\\\n",
" & = & (\\frac{e^{z_i}}{\\sum_k e^{z_k}} ) (1 - \\frac{e^{z_i}}{\\sum_k e^{z_k}} ) \\\\\n",
" & = & a_i (1 - a_i)\n",
"\\end{eqnarray}\n",
"\n",
"如果 $i \\ne j$:\n",
"\\begin{eqnarray}\n",
"\\frac{\\partial a_j}{\\partial z_i} & = & \\frac{\\partial (\\frac{e^{z_j}}{\\sum_k e^{z_k}})}{\\partial z_i} \\\\\n",
" & = & \\frac{0 \\cdot \\sum_k e^{z_k} - e^{z_j} \\cdot e^{z_i} }{(\\sum_k e^{z_k})^2} \\\\\n",
" & = & - \\frac{e^{z_j}}{\\sum_k e^{z_k}} \\cdot \\frac{e^{z_i}}{\\sum_k e^{z_k}} \\\\\n",
" & = & -a_j a_i\n",
"\\end{eqnarray}\n",
"\n",
"当u,v都是变量的函数时的导数推导公式:\n",
"$$\n",
"(\\frac{u}{v})' = \\frac{u'v - uv'}{v^2} \n",
"$$\n",
"\n",
"### 整体的推导\n",
"\n",
"\\begin{eqnarray}\n",
"\\frac{\\partial C}{\\partial z_i} & = & (-\\sum_j y_j \\frac{1}{a_j} ) \\frac{\\partial a_j}{\\partial z_i} \\\\\n",
" & = & - \\frac{y_i}{a_i} a_i ( 1 - a_i) + \\sum_{j \\ne i} \\frac{y_j}{a_j} a_i a_j \\\\\n",
" & = & -y_i + y_i a_i + \\sum_{j \\ne i} y_j a_i \\\\\n",
" & = & -y_i + a_i \\sum_{j} y_j\n",
"\\end{eqnarray}"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 问题\n",
"如何将本节所讲的softmax,交叉熵代价函数应用到上节所讲的方法中?"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## References\n",
"\n",
"* Softmax & 交叉熵\n",
" * [交叉熵代价函数(作用及公式推导)](https://blog.csdn.net/u014313009/article/details/51043064)\n",
" * [手打例子一步一步带你看懂softmax函数以及相关求导过程](https://www.jianshu.com/p/ffa51250ba2e)\n",
" * [简单易懂的softmax交叉熵损失函数求导](https://www.jianshu.com/p/c02a1fbffad6)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
},
"main_language": "python"
},
"nbformat": 4,
"nbformat_minor": 2
}

+ 146
- 0
nn/softmax_ce.py View File

@@ -0,0 +1,146 @@
# -*- coding: utf-8 -*-
# ---
# jupyter:
# jupytext_format_version: '1.2'
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# language_info:
# codemirror_mode:
# name: ipython
# version: 3
# file_extension: .py
# mimetype: text/x-python
# name: python
# nbconvert_exporter: python
# pygments_lexer: ipython3
# version: 3.5.2
# ---

# # Softmax & 交叉熵代价函数
#

# softmax经常被添加在分类任务的神经网络中的输出层,神经网络的反向传播中关键的步骤就是求导,从这个过程也可以更深刻地理解反向传播的过程,还可以对梯度传播的问题有更多的思考。
#
# ## softmax 函数
#
# softmax(柔性最大值)函数,一般在神经网络中, softmax可以作为分类任务的输出层。其实可以认为softmax输出的是几个类别选择的概率,比如我有一个分类任务,要分为三个类,softmax函数可以根据它们相对的大小,输出三个类别选取的概率,并且概率和为1。
#
# softmax函数的公式是这种形式:
#
# $$
# S_i = \frac{e^{z_i}}{\sum_k e^{z_k}}
# $$
#
# * $S_i$是经过softmax的类别概率输出
# * $z_k$是神经元的输出
#
#
# 更形象的如下图表示:
#
# ![softmax_demo](images/softmax_demo.png)
#
# softmax直白来说就是将原来输出是$[3,1,-3]$通过softmax函数一作用,就映射成为(0,1)的值,而这些值的累和为1(满足概率的性质),那么我们就可以将它理解成概率,在最后选取输出结点的时候,我们就可以选取概率最大(也就是值对应最大的)结点,作为我们的预测目标!
#
#
#
# 首先是神经元的输出,一个神经元如下图:
#
# ![softmax_neuron](images/softmax_neuron.png)
#
# 神经元的输出设为:
#
# $$
# z_i = \sum_{j} w_{ij} x_{j} + b
# $$
#
# 其中$W_{ij}$是第$i$个神经元的第$j$个权重,$b$是偏置。$z_i$表示该网络的第$i$个输出。
#
# 给这个输出加上一个softmax函数,那就变成了这样:
#
# $$
# a_i = \frac{e^{z_i}}{\sum_k e^{z_k}}
# $$
#
# $a_i$代表softmax的第$i$个输出值,右侧套用了softmax函数。
#
#
# ### 损失函数 loss function
#
# 在神经网络反向传播中,要求一个损失函数,这个损失函数其实表示的是真实值与网络的估计值的误差,知道误差了,才能知道怎样去修改网络中的权重。
#
# 损失函数可以有很多形式,这里用的是交叉熵函数,主要是由于这个求导结果比较简单,易于计算,并且交叉熵解决某些损失函数学习缓慢的问题。**[交叉熵函数](https://blog.csdn.net/u014313009/article/details/51043064)**是这样的:
#
# $$
# C = - \sum_i y_i ln a_i
# $$
#
# 其中$y_i$表示真实的分类结果。
#
#

# ## 推导过程
#
# 首先,我们要明确一下我们要求什么,我们要求的是我们的$loss$对于神经元输出($z_i$)的梯度,即:
#
# $$
# \frac{\partial C}{\partial z_i}
# $$
#
# 根据复合函数求导法则:
#
# $$
# \frac{\partial C}{\partial z_i} = \frac{\partial C}{\partial a_j} \frac{\partial a_j}{\partial z_i}
# $$
#
# 有个人可能有疑问了,这里为什么是$a_j$而不是$a_i$,这里要看一下$softmax$的公式了,因为$softmax$公式的特性,它的分母包含了所有神经元的输出,所以,对于不等于i的其他输出里面,也包含着$z_i$,所有的$a$都要纳入到计算范围中,并且后面的计算可以看到需要分为$i = j$和$i \ne j$两种情况求导。
#
# ### 针对$a_j$的偏导
#
# $$
# \frac{\partial C}{\partial a_j} = \frac{(\partial -\sum_j y_j ln a_j)}{\partial a_j} = -\sum_j y_j \frac{1}{a_j}
# $$
#
# ### 针对$z_i$的偏导
#
# 如果 $i=j$ :
#
# \begin{eqnarray}
# \frac{\partial a_i}{\partial z_i} & = & \frac{\partial (\frac{e^{z_i}}{\sum_k e^{z_k}})}{\partial z_i} \\
# & = & \frac{\sum_k e^{z_k} e^{z_i} - (e^{z_i})^2}{\sum_k (e^{z_k})^2} \\
# & = & (\frac{e^{z_i}}{\sum_k e^{z_k}} ) (1 - \frac{e^{z_i}}{\sum_k e^{z_k}} ) \\
# & = & a_i (1 - a_i)
# \end{eqnarray}
#
# 如果 $i \ne j$:
# \begin{eqnarray}
# \frac{\partial a_j}{\partial z_i} & = & \frac{\partial (\frac{e^{z_j}}{\sum_k e^{z_k}})}{\partial z_i} \\
# & = & \frac{0 \cdot \sum_k e^{z_k} - e^{z_j} \cdot e^{z_i} }{(\sum_k e^{z_k})^2} \\
# & = & - \frac{e^{z_j}}{\sum_k e^{z_k}} \cdot \frac{e^{z_i}}{\sum_k e^{z_k}} \\
# & = & -a_j a_i
# \end{eqnarray}
#
# 当u,v都是变量的函数时的导数推导公式:
# $$
# (\frac{u}{v})' = \frac{u'v - uv'}{v^2}
# $$
#
# ### 整体的推导
#
# \begin{eqnarray}
# \frac{\partial C}{\partial z_i} & = & (-\sum_j y_j \frac{1}{a_j} ) \frac{\partial a_j}{\partial z_i} \\
# & = & - \frac{y_i}{a_i} a_i ( 1 - a_i) + \sum_{j \ne i} \frac{y_j}{a_j} a_i a_j \\
# & = & -y_i + y_i a_i + \sum_{j \ne i} y_j a_i \\
# & = & -y_i + a_i \sum_{j} y_j
# \end{eqnarray}

# ## 问题
# 如何将本节所讲的softmax,交叉熵代价函数应用到上节所讲的方法中?

# ## References
#
# * Softmax & 交叉熵
# * [交叉熵代价函数(作用及公式推导)](https://blog.csdn.net/u014313009/article/details/51043064)
# * [手打例子一步一步带你看懂softmax函数以及相关求导过程](https://www.jianshu.com/p/ffa51250ba2e)
# * [简单易懂的softmax交叉熵损失函数求导](https://www.jianshu.com/p/c02a1fbffad6)

+ 9426
- 0
references/Matplotlib.ipynb
File diff suppressed because it is too large
View File


+ 11
- 1
references/References.md View File

@@ -43,4 +43,14 @@ http://localhost:8889/notebooks/machineLearning/notebooks/01%20-%20Model%20Selec

* https://medium.com/@UdacityINDIA/how-to-build-your-first-neural-network-with-python-6819c7f65dbf
* https://enlight.nyc/projects/neural-network/
* https://www.python-course.eu/neural_networks_with_python_numpy.php
* https://www.python-course.eu/neural_networks_with_python_numpy.php


## k-Means
* [如何使用 Keras 实现无监督聚类](http://m.sohu.com/a/236221126_717210)

## AutoEncoder (自编码/非监督学习)
* https://morvanzhou.github.io/tutorials/machine-learning/torch/4-04-autoencoder/
* https://github.com/MorvanZhou/PyTorch-Tutorial/blob/master/tutorial-contents/404_autoencoder.py
* pytorch AutoEncoder 自编码 https://www.jianshu.com/p/f0929f427d03
* Adversarial Autoencoders (with Pytorch) https://blog.paperspace.com/adversarial-autoencoders-with-pytorch/

+ 1353
- 0
references/SciPy.ipynb
File diff suppressed because it is too large
View File


+ 1069
- 0
references/Scikit-learn.ipynb
File diff suppressed because it is too large
View File


+ 5481
- 0
references/Seaborn.ipynb
File diff suppressed because it is too large
View File


+ 781
- 0
references/Statsmodels.ipynb View File

@@ -0,0 +1,781 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Statsmodels"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Statsmodels is a Python module that allows users to explore data, estimate statistical models, and perform statistical tests. An extensive list of descriptive statistics, statistical tests, plotting functions, and result statistics are available for different types of data and each estimator.\n",
"\n",
"Library documentation: <a>http://statsmodels.sourceforge.net/</a>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Linear Regression Models"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# needed to display the graphs\n",
"%matplotlib inline\n",
"from pylab import *"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"import numpy as np\n",
"import pandas as pd\n",
"import statsmodels.api as sm\n",
"from statsmodels.sandbox.regression.predstd import wls_prediction_std\n",
"np.random.seed(9876789)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# create some artificial data\n",
"nsample = 100\n",
"x = np.linspace(0, 10, 100)\n",
"X = np.column_stack((x, x**2))\n",
"beta = np.array([1, 0.1, 10])\n",
"e = np.random.normal(size=nsample)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# add column of 1s for intercept\n",
"X = sm.add_constant(X)\n",
"y = np.dot(X, beta) + e"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" OLS Regression Results \n",
"==============================================================================\n",
"Dep. Variable: y R-squared: 1.000\n",
"Model: OLS Adj. R-squared: 1.000\n",
"Method: Least Squares F-statistic: 4.020e+06\n",
"Date: Sun, 16 Nov 2014 Prob (F-statistic): 2.83e-239\n",
"Time: 20:59:31 Log-Likelihood: -146.51\n",
"No. Observations: 100 AIC: 299.0\n",
"Df Residuals: 97 BIC: 306.8\n",
"Df Model: 2 \n",
"==============================================================================\n",
" coef std err t P>|t| [95.0% Conf. Int.]\n",
"------------------------------------------------------------------------------\n",
"const 1.3423 0.313 4.292 0.000 0.722 1.963\n",
"x1 -0.0402 0.145 -0.278 0.781 -0.327 0.247\n",
"x2 10.0103 0.014 715.745 0.000 9.982 10.038\n",
"==============================================================================\n",
"Omnibus: 2.042 Durbin-Watson: 2.274\n",
"Prob(Omnibus): 0.360 Jarque-Bera (JB): 1.875\n",
"Skew: 0.234 Prob(JB): 0.392\n",
"Kurtosis: 2.519 Cond. No. 144.\n",
"==============================================================================\n"
]
}
],
"source": [
"# fit model and print the summary\n",
"model = sm.OLS(y, X)\n",
"results = model.fit()\n",
"print(results.summary())"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"('Parameters: ', array([ 1.34233516, -0.04024948, 10.01025357]))\n",
"('R2: ', 0.9999879365025871)\n"
]
}
],
"source": [
"# individual results parameters can be accessed\n",
"print('Parameters: ', results.params)\n",
"print('R2: ', results.rsquared)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" OLS Regression Results \n",
"==============================================================================\n",
"Dep. Variable: y R-squared: 0.933\n",
"Model: OLS Adj. R-squared: 0.928\n",
"Method: Least Squares F-statistic: 211.8\n",
"Date: Sun, 16 Nov 2014 Prob (F-statistic): 6.30e-27\n",
"Time: 20:59:31 Log-Likelihood: -34.438\n",
"No. Observations: 50 AIC: 76.88\n",
"Df Residuals: 46 BIC: 84.52\n",
"Df Model: 3 \n",
"==============================================================================\n",
" coef std err t P>|t| [95.0% Conf. Int.]\n",
"------------------------------------------------------------------------------\n",
"x1 0.4687 0.026 17.751 0.000 0.416 0.522\n",
"x2 0.4836 0.104 4.659 0.000 0.275 0.693\n",
"x3 -0.0174 0.002 -7.507 0.000 -0.022 -0.013\n",
"const 5.2058 0.171 30.405 0.000 4.861 5.550\n",
"==============================================================================\n",
"Omnibus: 0.655 Durbin-Watson: 2.896\n",
"Prob(Omnibus): 0.721 Jarque-Bera (JB): 0.360\n",
"Skew: 0.207 Prob(JB): 0.835\n",
"Kurtosis: 3.026 Cond. No. 221.\n",
"==============================================================================\n"
]
}
],
"source": [
"# example with non-linear relationship\n",
"nsample = 50\n",
"sig = 0.5\n",
"x = np.linspace(0, 20, nsample)\n",
"X = np.column_stack((x, np.sin(x), (x-5)**2, np.ones(nsample)))\n",
"beta = [0.5, 0.5, -0.02, 5.]\n",
"\n",
"y_true = np.dot(X, beta)\n",
"y = y_true + sig * np.random.normal(size=nsample)\n",
"\n",
"res = sm.OLS(y, X).fit()\n",
"print(res.summary())"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"('Parameters: ', array([ 0.46872448, 0.48360119, -0.01740479, 5.20584496]))\n",
"('Standard errors: ', array([ 0.02640602, 0.10380518, 0.00231847, 0.17121765]))\n",
"('Predicted values: ', array([ 4.77072516, 5.22213464, 5.63620761, 5.98658823,\n",
" 6.25643234, 6.44117491, 6.54928009, 6.60085051,\n",
" 6.62432454, 6.6518039 , 6.71377946, 6.83412169,\n",
" 7.02615877, 7.29048685, 7.61487206, 7.97626054,\n",
" 8.34456611, 8.68761335, 8.97642389, 9.18997755,\n",
" 9.31866582, 9.36587056, 9.34740836, 9.28893189,\n",
" 9.22171529, 9.17751587, 9.1833565 , 9.25708583,\n",
" 9.40444579, 9.61812821, 9.87897556, 10.15912843,\n",
" 10.42660281, 10.65054491, 10.8063004 , 10.87946503,\n",
" 10.86825119, 10.78378163, 10.64826203, 10.49133265,\n",
" 10.34519853, 10.23933827, 10.19566084, 10.22490593,\n",
" 10.32487947, 10.48081414, 10.66779556, 10.85485568,\n",
" 11.01006072, 11.10575781]))\n"
]
}
],
"source": [
"# look at some quantities of interest\n",
"print('Parameters: ', res.params)\n",
"print('Standard errors: ', res.bse)\n",
"print('Predicted values: ', res.predict())"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"<matplotlib.legend.Legend at 0x1788c9e8>"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": [
"iVBORw0KGgoAAAANSUhEUgAAAeEAAAFrCAYAAADmYTGwAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
"AAALEgAACxIB0t1+/AAAIABJREFUeJzs3XlYlGX3wPHv4I4a4G6mabhlmWlWrkEpoKllC5Zptkr2\n",
"lmhvb+WaltavzUqt1zTNViw18y1IRTNcstTMJVeUcs8dAVkFzu+PWxBk1GGYFc7nuuYCZp55nnsG\n",
"5cy9nWMREZRSSinlej7uboBSSilVVmkQVkoppdxEg7BSSinlJhqElVJKKTfRIKyUUkq5iQZhpZRS\n",
"yk3KO+vEFotF9z4ppZQqU0TEUpzjnRaEAXQPsvcaP34848ePd3czlB30d+fd9PfnvSyWYsVfQIej\n",
"lVJKKbfRIKyUUkq5iQZhZVVwcLC7m6DspL8776a/v7LF4qx5W4vFIjonrJRSqqywWCyetTDLGnsm\n",
"rssC/cCilFJlj8uDMGjAuZB+MFFKqbJJ54SVUkopN9EgrJRSSrmJBmGllFLKTTQIX8Kjjz7K2LFj\n",
"3d0MpZRSpZQG4UuwWCw2LZoKDg5m1qxZLmiRUkqp0kSD8GXYspJbVzcrpZSyh8cE4ZiYlYSFjSE4\n",
"eDxhYWOIiVnp8nNs3LiRdu3accUVV/Dggw+SkZEBwOnTp+nduzd16tShRo0a9OnTh0OHDgEwevRo\n",
"Vq1axbPPPkv16tWJjIwEYNiwYTRq1Ag/Pz/at2/P6tWri/16lFJKlXIi4pSbOXVR1u6Pjl4hgYGj\n",
"BCT/Fhg4SqKjV1g9hzUlPUdmZqY0atRI3n//fcnOzpb58+dLhQoVZOzYsXLy5ElZsGCBpKenS0pK\n",
"ioSHh0vfvn3znxscHCyzZs0qdL4vv/xSTp06JTk5OTJp0iSpV6+eZGZm2vyeKKWU8i7n/pYXL1YW\n",
"9wk2n7gYQTg0dHSh4Jl3CwsbY/OLL+k5VqxYIVdeeWWh+zp16iRjx44tcuzGjRslICAg/+fg4GCZ\n",
"OXPmJc8fEBAgW7ZssfqYBmGllPJ+9gRhjxiOzsy0nrgrI6Ocy85x+PBhGjRoUOi+q6++GoD09HSe\n",
"euopGjdujJ+fH0FBQSQlJRWaL75wXvidd96hVatW+Pv7ExAQQFJSEidOnLD59SillCr9PCIIV6qU\n",
"bfX+ypVzXHaO+vXr58/z5tm3bx8iwjvvvEN8fDzr1q0jKSmJFStWFOzxFwnAq1at4u2332bevHmc\n",
"Pn2axMRE/Pz8NF2nUkqpQjwiCEdGhhIYOLrQfYGBoxg6NMRl5+jUqRPly5dnypQpnD17lgULFrB+\n",
"/XoAzpw5Q5UqVfDz8+PUqVO88sorhZ5bt25dEhIS8n9OSUmhfPny1KpVi6ysLF599VWSk5Ntfi1K\n",
"KaXKBo8Iwr163cbkyWGEhY0lKGg8YWFjmTy5B7163eayc1SoUIEFCxbw6aefUrNmTebOnct9992H\n",
"xWJh+PDhpKenU6tWLTp16kTPnj0L9X6HDRvG/PnzqVGjBsOHD6dHjx706NGD5s2b07hxY6pUqUKj\n",
"Ro2K/b4opZQq3VxeT/hcvUWnXNNb6XuilFLez556wh7RE1ZKKaXKIg3CSimllJtoEFZKKaXcRIOw\n",
"Ukop5SYahJVSSil75eRAdDTceaddT9cgrJRSShWXCLzzDjRtCq++Cg88YNdpdIuSB9D3RCmlvND7\n",
"70PnznDzzYB9W5QuGYQtFssnQC/gmIi0Pnff20BvIAtIAB4TkSQrz9UgbCN9T5RSyvs5Y5/wbKDH\n",
"BffFAteJSBsgHhhZnAsqpZRSXmPtWnjtNaed/pJBWERWAYkX3LdURHLP/bgWuMpJbXOpatWqUb16\n",
"dapXr46Pjw++vr75P8+ZM8fdzVNKKeVKyckwdCj07QtNmjjtMtbr/9nucaBURKgzZ87kf9+kSRNm\n",
"zZrFHXfcUeS47Oxsypcv6dumlFLKY333HURGQlgYbNsGNWo47VJ2RxOLxTIayBKRqIsdM378+Pzv\n",
"g4ODCQ4OtvdybhMXF8fAgQOJjIzkvffeIyQkhG7dujFz5kxWrVqVf5yPjw979uzhmmuuITMzk9Gj\n",
"RzNv3jwyMzO55557eO+996hcubIbX4lSSqnLmj4d3nsPvvwSgoIueWhcXBxxcXElupxdQdhisTwK\n",
"3Al0u9RxBYOwNzt69CiJiYns37+fnJwcvv7660seP2LECP7++282b95M+fLleeihh3j11Vd5/fXX\n",
"XdRipZRSdhkwAB59FCpVuuyhF3YuLyxza4tiB2GLxdIDeAEIEpGMYl/RC/n4+PDKK69QoUIFKlSo\n",
"cMljRYSPP/6YLVu24O/vD8DIkSMZMGCABmGlPFBMzEqmTIklM7M8lSplExkZWqwyqohAQgLUrg1+\n",
"fkUf/+wzSEkBf38ICDBfmzc3xyvPU62aSy93ySBssVjmAEFALYvFcgAYh1kNXRFYeq6m7q8i8i9H\n",
"NchSrMXdF+fIHT+1a9emYsWKNh17/Phx0tLSuOmmmwq0RcjNzb3Es5RS7hATs5Jhw5aQkHB+9WtC\n",
"wmiASwfixERYvhxiY2HpUkhPh7lzoWvXosempMDOneY5p0+br9u3m1W3LVo4+iUpW4nAyZNQq5Zb\n",
"m3HJICwi/a3c/YmT2nLums48u30sF3wyqFq1Kmlpafk/HzlyJP/7WrVqUaVKFbZv3079+vVd1kal\n",
"VPFNmRJbKAADJCS8xtSpYy8ehN96CyZONEkaQkPNCtrrrrt4D+LZZ4vel5YGVaqUsPXKbqdOwZNP\n",
"QsWKcJnpRWfTtJV2aNOmDdu2bWPz5s1kZGQUmvv28fFh8ODBDB8+nOPHjwNw6NAhYmNj3dRapdTF\n",
"ZGZa74dkZJS7+JOeeAKOH4dFi+C55+D664s/hOfra/05e/bAgw/CwoWQUSZm+1xvxQq48UZo3NhM\n",
"FbiZBmEbXNgTbt68OS+//DLdu3enRYsWdO3atdAxb775Jk2bNqVDhw74+fkREhJCfHy8q5utlLqM\n",
"SpWyrd5fuXLOxYflata0adGOXWrXhjvugMmT4Zpr4MMPISvLOdcqa7KzYexY8yFn+nR4913n/R6L\n",
"QXNHewB9T5RyD2tzwoGBo/hsyFV0njMLZs6Etm3d07g//oAxY8z88Zw50LGje9pRWvz4o/lw89ln\n",
"UK+eUy7h8NzRJWyMBmEb6XuilPvExKxk6tSlZGSUo64lmXfK76Dh9i3w9tvQv7/jVovaa/VqU6nH\n",
"SYGjTBFx6u9Tg7CX0vdEKTfLzoZp00xJukcegZdfhiuucHerlJexJwhr/kWllDpzBlauNIt2WrVy\n",
"d2tss2mT2f5kbVtUWXf0KNSt6+5W2EQXZimllL8/zJvnPQEYzArtgQMhPBz27nV3azxDZia89JKZ\n",
"P8/MdHdrbKJBWCmlvFFIiEkCcsMN0L69WcRVoBBNmbN1K9x6q3lPfvvNI1Y+20LnhD2AvidKuVhu\n",
"LviUoj7IwYMwYgT8+Sds3Fi6Xtvl5OaaVc+vvw5vvAGPP+7QxVfFSWuqc8JKKXUp2dkmwUb9+jBq\n",
"lLtb4zhXXWWq/pw4UbYCMJgPIIsWmd5vYKBDT213WtNi0J6wB9D3RCkXSEkxiRqyssz877kCK0pd\n",
"TFjYGGJjJ1q5fyyLF08ocr89PeEy9pFJKVUmHThgVhE3aGCSNpSlAJybC2++aRZyqWKxK61pMWkQ\n",
"vsCnn35K69atqVq1KvXr1+df//oXSUlJgKmP/PDDD1t93urVq+nUqRP+/v7UrFmTLl268Pvvv7uy\n",
"6Uopa7ZtM6tlBw406QovU4601ElPh/37oWVLMwR/6pS7W2SfPXvMPm4XjhpeMq2pg2gQLmDSpEmM\n",
"GDGCSZMmkZyczG+//ca+ffsICQnh7NmzF31ecnIyvXv3ZtiwYSQmJnLo0CHGjRtHJS9ZnadUqXbl\n",
"lTBjBvznP+7PfuUOVauaHNR//GHmjJs3h/HjTVlFb3D6NDz/PHToYKoeubAsbGRkKIGBowvdFxg4\n",
"iqFDQxx2DZ0TPic5OZkGDRowe/Zs7r///vz7U1NTadKkCW+++Sb79+9nz549fPHFF4We+/vvvxMS\n",
"EkJiYqJd1/bU90QpVQr99RdMmGAKRVxkZM8jZGebkYtXX4W77zZtdkMCjoJpTStXzmHo0BCHro7W\n",
"IHzO4sWL6dOnD5mZmfhcsLrw0UcfJSsrixYtWlgNwikpKTRp0oTevXvz4IMPcuuttxIQEGDztT31\n",
"PVFKKbeZNg3mzzfVjtq0cXdrbOL9C7MiIiA4GO680/6hEjvPceLECWrVqlUkAAPUr1+fEydOXPS5\n",
"1atXZ/Xq1VgsFgYPHkydOnW4++67OXbsmD2vQCml3OPMGZg9G86tg3GriAhYtsxrArC9PCsIx8eb\n",
"3K2LFkFAgJm/ybuNH2/9OePHFz7u44/PnyMiwuZL16pVixMnTpBrZb7h8OHD1KpV65LPb9myJbNn\n",
"z+bAgQNs3bqVw4cPM3z4cJuvr5RygDVrIDLS3a3wXqdOwQ8/QKNGcO+9pieanu6868XHw8SJZuj5\n",
"QuXKlYk5fM8Kwr6+5mv79pCYaFbB5d0uFYQLHtez5/lzzJhh86U7duxIpUqV+Pbbbwvdf+bMGRYv\n",
"Xkz37t1tPleLFi145JFH2Lp1q83PUUqV0Jo1Zu7wzjvd3RLv1agRLFgA+/ZBnz5mTvbKK83QsCPk\n",
"5pqV2h98YFJMBgWZwJ+W5pjzeyHPCsJRUSYZ+dKl9u/js/Mcfn5+jBs3jqFDh7JkyRLOnj3L3r17\n",
"6devHw0bNmTgwIGICLm5uWRmZpKRkUFGRgaZmZns2rWLd999l0OHDgFw4MAB5syZQ0ctwq2Ua/z6\n",
"qwnAX3wBPXq4uzXez98fHnvM/B3dsQNCQ60ft2ABvPWWSX6yfj0cOwa7d8PJk9aP79fPBN9ff4VX\n",
"XjH7t9991+vLRsbErCQsbIx9TxYRp9zMqYu62P2eYtasWXL99ddLlSpVpG7dujJkyBA5ffq0iIiM\n",
"Hz9eLBZLoVvDhg3l0KFD0q9fP2nQoIFUrVpVGjRoIEOGDJGUlBSbrunp74lSHm3NGpHatUV+/NHd\n",
"LSl7Fi0S+fe/Re65R6RtW5GaNUWuuUbkhx+sH5+T49r2uUB09AoJDBx1bigWkWLGSl0d7QH0PVGq\n",
"BPr2Nes/dBhauUHh1JZawEEpVdZ8912ZWMCjPNPFUlvaSoOwUsq7eUgALk7JO1V6XCy1pa00CCul\n",
"VAm5ouSd8kyRkaEkJIzmxYTjPGXH8z1rdbRSSl3Kvn1wiTzu7jJlSmyhAAyQkPAaU6cudVOLlKv0\n",
"urklC0PSGFDuM7uer0FYKeUd4uOhc2dYtcrdLSnCFSXvlIfZssXkpWjenOtTjlP1xtZ2nUaHo5VS\n",
"nu+vv6B7d7O39I473N2aIlxR8k55GH9/eOQRk1WsalWTJrkYNQPyaE9YKeXZ9u2Dbt1g5Eh44gl3\n",
"t8YqV5S8U25y9qz1mgSNGsGDD5oADHYnmHLLPmFVlO4TVsqKw4fhtttg6FAYNszdrbmk4pS8U15A\n",
"xGQEe+EF08P94w9zf3g4zJ1r9SleUcpQKaVsduaMKSjQv7+7W6LKko0bYfhwk9f6vfdMas1Fi0xN\n",
"ggIpkXNyYN06iI42ty1bNAgrpZTLJSfD6tWwciWkpEDNmlCrlrld+H3Vqh6ztVldKDER/vMfiIkx\n",
"6w+eeALKlzdD0BERMGMGyT7+LFligu6PP0K9etC7t7l16aIZs5RSyukSE80i7RUrzG3nTrj5Zngn\n",
"OYJ6yfGk4cvHt0eRGr+W+MQ0jqf6cjTFl3+Sq/Jc5hsE+v6Df8Pq+C6Mom4LO4vVKMerVAkaNoRd\n",
"u8DPL//uJIs/n3aeyw/3w9q10KWLCbqvvAKNG5fsktoTVkopG8TFwcKFJuju2QMh7U4yoN5PdEpf\n",
"Rs0XnqBi11vN4p0VK8wTwsOhWTPYvt2U6ktNJenIMSr99TeVxaym/rbcfczoNp+BA00K7OrV3fby\n",
"yq6ICLP9zdfXVOErsMAqPd1UXXz7bQgJMb/S7t2hWjXrp7JnTlh7wkopz7Bhg6lb+/HHHjVeu327\n",
"GaEctDqCl6r/wRskUvGa6vhs+hu6djV/nQMbmoML1kSfMaPQH/S8rFpTZCN3soh1tOfVBk0Ja7OD\n",
"uXOv5dlnhMeDEug+pCmhoVChwsXbVNZTZDr09cfHn//gFBEBc+dy9izMmgUTJkCHDuYDWKtWDmt+\n",
"YcUtu2TrDS3Pp5SyVVycKUe4cKG7W5Lv2DGRp582zXr/fZGcrkFyrl6dSHCwSGZm0SclJoqEh5uv\n",
"FwgNHS0g4keifE24+JEoIBIWNkZERE5sOyIpflfK31VaypZybWR/3Zsks3vPIucqXDrP3AIDR0l0\n",
"9ApnvA351wwNHS1BQeMkNHS0U69lS1tK/PrXrBHp2VMkI8N8BZH27SXnZKJ89ZVIYKBISIjIunXF\n",
"axt2lDLUIKyUcq/oaJFatUSWLXN3S0REJD1d5M03TWncYcNETp4890CBP9bWguzlBAWNKxQ48m5B\n",
"QePOH5STI7J2rWTVb5h/wN7mIZKRcf6QvGB+4S0vmDuaO4L+pZTo9WdmiowcKVK3rsi335r7EhMl\n",
"NzxcfoxKlNatRW69VWT5cvvaZk8Q1mQdSin3mTPHrECNjjYJOWwUE7OSsLAxBAePJyxsDDExK0vc\n",
"FBGz/fPaa2HPsr38ujyd99+HGjXM9e7NaMXPtVtxb/UgYn7ZUuzz25RVy8cHbrmFCjdeD0BWvUbE\n",
"VOhLq1Ywb55poyNTZNryPnpaXmy7X/+ff8Itt8DWrbB5M9x7LwC/7/Gn88G5vPi6PxMnwq+/wu23\n",
"O7rVl1DcqG3rDe0JK6UuJTdXZNAgkT//LNbTnNEzW7dOpGNHkY6tU+TvAaNEatQQWbnSodezfp6R\n",
"1s9zwbD2smUiN94o0qGDyC23fOSQnrCtr8umHrwL2dUT3rPHjLZ88on5dyciaWkiL7wgUqeOyOzZ\n",
"ItnZJW8bOhytlCrtHD0cO22aSN3aObLyiU8l98orRQYOFDlwwCnXi45eIWFhYyQoaJyEhY0pViDP\n",
"yRH57DOR2rXT5QrfjdKWDZcP5pdg6+ty9fD35RTrw8zgwSJBQWYqYc+e/Lvj4kSaNhXp10/k6FHH\n",
"tc2eIKyro5VSXsVRw7Fnz5qkSL8tTeFgblPKz02H66+HqVMLrWp25PBvr1632b2K18cHBg2C8PDK\n",
"jHvoFC8v7EOWpTwHrwjgn9c+JKyY57X1deXVyy04JG3yYvco/otwgLz3b+rUsQVShPaw/r4WXPk8\n",
"ciTJM+fy0kvw/ffw4YdmW5i7aRBWSrmGiEO2HjmiYtHJk9Cvn8nNsHxdNcp3qQ3btpkJwXPbVBx5\n",
"PUeqUgXe+u4OsjrcRLW1q6iRlMw1//43hHUuVhEBW19XsYKei1zyw0xmpvnFQqEtY7H3z+DJ6yE0\n",
"1Pyq7ay34HjF7TrbekOHo5VSeb7+WuTeex1yqmINR1qxbZvZgvKf/xSYB7zEyueSXs9pzrU5KfBG\n",
"mV9tkCRWrivHf9ll89M99nWVxDffiDRsKHL8uPk5MVEy7gqXweGJ0qSJ8xfgY8dwtGbMUko5T2Ym\n",
"/PvfsHixWd7brp1DTmtvxaKYGHjsMZMB6ZFHCjxQIDewtS6SR1ZIKtDmtIr+THtmK29Ft+KNt3x4\n",
"9FHbBh088nXZ49QpePZZU+no88/hlluIjl7J6NGH2bmzF/XqbeHdd4X77uvi1GZoFSWllOf4+2+T\n",
"5+/qq+GTTwrl4nU1ERN4Z76XwtcfnqTdvY3d1hZn2rgRnnzSVN776CNo2tTdLXKBJUvMNrf77oP/\n",
"+z/w9eXzz3/j6aevIC3tfJqrwMDRTJ4c5tQPGfYEYd0nrJQLOWN/q0fauRNuvRUefhjmz3drAM7I\n",
"ML3e1Z//xbYrOtJu+5dua4uztW1rCgz07GnSLW66JQK5sa1JrZlXjL60iIiAzp3h/vvNKqvJk5Eq\n",
"vsyaBU8+2bpQAAb37m2+FF2YpZSL5OUOLrjKNCFhNIB3DgFeSvPmsHy5WW3sRqdPw513Qo9KPzP2\n",
"eH8sL4+Ff/3LrW1ytvLl4fnn4Z57IOWmeCynN5kHgoNNhM5btOTt4uNhzRrz/Vdf8fcNdxMRYUam\n",
"b7jhczZseLrIU+xZ0e5s2hNWykU8LfOQU/n4uD0AJyaaijdf7O3C2NWhWK5uBAMGeFRxCGe65hq4\n",
"oYNZHbyz/HX8ebweOc1awhdfmGr03u7cymdp356P2s3g5pvN73vtWqhZ85DVp1y48tsTRqY0CCtV\n",
"Ujt3wnvvwdNPm9SL115rqut89lmhw/L2ZVYgCzi/XsITP53bTAT++svdrSji1CnzB3lg83Vck7gB\n",
"S3Y2rF9vhjDLEMucKAgP58qE1XzWfzF9kz/nn5enkRsxxN1NK56DB2HiRPPvLU9UFMk9wunhs5Sv\n",
"Yvz55Rd46SUzEhAZGUpg4OhCpzB7m0Pyf84bmYqNnciKFeOJjZ3IsGFLXB+Ii7uc2tYbukVJlTYp\n",
"KdbvX7JEZOhQkalTRWJjRbZuFVmxQmTHjkKH5WUeGs0ESaWKrKCrvM4IGdv2gQJVArxIXJzJ9di1\n",
"a34qQE9w4oRJ8fj88+eaFRJy0e1HZc22bSK3B+dK51anZNUqd7fGBqmpIuPHmzSio0eLZGWJiEhS\n",
"ksjYsSYT5QcfmGxiF7pcdjJnZAJD01Yq5WC5uaakSmioyJ13luhUBfdlVidJuhMrUwI6y/E2N4lU\n",
"ry4yZ46DGu1kGzaIhIWJNGki8vnnjkm66yDHj4u0aWNyAud/LrhEecGyKDfX/FNr0MCk7k4dUCC1\n",
"46lT7m6ekZsrEhVl9vz26yfy998iYioPTp5siiA9/LDIvn32X8IZObHtCcK6MEspa3JzTW67N94w\n",
"k4svvggDB5bolBdmHipXOYdrhr5OrV63QXa2uVnz44/QoAG0bm3mWt3pjTdgyhQYM8bshalY0b3t\n",
"KeDECTMb0LOn2amSP/Xr718oA1ZZZ7HAgw9Cr16maP3G9+LpnH0utWOjRjBiBDz6qPk35y7dusG6\n",
"dWZdwfTp5F7hT9SXMHYstGoFsbFwww0lu4THZEIrbtS29Yb2hJU369lT5KabRObPd39P78UXRZo3\n",
"F/H3F+nd2xS7/fVXkbNnXd+W/ftFzpxx/XUv49gxkdbX58prkUc8aWTcKyR3MZm3/ijfXj4Z+JOk\n",
"DXpKJCBApFcvke++yx8CdqmuXfO7poe7hEubNqaC1AoHJvNyRsYwNGOWUg5y+DDUr+9ZK2mPHIHV\n",
"q2HlSvjlF/O1atWix2Vl2ddDzciAVatM8oPMTFPIoBhiYlYyZUosmZnlqVQpm8jIUJdsvTp2DLrd\n",
"IUyr9gKda+7AEhPj9Gt6A5t/H+cyb8X/ZwaTZvkzdy48dHcqo1vM58pFs0yWs/ffN4va4uPNquSo\n",
"qJIlX87NNf/W5s2Dd98t+u/1zjth0SJ2VGvPI/WXMvJNf/r2dfx/R0dnDLMnWYf2hJUqTU6fFqlY\n",
"0czXdu8uMmSIyNtvmzqq1hw7ZuYDbzo3L92pk8irr4r8/nuxLuuMGr+2OHJEpNW1ubLmlmGS2769\n",
"58xpullJfh/Hjpm1UHXqmGUQy2PPmtGFoKDzJwsPN2sloqNFEhKsr4y60I8/irz1lkirViKVKolU\n",
"q2b+rRUYWTl2TGTGDJG7gxLl+8rh8tnkRLcM+NgL7QkrVUx580551VZKg6ws2LcPEhJgzx7ztUoV\n",
"eP31osdmZpqEB1WrQrNmJt+hHcLCxhAbO9HK/WNZvHiCXee8nGPH4PagXGZXG8otPr+bHrzHlMax\n",
"nyNGFBzx+0hPh6++Mh3VSpXgh5w7uerPRdC+PSxdaubZFyyAHTvg+HGS6jUg7chxksr5klypCokf\n",
"TiHsgV7nT/j662bi/n//O7+tLTycw+/P5bvvTGK1jRuhRw+TgbJ3b/PP1ptoT1ip4pg2zXzc37DB\n",
"3S3xes5YaXopJ0+KzAt4UpKq1jdz5Xv3OuU6ruaoEQVH/j5yckRiYkQeCEuUBeXDpXXDROnXT2TS\n",
"JJHVq0XS0kQWzVskdzd4TLbRMv9iMVVbWm/3uepP/zRsL2G3Joq/v8jAgSILF5pzeTN0dbRSNsjM\n",
"hMhIM7/6yy9lJMu9c7lypWlysuktRfnu5orEf8ydL7xQKlZAXzyr2thi9YYd+fvw8TFTtHfe6U9u\n",
"7lxa7TZZqdauNVPD27dD+fKtSUnpQQR30oqdrKM9D6UuJeDZrSxcCElJZuo5KQnOHo/i5YoRLO06\n",
"g8gB/nTrVnoyadrjkkHYYrF8AvQCjolI63P31QC+Aa4G9gL9RKSUZQZXpdY//5iE73XqwG+/QfXq\n",
"7m5RqRAZGUpCwuhCAcRkKOrh0OukppphyptugsCavnAIMzw6Y4ZDr+MueVnVLlTcrGrO+n34+ECL\n",
"FuY2aJC5Lz0dOnVazKZNT/AQUUwngqeYQRL+1PRJ4+abTf0OPz8zW+Dn50/TpnPpW8H6Ndy1wM9d\n",
"LtcTng1MBT4vcN8IYKmIvGWxWF469/MIJ7VPKcd66y0IDTUbDt2957YUuXAPtFlp2sOhfzwzMqBv\n",
"X5MT+cMPwZIcdckawN7IUT1YV/w+8lSpAnXq/A1AEv48yPkRiWbNVhEREWrzucpUkZM8lxuvBhoD\n",
"fxb4eSdQ99z39YCdF3me8wfglSou3UTqlbKyRPr0McmTvGm1bHE5Y++qKziq3c5IJelKuGhOuK6I\n",
"HD33/VGgbsk+BijlQp6071fZJDvbFD/q8/dkHv28G+XLu7c6kzO5sgfrSI5qt6OG471JiRZmiYhY\n",
"LJaL7kMaP358/vfBwcEEBweX5HJKqTImNxeeeAK6bX6XJ3Km4VM33N1NcrpevW7z+KBrjSPa7TGp\n",
"JG0Ut3QpcV9+Cbt3m5sdLrtP2GKxNAZ+kPMLs3YCwSJyxGKx1Ad+FpGWVp4nlzu3Uk61ZIlZtFOz\n",
"prtbouwgAv/6FzRf9iHDsifhs3IFNGxYrHOUtUU+3s7anHBg4CgmT/aw0YCcHHjgAbNfumVLk7C8\n",
"Z08sHToUe5+wPT3h74FHgDfPfV1oxzmUcq5ly8zyzZ9+0iDshUTgP/+BqxbPZFj2m3YH4DK3yMfL\n",
"ec1wfLm9yTSZAAAgAElEQVRy8NhjMG0a1K5dolNdsidssVjmAEFALcz878vA/4C5QCMusUVJe8LK\n",
"bdavNyVivv0WunZ1d2tUMYnA2jYRVN2zietz/8Tyy2qzJ6mY3JHFS5Uyhw6ZZfmBgTYdbk/GrEvu\n",
"0RCR/iJypYhUFJGGIjJbRE6JSHcRaS4iodYCsFJus2sX3HUXzJypAdgL5eaaPCoV/o6ndfp6LJkZ\n",
"8Oabdp2rLC7yUQ5y5AgMH27qJa5e7dRLacYsVXocP25SKb32mgnEymFcMbeamwtDhsDWrfBuB19Y\n",
"RokScbh9kc+2baZbX6EClC9vvlaoYBLFlNMPAh7pxAmTS2DWLHj4YfM7rFfPqZfUIKxKj5o14eOP\n",
"oXt3d7ekVHHF3GpOjlkF/fffZj1dhZySJ+JwSRavEyegWjWoXLnoY8OHm5KYZ8+afVZnz5rbpk1O\n",
"/8Ou7HD2LNx6K4SFwebNcNVVLrmsVlFSygbeusrWUyryXEp2Ngx6WMg5+A+fLL7Saolkezm6Xizp\n",
"6WZ4culSs/gvIQFiY80f75JIS4MRI0w2t+BgE9iV66WmWq/RbSN75oS1J6zUZXjrKltHtduZc6tZ\n",
"WfBQf6HfplHcd+UayvnGAY5LqOLQPbcTJ8Ibb0CbNhASAlOmmOBb4SJJkIsjOxsaNID33oP+/c0w\n",
"/H33mRW4jvxU4mYe/2HWHe91cVNs2XpD01aqUsJbU+k5qt3Oev0ZGSJ39c6R76+JlJy27USOHy/R\n",
"+Zzu4EGRpCTnX+fMGZHoaJH77hN57DHnX89FHFWmscTOnhX57DNTo9HBsCNtpWawV94rOhoOHnT6\n",
"Zbx1la0jK/IEBo4udJ+ZWw2xu23p6XDPXTkM3fYUveqsx2f5T1Crlt3nc4kGDeCKK5x/napVzRa7\n",
"+fNLTXUouFSZxqWua8TmzWb04osvTE1MD6DD0co7bdxohupWrnT6pdy+ytZOnlqRJyUF+t4tjNv7\n",
"OF0a7ccnOtZz5kDPnjXb2+6+G6680t2tMauqrTl50uuS0Lj1w2xmpplOmD7dTCk89pjH5JHXIKy8\n",
"T2KimS/74AO49lqnX85VtXIdzZHtdtTc6u7dsOmWCKZViafZlWewzPnBMwKwCCxcaBZHNWpk5nw9\n",
"1ZkzcN110K0bjBwJ13tHQQu3fZg9eNAseGve3KxM94QPVwXo6mjlXXJzzR7gpk3h/fdddlmHr7J1\n",
"EU9qd3Q0PP44bPIP5srdK8yd4eEwd+6ln+hs//xjyjSdPAlvv23+YHu65GSTMvHdd+Gee2DChBKn\n",
"T3Q2t+WFzskxK9h79HB679ee1dEahJV3mTgRFi+Gn392zKpU5XS5uebXNmMGzJsHHSfcCYsWmRXA\n",
"S5favQ/YIdLToVUrMzw5erT3JdFITIRXXoGvvjI3D/8A4UkfCp1Bg7Aq/SZPNr0nDxtSUtYlJZk9\n",
"wFUO7eG96GbUrw+cPl3iRBwOdeBAsYtDeJzt26FGDU0C4mYahJVSHmPHDhhwVwoz5QlubHQKn5+W\n",
"esxiGFUKbd4Mzz9vpjdq1HBLExxewEEpbxcTs5KwsDEEB48nLGwMMTHOX02tYMECeKzTLpan3Uq7\n",
"2/3w+TFaA7A77N1rhiNKs+xsM98REmLyPQcEuLtFxaKro1Wp5a2ZrrxZTg6MGwfHpn/HKstTVHjl\n",
"dXjySXc3y1i2zJSl693b3S1xne++M1Woxo83v4eLbXnyVjt2wCOPmGmNDRu8clpBe8LKs2Vl2f1U\n",
"dyQHOHXK/C3Yvdssts3x7K3EDhUbCwvrRNB/akc+Sh1IhXlzPCMAi5jKOA8/XKpSQNrkuefMIrhv\n",
"voEbbzTVMUqLkydNnu0nnjCvywsDMGhPWHmyzZvhwQfN3r5KlYr9dGcmB8jJMYF282Zz27LFfE1K\n",
"gmuuMVs5T50yP1evbqaoCt5q1zZ16jt1gmbN7Bup9ZQ8vBs3wosvwv79sLpePLW3/2YemD7d7GV1\n",
"p8xMGDTIlGdat85r/1CXSNu2sHw5fP89PPss3HwzREW5u1UlV7Mm7NzpdcPPF9IgrDxTSopZBT1u\n",
"nF0BGByfHGD1avjsM/OZYPt2sxC1XeuzBDfYTd+WW2nW+E/8o7/AsueEiay//0xOdX+SksxOklOn\n",
"4Oy6jTSeMYpyq49xZH4t7iv/Df+k+9OxI3TsaILyzTdfvsPmCUPt+/bBmDFmlPfll02nt8LdvrCd\n",
"EtUBdpi0NLOHtnp1k1nNWrnBssJiMVnAevY0BZtLCy8PwIAWcFAeKDdXZMAAkSefLNFprCeMH1ns\n",
"hPFxcSK33y4SVW2w7L8mSE526ClJ+xLNg6+8ItKsmcg994i8/LJIq1bnLxYeXvRkH30kEhBw/piA\n",
"AEl55F+y5N2t8u9/i3TsKOLrK9Kuncizz4rMny9y4kTR07izqMTJkyLPPy9ys98uie3+piQnF3gw\n",
"MdG87sREu88fHb1CQkNHS1DQOAkNHW1/gv8//hCJiDAJ+5X3OnFC5PPP3d0Km2BHAQcNwsrzfPKJ\n",
"CWapqSU+VXT0CgkLGyNBQeMkLGyMzX/Qc3NFfvpJ5LbbRNo0SZLokaslp2tQ0QCbm1v4iT17msfb\n",
"t794IMo7pl07kUWLRCZNEtm+Pf/hjAyRNWtENt06WLYEdJUl5XpK19aJ8vzzIj/+KJKSIhIUNM5q\n",
"EA4KGlf8N8lGZ86IvPWWSKsa/0hcqyGSE1BT5PXXHVqNxmMq7ZQ1OTkiM2aU6MOTw509KzJ1qkjt\n",
"2iJDhxb9v+aBNAgr75eYKFKvnsiff7rl8rm5IrGxIl26iNza5Khs7jNKcmvWNN1SWwKsLT1BW3uL\n",
"QUH5kSjtqmbyTf/vpE/nk1Ktmoi//16n94Szs0XWrRN57TUzEvBluYflaJVGklu+gsjTT9tVevBy\n",
"vVxvLRvp9U6fFunf34zSPP20yLZt7mtLVpbIggUi110ncscdbvtbYA8Nwqp0sDb+6mS5uaZT2rGj\n",
"yB2Be2VnyLOSGxAgMmSISEKCOcgBQ63Fkhf0W7YUGTNGJDRUpHp1yWndRt4Yt1b8/eNkOoPlZ4Ik\n",
"hp5Sv8pv0q/fPpkzx/zdysoq/iUTEsyI+f33i9SoYQYkIiNFfvhBJOfqJkVHAorBll6uO3r4qoBD\n",
"h8y0St26It27m7kYVxs3znwK/u47r+j9FmRPENaMWarMS0szuxw2bTLrwPp99yA+Vzcy2zvq13df\n",
"w6yldzx71iysaduWmJiVNHz4YW5I3A/A1ubd+bt5OGtTWrH04LVsOVSTZs1MkR0/P7OiOyfH5DbI\n",
"+77gfVv/FK5K2UFI2xM0Gngb3bqZErr57ixZzuewsDHExk60cv9YFi+eYPMxVh08CL//Dn37FqtN\n",
"6iIyM03mKT8/UzDFlXJzwcc7d8/akzFLV0erMu3AAfN3++3kCILqxVPuc1+TCN8TVl36+xetMFSh\n",
"gtlywrlV0B2ug0X7oX17ro/6L9f/3//R58RsJh7fjlxRmTMVW5G5JonMCleQU8mXFU9FkVPdn3Ll\n",
"4IqTf9FuzouUyz2L/99/UDnpGJbyPljaPweDrKywjooqUc5nW7aM2VV+ce9esxXq6aeL3SZ1EZUq\n",
"mX3VF7Nkidlnd911xd+9kJ4OP/xgVqx/8EHRx700ANtLg7Aqs9asgfvvNx3e26Pjsaw8V17vqafc\n",
"X17PVhcGxk8+MfeLYPnnH6pv30714cNhm3ltD6+KOP/aEgOgwQMmsI/9C04ehGxgzx7r17L2oaAY\n",
"bNkylre9aurUsQUq7Vyi1N3u3dC9O7zwgtkDq1xj8WIzGpKQAC1bmg+GbduaalQX1ocWMVujDh6E\n",
"I0dMGcbbbjOZrry41+swxR2/tvWGzgkrW2RluWXeadYskZ7+a2T5zHPzvbYsuvJWxVmx7cTX76gt\n",
"Y/l27RK58kqRjz92bEOV7VJTRX77TWTaNLMdLC3N+nE//yyyZYvIsWMOXU3vadA5YeV1XnzR5H/9\n",
"/nuXJPjPzob/PC8EzPkvo3Nfpfy8r+H22z2vvJ4j2fLaXPT6HVZPNifHpGF87jl4/HHHN1QpO2gp\n",
"Q+VdFi+GwYNN3sNatZx+uVOnYND9aQzfOYTgGpsp/78FEBjo9OsqJzl50qQuVMpDaClD5T0OHzbz\n",
"R19+6ZIAvH073N/uL6Zv7cQdwbmUX/erBmBvpwFYlQLaE1aul5Njan8GBZk9QU7200/Qvz8svHMG\n",
"nW7KNAt4tLatUsrBdIuS8g7798NVV5ns/062fj0c6hVBfMt4/I/5wsNRGoC9UXZ26auFqxTaE1al\n",
"2O7dZifEnzWDqXVuiw7h4d6z/UgZp0+bkZMPP4RbbnF3a5S6KJ0TVuqcI0cgLAwmTIBajXzNnZ5Q\n",
"Xk8VT0oK9OgBnTubGo9KlTLaE1alTnIyjGi7hLZhdRj837ale/tRaZaaapI8XHcd/Pe/Oo2gPJ5u\n",
"UVJlXmYmvNRxJRN23Ee1n77H0qmju5uk7JGeDn36QMOGMGuWZlVSXkGHo5VnioqCd991+mVycmBc\n",
"r995Zfv9+C6cowHYm+3dCy1awMyZGoBVqaY9YeVc27ebrUjLlkGbNk67jAi8PmAbT8/vRrWvplMx\n",
"/G6nXcsVYmJWMmVKLJmZ5alUKZvIyFD7MksppVxGtygpz3LmDNx3H7z1llMDMMA7r6Ty+PyeVP5w\n",
"UqkIwMOGLSlUSSghYTSABmKlShkd51HOIWJSUnbqZDJjOdEnn8B/P6uKz8/L8R08wKnXcoUpU2IL\n",
"BWCAhITXmDp1qZtapJRyFu0JK+f45BNTmOHXX516mX09Imi5PJ4/O/hS7boop17LVWypu1uqnD5t\n",
"pivuv9/dLVHK5bQnrJzj7rth4UKoUsVpl/jrLzj4Uzydzq6g2qpFZhtSKWBL3d1S49Qp6NbNFHdW\n",
"qgzSIKyco1YtaNzYaafPyDDJr65sWvoScURGhhIYOLrQfYGBoxg6NMQp14uJWUlY2BiCg8cTFjaG\n",
"mJiVTrlOESdOmADcrRtMmuSaayrlYXQ4WnmlqD5z6FG9EY2XRcFTpSsRR97iq6lTxxaou9vDKYuy\n",
"3LYI7Ngx6N4deveG117TRByqzNItSsrrLJ74O7eM60mFNSuofmsrdzfHq4WFjSE2dqKV+8eyePEE\n",
"51349ttNYu/x4zUAq1JDtygp95k/H/r2dXqlm91rjnPduPtIems6TTQAl5jbFoF98w3UqePcayjl\n",
"BXROWJXc1KkwdqzJ9etEqUnZnAx9kBNhA2jy/L1OvVZZ4bZFYBqAlQI0CKuS+v57+L//gx9/BD8/\n",
"p11GBFZ0GomvXwVu/N6xw6RuW5jkAVy9CEwpVZgORyv7bdgATzwBMTHQpIlDT31h2saWLQeRktqT\n",
"qb+NxFLecUOlZT07ldMXgZ08CdHR8MgjjjmfUqWMLsxS9tm/Hzp2hA8+gHvuceiprQVGH59UPvxw\n",
"O0OGOLamrNsWJpUFf/5p9ov362dGS3QBlirltIqSch0/P5gyxeEBGKynbczNrcrChd87/FplLjuV\n",
"q3z7LdxxB0ycCG+8oQFYqYvQ4WhlHz8/U5zBCVwZGMtUdipXyM2FV16B2bNh8WK46SZ3t0gpj6Y9\n",
"YeVx8gLjV/TnZ4KJ4U78OO2UwKgLkxwsNRW2boX16zUAK2UDnRNWHicmZiVfPvoNs0/MpDJZAPxY\n",
"tSXyzfRCC4YcVXM3JmYlU6cuLbAwKaR45zlzBjZtMgvV/vgDnn8ebrih6HH//S8cPGjyaV9xBVx/\n",
"PbRrBwEBxW6zUsrz2DMnrEFYXd7Zs2b+NzISKlRw+uUy9h0lKfBGjlaoxg0Ze9h1RX32zviYsAd6\n",
"5R9jbfFWYOBoJk8Oc92q5kmT4OOPzSK11q1NQL3pJujTB+rWLXr8N99AQgKkpZnKQZs3m+C9YoV5\n",
"rrcR0blepQrQIKwcLz3dVErw8YG5c6FyZedeLzeXXU16sKXKrdz/6/NYLpIX2iNWNW/caN6XVq2g\n",
"QgX7euY5OSaQ+ViZGXr6aWjRwqR4bN3a+jHukJFhErQsWgQ//aSBWKlzNG2lcqyUFLjrLqhfHz77\n",
"zCW94N1PvknikQzu2D8OS0B5E/itcMniLRGIizNznEOHFn28bdv8b+3eb1zuIu0VMbmV4+Jg2jRT\n",
"cSgoyATkZ591T+ATMb35kSPNcPu0aRqAlSohD/lorTzOqVOmyk2zZvDFFy4JwCf3p5L15TdkzY6i\n",
"Zt1Lfz506qrm3FyTCaxTJxgyxKbqTNa2VSUkvMbUqUvta4PFAv37w/TpsGsXbNliVqMfPeqewPf2\n",
"23DVVfDOO2bl8//+Z3rpSqkS0Z6wsu7ll6FrV/PH1wV/9EUg4rmqNH7mDyY9dPnPhpGRoSQkjL5g\n",
"TngUQ4f2KFlD5s6FCRNMIYpRo+Deey/eWy3A6T3zBg1gwICLP756NUREwI03Qps257/Wq1e861xs\n",
"njckxOwJDwzU3q9SDqRBWFn37rum9+uiP7izZ8Pu3fDVV7YNzjgt3eL+/eaDR1hYsV672/cb33IL\n",
"REWdX+z15pvm+z594NNPix6/apWZawfzOi0W2L7dZEGbMqXo8Tfe6NTmK1VW2b0wy2KxjAQGArnA\n",
"n8BjIpJZ4HFdmFUG2bM4ac8e87f/55/Nrh1vZH219igmT3ZgHubiEjErsatWLfpYQgKsWWOOybs1\n",
"aQIdOkClSq5vq1KlgMsWZlkslsbAYOBaEcm0WCzfAA8Cn9lzPlU62LM46exZGDgQxozx3gAMLiiE\n",
"YA+LxXoABjOsHBjo2vYopYqwqydssVhqAL8CHYAU4DtgsogsK3CM9oS9QUbG+T3AJdx+VOxtQykp\n",
"LL5/Ju/JcBYttrh2B86KFWa4vVMnF15UKVWauayAg4icAiYB+4HDwOmCAVh5iS1bzFzi2rWQmXn5\n",
"4y+jWIuTRDh+3xCOr9rBp5+5MADn5pqKPg88YPZAK6WUG9k7HB0IDAcaA0nAPIvFMkBEvip43Pjx\n",
"4/O/Dw4OJjg42N52KkfKzYX33jPVbd5+29R6dcACrOIsTsruEkzVNb8T2qYzdaucBi6/DajETp2C\n",
"QYPM1/XroWFD519TKVVqxcXFERcXV6Jz2Dsc/QAQIiJPnvv5YaCDiDxT4BgdjvZEZ86YBBxZWfD5\n",
"53DNNQ47tc2Lk7ZuJfuGtpSXc0E7PPyiSTkcZv16c5177zUrh12w71kpVba4MmPWTmCsxWKpAmQA\n",
"3YF1dp5LuVLVqiYBxX332bT/tThsWpx05gxHg+7mGM1ozQ6TF/q+RwhzaEusOHnS5Hp2UvlFpZSy\n",
"R0m2KL0IPILZovQH8KSInC3wuPaEVRFRkxez4bmNzJKnmU4ETzGDWoFvu7bwglJKOYEWcFCFHTsG\n",
"deq4uxX5srOhdu19nD59dZHHXFp4QSmlnMBlq6OVh/vlF7j7blNWLyPD3a3JN3EigPXFWw4tvKCU\n",
"Ul5Cg3BpkVd0oHNnswK4Rw+T+N/ZpQdttHo1fPQRtGkzz+rjDk3vuHgx/PCD486nlFJOormjS4sx\n",
"YyA2Fl56yeaiAy4hwulTuQwcWI6PPwYfn04cPOiEwgt5li0zH0L+9z/HnE8ppZxI54S9zcWq3KSn\n",
"m16vh1W4kY+mE/vOFn7o8SEffGDui4lZydSpSwusoA5xzKKsFSvg/vvh229NLV6llHIhXZhVGmVm\n",
"moxWP/9sbj4+sHy5u1tlm02byOgaQnj91czd3IIqVZx4rTVroG9f+PpruOMOJ15IKaWs0yDsjS7W\n",
"sz11ypTT27kTWraE2283ty5doHp117ezuJKTybrhJp499SqRa/o7tzhDejq0amUmncOcvuNYKaWs\n",
"0iDsybKz4bXX4PhxcztxAg4dMl+PHy8aiHNy4PffoUUL8HdBSkdHEiGn34MsXBHAkXEf8cwzl39K\n",
"iSUlgZ+fCy6klFLWuTJjlgIzVLx7tymGvmOH+Rofb4aPK1YsfGy5cmYFc/PmZgVz7dpQvz40amS9\n",
"J1yuHNx6q2teh6OFhpId9yuN/Ttz70MuygutAVgp5YW0J1wSV18NVaqYodBrrz3/tU0bz1md7AaJ\n",
"NwQR8OdK84Mr8kIrpZQH0J6wM5w4AeXLWx8S/uuvMh1srTlyBLbHV+UOgPbtYcYMdzdJKaU8libr\n",
"sCY93fTe7roLmjaFlSutH6cBuJCsLLNDaP1zUaYHvHSpc+azp08325CUUsrL6XB0Qbt2wVtvwXff\n",
"mZSPDz8M99zjHauRPcC//gWHD8OCBWYnlVPExcEDD5gtSYGBTrqIUkoVnw5Hl9TZs2ZO99VXoUED\n",
"d7fGe0ydylfZD7B8eR3WrXNiAN63D/r3h6++0gCslCoVtCesSuajj0h/431an/mN6NX+tGzppOuk\n",
"ppo90oMGwXPPOekiSillP90nrFxr2TJyHhpIcLnVvDC9KXfd5cRrPfGE2Wv96acel5pTKaVAg7Bt\n",
"RGDWLPjtN5g5092t8V67diFduxJZdy417wtm/HgnXy8+3uyp9pCqUEopdSENwpeTkAAREZCcbALx\n",
"DTe4u0XeKSkJ2rfnq4Yj+KbaEyxc6MR5YKWU8hIahC9GBCZPNlXlR4yA4cPN3l9VLDExK5kyJZas\n",
"jHK02JtDdM4otm3z1WRVSimFro6+uG++gc8/N0PQTZu6uzVeKSZmJcOGLeHFhOM0J540fFndYDKr\n",
"V3d2TBlCpZQqg8pGTzgnB9LSdL9vCYSFjSE2diI/E0wwKwD4hnBmh7Vg8eIJjr/gzz9DcLAuwlJK\n",
"eQ17esJlYyavXDkNwJcQE7OSsLAxBAePJyxsDDExRTOEZWaaQZM0fAFYR3ueYgYZGU7IGvbVVzBk\n",
"iMlcppRSpVjZGI5WF5U3zJyQ8Fr+fQkJowHMMHNODrz8MnVzTUB8iCimE8FTzCAJfypXznFsg3bv\n",
"NnP2S5eCr69jz62UUh6m9PWEz56FxER3t8JrTJkSWygAAyQkvMbUqUvNvtxBg8j99TeOZQyjatVt\n",
"JOHPg8wlCX8CA0cxdGiI4xqTkQH9+sErr8CNNzruvEop5aFKV084KwsefBCuuQbeecfdrfEKecPM\n",
"FzqbBvTvT27KGR4OiKaqbxW+HLWKjz4aS0ZGOSpXzmHo0B6OXZT1wgsmHeXTTzvunEop5cFKTxDO\n",
"yDAlfCpWhNdfd3drvEalStlF7qtIJhN3zkX8WvB4wEJOnKrE//4HlSt3pW/frs5pSFoaHDigGbGU\n",
"UmVK6RiOTkuDu++GatXMdqSKFd3dIq8RGRlKYODowvfVvo+rm9UnosZ89h+txMKFLkhU5esLCxc6\n",
"p/ShUkp5KO/fopSRAXfeCVddBZ98okk47BATs5KpU5fmDzM/+0x3Yn7owrad5Vi0CKpWdXcLlVLK\n",
"85XNjFnZ2TBnDjz0kNmKpOwTEQHx8UgVX0Y0imLlFn9iY3Vnl1JK2apsBmHlGMHBsMIk4VgaEM7N\n",
"f83VkWGllCoGTdahiic93axI/vtvpIrZk7u1Snvab5jh/AB8+DA884zJ662UUmWUBuGyavNmuPlm\n",
"2L+flHL+DK4aRaxfOPW2LCWgiZMjcE4ODBgAdevqSmilVJnmfUE4OxuOH3d3K7xXbi5MmgTdu8NL\n",
"L/Hn6K9pHxIA/v50OTyXWk1dMAY9caKpfTh69OWPVUqpUsz7lhKPGgVHjpiqSKp4RKBPH0hKQtau\n",
"Y3ZcE168A959FwYNclEb4uLgo4/gjz90IZ1SqszzriA8fz7Mmwe//+7uljhdXu3ezMzyVKqUTWRk\n",
"qP3Zqc6tfMbXF557jtRbbudfkeVZvx5WroRWrRzb9os6fhwGDjQJOerXd9FFlVLKc3lPEN6xw6Qz\n",
"XLIEatZ0d2uc6rJFFYorPj5/5XNSbjU6HQihfXtYv97Fe4CrVYMPPoCwMBdeVCmlPJd3zAknJ8M9\n",
"98Bbb0G7du5ujdNdsqiCLZKTTW8zb+XxuWpEJ5q0p936GTz/vHnY5Uk4qlSBvn1dfFGllPJc3tET\n",
"XrwYbr8dHnus0N0OHbL1IBcrqnBh7d4LX/9L/Vpzx9bf4LPPIDTU5NKuVo2kaVHE3x7B0z4z+O5n\n",
"f264wRWvQiml1OV4RxDu1w/Cwwvd5fAhWw9iragCUKh2b97rfzHhODexgSs5ROWlSSTc05fATZug\n",
"USNOnIDJb8K0af707TuXn9/TDFhKKeVJvGM4GorsJy3xkO0FYmJWEhY2huDg8YSFjSEmZqXdTS0p\n",
"a0UVLqzdm/f6mxPPTfxBfY6yXHryTGpTjlRsxAsvQPPmcPQorF0LM2e6IQCLQGqqiy+qlFLewzt6\n",
"wlbYOmRrC1t71a4a/s4759SpY6mUnEHbjP0MDCxP062/wrnH8l5/Gma+dx3teYJPqLZ1F61amUXI\n",
"mzdDw4YOb57t/vtfsyVp3jw3NkIppTyX1wZhW4ZsbXXxXvXY/IDo8OHvJ580K779/CAqqnAJvy1b\n",
"6DVmGL127oSsLAgIgLDBcNv56+S9/oeIYjoRPMUMkvAnoEIW27dDvXrkt9st8+YbN8L48fDrr86/\n",
"llJKeSsRccrNnNpOv/wi8vPPlzwkOnqFBAaOEjPmaW6BgSMlOnpFsS8XFDSu0HnybkFB4/KPCQ0d\n",
"bfWYsLAxRU84eLBIUJBIz54iiYki+/aJ9OsnEhws0qqVSM2ahU8SHl74+cnJIhs2iHToYPWY7GyR\n",
"SZP+kBo1lhQ6TePGrxZ6/dbfo1F2vUfFkpws0qyZSFSUc6+jlFIe5FzcK1as9LyecGoqPPwwTJ58\n",
"ycMKDtnm1cEdOrSHXb08W3rV1oa/AzhFy3/2m5RTmzZBYiL88EOhfblERMDHH5stVnXqnL898ohZ\n",
"9d2+PcyYUfjE1aubrVgBAQBI+/bseWEGsR/CsmVmhPeqq9rStetBDhz4Al/fA1Stms7QoSGFXr8t\n",
"PXyHEzH7uYOCoH9/51xDKaVKCc8LwiNGQJcu0Lv3ZQ/t1es2hwSTyMhQEhJGFwpYZiFUj/yf8wL1\n",
"dCJoyQ7a8zs5lGP//gD4q7oJOm3bmoPP7cvND7B+fvDgg4UvOmeOCdAzZhQais7Kgn37ICEBDtwe\n",
"RdblSHMAABLgSURBVJvtETx2eAbJ9/oTEmIWiU+bljfcfBXw8EVflyPnzW0WFwdbt8KaNc67hlJK\n",
"lRKeFYTj4uC77+DPP116WVt61XmBunlCPLexGoAfq7ZAvpzBdRd+EIiKshpgwXQUExNh3z5//npg\n",
"LgkzTMDNux0+DA0aQGAgBAb6c/bFuXzXHZo1K37BIUfOm9vs9tvhl1/OfxBRSil1URZxUj1Xi8Ui\n",
"xTr3mTNwww0wdSr06uWUNhXLoUPwySem2lDHjoBZ5FTn0ce5+UQCu66oz94ZHxP2QNG2JifD7t2w\n",
"d+/52759578HaNLE3EywPX+7+mqoUMExL8HaYrLAwFFMnmzfsL1SSqmLs1gsiEixukueE4Q3bzY9\n",
"yDffdEp7bJKdDYsWmTnc1avhgQfg3/823dA8p08X6eXu3286f6tXm6979kDTptC4cdHb1Vebp7mq\n",
"jG5MzEqmTl1aoIcfogFYKaWcwLuDsLvt2AG33GKi49VXm0VTDRoUOSwnx0x55gXc1ashI8NMY3fu\n",
"bL62bQsVK178UqU13aZSSpVl9gRhz5oTdqerr4ZrroEtW0yUfe45mDs3/+GTJ82CqA8/hCuuMME2\n",
"JMRshS3OfG2pS7f5/vumKtK117q7JUop5XW8J22ls/n6nu/5Ftg2tGcPPPOMGV7+6y9YuhR27YJZ\n",
"s0w9iebNize07Oh0m241d64JwqW8tKRSSjmLBuGCoqIgPByJXcov2/y55x6zJsvfH7ZvN+u0rr++\n",
"ZJdwy7YhZ9i0yXw6WbjQ7HtWSilVbO4Lwikp8Pbb52veusqGDdCnD6SnF3kou5o/88Ln0rGnP4MG\n",
"mYXRe/fCa69B/fqOubxbtg052vHjpi7whx/CjTe6uzVKKeW13BeER482i6FctUw4KwvGjYOePU0m\n",
"p8qVCz28YQNcd50ZXX3xRZP06plnHF/43pYKSR4tN9dkDBkwwJSYVEopZTf3rI5ev970Rrdvhxo1\n",
"nHL9Qo4eNePIOTmm57ZgQf72IhGzNXnCBNOxc0Vc8fptQ8uXQ3Aw+OhshlJK5fGOLUrZ2XDzzfD8\n",
"86benrOlpprrpaWZjBlgenJz55KYCI8/DgcOwDffmGQZSimllD3sCcKu78pMnWpW0w4Y4JrrVa1q\n",
"Vjq3amV+Prfy+bffzH7eRo3Mfl8NwEoppVzN9UF40yZT7N1Vc8FgNvWeW/mcu2Qp78z05667zPzv\n",
"5MlQqZLrmqKUUkrlKVMZs06cgEcfNV+//tqkkXS0UpUNSwRefRXuuut8hSillFJWuTRjlsVi8Qdm\n",
"AtcBAjwuIr/Zez6HEbHay169Gh56yFQUfO01xxVJKKhUZcMSgZEjTfrOZ591d2uUUqpUKslw9GTg\n",
"RxG5FrgB2OGYJpXAgQNm1e7p04XujomBe+81aSffess5ARhKUTasvAC8ZAn89JNmxFJKKSexKwhb\n",
"LBY/oKuIfAIgItkikuTQlhXX/v0mAN99d6EavitWmCHoH35wfoXEUpENq2AAXrZMA7BSSjmRvcPR\n",
"TYDjFotlNtAG2AAME5G0Ikdu325WPjlz+fG+faaYfGQkDB+ef/eGDWY30vDhW3j55blOn6ctFdmw\n",
"Nm0ywVcDsFJKOZ29Qbg80A54VkTWWyyW94ERwMsFDxr/8sumNu+ttxI8fDjBwcEla601AwfC/Pmm\n",
"AtKjj+bfvXMn9O4NgwdvY/bsb1wyTxsZGUpCwuhC1zLZsHo49DpO1bYt/PYblNcCW0opdSlxcXHE\n",
"xcWV6Bx2rY62WCz1gF9FpMm5n7sAI0Skd4FjRN5/3yT4X77ceVuSrr3WRFzIT8Kxdy/cdpvJghUV\n",
"NYbY2IlFnhYWNpbFiyc4vDlenw1LKaWUXVy2OlpEjlgslgMWi6W5iMQD3YFtRQ6cMMFkwnDmnuAm\n",
"TUwQPpeE48gRU+f3P/+BRx6B2bNdO0/bq9dtGnSVUkrZpCSro4cCX1ksls2Y1dGvFz1iKLRoYdfJ\n",
"Y2JWEhY2huDg8YSFjSEmZqX1A88l4WDpUhLFn7AwM0IdGWkeLhXztM6Qnm7epJ9/dndLlFKqzLJ7\n",
"4k9ENgM3X/KgESPsOnex9tv6+8PcuaSmQq8QuOMOeLnAzHSpmKd1tM2bTdrQ66/XUoRKKeVGHpkx\n",
"KyysePO4mZmmKFODBjBrVtHiPjpPe05ursnT+frr8O67ZsjAlelDlVKqFHNpxixnuuh+23QfM8Q9\n",
"eDDccANgqhMOGADVq5uF2Naq6+k87TmPPQa7d8PatWY1uVJKKbfyyCB8sXncfsfWwuqjJu3VOW++\n",
"acoFL1umu2oua8wYs5BN3yillPIIHlmVPTIylMDA0YXuu7/BIJ44sg6+/RaqVAHg11/N6GpUlFZC\n",
"skmzZhqAlVLKg3jkX+S8oeOpU8eSkVGOepYkZm9dTKUvPs8fRk1KMsPQH30EDRu6s7UeJCsLFiyA\n",
"L780CUwqV3Z3i5RSSl2CRy7MKkQEQkOhQwez7/jcXf37Q40apjRxmffPPzB9OsyYAS1bmnnzPn20\n",
"16uUUi5UahZmFWKxwP/9X6F6tp9+Clu3wvr17muWx3jlFXj/fVOjcelSuO46d7dIKaWUjTy7JxwR\n",
"AfHx4OtrJn79/dm1Czp3hrg4s821VMvMNKuZN2yAq682VaIudPAg+PmZ5eFKKaXcxp6esEcuzMoX\n",
"H29qES5aBBERZGaaYegJE7w0AOfkwJEjJllGfLz1Y775Bpo2hSuuMIH1nnvM609NtX78VVdpAFZK\n",
"KS/l2cPRvr7m67m80CNHmg7hkCHubVax/PijyRx29CicOgUBAVC3rkmU8dJLRY/v1g3atYPatU0P\n",
"V5NpKKVUqeWZw9GpqVC1Kpw+bYakZ8zgxzX+DBliyt3WqOHYttpFxNQxXrvW3Hx9YWLRLF8cOwaH\n",
"D5vAW7u2LpZSSqlSyp7haM8Lwqmp0KYNfP89tGoFmMW/7drB119DUJCDG1pcBw7As8+awAtm1XaH\n",
"DqZhHTu6t21KKaXcpnQE4eHDzbDt558DJt1xjx4mzr36qoMbaY/UVIiONg1q1EiHi5VSSgGlIQj/\n",
"8ospS7h1a/6Y89tvw8KFZn2WS0dyc3PNQqoKFVx4UaWUUt7Ka1ZHW60VnJ4Ojz8OH3yQH4A3bTJB\n",
"OCrKhQH4zBnThmuvhblzXXRRpZRSZZHLe8LWagUHBo7mx1sO0zwnzWzRwXRCO3Uy67KeeMIpTSws\n",
"Lc2Md3/8Mdx+OwwbBl266HCzUkopm3hFT3jKlNhCARggIeE1Rh+pAR9+mH/fxx+bkeDHHnNBo06d\n",
"gptuMouu/vjD5F3u2lUDsFJKKady+X6Zi9UKPp5bHWrVAsyW2rFjYfly6/WBHS4gwORevk1rDiul\n",
"lHIdl/eEL1YruHLlnPzvn///9u4txqrqjuP49y/eay2pNRamNpCxmrbxQoxXWhkTKyg4rQ8qxrTz\n",
"5C0GL4hpO9JCTRBiYsXQxNhgG9JgdYwpYpQqaCf6ogaDBS9YAUWxFktACiTcnNWHfcCRzOCcy569\n",
"z/D9vMyZPfustZKdNb/stfda687sDvj00wepUREGsCRp0A16CPe1V3BraydTpvwEyO5+X34ZZswY\n",
"7JZJkjS4Bn04et9ewYfdOIYR2zaz58gj2DzrQcZPvIhdu+Dmm2HevGzBrIbbtg2mTYOpU+G003Ko\n",
"QJKkgStkitLEiRdx2ZFbOet/H3LOprWMf3IBAPfdl22H296eQ6WvvJJth9jTAy0tOVQgSVJ1ilms\n",
"46OPoLUV9uzJNmdYupQ1m4Zz/vlf7NrXUE89Bddfn718deWVDS5ckqRmWjHr6qth9Gh4/3344x9J\n",
"3xjOhAlwySVw110NbsjixVkAP/tsNg1JkqQc1BLCg7+lz9KlsHw5LFgAxxwDwBNd2UZDt9+eQ327\n",
"d2drPRvAkqSSGdw74d27s3lH998PkyYBsHVrtllSVxeMHZtLUyRJyl35h6N7emDZMrj00v2Hbr01\n",
"WzFy/vxcmiFJ0qAofwgf4PXXYeJEeOstOOGEXJohSdKgaIq1o/f5/HO46SaYM6eBAbxsGbz0UoMK\n",
"kyQpX4WF8Pz52XtZHR0NKvDFF+Haa910QZLUNAoZjt66NVuwasmSbP2MunV3Z9OenngCxo1rQIGS\n",
"JFWnnMPRc+dmc3R7ufdeuPzyBgXwG29kAdzVZQBLkppKvnfC69bBOed8aRmsfYfefBNGjKizkr17\n",
"sylPM2bA5Mn1N1qSpBqV7+3o9nY47zzo7Nx//Kqr4MwzYfr0BlW0Zg2cckqDCpMkqTblC+FTT4WV\n",
"K+Goo4Bsi8LrroPVq+HYY3OpVpKkQpTvmfC8efsDuKcH7rgDZs82gCVJgrxDeO5c+OwzABYuhGHD\n",
"sllEkiQp7xBesgRuuIEdO7LHwg88AIfVU+PevdmYtiRJQ0C+z4QrewX/7sHhvPMOPPZYnYV2dmY7\n",
"MD33nItySJJKpXwvZm3Zwsc7hnPGGdkspVGj6ijw6afhlluygk48sVHNlCSpIcoXwinR0QEjR2Yv\n",
"ZNVs3Tq44AJYtCj7KUlSydQSwofn1RjIRo6ffx7efbeOQnbuzCYXd3YawJKkISXXF7OmToV77oHj\n",
"j6+jkPXrYezYbONhSZKGkFyHo4877t8sXPge7e0X5VKHJEllUbrFOrZvH8HUqc/xzDPu8StJ0oFy\n",
"30Vp7dpZzJu3NO9qJElqOvlvZQjs3DlsMKqRJKmpDEoIH3305wM/eccOmDUrW2xakqQhLNcpSgCt\n",
"rZ1MmTJh4F+YPh02bapzfUtJksov1xAeP/43TJkygYkTB/h29KuvZmtbrlqVZ7MkSSqF3FfMGrDd\n",
"u+Hss+Huu2Hy5FzaJElSXko3Rakqc+Zki0tfc03RLZEkaVCUI4RTgg8/hIcecnckSdIhozzD0ZIk\n",
"NbHmHo6WJOkQYwhLklQQQ1iSpIIUF8IvvACrVxdWvSRJRasrhCNiWESsiIinq/ri9u3Q0QGbN9dT\n",
"vSRJTa3eO+HbgLeB6l6Dnj0bLr4YLrywzuolSWpeNS9bGRHfAS4HZgFTB/zFtWvh4Ydh5cpaq5Yk\n",
"aUio5074AeAuoLrtju68E6ZNg5Ej66hakqTmV9OdcERMAj5NKa2IiLb+zps5c+b+z21tbbS1tsL6\n",
"9fD447VUK0lSaXR3d9Pd3V1XGTWtmBUR9wI/B/YCRwPHA0+mlH7R65y+V8zq6XGbQknSkFPLill1\n",
"L1sZEeOAaSmlKw447rKVkqRDRpHLVpq2kiRVyQ0cJElqgHJu4LBlS7ZVoSRJ+pJ8QzglmDQJFi/O\n",
"tRpJkppRviH86KOwaxdcccVXnytJ0iEm32fCLS3Q1eXylJKkIa98z4Tb2gxgSZL6ke+d8IYN0NKS\n",
"S/mSJJVJIYt1HKQxTlGSJB0yyjccLUmS+mUIS5JUEENYkqSCGMKSJBXEEJYkqSCGsCRJBTGEJUkq\n",
"iCEsSVJBDGFJkgpiCEuSVBBDWJKkghjCkiQVxBCWJKkghrAkSQUxhCVJKoghLElSQQxhSZIKYghL\n",
"klQQQ1h96u7uLroJqpHXrrl5/Q4thrD65D+C5uW1a25ev0OLISxJUkEMYUmSChIppXwKjsinYEmS\n",
"SiqlFNWcn1sIS5Kkg3M4WpKkghjCkiQVJJcQjogJEbE6It6LiF/mUYfyEREfRMTKiFgREa8V3R4d\n",
"XET8KSI2RsSqXse+GRFLI+JfEfF8RAwvso3qXz/Xb2ZEbKj0wRURMaHINqpvEXFyRPwjIt6KiDcj\n",
"4tbK8ar6X8NDOCKGAX8AJgA/AK6NiO83uh7lJgFtKaUxKaVzi26MvtKfyfpab78ClqaUTgVeqPyu\n",
"curr+iXg95U+OCal9PcC2qWvtge4I6X0Q+B84JZK1lXV//K4Ez4XWJNS+iCltAd4DPhpDvUoP1W9\n",
"3afipJReBrYccLgdWFD5vAD42aA2SgPWz/UD+2DppZT+k1J6o/J5O/AO0EKV/S+PEG4BPur1+4bK\n",
"MTWHBCyLiOURcX3RjVFNTkopbax83gicVGRjVJMpEfHPiHjExwnlFxGjgDHAq1TZ//IIYec8Nbex\n",
"KaUxwGVkwys/LrpBql3K5iDaJ5vLQ8Bo4CzgE+D+Ypujg4mI44AngdtSStt6/20g/S+PEP4YOLnX\n",
"7yeT3Q2rCaSUPqn8/C/wN7LHC2ouGyPi2wARMQL4tOD2qAoppU9TBTAf+2BpRcQRZAH8l5TSosrh\n",
"qvpfHiG8HPheRIyKiCOBa4DFOdSjBouIYyPi65XPXwMuBVYd/FsqocVAR+VzB7DoIOeqZCr/uPe5\n",
"EvtgKUVEAI8Ab6eU5vb6U1X9L5cVsyLiMmAuMAx4JKU0u+GVqOEiYjTZ3S/A4cBCr125RcRfgXHA\n",
"t8ieP/0WeAroAr4LfABcnVL6rKg2qn99XL8ZQBvZUHQC3gdu7PWMUSURET8CXgJW8sWQ86+B16ii\n",
"/7lspSRJBXHFLEmSCmIIS5JUEENYkqSCGMKSJBXEEJYkqSCGsCRJBTGEJUkqiCEsSVJB/g/QBf7c\n",
"eU1AMwAAAABJRU5ErkJggg==\n"
],
"text/plain": [
"<matplotlib.figure.Figure at 0x17772400>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# plot the true relationship vs. the prediction\n",
"prstd, iv_l, iv_u = wls_prediction_std(res)\n",
"\n",
"fig, ax = plt.subplots(figsize=(8,6))\n",
"\n",
"ax.plot(x, y, 'o', label=\"data\")\n",
"ax.plot(x, y_true, 'b-', label=\"True\")\n",
"ax.plot(x, res.fittedvalues, 'r--.', label=\"OLS\")\n",
"ax.plot(x, iv_u, 'r--')\n",
"ax.plot(x, iv_l, 'r--')\n",
"ax.legend(loc='best')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Time-Series Analysis"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from statsmodels.tsa.arima_process import arma_generate_sample"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# generate some data\n",
"np.random.seed(12345)\n",
"arparams = np.array([.75, -.25])\n",
"maparams = np.array([.65, .35])"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# set parameters\n",
"arparams = np.r_[1, -arparams]\n",
"maparam = np.r_[1, maparams]\n",
"nobs = 250\n",
"y = arma_generate_sample(arparams, maparams, nobs)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# add some dates information\n",
"dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)\n",
"y = pd.TimeSeries(y, index=dates)\n",
"arma_mod = sm.tsa.ARMA(y, order=(2,2))\n",
"arma_res = arma_mod.fit(trend='nc', disp=-1)"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" ARMA Model Results \n",
"==============================================================================\n",
"Dep. Variable: y No. Observations: 250\n",
"Model: ARMA(2, 2) Log Likelihood -245.887\n",
"Method: css-mle S.D. of innovations 0.645\n",
"Date: Sun, 16 Nov 2014 AIC 501.773\n",
"Time: 20:59:32 BIC 519.381\n",
"Sample: 01-31-1980 HQIC 508.860\n",
" - 10-31-2000 \n",
"==============================================================================\n",
" coef std err z P>|z| [95.0% Conf. Int.]\n",
"------------------------------------------------------------------------------\n",
"ar.L1.y 0.8411 0.403 2.089 0.038 0.052 1.630\n",
"ar.L2.y -0.2693 0.247 -1.092 0.276 -0.753 0.214\n",
"ma.L1.y 0.5352 0.412 1.299 0.195 -0.273 1.343\n",
"ma.L2.y 0.0157 0.306 0.051 0.959 -0.585 0.616\n",
" Roots \n",
"=============================================================================\n",
" Real Imaginary Modulus Frequency\n",
"-----------------------------------------------------------------------------\n",
"AR.1 1.5618 -1.1289j 1.9271 -0.0996\n",
"AR.2 1.5618 +1.1289j 1.9271 0.0996\n",
"MA.1 -1.9835 +0.0000j 1.9835 0.5000\n",
"MA.2 -32.1812 +0.0000j 32.1812 0.5000\n",
"-----------------------------------------------------------------------------\n"
]
}
],
"source": [
"print(arma_res.summary())"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.9"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

+ 400
- 0
tips/dataset_circles.csv View File

@@ -0,0 +1,400 @@
9.862597006109558251e+00,4.388945920860357397e+00,0.000000000000000000e+00
2.877170945049199346e+00,1.202827750793628248e+01,0.000000000000000000e+00
3.337256239136289437e+00,-5.220370923066885105e+00,0.000000000000000000e+00
6.493095562032711321e+00,-1.113144595320488328e+01,0.000000000000000000e+00
9.160816030038004598e+00,-1.009779300537452151e+01,0.000000000000000000e+00
1.041802753742201126e+01,1.894574992636611332e+00,0.000000000000000000e+00
-9.011386818141570387e+00,7.113786261861553051e+00,0.000000000000000000e+00
8.634194767050646035e+00,6.044937306977867797e-01,0.000000000000000000e+00
-8.310295927199726762e+00,-1.184857079692059001e+00,0.000000000000000000e+00
1.317546095251883154e+00,-1.129096844315353998e+01,0.000000000000000000e+00
9.529257864350787699e+00,1.736339478767750633e+00,0.000000000000000000e+00
-9.437763538148617570e+00,-2.872208646559228029e+00,0.000000000000000000e+00
5.815080874493489205e+00,-8.314022581206840457e+00,0.000000000000000000e+00
-1.288892096908664797e+01,-1.122402541491438477e+00,0.000000000000000000e+00
-7.491945439414098296e+00,-1.046310837836343666e+01,0.000000000000000000e+00
6.971623436910721061e+00,-3.713812314908092826e+00,0.000000000000000000e+00
-3.647715279401396060e+00,-8.566426635858620742e+00,0.000000000000000000e+00
-5.963933774842476332e+00,-5.215555684133276415e+00,0.000000000000000000e+00
5.649964657621771913e+00,-5.369954314422594521e+00,0.000000000000000000e+00
1.172710561955023145e+01,-2.754936843545676695e+00,0.000000000000000000e+00
1.672264154472602238e+00,-1.071807032527019565e+01,0.000000000000000000e+00
-3.443717408141795389e+00,1.250380032886874737e+01,0.000000000000000000e+00
5.863452362896988568e+00,-8.186611721106453743e+00,0.000000000000000000e+00
4.991164283632463672e+00,1.139369901458018042e+01,0.000000000000000000e+00
8.838997710199558711e+00,6.044714573548058967e-01,0.000000000000000000e+00
1.639030475559324795e+00,7.915238715166661443e+00,0.000000000000000000e+00
3.758705824661268569e+00,-8.090773260493815400e+00,0.000000000000000000e+00
-9.394967210155567017e+00,-5.137130325801137154e+00,0.000000000000000000e+00
-6.288288633339392142e+00,1.433786837462863506e+00,0.000000000000000000e+00
-9.604286351326987159e+00,-8.457016591349884038e+00,0.000000000000000000e+00
4.404554230019639860e+00,7.498658012125457795e+00,0.000000000000000000e+00
1.251066075969001723e+01,2.403854004526353894e+00,0.000000000000000000e+00
-6.625366638525115981e+00,-4.034266053666308416e+00,0.000000000000000000e+00
5.745785337741070364e+00,-4.064766044917044852e+00,0.000000000000000000e+00
-7.111819117251069855e+00,2.937469414945817370e+00,0.000000000000000000e+00
1.188200659083437927e+01,-3.459560370844172006e+00,0.000000000000000000e+00
3.763980582232898886e+00,6.079997397875869680e+00,0.000000000000000000e+00
-2.444132686299686608e+00,8.655025186859889530e+00,0.000000000000000000e+00
3.827248176924319178e+00,7.780625034062842005e+00,0.000000000000000000e+00
-2.518060036469188523e-01,1.345001652707794726e+01,0.000000000000000000e+00
-1.062124752956786367e+01,3.788639253029340459e+00,0.000000000000000000e+00
6.914686308755981337e+00,2.517928331085216964e+00,0.000000000000000000e+00
4.819454929706679991e+00,4.305957988922500590e+00,0.000000000000000000e+00
9.308607156157549412e+00,1.018017507427041402e+01,0.000000000000000000e+00
-2.644229094526551638e+00,1.308185169274385018e+01,0.000000000000000000e+00
-8.110820419917877944e-01,1.373132883726502484e+01,0.000000000000000000e+00
-7.336981746187526632e+00,2.633628829714826036e+00,0.000000000000000000e+00
-7.304144561890504761e+00,-4.985228209873077354e+00,0.000000000000000000e+00
-3.942089524730379768e+00,7.429147677138015382e+00,0.000000000000000000e+00
-1.563940960823078541e+00,-1.226771652324130102e+01,0.000000000000000000e+00
7.752633583239491699e+00,1.606933591460645960e+00,0.000000000000000000e+00
-4.644020567309505410e+00,-1.260416337454934776e+01,0.000000000000000000e+00
9.559905872637479263e+00,-2.069167693554651688e+00,0.000000000000000000e+00
6.510524870183760804e+00,1.055846542430648682e+01,0.000000000000000000e+00
7.709822285798955122e+00,2.414004778121223715e+00,0.000000000000000000e+00
-7.112111243945566486e+00,-9.955758248422416301e+00,0.000000000000000000e+00
2.826837256620440275e-01,9.890510484957566462e+00,0.000000000000000000e+00
3.043287967173990527e+00,-8.571990423685743821e+00,0.000000000000000000e+00
9.569832822923251214e+00,-3.349889908766237046e+00,0.000000000000000000e+00
2.825239466651834896e-01,8.493236936762546208e+00,0.000000000000000000e+00
-3.456625845444592837e+00,-1.112489309506036506e+01,0.000000000000000000e+00
4.009169731782363577e+00,1.000783306149301488e+01,0.000000000000000000e+00
4.688796243646584649e+00,1.281843321647803968e+01,0.000000000000000000e+00
-1.998966439837163600e+00,1.351012796539344230e+01,0.000000000000000000e+00
-5.633144883347592113e+00,-2.976004791943957617e+00,0.000000000000000000e+00
-1.071777719431886666e+01,8.746724717809167160e+00,0.000000000000000000e+00
6.933072615037868758e+00,-2.014502580037639490e+00,0.000000000000000000e+00
-3.996300934160016194e+00,-7.669085547264026914e+00,0.000000000000000000e+00
6.057410272621098102e+00,-2.652565982981485160e-01,0.000000000000000000e+00
-1.211110506752794613e+01,3.213092951287801480e+00,0.000000000000000000e+00
-2.660457520427601530e+00,9.541749783643794558e+00,0.000000000000000000e+00
5.677396269346377089e+00,-8.181021821215502143e+00,0.000000000000000000e+00
8.025138318366114021e+00,-1.384302439837393317e+00,0.000000000000000000e+00
7.007870666149849104e-01,-8.332889101745662330e+00,0.000000000000000000e+00
-2.561600053427863344e+00,-9.743204526040658564e+00,0.000000000000000000e+00
-7.680125944110861802e+00,7.501834896431613053e+00,0.000000000000000000e+00
-5.990419838503080463e+00,-1.634517124985244374e+00,0.000000000000000000e+00
-3.408750904602412479e+00,-1.011555207509729293e+01,0.000000000000000000e+00
-7.466990342399484781e+00,4.972154832211821862e+00,0.000000000000000000e+00
-1.385863376104194344e+01,-1.569697443999104047e+00,0.000000000000000000e+00
1.016630480502666245e+01,-5.786895095791383170e+00,0.000000000000000000e+00
2.138339060813499781e+00,9.235897157296232507e+00,0.000000000000000000e+00
9.784654375427050965e+00,3.223452669892223899e+00,0.000000000000000000e+00
-7.160947322329206166e+00,4.315264819978651897e-03,0.000000000000000000e+00
1.069526264501631019e+01,2.146676049265247688e+00,0.000000000000000000e+00
1.352531737671455403e+01,5.341317125800683074e-01,0.000000000000000000e+00
-8.204380562754867157e+00,2.958040506654077695e-01,0.000000000000000000e+00
-7.700927136434255083e+00,3.450173937237375643e+00,0.000000000000000000e+00
-6.162063854688403453e+00,-1.039351804719752614e+01,0.000000000000000000e+00
-7.622924349294661006e+00,-8.395757258940216516e+00,0.000000000000000000e+00
-4.781299924855306749e+00,-4.300113418931317710e+00,0.000000000000000000e+00
1.273936254270339319e+01,4.444056568979243949e+00,0.000000000000000000e+00
-6.212029468907783070e+00,6.412488301479442043e-01,0.000000000000000000e+00
-8.598454550560376219e+00,-9.505732040174901698e+00,0.000000000000000000e+00
-6.551844072080291070e+00,-2.597153824509962572e+00,0.000000000000000000e+00
5.406244662529976486e-01,-1.180744558537000977e+01,0.000000000000000000e+00
-1.223328107698617728e+01,2.534270081704574018e+00,0.000000000000000000e+00
-1.234205194892152413e+01,1.643521855276022237e+00,0.000000000000000000e+00
-9.493836137915996787e+00,9.746672478642727455e+00,0.000000000000000000e+00
-8.420488306810536727e+00,9.741715891244462355e+00,0.000000000000000000e+00
3.814812252424179650e+00,-1.095530939520013547e+01,0.000000000000000000e+00
7.408768607650642934e+00,-1.598252129266272581e+00,0.000000000000000000e+00
-8.954549145432116219e+00,-8.716215395331682103e+00,0.000000000000000000e+00
6.750393096483064959e+00,-8.732504927011085272e+00,0.000000000000000000e+00
-4.786803561513118588e+00,-8.222241820233284315e+00,0.000000000000000000e+00
-7.346628433171869865e+00,-5.200293638609803537e+00,0.000000000000000000e+00
1.416856726047263759e+00,1.122018551024202893e+01,0.000000000000000000e+00
7.209540291555192049e+00,3.896096344772451925e+00,0.000000000000000000e+00
-4.818795573205335359e+00,1.275527076454161524e+01,0.000000000000000000e+00
-3.600791011231857741e+00,-5.685619246095143353e+00,0.000000000000000000e+00
-1.303337576890614358e+01,1.454981134345443294e-01,0.000000000000000000e+00
-5.668879881725418457e+00,4.265559882994224949e+00,0.000000000000000000e+00
-1.336677225502713995e+01,-3.333565427677156023e+00,0.000000000000000000e+00
6.380063124213702253e+00,-8.694515196234624099e+00,0.000000000000000000e+00
7.958495857862243561e+00,5.662910911325223395e+00,0.000000000000000000e+00
4.971861734308737013e+00,-7.650823140952451418e+00,0.000000000000000000e+00
7.413214733587544458e+00,3.314437518815265271e+00,0.000000000000000000e+00
-8.665573289299810966e+00,2.579632707896181820e+00,0.000000000000000000e+00
9.929397321851664016e-01,1.355034933325041813e+01,0.000000000000000000e+00
5.360547077905177815e+00,-6.588248171721820867e+00,0.000000000000000000e+00
5.084186669565729844e+00,3.833783430598022512e+00,0.000000000000000000e+00
1.002670917279956520e+01,7.517106438066921470e+00,0.000000000000000000e+00
-4.992627975094395154e+00,-4.021636687013394607e+00,0.000000000000000000e+00
-1.072877553404850737e+01,3.393035849131368575e+00,0.000000000000000000e+00
-3.801811713125446524e+00,6.401279054144701774e+00,0.000000000000000000e+00
1.050052476588508199e+01,-2.890999409234909390e+00,0.000000000000000000e+00
1.004219115274414342e+01,-4.302461600016681542e-01,0.000000000000000000e+00
8.126291864264146625e+00,-2.062137138477090570e+00,0.000000000000000000e+00
8.532036146140371535e-01,-1.077703138089397683e+01,0.000000000000000000e+00
-7.709349249077324551e+00,-8.402083930252045363e+00,0.000000000000000000e+00
-3.158019423283696003e+00,-8.622525955647978080e+00,0.000000000000000000e+00
4.368821578430113206e+00,-9.280167433647054764e+00,0.000000000000000000e+00
-4.339665761257628596e+00,-9.873795252064214267e+00,0.000000000000000000e+00
-3.975662970089751114e-01,8.778761114131237164e+00,0.000000000000000000e+00
-1.005816644696833784e+01,2.676157423149173376e+00,0.000000000000000000e+00
7.976759622447467279e+00,1.488112982960128061e+00,0.000000000000000000e+00
-1.115846173131786756e+01,2.347405448698627328e+00,0.000000000000000000e+00
1.828795701256797024e+00,7.084758587571729826e+00,0.000000000000000000e+00
-8.182845482922107561e-01,-8.837635696617843095e+00,0.000000000000000000e+00
-1.216592766927463565e+01,4.182020621616657330e+00,0.000000000000000000e+00
7.545136762876549774e+00,9.945150848296330892e+00,0.000000000000000000e+00
1.296725766555409187e+01,2.779679035545373100e+00,0.000000000000000000e+00
-7.649802069554393569e+00,-7.912731844844135054e+00,0.000000000000000000e+00
1.450029305422914083e+00,6.154506368128457972e+00,0.000000000000000000e+00
1.126182752393684439e+01,3.820055905771543081e+00,0.000000000000000000e+00
4.114692346497415443e+00,-1.153298607052224334e+01,0.000000000000000000e+00
-2.985702422723075955e+00,-1.147272148740606212e+01,0.000000000000000000e+00
-8.817811375181157985e+00,-3.921949061901613298e+00,0.000000000000000000e+00
-6.137022608944259439e+00,-3.670673919963554521e+00,0.000000000000000000e+00
4.848399268197606027e+00,-8.253359306310066756e+00,0.000000000000000000e+00
3.710181045897119834e+00,-5.714956939008809300e+00,0.000000000000000000e+00
4.404261556429599267e+00,-5.179156225369094813e+00,0.000000000000000000e+00
8.480423471650485823e+00,5.505935081293400479e+00,0.000000000000000000e+00
2.676682092765721332e+00,6.755568695663916934e+00,0.000000000000000000e+00
1.349602128878756702e+01,-2.760280205910110141e+00,0.000000000000000000e+00
-6.354333628858702454e+00,1.566935815149126388e+00,0.000000000000000000e+00
9.652499414980560744e+00,-6.998124216434617573e+00,0.000000000000000000e+00
9.109090814919925450e+00,-2.885739466264036324e+00,0.000000000000000000e+00
9.424729913805274251e+00,-7.615755732057966298e+00,0.000000000000000000e+00
-2.298718457690322570e+00,-9.940854626658103044e+00,0.000000000000000000e+00
6.943235844908800303e+00,6.566408021322475363e+00,0.000000000000000000e+00
-1.179297227185062624e+01,3.148610988692310375e+00,0.000000000000000000e+00
5.675240778019130516e+00,-1.061068463254039251e+01,0.000000000000000000e+00
-9.296491368779978615e+00,-6.171290820394590426e+00,0.000000000000000000e+00
3.039700743392670379e+00,8.477483981078608011e+00,0.000000000000000000e+00
-8.279794421336692523e+00,-2.803017026074221985e+00,0.000000000000000000e+00
3.278886571257194849e+00,5.347735189350168561e+00,0.000000000000000000e+00
1.863827422845622195e+00,-1.290070719733525451e+01,0.000000000000000000e+00
-1.072330165521738898e+01,-3.802489649366284086e-01,0.000000000000000000e+00
-1.073258780920207656e+00,-6.213274218993801057e+00,0.000000000000000000e+00
-3.426478986566940055e+00,9.716814665952153263e+00,0.000000000000000000e+00
7.202327952088109164e+00,5.121600172933984396e+00,0.000000000000000000e+00
1.047836588896138110e+01,-6.382333863074792113e+00,0.000000000000000000e+00
5.811234604184800645e+00,-8.319298986500051285e+00,0.000000000000000000e+00
6.397475790321884404e+00,9.488321600312806581e+00,0.000000000000000000e+00
1.195201120691531926e+01,-5.297988414364907861e+00,0.000000000000000000e+00
7.727243630985335621e+00,7.010923292377260196e+00,0.000000000000000000e+00
5.752351514067115090e+00,6.369432858600823977e+00,0.000000000000000000e+00
1.044615088095522815e+01,6.462810993168848661e+00,0.000000000000000000e+00
-1.303394241626110395e+00,1.196422018477452553e+01,0.000000000000000000e+00
-8.135030836916312680e+00,1.024645229885290831e+00,0.000000000000000000e+00
1.839659788413228469e-01,-8.364308881586348221e+00,0.000000000000000000e+00
-5.300267485194837924e+00,1.161095589778359027e+01,0.000000000000000000e+00
-7.885508602691148283e+00,7.688147706153779737e+00,0.000000000000000000e+00
9.408419234037580736e+00,1.319904500240061340e+00,0.000000000000000000e+00
-8.887580366717934055e+00,6.848370446358720720e+00,0.000000000000000000e+00
1.484191788366077569e+00,-6.441161925269415001e+00,0.000000000000000000e+00
-1.230709578968634865e+01,-2.439418675346519461e+00,0.000000000000000000e+00
-4.264629326115078278e+00,9.834984899885139242e+00,0.000000000000000000e+00
1.346987501589731018e+01,-2.903988687159458237e+00,0.000000000000000000e+00
3.227677578327154517e+00,-9.940449883403619680e+00,0.000000000000000000e+00
6.497666163166413433e+00,1.081058228256788922e+01,0.000000000000000000e+00
-9.859128189279788401e+00,9.895946349967440270e+00,0.000000000000000000e+00
7.301566174542353238e+00,-8.244922402723963373e+00,0.000000000000000000e+00
-7.580699915211076778e+00,-1.591836124550713549e+00,0.000000000000000000e+00
-9.505532154825969471e+00,-7.032476220821989088e+00,0.000000000000000000e+00
1.340531295051455984e+01,-1.594081706516784713e+00,0.000000000000000000e+00
1.179244973371897842e+01,5.709177596323251613e+00,0.000000000000000000e+00
-6.487131766302675295e+00,2.093036300507789688e-02,0.000000000000000000e+00
3.427985069410309116e+00,-1.187787974618283648e+01,0.000000000000000000e+00
-1.408581976741892205e+01,1.625550970238727899e+01,1.000000000000000000e+00
-1.881043151933305424e+01,-2.621960441323269109e+00,1.000000000000000000e+00
-2.322581527025400305e+01,-2.667811322111456995e+00,1.000000000000000000e+00
-1.773818831811092167e+01,1.385242284258890422e+01,1.000000000000000000e+00
1.387531676297662742e+01,-1.674031384786129451e+01,1.000000000000000000e+00
1.524986377891802292e+01,-1.478608052582741550e+01,1.000000000000000000e+00
2.084600916661210945e+01,-7.837637204688920889e+00,1.000000000000000000e+00
-3.335846368905910797e+00,-1.661355464770509371e+01,1.000000000000000000e+00
-1.531546407337876659e+01,1.389103285498575069e+01,1.000000000000000000e+00
-5.862798124093491658e+00,-1.895705090151678007e+01,1.000000000000000000e+00
1.830567439264983065e+01,1.515095846689039405e+01,1.000000000000000000e+00
-7.641068875385179204e+00,1.491360609394572379e+01,1.000000000000000000e+00
-1.906664659447316978e+01,1.428824374903993499e+00,1.000000000000000000e+00
-7.634730107177073677e+00,2.021852265839935114e+01,1.000000000000000000e+00
-8.946142480307802458e+00,1.834914057483182859e+01,1.000000000000000000e+00
-1.841892096560701120e+01,3.968763401385180689e+00,1.000000000000000000e+00
-1.190606992324205393e+01,-1.480237550559005122e+01,1.000000000000000000e+00
-1.768958485332136732e+01,1.038413696695125665e+01,1.000000000000000000e+00
-8.501571361773706315e+00,-1.717272805437283267e+01,1.000000000000000000e+00
-5.775465720724419327e+00,2.002395286480275516e+01,1.000000000000000000e+00
2.134699125170529044e+01,-7.251398575296883031e+00,1.000000000000000000e+00
1.781413832072523107e+01,7.056121174639669036e+00,1.000000000000000000e+00
-1.649961768692056197e+01,-1.152300038742184007e+01,1.000000000000000000e+00
-2.199533523781454036e+01,-5.240369637901090449e+00,1.000000000000000000e+00
-2.022220219640516348e+01,5.295586840647683147e+00,1.000000000000000000e+00
7.274107302903876082e-01,2.027846444687708782e+01,1.000000000000000000e+00
-1.819994910579366376e+01,-1.401824275971767175e+00,1.000000000000000000e+00
-1.485594191132518915e+01,6.027297875184268960e+00,1.000000000000000000e+00
6.578620336695827575e+00,1.883714939585484771e+01,1.000000000000000000e+00
-1.627021459970769612e+01,-4.918490433325404254e+00,1.000000000000000000e+00
6.466932279519650884e+00,2.083347368624722407e+01,1.000000000000000000e+00
2.114204413327044207e+01,6.294073669553609918e+00,1.000000000000000000e+00
-1.524418191809297873e+01,-1.508795587492252821e+01,1.000000000000000000e+00
2.187864861441643072e+01,-7.572218157049059428e+00,1.000000000000000000e+00
-7.366372618457587285e-01,1.632196831778809454e+01,1.000000000000000000e+00
1.891861621373580959e+01,-9.648249362280228780e+00,1.000000000000000000e+00
-1.313160444416345385e+01,1.632701131849736598e+01,1.000000000000000000e+00
-1.934693546659212160e+01,-1.326870036449994394e+01,1.000000000000000000e+00
2.147968744797048402e+01,6.687445991361102848e+00,1.000000000000000000e+00
1.575066236102569661e+01,9.629654206912119818e+00,1.000000000000000000e+00
6.786805557994117777e+00,1.506120831288941808e+01,1.000000000000000000e+00
1.330151895443178311e+01,1.137041975833180629e+01,1.000000000000000000e+00
2.195150842243708489e+00,-1.760728806651117750e+01,1.000000000000000000e+00
2.200035505632448007e+01,-7.998043075783540345e+00,1.000000000000000000e+00
-1.082657318041693628e+00,2.030545540994938847e+01,1.000000000000000000e+00
-1.775421853022066188e+01,-8.550442106904826645e+00,1.000000000000000000e+00
1.659819141816188548e+01,-1.298883742117128470e+01,1.000000000000000000e+00
-1.855075329425867636e+01,-8.270103260125782896e+00,1.000000000000000000e+00
1.476912249740130001e+01,-1.043225192715721761e+01,1.000000000000000000e+00
2.164518554409495721e+01,3.836524661536647063e+00,1.000000000000000000e+00
-8.078122985740316508e+00,-1.381988479693678151e+01,1.000000000000000000e+00
1.783941024006371734e+01,-1.387961998757027970e+01,1.000000000000000000e+00
2.005823843660326133e+01,-7.994747011677134640e-01,1.000000000000000000e+00
9.751475655640280404e-01,-1.681306880565960427e+01,1.000000000000000000e+00
1.482893081542048463e+01,6.627858565599247243e+00,1.000000000000000000e+00
-1.488228819116111268e+01,-9.884994691517903220e+00,1.000000000000000000e+00
1.858342179644801462e+01,-1.320545419153004296e+01,1.000000000000000000e+00
4.031313603555224390e+00,2.069737823722174141e+01,1.000000000000000000e+00
-1.871196510041764682e+01,6.679055498330213059e-01,1.000000000000000000e+00
-8.187043865690890598e+00,-1.692392184362027052e+01,1.000000000000000000e+00
1.847298008645853074e+01,-1.092935052323345646e+00,1.000000000000000000e+00
-1.217784293794317962e+00,-1.958808310337123615e+01,1.000000000000000000e+00
1.177626505768050968e+01,-1.879290668870535796e+01,1.000000000000000000e+00
7.707126339575352425e+00,-1.829951136840453429e+01,1.000000000000000000e+00
1.505558382543773455e+01,8.480463032162875336e+00,1.000000000000000000e+00
-2.119393794989089130e+01,-9.816193422288213455e+00,1.000000000000000000e+00
4.384412174196432588e+00,-1.705187614748492564e+01,1.000000000000000000e+00
1.904163303190937739e+01,1.025881051909351527e+00,1.000000000000000000e+00
1.201792025011094012e+01,1.740326982067275097e+01,1.000000000000000000e+00
6.457082462408918033e+00,-1.559966047720136650e+01,1.000000000000000000e+00
2.105947251379482665e+01,-4.066524050310797556e+00,1.000000000000000000e+00
1.787357580991723438e+01,-2.135419503673841923e+00,1.000000000000000000e+00
1.955848548793692743e+01,7.401194442349940950e+00,1.000000000000000000e+00
-3.079446174618643006e+00,1.913038719229226814e+01,1.000000000000000000e+00
-9.217172216359992731e+00,1.527217041406997211e+01,1.000000000000000000e+00
8.224705590183873483e+00,2.017261461282702584e+01,1.000000000000000000e+00
-1.749777942338765868e+01,-1.151539351924551902e+01,1.000000000000000000e+00
1.034680190611704020e+01,-1.963754488396148545e+01,1.000000000000000000e+00
1.763095589337911662e+01,1.672278254557209687e+00,1.000000000000000000e+00
6.598337664677481840e+00,-1.977406797872303201e+01,1.000000000000000000e+00
1.042042874765041738e+01,-1.275830591034056560e+01,1.000000000000000000e+00
7.112518493809214526e+00,-1.462910665470060678e+01,1.000000000000000000e+00
-1.513599595923272645e+01,1.521037463861764927e+01,1.000000000000000000e+00
1.900245304385011735e+01,-2.367845735173759625e+00,1.000000000000000000e+00
3.696622127364966315e-01,-2.208322951401586565e+01,1.000000000000000000e+00
-1.674446563614008809e+01,4.548141850778240958e+00,1.000000000000000000e+00
-1.560169366657280854e+01,8.564550568764795813e+00,1.000000000000000000e+00
-1.358193781909342590e+01,-1.097790552661601460e+01,1.000000000000000000e+00
-2.240185634211720611e+01,-5.603200611514385088e+00,1.000000000000000000e+00
1.954452574678360932e+01,1.336835013392262539e+01,1.000000000000000000e+00
-2.858794153315326270e+00,-2.302730339878763388e+01,1.000000000000000000e+00
-2.455911165895627413e+00,-1.989143206333796599e+01,1.000000000000000000e+00
1.584364903763910171e+01,9.646518995433194021e+00,1.000000000000000000e+00
2.050691415187010591e+01,9.107882609707594890e-01,1.000000000000000000e+00
-1.798215621574233225e+01,-3.840035114518217885e+00,1.000000000000000000e+00
-5.218030398042513340e+00,-1.852003062825603408e+01,1.000000000000000000e+00
-1.626195615182393794e+01,7.999579969871203566e+00,1.000000000000000000e+00
-3.885580139627898166e+00,-2.126363182176448419e+01,1.000000000000000000e+00
1.622631703332194064e+01,-9.112886568774294815e+00,1.000000000000000000e+00
3.289233406699478834e+00,-1.578293616309770719e+01,1.000000000000000000e+00
-1.570206197463574149e+01,-8.112347071513182684e+00,1.000000000000000000e+00
-2.110023557641758885e+01,-3.414887568059537859e+00,1.000000000000000000e+00
-1.171737626436313207e+01,1.684019628289640735e+01,1.000000000000000000e+00
-8.229232676902782373e+00,2.025828909402180145e+01,1.000000000000000000e+00
-4.411049605760258174e+00,2.281047539610623431e+01,1.000000000000000000e+00
-1.361908495722199852e+01,-1.162599818090166970e+01,1.000000000000000000e+00
9.069239175919369345e+00,1.525657976920708592e+01,1.000000000000000000e+00
1.371782330866971433e+00,2.222735932935401237e+01,1.000000000000000000e+00
-1.631174401698077858e+01,1.912446542368404945e+00,1.000000000000000000e+00
1.586417992501583818e+01,1.720846638598678169e+01,1.000000000000000000e+00
-6.100250645849026654e+00,2.043729798858883839e+01,1.000000000000000000e+00
-1.514274173120179756e-01,2.396069878730597935e+01,1.000000000000000000e+00
-1.866751544404865371e+01,2.590085678608596620e+00,1.000000000000000000e+00
1.722152128357636514e+01,1.532105592989684695e+01,1.000000000000000000e+00
1.632359579219197698e+01,3.070645410049057666e-01,1.000000000000000000e+00
-8.919678180244551058e+00,-1.406553559794046571e+01,1.000000000000000000e+00
1.798477547300765522e+01,-5.923235847477148042e+00,1.000000000000000000e+00
2.199583586046353645e+01,-6.432991015706561733e+00,1.000000000000000000e+00
-1.210606087628472949e+01,2.056880706205652487e+01,1.000000000000000000e+00
-1.567761440016490049e+01,-3.755054263113921653e+00,1.000000000000000000e+00
-1.181109540423527449e+01,1.897725652574823130e+01,1.000000000000000000e+00
1.526917652603902731e+01,-7.398945499213142263e+00,1.000000000000000000e+00
1.830250107970536533e+01,6.669530402613020215e+00,1.000000000000000000e+00
1.603129138567219059e+01,-6.510980790719353983e+00,1.000000000000000000e+00
-1.087233064207762645e+01,-1.800804546254688887e+01,1.000000000000000000e+00
9.696425322760605425e+00,-1.842533650807427037e+01,1.000000000000000000e+00
-1.097641534077534509e+01,-1.950583983611138450e+01,1.000000000000000000e+00
8.940813979789155042e+00,-1.605524738886916225e+01,1.000000000000000000e+00
6.324123844534630834e+00,2.030224064707331166e+01,1.000000000000000000e+00
-5.771326940266628291e+00,-2.149899821913217934e+01,1.000000000000000000e+00
-1.640349684038727318e+01,8.764427348820412078e+00,1.000000000000000000e+00
-1.196200090186676768e+01,1.233173970011232612e+01,1.000000000000000000e+00
6.672753526427372961e+00,-1.613010582011700222e+01,1.000000000000000000e+00
-8.470705943984732755e+00,1.372223920783005191e+01,1.000000000000000000e+00
3.136527504609972095e+00,-1.574731525821545297e+01,1.000000000000000000e+00
8.779430717828166308e+00,-2.220454785260612240e+01,1.000000000000000000e+00
-1.975899624168256485e+00,1.647258011430297842e+01,1.000000000000000000e+00
1.230965878216300702e+01,1.843755303784963573e+01,1.000000000000000000e+00
1.198961865782439773e+01,1.906339524862348256e+01,1.000000000000000000e+00
-4.369594976406253051e-01,-2.136814521627741215e+01,1.000000000000000000e+00
-1.153596650343871488e+01,-1.746458284267908212e+01,1.000000000000000000e+00
-1.056519752895748177e+01,-1.354953930252310634e+01,1.000000000000000000e+00
-1.592817793746197097e+01,6.187303876972833905e+00,1.000000000000000000e+00
1.093463168032933197e+01,2.132341045954430214e+01,1.000000000000000000e+00
-8.235602869193467512e+00,-1.865641389752591550e+01,1.000000000000000000e+00
-1.454006125251376780e+01,-1.123985805024244300e+01,1.000000000000000000e+00
-2.565505190490253273e+00,-2.340626388938940394e+01,1.000000000000000000e+00
1.665974385519901801e+01,-1.374618648548013411e+00,1.000000000000000000e+00
-1.335309305414386571e+01,-1.796550630458822440e+01,1.000000000000000000e+00
-4.722899374382690141e+00,-1.892603425152083219e+01,1.000000000000000000e+00
-1.489728628889070983e+01,-1.687008779615263876e+01,1.000000000000000000e+00
-1.980491475369140275e+01,6.117387954710057585e+00,1.000000000000000000e+00
1.854552807015078386e+01,5.656321833644797792e+00,1.000000000000000000e+00
-3.308639090088201229e-01,-1.940405230026158279e+01,1.000000000000000000e+00
-2.159815288257910382e+01,4.646469071929907990e+00,1.000000000000000000e+00
-2.943940135645302991e+00,1.736966318086229322e+01,1.000000000000000000e+00
-2.142856753274137915e+01,-9.898798358668107866e+00,1.000000000000000000e+00
9.776476702853923229e+00,1.634405660464478061e+01,1.000000000000000000e+00
-1.177230338354111261e+01,2.016922706826369449e+01,1.000000000000000000e+00
1.906215669925919087e+01,-2.941195507582941918e-02,1.000000000000000000e+00
1.748929608461976670e+01,9.131879119712111859e+00,1.000000000000000000e+00
7.300506941667010530e+00,1.469949163685404514e+01,1.000000000000000000e+00
8.473729594218674777e+00,1.914678500486009227e+01,1.000000000000000000e+00
-1.499523124705943644e+01,-1.030882184404770996e+01,1.000000000000000000e+00
-1.629646418763191207e+01,1.348569901444114372e+00,1.000000000000000000e+00
2.035982484527372804e+01,9.022390579586556214e+00,1.000000000000000000e+00
2.137410489751234266e+01,7.494069355040792857e+00,1.000000000000000000e+00
1.597308023387855869e+01,6.080482802867658521e+00,1.000000000000000000e+00
1.443262597851837548e+01,-1.896843317146863228e+01,1.000000000000000000e+00
-4.315842059365481376e+00,1.705446539686293761e+01,1.000000000000000000e+00
1.319916894392426521e+01,1.805744511264430585e+01,1.000000000000000000e+00
-1.759058978629317949e+01,-1.548474985972600138e+01,1.000000000000000000e+00
2.082462611587753898e+01,6.607463476821727077e+00,1.000000000000000000e+00
-3.363806796273415944e+00,2.343131705141279042e+01,1.000000000000000000e+00
5.954366534199434291e+00,-2.066555709222398107e+01,1.000000000000000000e+00
-2.102278354426128715e+01,8.011343166285978867e+00,1.000000000000000000e+00
7.718584304440787136e+00,-1.437599354633448456e+01,1.000000000000000000e+00
1.385456135098922203e+01,1.074151357564068121e+01,1.000000000000000000e+00
1.994484467906970337e+01,1.235510224107671640e+01,1.000000000000000000e+00
-1.484191764160719273e+01,-1.012110610923690146e+01,1.000000000000000000e+00
3.244618917909689593e+00,2.051327067488236722e+01,1.000000000000000000e+00
-1.938406729755708113e+01,-8.688565304893570485e+00,1.000000000000000000e+00
1.478513249063537671e+01,-8.772792061643697181e+00,1.000000000000000000e+00
1.567437276707519622e+01,-8.946857092930706301e+00,1.000000000000000000e+00
1.282292163424287956e+01,2.002178021613681835e+01,1.000000000000000000e+00
-1.309994103974544366e+01,-1.530540351584944325e+01,1.000000000000000000e+00
2.065284590031502532e+01,8.986394708865578451e+00,1.000000000000000000e+00
-3.799884320062938858e+00,2.100214796945848050e+01,1.000000000000000000e+00
4.185996418896004712e-01,-1.932272879562866308e+01,1.000000000000000000e+00
-8.014643663295325515e-01,-2.145741140803774627e+01,1.000000000000000000e+00
3.867347210815940350e+00,2.090780355591596518e+01,1.000000000000000000e+00
1.799263625130667421e+01,9.927385959331937570e+00,1.000000000000000000e+00
3.886771499111138173e+00,1.791094324980342378e+01,1.000000000000000000e+00
-9.773760806932845213e+00,1.303976848378022702e+01,1.000000000000000000e+00
3.595550202052158473e-01,1.684261645691217524e+01,1.000000000000000000e+00
-2.032489831597553831e+01,1.152621995555932521e+01,1.000000000000000000e+00
-2.195941645981185886e+01,-8.896624415963758636e+00,1.000000000000000000e+00
-3.549846791126267220e+00,-1.636916020825658791e+01,1.000000000000000000e+00
-6.539620984264148396e-01,-2.384220191946839407e+01,1.000000000000000000e+00
-2.261051931966515216e+00,-1.609144845481474206e+01,1.000000000000000000e+00

+ 55
- 3
tips/datasets.ipynb
File diff suppressed because it is too large
View File


+ 32
- 3
tips/datasets.py View File

@@ -128,9 +128,7 @@ plt.show()

# +
import matplotlib.pyplot as plt
from sklearn.datasets import make_blobs


from sklearn.datasets import make_blobsb

# Generate 3 blobs with 2 classes where the second blob contains
# half positive samples and half negative samples. Probability in this
@@ -145,3 +143,34 @@ plt.figure(figsize=(15, 9))
plt.scatter(X[:, 0], X[:, 1], c=y)
plt.colorbar()
plt.show()
# -

# ## Circles

# +
# %matplotlib inline

import numpy as np
import matplotlib.pyplot as plt

n = 200

t1 = (np.random.rand(n, 1)*2-1)*np.pi
r1 = 10 + (np.random.rand(n, 1)*2-1)*4
x_1 = np.concatenate((r1 * np.cos(t1), r1 * np.sin(t1)), axis=1)
y_1 = [0 for _ in range(n)]

t2 = (np.random.rand(n, 1)*2-1)*np.pi
r2 = 20 + (np.random.rand(n, 1)*2-1)*4
x_2 = np.concatenate((r2 * np.cos(t2), r2 * np.sin(t2)), axis=1)
y_2 = [1 for _ in range(n)]

x = np.concatenate((x_1, x_2), axis=0)
y = np.concatenate((y_1, y_2), axis=0)

plt.scatter(x[:, 0], x[:,1], c=y)
plt.show()

yy = y.reshape(-1, 1)
data = np.concatenate((x, yy), axis=1)
np.savetxt("dataset_circles.csv", data, delimiter=",")

+ 159
- 5
tips/notebook_tips.ipynb View File

@@ -10,7 +10,84 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"\n",
"<style>\n",
"\n",
"div.cell { /* Tunes the space between cells */\n",
"margin-top:1em;\n",
"margin-bottom:1em;\n",
"}\n",
"\n",
"div.text_cell_render h1 { /* Main titles bigger, centered */\n",
"font-size: 2.2em;\n",
"line-height:1.4em;\n",
"text-align:center;\n",
"}\n",
"\n",
"div.text_cell_render h2 { /* Parts names nearer from text */\n",
"margin-bottom: -0.4em;\n",
"}\n",
"\n",
"\n",
"div.text_cell_render { /* Customize text cells */\n",
"font-family: 'Times New Roman';\n",
"font-size:1.5em;\n",
"line-height:1.4em;\n",
"padding-left:3em;\n",
"padding-right:3em;\n",
"}\n",
"</style>\n"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from IPython.core.display import HTML\n",
"HTML(\"\"\"\n",
"<style>\n",
"\n",
"div.cell { /* Tunes the space between cells */\n",
"margin-top:1em;\n",
"margin-bottom:1em;\n",
"}\n",
"\n",
"div.text_cell_render h1 { /* Main titles bigger, centered */\n",
"font-size: 2.2em;\n",
"line-height:1.4em;\n",
"text-align:center;\n",
"}\n",
"\n",
"div.text_cell_render h2 { /* Parts names nearer from text */\n",
"margin-bottom: -0.4em;\n",
"}\n",
"\n",
"\n",
"div.text_cell_render { /* Customize text cells */\n",
"font-family: 'Times New Roman';\n",
"font-size:1.5em;\n",
"line-height:1.4em;\n",
"padding-left:3em;\n",
"padding-right:3em;\n",
"}\n",
"</style>\n",
"\"\"\")"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
@@ -27,7 +104,7 @@
"<IPython.core.display.Latex object>"
]
},
"execution_count": 2,
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
@@ -44,7 +121,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 5,
"metadata": {},
"outputs": [
{
@@ -79,6 +156,34 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"\\begin{align}\n",
"\\nabla \\times \\vec{\\mathbf{B}} -\\, \\frac1c\\, \\frac{\\partial\\vec{\\mathbf{E}}}{\\partial t} & = \\frac{4\\pi}{c}\\vec{\\mathbf{j}} \\\\\n",
"\\nabla \\cdot \\vec{\\mathbf{E}} & = 4 \\pi \\rho \\\\\n",
"\\nabla \\times \\vec{\\mathbf{E}}\\, +\\, \\frac1c\\, \\frac{\\partial\\vec{\\mathbf{B}}}{\\partial t} & = \\vec{\\mathbf{0}} \\\\\n",
"\\nabla \\cdot \\vec{\\mathbf{B}} & = 0\n",
"\\end{align}\n",
"\n",
"\\begin{equation}\n",
"E = F \\cdot s \n",
"\\end{equation}\n",
"\n",
"\\begin{eqnarray}\n",
"F & = & sin(x) \\\\\n",
"G & = & cos(x)\n",
"\\end{eqnarray}\n",
"\n",
"\\begin{align}\n",
" g &= \\int_a^b f(x)dx \\label{eq1} \\\\\n",
" a &= b + c \\label{eq2}\n",
"\\end{align}\n",
"\n",
"See (\\ref{eq1})"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Audio\n"
]
},
@@ -201,6 +306,56 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"## JupyterLab"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"array([1, 2, 3])\n"
]
}
],
"source": [
"import numpy as np\n",
"from pprint import pprint\n",
"\n",
"pp = pprint\n",
"a = np.array([1, 2, 3])\n",
"pp(a)\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### [jupyter-matplotlib](https://github.com/matplotlib/jupyter-matplotlib)\n",
"\n",
"\n",
"```\n",
"# Installing Node.js 5.x on Ubuntu / Debian\n",
"curl -sL https://deb.nodesource.com/setup_5.x | sudo -E bash -\n",
"sudo apt-get install -y nodejs\n",
"\n",
"pip install ipympl\n",
"\n",
"# If using JupyterLab\n",
"# Install nodejs: https://nodejs.org/en/download/\n",
"jupyter labextension install @jupyter-widgets/jupyterlab-manager\n",
"jupyter labextension install jupyter-matplotlib\n",
"```"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## References\n",
"\n",
"* https://nbviewer.jupyter.org/github/ipython/ipython/blob/master/examples/IPython%20Kernel/Index.ipynb"
@@ -224,8 +379,7 @@
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
},
"main_language": "python"
}
},
"nbformat": 4,
"nbformat_minor": 2


+ 81
- 0
tips/notebook_tips.py View File

@@ -21,6 +21,36 @@
#
#

from IPython.core.display import HTML
HTML("""
<style>

div.cell { /* Tunes the space between cells */
margin-top:1em;
margin-bottom:1em;
}

div.text_cell_render h1 { /* Main titles bigger, centered */
font-size: 2.2em;
line-height:1.4em;
text-align:center;
}

div.text_cell_render h2 { /* Parts names nearer from text */
margin-bottom: -0.4em;
}


div.text_cell_render { /* Customize text cells */
font-family: 'Times New Roman';
font-size:1.5em;
line-height:1.4em;
padding-left:3em;
padding-right:3em;
}
</style>
""")

from IPython.display import Latex
Latex(r"""\begin{eqnarray}
\nabla \times \vec{\mathbf{B}} -\, \frac1c\, \frac{\partial\vec{\mathbf{E}}}{\partial t} & = \frac{4\pi}{c}\vec{\mathbf{j}} \\
@@ -37,6 +67,29 @@ Latex(r"""\begin{eqnarray}
\nabla \cdot \vec{\mathbf{B}} & = 0
\end{align}

# \begin{align}
# \nabla \times \vec{\mathbf{B}} -\, \frac1c\, \frac{\partial\vec{\mathbf{E}}}{\partial t} & = \frac{4\pi}{c}\vec{\mathbf{j}} \\
# \nabla \cdot \vec{\mathbf{E}} & = 4 \pi \rho \\
# \nabla \times \vec{\mathbf{E}}\, +\, \frac1c\, \frac{\partial\vec{\mathbf{B}}}{\partial t} & = \vec{\mathbf{0}} \\
# \nabla \cdot \vec{\mathbf{B}} & = 0
# \end{align}
#
# \begin{equation}
# E = F \cdot s
# \end{equation}
#
# \begin{eqnarray}
# F & = & sin(x) \\
# G & = & cos(x)
# \end{eqnarray}
#
# \begin{align}
# g &= \int_a^b f(x)dx \label{eq1} \\
# a &= b + c \label{eq2}
# \end{align}
#
# See (\ref{eq1})

# ## Audio
#

@@ -63,6 +116,34 @@ from IPython.display import IFrame
IFrame('https://jupyter.org', width='100%', height=350)
# -

# ## JupyterLab

# +
import numpy as np
from pprint import pprint

pp = pprint
a = np.array([1, 2, 3])
pp(a)

# -

# ### [jupyter-matplotlib](https://github.com/matplotlib/jupyter-matplotlib)
#
#
# ```
# # Installing Node.js 5.x on Ubuntu / Debian
# curl -sL https://deb.nodesource.com/setup_5.x | sudo -E bash -
# sudo apt-get install -y nodejs
#
# pip install ipympl
#
# # If using JupyterLab
# # Install nodejs: https://nodejs.org/en/download/
# jupyter labextension install @jupyter-widgets/jupyterlab-manager
# jupyter labextension install jupyter-matplotlib
# ```

# ## References
#
# * https://nbviewer.jupyter.org/github/ipython/ipython/blob/master/examples/IPython%20Kernel/Index.ipynb

Loading…
Cancel
Save