You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

TODS Official Demo Notebook.ipynb 167 kB

4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706
  1. {
  2. "cells": [
  3. {
  4. "cell_type": "code",
  5. "execution_count": 1,
  6. "metadata": {},
  7. "outputs": [
  8. {
  9. "data": {
  10. "text/plain": [
  11. "'1.4.1'"
  12. ]
  13. },
  14. "execution_count": 1,
  15. "metadata": {},
  16. "output_type": "execute_result"
  17. }
  18. ],
  19. "source": [
  20. "import scipy\n",
  21. "scipy.__version__"
  22. ]
  23. },
  24. {
  25. "cell_type": "code",
  26. "execution_count": 2,
  27. "metadata": {},
  28. "outputs": [
  29. {
  30. "name": "stdout",
  31. "output_type": "stream",
  32. "text": [
  33. "Python 3.6.10 :: Anaconda, Inc.\r\n"
  34. ]
  35. }
  36. ],
  37. "source": [
  38. "!python -V"
  39. ]
  40. },
  41. {
  42. "cell_type": "markdown",
  43. "metadata": {},
  44. "source": [
  45. "# TODS"
  46. ]
  47. },
  48. {
  49. "cell_type": "markdown",
  50. "metadata": {},
  51. "source": [
  52. "## Introduction Summary"
  53. ]
  54. },
  55. {
  56. "cell_type": "markdown",
  57. "metadata": {},
  58. "source": [
  59. "TODS is a full-stack automated machine learning system for outlier detection on multivariate time-series data. TODS provides exhaustive modules for building machine learning-based outlier detection systems, including: data processing, time series processing, feature analysis (extraction), detection algorithms, and reinforcement module. The functionalities provided via these modules include data preprocessing for general purposes, time series data smoothing/transformation, extracting features from time/frequency domains, various detection algorithms, and involving human expertise to calibrate the system. Three common outlier detection scenarios on time-series data can be performed: point-wise detection (time points as outliers), pattern-wise detection (subsequences as outliers), and system-wise detection (sets of time series as outliers), and a wide-range of corresponding algorithms are provided in TODS. This package is developed by DATA Lab @ Texas A&M University."
  60. ]
  61. },
  62. {
  63. "cell_type": "markdown",
  64. "metadata": {},
  65. "source": [
  66. "## Packages"
  67. ]
  68. },
  69. {
  70. "cell_type": "code",
  71. "execution_count": 3,
  72. "metadata": {},
  73. "outputs": [
  74. {
  75. "name": "stdout",
  76. "output_type": "stream",
  77. "text": [
  78. "Obtaining tods from git+https://github.com/datamllab/tods.git#egg=tods\n",
  79. " Cloning https://github.com/datamllab/tods.git to ./src/tods\n",
  80. " Running command git clone -q https://github.com/datamllab/tods.git '/Users/wangyanghe/Desktop/Research/Tods Notebook/src/tods'\n",
  81. "Requirement already satisfied: Jinja2 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (2.11.3)\n",
  82. "Requirement already satisfied: numpy==1.18.2 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (1.18.2)\n",
  83. "Requirement already satisfied: simplejson==3.12.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (3.12.0)\n",
  84. "Requirement already satisfied: scikit-learn==0.22.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (0.22)\n",
  85. "Requirement already satisfied: statsmodels==0.11.1 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (0.11.1)\n",
  86. "Requirement already satisfied: PyWavelets>=1.1.1 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (1.1.1)\n",
  87. "Requirement already satisfied: pillow==7.1.2 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (7.1.2)\n",
  88. "Requirement already satisfied: tensorflow==2.2 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (2.2.0)\n",
  89. "Requirement already satisfied: keras in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (2.4.3)\n",
  90. "Requirement already satisfied: pyod in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (0.8.7)\n",
  91. "Requirement already satisfied: nimfa==1.4.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (1.4.0)\n",
  92. "Requirement already satisfied: stumpy==1.4.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (1.4.0)\n",
  93. "Requirement already satisfied: more-itertools==8.5.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tods) (8.5.0)\n",
  94. "Requirement already satisfied: MarkupSafe>=0.23 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from Jinja2->tods) (1.1.1)\n",
  95. "Requirement already satisfied: joblib>=0.11 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from scikit-learn==0.22.0->tods) (1.0.1)\n",
  96. "Requirement already satisfied: scipy>=0.17.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from scikit-learn==0.22.0->tods) (1.4.1)\n",
  97. "Requirement already satisfied: patsy>=0.5 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from statsmodels==0.11.1->tods) (0.5.1)\n",
  98. "Requirement already satisfied: pandas>=0.21 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from statsmodels==0.11.1->tods) (1.0.3)\n",
  99. "Requirement already satisfied: google-pasta>=0.1.8 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (0.2.0)\n",
  100. "Requirement already satisfied: protobuf>=3.8.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (3.15.8)\n",
  101. "Requirement already satisfied: six>=1.12.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (1.15.0)\n",
  102. "Requirement already satisfied: grpcio>=1.8.6 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (1.37.0)\n",
  103. "Requirement already satisfied: keras-preprocessing>=1.1.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (1.1.2)\n",
  104. "Requirement already satisfied: h5py<2.11.0,>=2.10.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (2.10.0)\n",
  105. "Requirement already satisfied: opt-einsum>=2.3.2 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (3.3.0)\n",
  106. "Requirement already satisfied: termcolor>=1.1.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (1.1.0)\n",
  107. "Requirement already satisfied: wheel>=0.26; python_version >= \"3\" in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (0.36.2)\n",
  108. "Requirement already satisfied: tensorboard<2.3.0,>=2.2.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (2.2.2)\n",
  109. "Requirement already satisfied: tensorflow-estimator<2.3.0,>=2.2.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (2.2.0)\n",
  110. "Requirement already satisfied: wrapt>=1.11.1 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (1.12.1)\n",
  111. "Requirement already satisfied: gast==0.3.3 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (0.3.3)\n",
  112. "Requirement already satisfied: astunparse==1.6.3 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (1.6.3)\n",
  113. "Requirement already satisfied: absl-py>=0.7.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorflow==2.2->tods) (0.12.0)\n",
  114. "Requirement already satisfied: pyyaml in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from keras->tods) (5.4.1)\n",
  115. "Requirement already satisfied: matplotlib in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from pyod->tods) (3.3.4)\n",
  116. "Requirement already satisfied: numba>=0.35 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from pyod->tods) (0.53.1)\n",
  117. "Requirement already satisfied: pytz>=2017.2 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from pandas>=0.21->statsmodels==0.11.1->tods) (2021.1)\n",
  118. "Requirement already satisfied: python-dateutil>=2.6.1 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from pandas>=0.21->statsmodels==0.11.1->tods) (2.8.1)\n",
  119. "Requirement already satisfied: requests<3,>=2.21.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (2.23.0)\n",
  120. "Requirement already satisfied: werkzeug>=0.11.15 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (1.0.1)\n",
  121. "Requirement already satisfied: setuptools>=41.0.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (56.0.0)\n",
  122. "Requirement already satisfied: markdown>=2.6.8 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (3.3.4)\n",
  123. "Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (1.8.0)\n",
  124. "Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (0.4.4)\n",
  125. "Requirement already satisfied: google-auth<2,>=1.6.3 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (1.28.1)\n",
  126. "Requirement already satisfied: cycler>=0.10 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from matplotlib->pyod->tods) (0.10.0)\n",
  127. "Requirement already satisfied: kiwisolver>=1.0.1 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from matplotlib->pyod->tods) (1.3.1)\n",
  128. "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from matplotlib->pyod->tods) (2.4.7)\n",
  129. "Requirement already satisfied: llvmlite<0.37,>=0.36.0rc1 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from numba>=0.35->pyod->tods) (0.36.0)\n",
  130. "Requirement already satisfied: certifi>=2017.4.17 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (2020.12.5)\n",
  131. "Requirement already satisfied: idna<3,>=2.5 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (2.10)\n",
  132. "Requirement already satisfied: chardet<4,>=3.0.2 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (3.0.4)\n"
  133. ]
  134. },
  135. {
  136. "name": "stdout",
  137. "output_type": "stream",
  138. "text": [
  139. "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (1.25.11)\n",
  140. "Requirement already satisfied: importlib-metadata; python_version < \"3.8\" in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from markdown>=2.6.8->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (3.10.0)\n",
  141. "Requirement already satisfied: requests-oauthlib>=0.7.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (1.3.0)\n",
  142. "Requirement already satisfied: rsa<5,>=3.1.4; python_version >= \"3.6\" in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (4.7.2)\n",
  143. "Requirement already satisfied: cachetools<5.0,>=2.0.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (4.2.1)\n",
  144. "Requirement already satisfied: pyasn1-modules>=0.2.1 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (0.2.8)\n",
  145. "Requirement already satisfied: zipp>=0.5 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (3.4.1)\n",
  146. "Requirement already satisfied: typing-extensions>=3.6.4; python_version < \"3.8\" in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (3.7.4.3)\n",
  147. "Requirement already satisfied: oauthlib>=3.0.0 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (3.1.0)\n",
  148. "Requirement already satisfied: pyasn1>=0.1.3 in /Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages (from rsa<5,>=3.1.4; python_version >= \"3.6\"->google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.2->tods) (0.4.8)\n",
  149. "Installing collected packages: tods\n",
  150. " Attempting uninstall: tods\n",
  151. " Found existing installation: tods 0.0.2\n",
  152. " Uninstalling tods-0.0.2:\n",
  153. " Successfully uninstalled tods-0.0.2\n",
  154. " Running setup.py develop for tods\n",
  155. "Successfully installed tods\n"
  156. ]
  157. }
  158. ],
  159. "source": [
  160. "!pip install -e git+https://github.com/datamllab/tods.git#egg=tods"
  161. ]
  162. },
  163. {
  164. "cell_type": "code",
  165. "execution_count": 4,
  166. "metadata": {},
  167. "outputs": [
  168. {
  169. "name": "stdout",
  170. "output_type": "stream",
  171. "text": [
  172. "/Users/wangyanghe/Desktop/Research/Tods Notebook/src/tods\n"
  173. ]
  174. }
  175. ],
  176. "source": [
  177. "%cd src/tods"
  178. ]
  179. },
  180. {
  181. "cell_type": "code",
  182. "execution_count": 5,
  183. "metadata": {},
  184. "outputs": [
  185. {
  186. "name": "stdout",
  187. "output_type": "stream",
  188. "text": [
  189. "Branch 'wangyanghe' set up to track remote branch 'wangyanghe' from 'origin'.\r\n",
  190. "Switched to a new branch 'wangyanghe'\r\n"
  191. ]
  192. }
  193. ],
  194. "source": [
  195. "!git checkout wangyanghe"
  196. ]
  197. },
  198. {
  199. "cell_type": "code",
  200. "execution_count": 6,
  201. "metadata": {},
  202. "outputs": [
  203. {
  204. "name": "stdout",
  205. "output_type": "stream",
  206. "text": [
  207. "/Users/wangyanghe/Desktop/Research/Tods Notebook/src/tods/examples/sk_examples\n"
  208. ]
  209. }
  210. ],
  211. "source": [
  212. "%cd examples/sk_examples"
  213. ]
  214. },
  215. {
  216. "cell_type": "code",
  217. "execution_count": 7,
  218. "metadata": {},
  219. "outputs": [
  220. {
  221. "name": "stdout",
  222. "output_type": "stream",
  223. "text": [
  224. "500_UCR_Anomaly_robotDOG1_10000_19280_19360.txt\r\n",
  225. "DeepLog_test.py\r\n",
  226. "IsolationForest_test.py\r\n",
  227. "MatrixProfile_test.py\r\n",
  228. "Telemanom_test.py\r\n"
  229. ]
  230. }
  231. ],
  232. "source": [
  233. "!ls"
  234. ]
  235. },
  236. {
  237. "cell_type": "markdown",
  238. "metadata": {},
  239. "source": [
  240. "## Imports"
  241. ]
  242. },
  243. {
  244. "cell_type": "code",
  245. "execution_count": 51,
  246. "metadata": {},
  247. "outputs": [],
  248. "source": [
  249. "import sys\n",
  250. "import argparse\n",
  251. "import os\n",
  252. "import numpy as np\n",
  253. "import pandas as pd\n",
  254. "from sklearn.metrics import precision_recall_curve\n",
  255. "from sklearn.metrics import accuracy_score\n",
  256. "from sklearn.metrics import confusion_matrix\n",
  257. "from sklearn.metrics import classification_report\n",
  258. "import matplotlib.pyplot as plt\n",
  259. "from sklearn import metrics"
  260. ]
  261. },
  262. {
  263. "cell_type": "code",
  264. "execution_count": 11,
  265. "metadata": {},
  266. "outputs": [],
  267. "source": [
  268. "from tods.tods_skinterface.primitiveSKI.detection_algorithm.DeepLog_skinterface import DeepLogSKI"
  269. ]
  270. },
  271. {
  272. "cell_type": "code",
  273. "execution_count": 9,
  274. "metadata": {},
  275. "outputs": [
  276. {
  277. "name": "stderr",
  278. "output_type": "stream",
  279. "text": [
  280. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/deprecation.py:144: FutureWarning: The sklearn.preprocessing.data module is deprecated in version 0.22 and will be removed in version 0.24. The corresponding classes / functions should instead be imported from sklearn.preprocessing. Anything that cannot be imported from sklearn.preprocessing is now part of the private API.\n",
  281. " warnings.warn(message, FutureWarning)\n",
  282. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/deprecation.py:144: FutureWarning: The sklearn.decomposition.truncated_svd module is deprecated in version 0.22 and will be removed in version 0.24. The corresponding classes / functions should instead be imported from sklearn.decomposition. Anything that cannot be imported from sklearn.decomposition is now part of the private API.\n",
  283. " warnings.warn(message, FutureWarning)\n",
  284. "d3m.primitives.tods.detection_algorithm.LSTMODetector: Primitive is not providing a description through its docstring.\n"
  285. ]
  286. }
  287. ],
  288. "source": [
  289. "from tods.tods_skinterface.primitiveSKI.detection_algorithm.Telemanom_skinterface import TelemanomSKI"
  290. ]
  291. },
  292. {
  293. "cell_type": "code",
  294. "execution_count": 69,
  295. "metadata": {},
  296. "outputs": [],
  297. "source": [
  298. "from d3m import index\n",
  299. "from d3m.metadata.base import ArgumentType\n",
  300. "from d3m.metadata.pipeline import Pipeline, PrimitiveStep\n",
  301. "from axolotl.backend.simple import SimpleRunner\n",
  302. "from tods import generate_dataset, generate_problem\n",
  303. "from tods.searcher import BruteForceSearch"
  304. ]
  305. },
  306. {
  307. "cell_type": "code",
  308. "execution_count": 52,
  309. "metadata": {},
  310. "outputs": [],
  311. "source": [
  312. "from tods import generate_dataset, load_pipeline, evaluate_pipeline"
  313. ]
  314. },
  315. {
  316. "cell_type": "markdown",
  317. "metadata": {},
  318. "source": [
  319. "## Dataset"
  320. ]
  321. },
  322. {
  323. "cell_type": "markdown",
  324. "metadata": {},
  325. "source": [
  326. "### UCR Dataset"
  327. ]
  328. },
  329. {
  330. "cell_type": "code",
  331. "execution_count": 13,
  332. "metadata": {},
  333. "outputs": [],
  334. "source": [
  335. "data = np.loadtxt(\"./500_UCR_Anomaly_robotDOG1_10000_19280_19360.txt\")"
  336. ]
  337. },
  338. {
  339. "cell_type": "code",
  340. "execution_count": 14,
  341. "metadata": {},
  342. "outputs": [
  343. {
  344. "name": "stdout",
  345. "output_type": "stream",
  346. "text": [
  347. "shape: (20000,)\n",
  348. "datatype of data: float64\n",
  349. "First 5 rows:\n",
  350. " [0.145299 0.128205 0.094017 0.076923 0.111111]\n"
  351. ]
  352. }
  353. ],
  354. "source": [
  355. "print(\"shape:\", data.shape)\n",
  356. "print(\"datatype of data:\",data.dtype)\n",
  357. "print(\"First 5 rows:\\n\", data[:5])"
  358. ]
  359. },
  360. {
  361. "cell_type": "code",
  362. "execution_count": 15,
  363. "metadata": {},
  364. "outputs": [],
  365. "source": [
  366. "X_train = np.expand_dims(data[:10000], axis=1)\n",
  367. "X_test = np.expand_dims(data[10000:], axis=1)"
  368. ]
  369. },
  370. {
  371. "cell_type": "code",
  372. "execution_count": 16,
  373. "metadata": {},
  374. "outputs": [
  375. {
  376. "name": "stdout",
  377. "output_type": "stream",
  378. "text": [
  379. "First 5 rows train:\n",
  380. " [[0.145299]\n",
  381. " [0.128205]\n",
  382. " [0.094017]\n",
  383. " [0.076923]\n",
  384. " [0.111111]]\n",
  385. "First 5 rows test:\n",
  386. " [[0.076923]\n",
  387. " [0.076923]\n",
  388. " [0.076923]\n",
  389. " [0.094017]\n",
  390. " [0.145299]]\n"
  391. ]
  392. }
  393. ],
  394. "source": [
  395. "print(\"First 5 rows train:\\n\", X_train[:5])\n",
  396. "print(\"First 5 rows test:\\n\", X_test[:5])"
  397. ]
  398. },
  399. {
  400. "cell_type": "markdown",
  401. "metadata": {},
  402. "source": [
  403. "### Yahoo Dataset"
  404. ]
  405. },
  406. {
  407. "cell_type": "code",
  408. "execution_count": 17,
  409. "metadata": {},
  410. "outputs": [],
  411. "source": [
  412. "data_yahoo = pd.read_csv('../../datasets/anomaly/raw_data/yahoo_sub_5.csv')"
  413. ]
  414. },
  415. {
  416. "cell_type": "code",
  417. "execution_count": 18,
  418. "metadata": {},
  419. "outputs": [
  420. {
  421. "name": "stdout",
  422. "output_type": "stream",
  423. "text": [
  424. "shape: (1400, 7)\n",
  425. "First 5 rows:\n",
  426. " timestamp value_0 value_1 value_2 value_3 value_4 anomaly\n",
  427. "0 1 12183 0.000000 3.716667 5 2109 0\n",
  428. "1 2 12715 0.091758 3.610833 60 3229 0\n",
  429. "2 3 12736 0.172297 3.481389 88 3637 0\n",
  430. "3 4 12716 0.226219 3.380278 84 1982 0\n",
  431. "4 5 12739 0.176358 3.193333 111 2751 0\n"
  432. ]
  433. }
  434. ],
  435. "source": [
  436. "print(\"shape:\", data_yahoo.shape)\n",
  437. "print(\"First 5 rows:\\n\", data_yahoo[:5])"
  438. ]
  439. },
  440. {
  441. "cell_type": "markdown",
  442. "metadata": {},
  443. "source": [
  444. "## SK Example 1: DeepLog"
  445. ]
  446. },
  447. {
  448. "cell_type": "code",
  449. "execution_count": 19,
  450. "metadata": {},
  451. "outputs": [
  452. {
  453. "name": "stdout",
  454. "output_type": "stream",
  455. "text": [
  456. "Epoch 1/10\n",
  457. "282/282 [==============================] - 1s 5ms/step - loss: 0.4239 - val_loss: 0.2694\n",
  458. "Epoch 2/10\n",
  459. "282/282 [==============================] - 1s 2ms/step - loss: 0.3344 - val_loss: 0.2818\n",
  460. "Epoch 3/10\n",
  461. "282/282 [==============================] - 1s 2ms/step - loss: 0.3444 - val_loss: 0.2806\n",
  462. "Epoch 4/10\n",
  463. "282/282 [==============================] - 1s 2ms/step - loss: 0.3575 - val_loss: 0.2731\n",
  464. "Epoch 5/10\n",
  465. "282/282 [==============================] - 1s 2ms/step - loss: 0.3364 - val_loss: 0.2783\n",
  466. "Epoch 6/10\n",
  467. "282/282 [==============================] - 1s 2ms/step - loss: 0.3447 - val_loss: 0.2742\n",
  468. "Epoch 7/10\n",
  469. "282/282 [==============================] - 1s 2ms/step - loss: 0.3357 - val_loss: 0.2586\n",
  470. "Epoch 8/10\n",
  471. "282/282 [==============================] - 1s 2ms/step - loss: 0.3392 - val_loss: 0.2804\n",
  472. "Epoch 9/10\n",
  473. "282/282 [==============================] - 1s 2ms/step - loss: 0.3442 - val_loss: 0.2691\n",
  474. "Epoch 10/10\n",
  475. "282/282 [==============================] - 1s 2ms/step - loss: 0.3475 - val_loss: 0.2683\n"
  476. ]
  477. },
  478. {
  479. "name": "stderr",
  480. "output_type": "stream",
  481. "text": [
  482. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  483. " \"argument is ignored.\", FutureWarning)\n",
  484. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  485. " \"argument is ignored.\", FutureWarning)\n",
  486. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  487. " \"argument is ignored.\", FutureWarning)\n"
  488. ]
  489. }
  490. ],
  491. "source": [
  492. "transformer = DeepLogSKI()\n",
  493. "transformer.fit(X_train)\n",
  494. "prediction_labels_train = transformer.predict(X_train)\n",
  495. "prediction_labels_test = transformer.predict(X_test)\n",
  496. "prediction_score = transformer.predict_score(X_test)"
  497. ]
  498. },
  499. {
  500. "cell_type": "code",
  501. "execution_count": 20,
  502. "metadata": {},
  503. "outputs": [
  504. {
  505. "name": "stdout",
  506. "output_type": "stream",
  507. "text": [
  508. "Primitive: d3m.primitives.tods.detection_algorithm.deeplog(hyperparams=Hyperparams({'contamination': 0.1, 'window_size': 1, 'step_size': 1, 'return_subseq_inds': False, 'use_columns': (), 'exclude_columns': (), 'return_result': 'new', 'use_semantic_types': False, 'add_index_columns': False, 'error_on_no_input': True, 'return_semantic_type': 'https://metadata.datadrivendiscovery.org/types/Attribute', 'hidden_size': 64, 'loss': 'mean_squared_error', 'optimizer': 'Adam', 'epochs': 10, 'batch_size': 32, 'dropout_rate': 0.2, 'l2_regularizer': 0.1, 'validation_size': 0.1, 'features': 1, 'stacked_layers': 1, 'preprocessing': True, 'verbose': 1}), random_seed=0)\n",
  509. "Prediction Labels\n",
  510. " [[0]\n",
  511. " [0]\n",
  512. " [0]\n",
  513. " ...\n",
  514. " [0]\n",
  515. " [0]\n",
  516. " [0]]\n",
  517. "Prediction Score\n",
  518. " [[0. ]\n",
  519. " [0.3569443 ]\n",
  520. " [0.3569443 ]\n",
  521. " ...\n",
  522. " [0.77054234]\n",
  523. " [0.4575615 ]\n",
  524. " [0.17499346]]\n"
  525. ]
  526. }
  527. ],
  528. "source": [
  529. "print(\"Primitive: \", transformer.primitive)\n",
  530. "print(\"Prediction Labels\\n\", prediction_labels_test)\n",
  531. "print(\"Prediction Score\\n\", prediction_score)"
  532. ]
  533. },
  534. {
  535. "cell_type": "code",
  536. "execution_count": 21,
  537. "metadata": {},
  538. "outputs": [],
  539. "source": [
  540. "y_true = prediction_labels_train\n",
  541. "y_pred = prediction_labels_test\n",
  542. "precision, recall, thresholds = precision_recall_curve(y_true, y_pred)\n",
  543. "f1_scores = 2*recall*precision/(recall+precision)\n",
  544. "fpr, tpr, threshold = metrics.roc_curve(y_true, y_pred)\n",
  545. "roc_auc = metrics.auc(fpr, tpr)"
  546. ]
  547. },
  548. {
  549. "cell_type": "code",
  550. "execution_count": 22,
  551. "metadata": {},
  552. "outputs": [
  553. {
  554. "name": "stdout",
  555. "output_type": "stream",
  556. "text": [
  557. "Accuracy Score: 0.903\n"
  558. ]
  559. }
  560. ],
  561. "source": [
  562. "print('Accuracy Score: ', accuracy_score(y_true, y_pred))"
  563. ]
  564. },
  565. {
  566. "cell_type": "code",
  567. "execution_count": 23,
  568. "metadata": {},
  569. "outputs": [
  570. {
  571. "data": {
  572. "text/plain": [
  573. "array([[8646, 358],\n",
  574. " [ 612, 384]])"
  575. ]
  576. },
  577. "execution_count": 23,
  578. "metadata": {},
  579. "output_type": "execute_result"
  580. }
  581. ],
  582. "source": [
  583. "confusion_matrix(y_true, y_pred)"
  584. ]
  585. },
  586. {
  587. "cell_type": "code",
  588. "execution_count": 24,
  589. "metadata": {},
  590. "outputs": [
  591. {
  592. "name": "stdout",
  593. "output_type": "stream",
  594. "text": [
  595. " precision recall f1-score support\n",
  596. "\n",
  597. " 0 0.93 0.96 0.95 9004\n",
  598. " 1 0.52 0.39 0.44 996\n",
  599. "\n",
  600. " accuracy 0.90 10000\n",
  601. " macro avg 0.73 0.67 0.69 10000\n",
  602. "weighted avg 0.89 0.90 0.90 10000\n",
  603. "\n"
  604. ]
  605. }
  606. ],
  607. "source": [
  608. "print(classification_report(y_true, y_pred))"
  609. ]
  610. },
  611. {
  612. "cell_type": "code",
  613. "execution_count": 25,
  614. "metadata": {},
  615. "outputs": [
  616. {
  617. "name": "stdout",
  618. "output_type": "stream",
  619. "text": [
  620. "Best threshold: 1\n",
  621. "Best F1-Score: 0.4418872266973533\n"
  622. ]
  623. }
  624. ],
  625. "source": [
  626. "print('Best threshold: ', thresholds[np.argmax(f1_scores)])\n",
  627. "print('Best F1-Score: ', np.max(f1_scores))"
  628. ]
  629. },
  630. {
  631. "cell_type": "code",
  632. "execution_count": 26,
  633. "metadata": {},
  634. "outputs": [
  635. {
  636. "data": {
  637. "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAq5UlEQVR4nO3deZgU1bnH8e/L4owLoAJuLIICCnFBmWhQ476gQQkBEY0acMGIJka9XMk1V40ak7gkamRkE0FUEDUKGoVoRDHeqOCGwAAiCAygEBYVWWR57x+nxgzDLA0z1TUz9fs8zzzTVX26+y2WfqvOOfUec3dERCS96iQdgIiIJEuJQEQk5ZQIRERSTolARCTllAhERFJOiUBEJOWUCEREUk6JQKQcZvaZma03s7Vm9rmZjTSzPYo9f5yZvWZmX5vZl2b2gpl1KPEeDc3sfjNbFL3Pp9F2k+wfkcj2lAhEKnauu+8BdASOAn4NYGadgb8D44EDgNbAR8BbZnZQ1GYX4B/A94AuQEOgM7ASOCarRyFSBtOdxSJlM7PPgCvc/dVo+27ge+7+IzN7E/jY3fuXeM3LwAp3v9TMrgB+Bxzs7muzHL5IRnRFIJIhM2sOnA3MM7PdgOOAp0tpOg44I3p8OjBRSUCqMyUCkYo9b2ZfA4uB5cCtwN6E/z/LSmm/DCjq/29cRhuRakOJQKRiP3b3BsDJwKGEL/nVwFZg/1La7w/8O3q8sow2ItWGEoFIhtz9DWAkcK+7fwP8Czi/lKa9CAPEAK8CZ5nZ7lkJUmQnKBGI7Jj7gTPM7EhgIPAzM/ulmTUws73M7E7CrKDfRu1HE7qUnjWzQ82sjpk1NrP/MbNzEjkCkRKUCER2gLuvAB4DbnH3fwJnAT8hjAMsJEwvPcHdP4nabyQMGM8GXgG+At4ldC+9k/UDECmFpo+KiKScrghERFJOiUBEJOWUCEREUk6JQEQk5eolHcCOatKkibdq1SrpMEREapT33nvv3+7etLTnalwiaNWqFdOmTUs6DBGRGsXMFpb1nLqGRERSTolARCTllAhERFJOiUBEJOWUCEREUi62RGBmI8xsuZnNKON5M7MHzWyemU03s6PjikVERMoW5xXBSMJi3WU5G2gb/fQDHo4xFhERKUNsicDdpwCrymnSDXjMg7eBPc1MKzmJiJRQWAi33AIFBfG8f5I3lDUjLNhRpDDat936rmbWj3DVQMuWLbMSnIhIktzhtdcgPx/Gj4etW2H//aF9+6r/rBoxWOzuQ909z93zmjYt9Q5pEZFaYc0aePDB8IV/+unwxhtw443w6adw9dXxfGaSVwRLgBbFtptH+0REUuejj8LZ/+OPw7p1cOyxMGoU9OoFubnxfnaSiWACcK2ZjQWOBb509+26hUREaquNG+HZZ0MCeOut8IV/0UXQvz906pS9OGJLBGY2BjgZaGJmhcCtQH0Adx8MvAScA8wD1gF944pFRKQ6WbQIhgyB4cNh+XJo0wbuuw/69IG9985+PLElAne/sILnHbgmrs8XEalOtm6FV18NZ/8vvBD2de0K11wTxgLqJDhiW+PKUIuI1CSrV8PIkfDww/DJJ9C0Kdx0E1x1FRx4YNLRBUoEIiIxeP99GDQIxoyB9evhuOPgttugRw/IyUk6um0pEYiIVJENG+Dpp0MCeOcd2G03uOSSMO2zY8ekoyubEoGISCUtWACDB8Mjj8DKlXDIIfDAA3DppbDnnklHVzElAhGRnbB1K0yaFM7+X3opDPZ26xamfp56KpglHWHmlAhERHbAypUwYkS4Apg/H/bdF37zG+jXD5o3Tzq6naNEICKSgalTw9n/2LHhRrATT4S77oLu3WGXXZKOrnKUCEREyrB+ffjiz8+HadNgjz3gssvC4O/hhycdXdVRIhARKWHevND1M2JEuA+gQwd46KEwA6hhw6Sjq3pKBCIiwJYtYdA3Px8mToR69UK3T//+cNJJNWvwd0cpEYhIqq1YEaZ9Dh4MCxfCAQeEG7+uvDI8TgMlAhFJHXd4++1w9j9uHHz7LZxyCtx7b5gCWr9+0hFmlxKBiKTGN9+Ekg/5+fDBB9CgQZj2efXVYRwgrZQIRKTWmzs3FH179FH48ssw4+fhh+Hii8NMoLRTIhCRWmnzZnjxxXD2/8orobunR49Q9vn442v34O+OUiIQkVrliy/Cgi9DhsDixeFu3zvugCuugP32Szq66kmJQERqPPew1OOgQWHpx02bwmIvDz4YFn+pp2+6cumPR0RqrLVr4YknQvfP9OnQqFHo+vn5z0MFUMmMEoGI1DgFBeHLf9Qo+PrrUOt/2DC48ELYffeko6t5lAhEpEbYtAkmTAjdP5Mnh0JvvXqFO39/8AMN/laGEoGIVGvLlsHQoeFn6dKwzu/vfx+Kv+2zT9LR1Q5KBCJS7bjDlCnh7P+558JU0C5dQhmIc86BunWTjrB2USIQkWrjq69g9OjQ/z9rFuy1F1x3XRj8bdMm6ehqLyUCEUncjBnhy3/06DATqFOnUAL6ggvCAvASLyUCEUnEt9+Gbp/8/NANlJMTvvivuQa+/30N/maTEoGIZFVhYRj4HTYMPv8cWreGu++Gvn2hSZOko0snJQIRiZ07vPZaOPsfPx62bg2Dvv37w1lnafA3aUoEIhKbNWvgscdCApgzBxo3hhtvhKuugoMOSjo6KaJEICJV7qOPwpf/44/DunVw7LHhLuBevSA3N+nopCQlAhGpEhs3hoJv+fmhAFxuLlx0Uej+6dQp6eikPEoEIlIpixaFks/Dh8Py5WG+/333QZ8+sPfeSUcnmagT55ubWRczm2Nm88xsYCnPtzSzyWb2gZlNN7Nz4oxHRKrG1q3w97/Dj38cZv384Q+h3s/EiWEs4IYblARqktiuCMysLjAIOAMoBKaa2QR3n1Ws2W+Ace7+sJl1AF4CWsUVk4hUzurVMHJkWObxk0+gaVO46aYw+HvggUlHJzsrzq6hY4B57j4fwMzGAt2A4onAgYbR40bA0hjjEZGd9P77oe7PmDGwfj0cdxzceiv07BluBJOaLc5E0AxYXGy7EDi2RJvbgL+b2S+A3YHTS3sjM+sH9ANo2bJllQcqItvbsAGefjokgHfeCaUeLr44DP527Jh0dFKVYh0jyMCFwEh3bw6cA4w2s+1icveh7p7n7nlNmzbNepAiabJgQejuad4cLr00dAfdfz8sWRLuCFYSqH3ivCJYArQott082lfc5UAXAHf/l5nlAk2A5THGJSIlbN0KkyaFs/+XXgp1frp1C3V/Tj1VdX9quzgTwVSgrZm1JiSA3sBFJdosAk4DRppZeyAXWBFjTCJSzMqVocrn4MEwfz7suy/cfDP06wctWlT8eqkdYksE7r7ZzK4FJgF1gRHuPtPMbgemufsE4EZgmJldTxg47uPuHldMIhJMnRrO/seODTeC/fCHcNdd0L17WAJS0iXWG8rc/SXClNDi+24p9ngWcHycMYhIsH59+OLPz4dp08Ii7337hsHfww9POjpJku4sFqnl5s0LXT8jRoSB3/bt4aGH4JJLoGHDil8vtZ8SgUgttGVLGPTNzw93+9atG7p9rrkGTjpJg7+yLSUCkVpkxQp45JFwBbBwIey/P9x2G1x5JRxwQNLRSXWlRCBSw7nD22+Hs/9x48ISkKecAvfeG6aA1q+fdIRS3SkRiNRQ33wTSj7k58MHH0CDBmHa59VXQ4cOSUcnNYkSgUgNM3duKPr26KPw5Zdw2GFh+6c/DclAZEcpEYjUAJs3w4svhrP/V16BevVCwbf+/eGEEzT4K5WjRCBSjX3xRVjwZcgQWLw41P+54w644grYb7+ko5PaQolApJpxD0s9DhoUln7ctAlOPx0eeADOPTdcDYhUJf2TEqkm1q6FJ54I3T/Tp0OjRqHr5+qr4ZBDko5OarOME4GZ7ebu6+IMRiSNCgrCl/+oUfD113DkkaHc80UXhTIQInGrMBGY2XHAcGAPoKWZHQlc5e794w5OpLbatAkmTAjdP5Mnh0Jv558f7vz9wQ80+CvZlckVwZ+Bs4AJAO7+kZmdGGtUIrXUsmXhbH/oUFi6FFq2DFU/L78c9tkn6egkrTLqGnL3xbbtKcqWeMIRqX3cYcqUcPb/3HNhKuhZZ4UyEOecE+oAiSQpk0SwOOoecjOrD1wHFMQblkjN99VXMHp06P+fNQv22gt++csw+NumTdLRifxHJong58ADhMXolwB/BzQ+IFKGGTPCl//o0WEmUKdOoQT0BReEBeBFqptMEsEh7v7T4jvM7HjgrXhCEql5vv02dPvk54duoJyc8MV/zTXw/e9r8Feqt0wSwV+AozPYJ5I6hYVh4HfYMPj8c2jdGu6+O6z81aRJ0tGJZKbMRGBmnYHjgKZmdkOxpxoS1iAWSSV3eO21cPY/fjxs3RoGffv3D4PAGvyVmqa8K4JdCPcO1AOK1zT8CugZZ1Ai1dGaNfDYYyEBzJkDjRvDjTfCVVfBQQclHZ3IziszEbj7G8AbZjbS3RdmMSaRauWjj8KX/+OPw7p1cOyx4S7gXr0gNzfp6EQqL5MxgnVmdg/wPeC7f/bufmpsUYkkbOPGUPAtPz8UgMvNDSUf+vcPs4BEapNMEsETwFNAV8JU0p8BK+IMSiQpixaFks/Dh8Py5WG+/333QZ8+sPfeSUcnEo9MEkFjd3/EzK4r1l00Ne7ARLJl61Z49dVw9v/CC2Ff167h7P+MM6BOnWTjE4lbJolgU/R7mZn9CFgK6NxIarzVq2HkyLDM4yefQNOmcNNNYfD3wAOTjk4kezJJBHeaWSPgRsL9Aw2BX8UZlEic3n8/1P0ZMwbWr4fjjoNbbw1LP+bkJB2dSPZVmAjc/cXo4ZfAKfDdncUiNcaGDfD00yEBvPNOKPVw8cWh+6djx6SjE0lWeTeU1QV6EWoMTXT3GWbWFfgfYFfgqOyEKLLzFiwIVT4feQRWroR27eD+++FnP4M990w6OpHqobwrgkeAFsC7wINmthTIAwa6+/NZiE1kp2zdCpMmhbP/l14KdX66dQt1f049VXV/REoqLxHkAUe4+1YzywU+Bw5295XZCU1kx6xcGap8Dh4M8+fDvvvCzTdDv37QokXS0YlUX+VNjPvW3bcCuPsGYP6OJgEz62Jmc8xsnpkNLKNNLzObZWYzzezJHXl/EYCpU8M8/2bN4L//O/weOzbcE3DHHUoCIhUp74rgUDObHj024OBo2wB39yPKe+NojGEQcAZQCEw1swnuPqtYm7bAr4Hj3X21mWmxPsnI+vXhyz4/H6ZNC4u89+0bBn8PPzzp6ERqlvISQftKvvcxwDx3nw9gZmOBbsCsYm2uBAa5+2oAd19eyc+UWm7evND1M2JEuA+gfXt46CG45BJo2DDp6ERqpvKKzlW20FwzYHGx7ULg2BJt2gGY2VuE0ta3ufvEkm9kZv2AfgAtW7asZFhS02zZEgZ98/Nh4sRQ5rl79zD4e9JJGvwVqayMFq+P+fPbAicDzYEpZna4u68p3sjdhwJDAfLy8jzLMUpCVqwI0z4HD4aFC2H//eG22+DKK+GAA5KOTqT2iDMRLCFMPy3SPNpXXCHwjrtvAhaY2VxCYlAto5Ryh7ffDmf/48aFJSBPPhnuvTdMAa1fP+kIRWqfjMppmdmuZnbIDr73VKCtmbU2s12A3sCEEm2eJ1wNYGZNCF1F83fwc6QW+OabUPGzU6dQ8mH8+HDmP3MmTJ4cyj8oCYjEo8JEYGbnAh8CE6PtjmZW8gt9O+6+GbgWmAQUAOPcfaaZ3W5m50XNJgErzWwWMBkYoPsU0mXuXLj++jDl88orYdOmUARuyZIwCNyhQ9IRitR+5l5+l7uZvQecCrzu7kdF+z5290Qm6eXl5fm0adOS+GipIps3w4svhu6fV16BevWgR48w+HvCCRr8FYmDmb3n7nmlPZdRGWp3/9K2/d+pAVvZYV98Ebp/hgyBxYuhefNww9cVV8B++yUdnUh6ZZIIZprZRUDd6AawXwL/F29YUlu4h6UeBw0KSz9u2gSnnQYPPADnnhuuBkQkWZn8N/wFcDOwEXiS0K9/Z5xBSc23di088UTo/pk+HRo1Cnf9/vzncOihSUcnIsVlkggOdfebCclApFwFBeHLf9Qo+PprOPJIGDo0LPy+++5JRycipckkEdxnZvsBzwBPufuMmGOSGmbTJpgwIXT/TJ4Mu+wC558frgA6d9bgr0h1l8kKZadEiaAXMMTMGhISgrqHUm7ZsnC2P3QoLF0KLVvCXXfB5ZfDPiofKFJjZDRU5+6fExanmQz8N3ALGidIJXeYMiWc/T/3XJgKetZZYe7/j34U6gCJSM1SYSIws/bABUAPYCXwFGEhe0mRr76C0aND//+sWWGZx1/+Mgz+tm2bdHQiUhmZXBGMIHz5n+XuS2OOR6qZGTPCl//o0WEm0NFHh0JwvXuHBeBFpObLZIygczYCkerj229Dt09+fugGysmBCy4Ig7/HHKPBX5HapsxEYGbj3L2XmX3MtncSZ7RCmdQ8hYVh4HfYMPj8c2jVCv74R7jsMmjSJOnoRCQu5V0RXBf97pqNQCQZ7mHK56BBoeLn1q1w9tnh7L9LFw3+iqRBeSuULYse9nf3m4o/Z2Z/BG7a/lVSU3z5Zbjp6+GHYfZs2HtvuOGGMPh70EFJRyci2ZTJegRnlLLv7KoORLLjo4/gqqvCCl/XXRfW+R01KnQL3X23koBIGpU3RnA10B84yMymF3uqAfBW3IFJ1dm4MRR8y88PBeByc+HCC0P3T16pRWlFJE3KGyN4EngZ+D0wsNj+r919VaxRSZVYtCiUfB4+HJYvh4MPhvvugz59QleQiAiUnwjc3T8zs2tKPmFmeysZVE9bt8Krr4az/xdeCIPBXbuGRV/OOAPqZLQ4qYikSUVXBF2B9wjTR4vPHndAvcnVyOrVMHJkGPz95BNo2hRuugn69QvTQEVEylLerKGu0e/W2QtHdtT774ez/yefhPXrQ7XPW28Ni73n5CQdnYjUBJnUGjoe+NDdvzGzi4GjgfvdfVHs0UmpNmyAp58OCeDtt0Oph4svhquvhqOOSjo6EalpMukxfhhYZ2ZHEorNfQqMjjUqKdWCBTBwILRoAZdeCqtWwf33w5Il4Y5gJQER2RmZFJ3b7O5uZt2Ah9z9ETO7PO7AJNi6FSZNCmf/f/tbqPPTrVuY+nnaaar7IyKVl0ki+NrMfg1cAvzQzOoA9eMNS1auhEcfDYO/8+fDvvvCzTeHwd8WLZKOTkRqk0wSwQXARcBl7v65mbUE7ok3rPSaOjWc/Y8dG8YCfvhD+N3v4Cc/CUtAiohUtUzKUH9uZk8A3zezrsC77v5Y/KGlx/r18NRTofDbtGlhkfc+fcLg7xGq8SoiMatwsNjMegHvAucT1i1+x8x6xh1YGnz6KQwYAM2bQ9++8M038Je/hPV/H35YSUBEsiOTrqGbge+7+3IAM2sKvAo8E2dgtdWWLfDyy+Hsf+LEUOa5e/cw+HvyyRr8FZHsyyQR1ClKApGVZDbtVIpZsSIs8Th4MCxcCPvvH278uvJKaNYs6ehEJM0ySQQTzWwSMCbavgB4Kb6Qag93eOedcPY/blxYAvLkk+Hee8MU0PqaeyUi1UAmg8UDzOwnwAnRrqHu/ly8YdVs69aFkg/5+fDBB9CgQTjz798fOnRIOjoRkW2Vtx5BW+Be4GDgY+C/3H1JtgKriebODYO8I0fCmjVw2GFh+6c/DclARKQ6Kq+vfwTwItCDUIH0Lzv65mbWxczmmNk8MxtYTrseZuZmVuOWSdm8GZ5/Hs48Ew45BB56CM46C6ZMgenTw9KPSgIiUp2V1zXUwN2HRY/nmNn7O/LGZlYXGERY6rIQmGpmE9x9Vol2DYDrgHd25P2T9sUXYcGXIUNg8eIwBfSOO+CKK2C//ZKOTkQkc+UlglwzO4r/rEOwa/Ftd68oMRwDzHP3+QBmNhboBswq0e4O4I/AgB2MPevcw1KP+fnwzDOwaVOo9/PAA3DuuVAvk6F3EZFqpryvrmXAn4ptf15s24FTK3jvZsDiYtuFwLHFG5jZ0UALd/+bmZWZCMysH9APoGXLlhV8bNVbuxaeeCIkgOnToVGjMPD785/DoYdmPRwRkSpV3sI0p8T5wVHxuj8BfSpq6+5DgaEAeXl5HmdcxRUUhMHeUaPgq6/gyCNDueeLLgplIEREaoM4OzOWAMXrZDaP9hVpABwGvG7hdtr9gAlmdp67T4sxroy8+SacdFKY63/++eEKoHNn3fkrIrVPnIlgKtDWzFoTEkBvQhVTANz9S6BJ0baZvU6Yopp4EgB47bXwe8ECOOCAZGMREYlTbKUi3H0zcC0wCSgAxrn7TDO73czOi+tzq0pBARx4oJKAiNR+maxZbMBPgYPc/fZoPYL93P3dil7r7i9RohyFu99SRtuTM4o4S2bPhvbtk45CRCR+mVwR5AOdgQuj7a8J9wfUWlu2wJw5SgQikg6ZjBEc6+5Hm9kHAO6+2sxq9VpZCxeG1cE0NVRE0iCTK4JN0V3CDt+tR7A11qgSNnt2+K0rAhFJg0wSwYPAc8A+ZvY74J/AXbFGlbCCgvBbiUBE0iCTMtRPmNl7wGmE8hI/dveC2CNLUEEBNGkCjRsnHYmISPwymTXUElgHvFB8n7svijOwJGnGkIikSSaDxX8jjA8YkAu0BuYA34sxrsS4hyuCnj2TjkREJDsy6Ro6vPh2VCiuf2wRJezf/4ZVqzRjSETSY4fvLI7KTx9bYcMaSgPFIpI2mYwR3FBssw5wNLA0togSpkQgImmTyRhB8YUWNxPGDJ6NJ5zkzZ4Nu+0GLVpU3FZEpDYoNxFEN5I1cPf/ylI8iSsoCGsP14mtHJ+ISPVS5tedmdVz9y3A8VmMJ3EFBeoWEpF0Ke+K4F3CeMCHZjYBeBr4puhJd/9rzLFl3TffwKJFmjEkIumSyRhBLrCSsEZx0f0EDtS6RDBnTvitKwIRSZPyEsE+0YyhGfwnARTJ2rrB2VQ0Y0hXBCKSJuUlgrrAHmybAIrUykQwe3YYJG7bNulIRESyp7xEsMzdb89aJNVAQQEcfDDk5CQdiYhI9pQ3SbK0K4FaraBA3UIikj7lJYLTshZFNbB5M3zyiQaKRSR9ykwE7r4qm4Ekbf582LRJiUBE0kf3z0Y0Y0hE0kqJIKJ1ikUkrZQIIgUFsP/+0KhR0pGIiGSXEkFk9mx1C4lIOikR8J/lKdUtJCJppEQALFsGX32lRCAi6aREwH8GitU1JCJppESAlqcUkXRTIiAkggYN4IADko5ERCT7Yk0EZtbFzOaY2TwzG1jK8zeY2Swzm25m/zCzA+OMpyxFM4YsddWVRERiTATReseDgLOBDsCFZtahRLMPgDx3PwJ4Brg7rnjKoxlDIpJmcV4RHAPMc/f57v4tMBboVryBu09293XR5ttA8xjjKdWXX8LSpRooFpH0ijMRNAMWF9sujPaV5XLg5dKeMLN+ZjbNzKatWLGiCkPU8pQiItVisNjMLgbygHtKe97dh7p7nrvnNW3atEo/WzOGRCTtMlm8fmctAVoU224e7duGmZ0O3Ayc5O4bY4ynVAUFUK8eHHRQtj9ZRKR6iPOKYCrQ1sxam9kuQG9gQvEGZnYUMAQ4z92XxxhLmWbPDmsU16+fxKeLiCQvtkTg7puBa4FJQAEwzt1nmtntZnZe1OweYA/gaTP70MwmlPF2sdGMIRFJuzi7hnD3l4CXSuy7pdjj0+P8/Ip8+y18+in07JlkFCIiyaoWg8VJmTcPtmzRFYGIpFuqE4FmDImIpDwRFFUdPeSQZOMQEUlSqhNBQQG0aAF77JF0JCIiyUl9IlC3kIikXWoTwdatWqdYRARSnAgKC2HdOl0RiIikNhFoxpCISJDaRKB1ikVEgtQmgoIC2Gsv2GefpCMREUlWqhNB+/ZanlJEJLWJQDOGRESCVCaCVatg+XINFIuIQEoTQdGMIV0RiIikNBEUzRjSFYGISEoTQUEB5ORAq1ZJRyIikrzUJoJ27aBu3aQjERFJXioTwezZ6hYSESmSukSwfj0sWKBEICJSJHWJYO5ccNeMIRGRIqlLBJoxJCKyrXpJB5BtBQWhrES7dklHIlL7bdq0icLCQjZs2JB0KKmRm5tL8+bNqV+/fsavSV0imD07TBvdddekIxGp/QoLC2nQoAGtWrXCVNgrdu7OypUrKSwspHXr1hm/LnVdQ1qeUiR7NmzYQOPGjZUEssTMaNy48Q5fgaUqEWzZAnPmKBGIZJOSQHbtzJ93qhLBwoWwcaNmDImIFJeqRKDlKUXS6fnnn8fMmF00bRB4/fXX6dq16zbt+vTpwzPPPAOEge6BAwfStm1bjj76aDp37szLL79c6Vh+//vf06ZNGw455BAmTZpUaht35+abb6Zdu3a0b9+eBx98EIB77rmHjh070rFjRw477DDq1q3LqlWrKh1TqgaLVXVUJJ3GjBnDCSecwJgxY/jtb3+b0Wv+93//l2XLljFjxgxycnL44osveOONNyoVx6xZsxg7diwzZ85k6dKlnH766cydO5e6JerdjBw5ksWLFzN79mzq1KnD8uXLARgwYAADBgwA4IUXXuDPf/4ze++9d6VigpQlgtmzoWlTaNw46UhE0udXv4IPP6za9+zYEe6/v/w2a9eu5Z///CeTJ0/m3HPPzSgRrFu3jmHDhrFgwQJycnIA2HfffenVq1el4h0/fjy9e/cmJyeH1q1b06ZNG9599106d+68TbuHH36YJ598kjp1QqfNPqWsqTtmzBguvPDCSsVTJHVdQ+oWEkmX8ePH06VLF9q1a0fjxo157733KnzNvHnzaNmyJQ0bNqyw7fXXX/9dd03xnz/84Q/btV2yZAktWrT4brt58+YsWbJku3affvopTz31FHl5eZx99tl88skn2zy/bt06Jk6cSI8ePSqMLxOpuSJwD4ng/POTjkQknSo6c4/LmDFjuO666wDo3bs3Y8aMoVOnTmXOrtnRWTd//vOfKx1jSRs3biQ3N5dp06bx17/+lcsuu4w333zzu+dfeOEFjj/++CrpFoKYE4GZdQEeAOoCw939DyWezwEeAzoBK4EL3P2zOGJZsQJWr9YVgUiarFq1itdee42PP/4YM2PLli2YGffccw+NGzdm9erV27Vv0qQJbdq0YdGiRXz11VcVXhVcf/31TJ48ebv9vXv3ZuDAgdvsa9asGYsXL/5uu7CwkGbNmm332ubNm/OTn/wEgO7du9O3b99tnh87dmyVdQsBYXQ6jh/Cl/+nwEHALsBHQIcSbfoDg6PHvYGnKnrfTp06+c54/XV3cJ84cadeLiI7YdasWYl+/pAhQ7xfv37b7DvxxBP9jTfe8A0bNnirVq2+i/Gzzz7zli1b+po1a9zdfcCAAd6nTx/fuHGju7svX77cx40bV6l4ZsyY4UcccYRv2LDB58+f761bt/bNmzdv1+6mm27yRx55xN3dJ0+e7Hl5ed89t2bNGt9rr7187dq1ZX5OaX/uwDQv43s1zjGCY4B57j7f3b8FxgLdSrTpBoyKHj8DnGYx3X2iGUMi6TNmzBi6d+++zb4ePXowZswYcnJyePzxx+nbty8dO3akZ8+eDB8+nEaNGgFw55130rRpUzp06MBhhx1G165dMxozKM/3vvc9evXqRYcOHejSpQuDBg36bsbQOeecw9KlSwEYOHAgzz77LIcffji//vWvGT58+Hfv8dxzz3HmmWey++67VyqW4iwkiqpnZj2BLu5+RbR9CXCsu19brM2MqE1htP1p1ObfJd6rH9APoGXLlp0WLly4w/GMHw+PPgp//SvUSdUQuUhyCgoKaK/+2Kwr7c/dzN5z97zS2teIr0R3H+ruee6e17Rp0516j27d4PnnlQREREqK82txCdCi2HbzaF+pbcysHtCIMGgsIiJZEmcimAq0NbPWZrYLYTB4Qok2E4CfRY97Aq95XH1VIpII/ZfOrp35844tEbj7ZuBaYBJQAIxz95lmdruZnRc1ewRobGbzgBuAgaW/m4jURLm5uaxcuVLJIEs8Wo8gNzd3h14X22BxXPLy8nzatGlJhyEiGdAKZdlX1gpl5Q0Wp+bOYhHJvvr16+/QSlmSDM2hERFJOSUCEZGUUyIQEUm5GjdYbGYrgB2/tThoAvy7wla1i445HXTM6VCZYz7Q3Uu9I7fGJYLKMLNpZY2a11Y65nTQMadDXMesriERkZRTIhARSbm0JYKhSQeQAB1zOuiY0yGWY07VGIGIiGwvbVcEIiJSghKBiEjK1cpEYGZdzGyOmc0zs+0qmppZjpk9FT3/jpm1SiDMKpXBMd9gZrPMbLqZ/cPMDkwizqpU0TEXa9fDzNzMavxUw0yO2cx6RX/XM83syWzHWNUy+Lfd0swmm9kH0b/vc5KIs6qY2QgzWx6t4Fja82ZmD0Z/HtPN7OhKf2hZixnX1B+gLvApcBCwC/AR0KFEm/7A4Ohxb+CppOPOwjGfAuwWPb46DccctWsATAHeBvKSjjsLf89tgQ+AvaLtfZKOOwvHPBS4OnrcAfgs6bgrecwnAkcDM8p4/hzgZcCAHwDvVPYza+MVwTHAPHef7+7fAmOBbiXadANGRY+fAU4zM8tijFWtwmN298nuvi7afJuwYlxNlsnfM8AdwB+B2lAHOZNjvhIY5O6rAdx9eZZjrGqZHLMDRavKNwKWZjG+KufuU4BV5TTpBjzmwdvAnma2f2U+szYmgmbA4mLbhdG+Utt4WEDnS6BxVqKLRybHXNzlhDOKmqzCY44umVu4+9+yGViMMvl7bge0M7O3zOxtM+uStejikckx3wZcbGaFwEvAL7ITWmJ29P97hbQeQcqY2cVAHnBS0rHEyczqAH8C+iQcSrbVI3QPnUy46ptiZoe7+5okg4rZhcBId7/PzDoDo83sMHffmnRgNUVtvCJYArQott082ldqGzOrR7icXJmV6OKRyTFjZqcDNwPnufvGLMUWl4qOuQFwGPC6mX1G6EudUMMHjDP5ey4EJrj7JndfAMwlJIaaKpNjvhwYB+Du/wJyCcXZaquM/r/viNqYCKYCbc2stZntQhgMnlCizQTgZ9HjnsBrHo3C1FAVHrOZHQUMISSBmt5vDBUcs7t/6e5N3L2Vu7cijIuc5+41eZ3TTP5tP0+4GsDMmhC6iuZnMcaqlskxLwJOAzCz9oREsCKrUWbXBODSaPbQD4Av3X1ZZd6w1nUNuftmM7sWmESYcTDC3Wea2e3ANHefADxCuHycRxiU6Z1cxJWX4THfA+wBPB2Niy9y9/MSC7qSMjzmWiXDY54EnGlms4AtwAB3r7FXuxke843AMDO7njBw3Kcmn9iZ2RhCMm8SjXvcCtQHcPfBhHGQc4B5wDqgb6U/swb/eYmISBWojV1DIiKyA5QIRERSTolARCTllAhERFJOiUBEJOWUCKRaMrMtZvZhsZ9W5bRdWwWfN9LMFkSf9X50h+qOvsdwM+sQPf6fEs/9X2VjjN6n6M9lhpm9YGZ7VtC+Y02vxinx0/RRqZbMbK2771HVbct5j5HAi+7+jJmdCdzr7kdU4v0qHVNF72tmo4C57v67ctr3IVRdvbaqY5HaQ1cEUiOY2R7ROgrvm9nHZrZdpVEz29/MphQ7Y/5htP9MM/tX9NqnzayiL+gpQJvotTdE7zXDzH4V7dvdzP5mZh9F+y+I9r9uZnlm9gdg1yiOJ6Ln1ka/x5rZj4rFPNLMeppZXTO7x8ymRjXmr8rgj+VfRMXGzOyY6Bg/MLP/M7NDojtxbwcuiGK5IIp9hJm9G7UtrWKrpE3Stbf1o5/Sfgh3xX4Y/TxHuAu+YfRcE8JdlUVXtGuj3zcCN0eP6xLqDTUhfLHvHu2/CbillM8bCfSMHp8PvAN0Aj4GdifclT0TOAroAQwr9tpG0e/XidY8KIqpWJuiGLsDo6LHuxCqSO4K9AN+E+3PAaYBrUuJc22x43sa6BJtNwTqRY9PB56NHvcBHir2+ruAi6PHexJqEe2e9N+3fpL9qXUlJqTWWO/uHYs2zKw+cJeZnQhsJZwJ7wt8Xuw1U4ERUdvn3f1DMzuJsFjJW1FpjV0IZ9KlucfMfkOoU3M5oX7Nc+7+TRTDX4EfAhOB+8zsj4TupDd34LheBh4wsxygCzDF3ddH3VFHmFnPqF0jQrG4BSVev6uZfRgdfwHwSrH2o8ysLaHMQv0yPv9M4Dwz+69oOxdoGb2XpJQSgdQUPwWaAp3cfZOFiqK5xRu4+5QoUfwIGGlmfwJWA6+4+4UZfMYAd3+maMPMTiutkbvPtbDWwTnAnWb2D3e/PZODcPcNZvY6cBZwAWGhFQirTf3C3SdV8Bbr3b2jme1GqL9zDfAgYQGeye7ePRpYf72M1xvQw93nZBKvpIPGCKSmaAQsj5LAKcB2ay5bWIf5C3cfBgwnLPf3NnC8mRX1+e9uZu0y/Mw3gR+b2W5mtjuhW+dNMzsAWOfujxOK+ZW2Zuym6MqkNE8RCoUVXV1A+FK/uug1ZtYu+sxSeVht7pfAjfafUupFpYj7FGv6NaGLrMgk4BcWXR5ZqEorKadEIDXFE0CemX0MXArMLqXNycBHZvYB4Wz7AXdfQfhiHGNm0wndQodm8oHu/j5h7OBdwpjBcHf/ADgceDfqorkVuLOUlw8FphcNFpfwd8LCQK96WH4RQuKaBbxvYdHyIVRwxR7FMp2wMMvdwO+jYy/+uslAh6LBYsKVQ/0otpnRtqScpo+KiKScrghERFJOiUBEJOWUCEREUk6JQEQk5ZQIRERSTolARCTllAhERFLu/wEt+DXz07Zq7gAAAABJRU5ErkJggg==\n",
  638. "text/plain": [
  639. "<Figure size 432x288 with 1 Axes>"
  640. ]
  641. },
  642. "metadata": {
  643. "needs_background": "light"
  644. },
  645. "output_type": "display_data"
  646. }
  647. ],
  648. "source": [
  649. "plt.title('ROC')\n",
  650. "plt.plot(fpr, tpr, 'b', label = 'AUC = %0.2f' % roc_auc)\n",
  651. "plt.legend(loc = 'lower right')\n",
  652. "plt.ylabel('True Positive Rate')\n",
  653. "plt.xlabel('False Positive Rate')\n",
  654. "plt.show()"
  655. ]
  656. },
  657. {
  658. "cell_type": "markdown",
  659. "metadata": {},
  660. "source": [
  661. "## SK Example 2: Telemanom"
  662. ]
  663. },
  664. {
  665. "cell_type": "code",
  666. "execution_count": 30,
  667. "metadata": {},
  668. "outputs": [
  669. {
  670. "name": "stdout",
  671. "output_type": "stream",
  672. "text": [
  673. "125/125 [==============================] - 1s 8ms/step - loss: 0.0112 - val_loss: 0.0046\n"
  674. ]
  675. },
  676. {
  677. "name": "stderr",
  678. "output_type": "stream",
  679. "text": [
  680. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  681. " \"argument is ignored.\", FutureWarning)\n",
  682. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  683. " \"argument is ignored.\", FutureWarning)\n"
  684. ]
  685. }
  686. ],
  687. "source": [
  688. "transformer = TelemanomSKI(l_s= 2, n_predictions= 1)\n",
  689. "transformer.fit(X_train)\n",
  690. "prediction_labels_train = transformer.predict(X_train)\n",
  691. "prediction_labels_test = transformer.predict(X_test)\n",
  692. "prediction_score = transformer.predict_score(X_test)"
  693. ]
  694. },
  695. {
  696. "cell_type": "code",
  697. "execution_count": 31,
  698. "metadata": {},
  699. "outputs": [
  700. {
  701. "name": "stdout",
  702. "output_type": "stream",
  703. "text": [
  704. "Primitive: d3m.primitives.tods.detection_algorithm.telemanom(hyperparams=Hyperparams({'contamination': 0.1, 'window_size': 1, 'step_size': 1, 'return_subseq_inds': False, 'use_columns': (), 'exclude_columns': (), 'return_result': 'new', 'use_semantic_types': False, 'add_index_columns': False, 'error_on_no_input': True, 'return_semantic_type': 'https://metadata.datadrivendiscovery.org/types/Attribute', 'smoothing_perc': 0.05, 'window_size_': 100, 'error_buffer': 50, 'batch_size': 70, 'dropout': 0.3, 'validation_split': 0.2, 'optimizer': 'Adam', 'lstm_batch_size': 64, 'loss_metric': 'mean_squared_error', 'layers': [10, 10], 'epochs': 1, 'patience': 10, 'min_delta': 0.0003, 'l_s': 2, 'n_predictions': 1, 'p': 0.05}), random_seed=0)\n",
  705. "Prediction Labels\n",
  706. " [[1]\n",
  707. " [1]\n",
  708. " [1]\n",
  709. " ...\n",
  710. " [1]\n",
  711. " [1]\n",
  712. " [1]]\n",
  713. "Prediction Score\n",
  714. " [[0.08822848]\n",
  715. " [0.07965706]\n",
  716. " [0.05999164]\n",
  717. " ...\n",
  718. " [0.05911084]\n",
  719. " [0.05963569]\n",
  720. " [0.06003137]]\n"
  721. ]
  722. }
  723. ],
  724. "source": [
  725. "print(\"Primitive: \", transformer.primitive)\n",
  726. "print(\"Prediction Labels\\n\", prediction_labels_test)\n",
  727. "print(\"Prediction Score\\n\", prediction_score)"
  728. ]
  729. },
  730. {
  731. "cell_type": "code",
  732. "execution_count": 32,
  733. "metadata": {},
  734. "outputs": [],
  735. "source": [
  736. "y_true = prediction_labels_train\n",
  737. "y_pred = prediction_labels_test\n",
  738. "precision, recall, thresholds = precision_recall_curve(y_true, y_pred)\n",
  739. "f1_scores = 2*recall*precision/(recall+precision)\n",
  740. "fpr, tpr, threshold = metrics.roc_curve(y_true, y_pred)\n",
  741. "roc_auc = metrics.auc(fpr, tpr)"
  742. ]
  743. },
  744. {
  745. "cell_type": "code",
  746. "execution_count": 33,
  747. "metadata": {},
  748. "outputs": [
  749. {
  750. "name": "stdout",
  751. "output_type": "stream",
  752. "text": [
  753. "Accuracy Score: 0.1839551865559668\n"
  754. ]
  755. }
  756. ],
  757. "source": [
  758. "print('Accuracy Score: ', accuracy_score(y_true, y_pred))"
  759. ]
  760. },
  761. {
  762. "cell_type": "code",
  763. "execution_count": 34,
  764. "metadata": {},
  765. "outputs": [
  766. {
  767. "data": {
  768. "text/plain": [
  769. "array([[ 990, 8007],\n",
  770. " [ 151, 849]])"
  771. ]
  772. },
  773. "execution_count": 34,
  774. "metadata": {},
  775. "output_type": "execute_result"
  776. }
  777. ],
  778. "source": [
  779. "confusion_matrix(y_true, y_pred)"
  780. ]
  781. },
  782. {
  783. "cell_type": "code",
  784. "execution_count": 35,
  785. "metadata": {},
  786. "outputs": [
  787. {
  788. "name": "stdout",
  789. "output_type": "stream",
  790. "text": [
  791. " precision recall f1-score support\n",
  792. "\n",
  793. " 0 0.87 0.11 0.20 8997\n",
  794. " 1 0.10 0.85 0.17 1000\n",
  795. "\n",
  796. " accuracy 0.18 9997\n",
  797. " macro avg 0.48 0.48 0.18 9997\n",
  798. "weighted avg 0.79 0.18 0.19 9997\n",
  799. "\n"
  800. ]
  801. }
  802. ],
  803. "source": [
  804. "print(classification_report(y_true, y_pred))"
  805. ]
  806. },
  807. {
  808. "cell_type": "code",
  809. "execution_count": 36,
  810. "metadata": {},
  811. "outputs": [
  812. {
  813. "name": "stdout",
  814. "output_type": "stream",
  815. "text": [
  816. "Best threshold: 0\n",
  817. "Best F1-Score: 0.18186778212239701\n"
  818. ]
  819. }
  820. ],
  821. "source": [
  822. "print('Best threshold: ', thresholds[np.argmax(f1_scores)])\n",
  823. "print('Best F1-Score: ', np.max(f1_scores))"
  824. ]
  825. },
  826. {
  827. "cell_type": "code",
  828. "execution_count": 37,
  829. "metadata": {
  830. "scrolled": true
  831. },
  832. "outputs": [
  833. {
  834. "data": {
  835. "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAsEElEQVR4nO3debyWc/7H8ddHSoRQGUuaMkXFJHV+yDYZJEn21DBkxoQ09maasU52DWMrKpJlnGRLTJu0GJJW7eGQ6hBRpCRaPr8/vlfmOJ3lPp1z3de5z/1+Ph7341zXdX/v+/5cJ+7P+e7m7oiISPbaLukAREQkWUoEIiJZTolARCTLKRGIiGQ5JQIRkSynRCAikuWUCEREspwSgUgJzOwTM/vezNaa2edmNsTMdi7w/JFmNt7M1pjZajN71cyaF3qPXc3sfjNbGr3PR9F53fTfkcjWlAhESnequ+8MtAQOBf4GYGZtgLHAK8A+QCNgNvC2me0flakBvAEcBLQHdgXaACuBw9J6FyLFMM0sFimemX0CXOzu46Lze4CD3P0UM/svMNfdexR6zSjgS3e/wMwuBm4HfuXua9McvkhKVCMQSZGZ1QdOBvLMbCfgSOD5IooOA06Mjk8ARisJSGWmRCBSuuFmtgZYBqwAbgb2IPz/s7yI8suBLe3/dYopI1JpKBGIlO50d98FaAs0JXzJfw1sBvYuovzewFfR8cpiyohUGkoEIily90nAEOCf7v4d8A5wThFFOxM6iAHGASeZWa20BCmyDZQIRMrmfuBEMzsE6A1caGZXmNkuZra7md1GGBX0j6j804QmpRfNrKmZbWdmdczs72bWIZE7EClEiUCkDNz9S+Ap4CZ3fws4CTiT0A+whDC89Gh3/zAq/wOhw3gR8DrwLTCV0Lz0btpvQKQIGj4qIpLlVCMQEclySgQiIllOiUBEJMspEYiIZLntkw6grOrWresNGzZMOgwRkYwyY8aMr9y9XlHPZVwiaNiwIdOnT086DBGRjGJmS4p7Tk1DIiJZTolARCTLKRGIiGQ5JQIRkSynRCAikuViSwRmNtjMVpjZvGKeNzN70MzyzGyOmbWKKxYRESlenDWCIYTNuotzMtAkenQHHokxFhERKUZsicDd3wRWlVDkNOApD6YAu5mZdnISESlkwwa47jpYtiye90+yj2BfwoYdW+RH17ZiZt3NbLqZTf/yyy/TEpyISGWwdi2ceircey+MHBnPZ2REZ7G7D3T3HHfPqVevyBnSIiJVzhdfQNu2MG4cDBoEl1wSz+ckucTEp8B+Bc7rR9dERLLehx/CSSeFZPDKK3DKKfF9VpI1ghHABdHooSOA1e6+PMF4REQqhalT4cgjYc0amDAh3iQAMdYIzCwXaAvUNbN84GagOoC7PwqMBDoAecA64KK4YhERyRT/+Q907gx77QWjR0OTJvF/ZmyJwN27lvK8A5fH9fkiIpnmscfg0kuhZcuQEH7xi/R8bkZ0FouIVGXu8I9/wJ/+BCeeCBMnpi8JQAbuRyAiUpVs3AiXXRZqA926wcCBUL16emNQjUBEJCHffQdnnBGSwPXXw+DB6U8CoBqBiEgivvwyTBSbNg369w+1gqQoEYiIpNnHH0P79mHJiBdfhNNPTzYeJQIRkTSaMQM6dAh9A2+8EeYLJE19BCIiaTJmDPzmN7DjjvD225UjCYASgYhIWjz5JHTsCI0bw+TJ0LRp0hH9jxKBiEiM3OGOO8LQ0N/8Bt58E/bZJ+mofk6JQEQkJps2Qc+eYWjoeeeFZaR33TXpqLamRCAiEoPvv4ezzw5DQ//yF3jqKahRI+moiqZRQyIiFWzVqjBH4J134IEH4Iorko6oZEoEIiIVaMmSMEdg8WIYNizUCio7JQIRkQry3nthjsD338PYsXDssUlHlBr1EYiIVIA33ghf/NWqwVtvZU4SACUCEZFye/ZZOPlk+OUvQ7/AQQclHVHZKBGIiGwjd/jnP8PQ0COPhP/+F+rXTzqqslMiEBHZBps3w9VXQ69eYWvJMWNgt92SjmrbKBGIiJTR+vXQpUsYGnrVVZCbCzvskHRU206jhkREyuCbb8Ky0ZMmhWaha69NOqLyUyIQEUnRsmWhU/iDD0IHcdeuSUdUMZQIRERSMG9emCi2Zg2MHg2//W3SEVUc9RGIiJRi0iQ4+ujQQfzmm1UrCYASgYhIsTZvhkGDoF27sHT0O+/AIYckHVXFUyIQESnC/PnQti107x7mCLz1VpgwVhUpEYiIFLBuHfz979CyZUgGjz0Wlo/YY4+kI4uPOotFRCKjRsHll4eVQy+8EPr2hXr1ko4qfqoRiEjW++wzOOecsHJojRowYQIMGZIdSQCUCEQki23aBA89FDaSf/VVuPVWmD079A1kEzUNiUhWmjEDLrkk/GzXDvr1g8aNk44qGaoRiEhW+fZbuPJKOOwwyM8P6wSNHp29SQBiTgRm1t7M3jezPDPrXcTzDcxsgpnNMrM5ZtYhznhEJHu5wwsvQLNmoTno0kth0aKweJxZ0tElK7ZEYGbVgH7AyUBzoKuZNS9U7AZgmLsfCnQB+scVj4hkr8WLoWPH0CG8555hYli/fpm7bHRFi7NGcBiQ5+4fu/uPwFDgtEJlHNg1Oq4NfBZjPCKSZX78Ee66K+wYNmkS3HcfTJsGhx+edGSVS5ydxfsCywqc5wOFf/23AGPN7M9ALeCEot7IzLoD3QEaNGhQ4YGKSNXz1luh+Wf+fDjjjLB3wH77JR1V5ZR0Z3FXYIi71wc6AE+b2VYxuftAd89x95x62TKwV0S2ycqVcPHFcMwxYaXQESPgpZeUBEoSZyL4FCj4q68fXSvoj8AwAHd/B6gJ1I0xJhGpotzhySfDnIAhQ8IWkgsWwKmnJh1Z5RdnIpgGNDGzRmZWg9AZPKJQmaXA8QBm1oyQCL6MMSYRqYIWLoTjjoNu3aBJE5g5E+65B2rVSjqyzBBbInD3jUBPYAywkDA6aL6Z9TGzTlGxa4E/mdlsIBfo5u4eV0wiUrV8/z3ccENYGnr2bBgwIPQNtGiRdGSZJdaZxe4+EhhZ6NpNBY4XAEfFGYOIVE1jx0KPHvDRR3D++XDvvWFoqJRd0p3FIiJlsnx5mAR20kmw3XYwbhw8/bSSQHkoEYhIRti0Cfr3D53BL78Mt9wCc+bA8ccnHVnm06JzIlLpzZoV5gRMnRq++Pv3hwMOSDqqqkM1AhGptNasgauvhpwc+OQTeOYZeP11JYGKphqBiFQ67jB8OFxxRVgh9JJL4M47Yffdk46salKNQEQqlSVLoFMnOPPMsE/w5Mnw6KNKAnFSIhCRSmHDhrBHcPPmMH58OJ4+Hdq0STqyqk9NQyKSuMmTQ2fw3LmhNvDQQ6D1JdNHNQIRScyqVdC9Oxx1FHzzTegXeOUVJYF0UyIQkbRzD5PAmjaFwYPhmmvCAnGnFd6xRNIi5aYhM9vJ3dfFGYyIVH3vvx+Whhg/PmwQM3YstGyZdFTZrdQagZkdaWYLgEXR+SFmpi0lRaRM1q+Hm28OC8LNmBEmhb39tpJAZZBKjeBfwElES0i7+2wzOzbWqESkShk3LtQCPvwQunYNW0butVfSUckWKfURuPuyQpc2xRCLiFQxX3wB550HJ54ImzfDmDHw7LNKApVNKolgmZkdCbiZVTez6wj7C4iIFGnz5jAJ7MAD4fnn4cYbw9DQdu2SjkyKkkrT0KXAA4TN6D8FxgI94gxKRDLX7NlhTsCUKdC2LTzySBgdJJVXKjWCA939PHf/hbvv6e7nA83iDkxEMsvatXDdddC6NeTlwVNPhZFBSgKVXyqJ4KEUr4lIlhoxIiwNce+9cNFFYYjo738PZklHJqkotmnIzNoARwL1zOyaAk/tClSLOzARqfyWLg0rhL7yChx8MOTmhlnCkllKqhHUAHYmJItdCjy+Bc6OPzQRqaw2bgx//TdvHiaE3X03zJypJJCpiq0RuPskYJKZDXH3JWmMSUQqsSlTQmfw7Nlwyinw8MPQsGHSUUl5pDJqaJ2Z9QUOAmpuuejuv40tKhGpdL75Bv72NxgwAPbZB158Ec44Q/0AVUEqncX/Jiwv0Qj4B/AJMC3GmESkEnEPbf9Nm8LAgaFPYOHCsHGMkkDVkEoiqOPujwMb3H2Su/8BUG1AJAt8+CGcdBL87new334wbRrcfz/sskvSkUlFSiURbIh+LjezU8zsUGCPGGMSkYT98AP06QO//jW8+27oB5gyBVq1SjoyiUMqfQS3mVlt4FrC/IFdgaviDEpEkjNhQugM/uADOPfcsEDcPvskHZXEqdQagbu/5u6r3X2eux/n7q2BVWmITUTSaMUKuOAC+O1vw/DQUaNg6FAlgWxQbCIws2pm1tXMrjOzg6NrHc1sMvBw2iIUkVht3gyDBoXO4KFD4frrYd48aN8+6cgkXUpqGnoc2A+YCjxoZp8BOUBvdx+ehthEJGZz54ZmoMmT4dhjwwJxzZsnHZWkW0mJIAdo4e6bzawm8DnwK3dfmZ7QRCQu330XOoPvuw9q14YnnoALL9Rw0GxVUh/Bj+6+GcDd1wMflzUJmFl7M3vfzPLMrHcxZTqb2QIzm29mz5bl/UWk7F57DQ46CO65J/QJLFoE3bopCWSzkmoETc1sTnRswK+icwPc3VuU9MZmVg3oB5wI5APTzGyEuy8oUKYJ8DfgKHf/2sz2LMe9iEgJ8vPhyivhpZegWTOYNCk0B4mUlAjKu+fAYUCeu38MYGZDgdOABQXK/Ano5+5fA7j7inJ+pogUsnFjmAdw443h+I474NproUaNpCOTyqKkRefKu9DcvkDBvY7zgcMLlTkAwMzeJixtfYu7jy78RmbWHegO0KBBg3KGJZI9pk4NncGzZoVRQP36wf77Jx2VVDYpbV4fo+2BJkBboCswyMx2K1zI3Qe6e46759SrVy+9EYpkoNWroWdPOOII+PxzGDYMRo5UEpCixZkIPiUMP92ifnStoHxghLtvcPfFwAeExCAi28AdnnsuzAno3z8kg0WL4Jxz1BksxUspEZjZjmZ2YBnfexrQxMwamVkNoAswolCZ4YTaAGZWl9BU9HEZP0dEgI8+gpNPhi5dwmzgqVPhwQdh112Tjkwqu1ITgZmdCrwHjI7OW5pZ4S/0rbj7RqAnMAZYCAxz9/lm1sfMOkXFxgArzWwBMAHopXkKImXz449w++1hq8jJk+GBB0ISyMlJOjLJFObuJRcwm0FYdnqiux8aXZvr7r9OQ3xbycnJ8enTpyfx0SKVzqRJcNllYX+As84KSWDffZOOSiojM5vh7kX+eZDSMtTuvrrQtZKzh4jE6quv4KKLoG1b+P77MEnshReUBGTbpJII5pvZ74BqZtbEzB4CJsccl4gUYfNmGDwYDjwQnnkGeveG+fPD3sEi2yqVRPBnwn7FPwDPAqvRfgQiaTd/fqgB/PGPYWbwrFlw552w005JRyaZLpWNaZq6+/XA9XEHIyJbW7cObrsN+vYNI4Aeeyw0C22X9CwgqTJSSQT3mtlewAvAc+4+L+aYRCQyahRcfjksXhxWB+3bFzSnUipaKjuUHQccB3wJDDCzuWZ2Q+yRiWSxzz6Dzp2hQ4ewJtCECTBkiJKAxCOlyqW7f+7uDwKXEuYU3BRnUCLZatMmeOihMDN4xAi49VaYPTv0DYjEpdSmITNrBpwLnAWsBJ4jbGQvIhVoxgy45JLws127sEBc48ZJRyXZIJU+gsGEL/+T3P2zmOMRyTrffhuWiH744dD0k5sL556rtYEkfUpNBO7eJh2BiGQbd3jxxbBZzPLlYYbw7bfDbrslHZlkm2ITgZkNc/fOZjaXn88kTmmHMhEp3uLFYWXQkSOhZcuwa9jhhXfrEEmTkmoEV0Y/O6YjEJFs8OOPYcP4Pn3CPID77oM//xm2T6WRViQmxY4acvfl0WEPd19S8AH0SE94IlXHW29Bq1bwt7+F3cIWLoSrr1YSkOSlMnz0xCKunVzRgYhUVStXwsUXwzHHwJo1YVjoSy/BfvuV/lqRdCipj+Aywl/++5vZnAJP7QK8HXdgIpnOHZ56Cq67Dr7+Gnr1gptvhlq1ko5M5OdKqpQ+C4wC7gR6F7i+xt1XxRqVSIZbuDCMApo0Cdq0gUcfhRYaXiGVVElNQ+7unwCXA2sKPDCzPeIPTSTzfP893HADHHJImBE8YEDoG1ASkMqstBpBR2AGYfhowektDuwfY1wiGWfsWOjRI+wdfP75cO+9sOeeSUclUrpiE4G7d4x+NkpfOCKZZ/nyMPrnueegSRMYNw6OPz7pqERSl8rm9UeZWa3o+Hwzu8/MGsQfmkjltmkT9O8fFoh7+WW45RaYM0dJQDJPKsNHHwHWmdkhhMXmPgKejjUqkUpu1iw48siwV8D//R/MnRtGBNWsmXRkImWXSiLY6O4OnAY87O79CENIRbLOmjVwzTWQkwOffBL2DX79dTjggKQjE9l2qcxpXGNmfwN+DxxjZtsB1eMNS6RycYfhw+GKKyA/PywXfeedsPvuSUcmUn6p1AjOJWxc/wd3/xyoD/SNNSqRSmTJEujUCc48E/bYAyZPDvMClASkqkhlq8rPgX8Dtc2sI7De3Z+KPTKRhG3YEPYIbt4cxo8Px9OnhwliIlVJKqOGOgNTgXOAzsC7ZnZ23IGJJGnyZGjdGv7yFzjhhDBT+LrroLoaRaUKSqWP4Hrg/9x9BYCZ1QPGAS/EGZhIElatCquDDhwYFoUbPhxOOy3pqETilUofwXZbkkBkZYqvE8kY7vD002FOwOOPh5FBCxYoCUh2SKVGMNrMxgC50fm5wMj4QhJJr/ffD0tDjB8fdgkbOzbsGiaSLVLZs7iXmZ0JHB1dGujuL8cblkj81q8PQ0Dvugt23DHMEu7eHapVSzoykfQqaT+CJsA/gV8Bc4Hr3P3TdAUmEqdx40It4MMPoWvXsGXkXnslHZVIMkpq6x8MvAacRViB9KGyvrmZtTez980sz8x6l1DuLDNzM8sp62eIlMUXX8B558GJJ4Z+gbFj4dlnlQQku5XUNLSLuw+Kjt83s5lleWMzqwb0I2x1mQ9MM7MR7r6gULldgCuBd8vy/iJlsXlzGAnUu3fYM+Cmm8LoIK0NJFJyIqhpZofyv30Idix47u6lJYbDgDx3/xjAzIYS1itaUKjcrcDdQK8yxi6Sktmz4dJLYcoUaNsWHnkkjA4SkaCkRLAcuK/A+ecFzh34bSnvvS+wrMB5PnB4wQJm1grYz93/Y2bFJgIz6w50B2jQQCtgS2rWrg1LQ99/f1gO4qmnwoYxZqW9UiS7lLQxzXFxfnC0eN19QLfSyrr7QGAgQE5OjscZl1QNI0ZAz56wbBlcfDHcfXdYJ0hEthbnxLBPgf0KnNePrm2xC3AwMNHMPgGOAEaow1jKY+lSOP30MBGsdu2wX/CgQUoCIiWJMxFMA5qYWSMzqwF0AUZsedLdV7t7XXdv6O4NgSlAJ3efHmNMUkVt3Bj2CG7ePIwEuvtumDkTjjoq6chEKr9UZhZvE3ffaGY9gTFANWCwu883sz7AdHcfUfI7iKRmypTQGTx7NpxyCjz8MDRsmHRUIpmj1ERgZgacB+zv7n2i/Yr3cveppb3W3UdSaDkKd7+pmLJtU4pYJPLNN2EI6IABsM8+8OKLcMYZ6gwWKatUmob6A22ArtH5GsL8AJFEuENubhgCOnBg2DVs4cKwcYySgEjZpdI0dLi7tzKzWQDu/nXU5i+Sdnl5YWmI118P+waPHAmtWiUdlUhmS6VGsCGaJezw034Em2ONSqSQH36APn3g4IPh3XdDP8CUKUoCIhUhlRrBg8DLwJ5mdjtwNnBDrFGJFDBhQugM/uADOPfcsEDcPvskHZVI1ZHKMtT/NrMZwPGE5SVOd/eFsUcmWW/FirA95NNPw/77w6hR0L590lGJVD2pjBpqAKwDXi14zd2XxhmYZK/Nm8MuYX/9a1gm4vrrw2PHHZOOTKRqSqVp6D+E/gEDagKNgPeBg2KMS7LUvHmhGejtt+HYY8MCcc2bJx2VSNWWStPQrwueRwvF9YgtIslK330XOoPvuy8sDfHEE3DhhRoOKpIOZZ5Z7O4zzezw0kuKpOa118ICcUuWwB/+EJaHqFs36ahEskcqfQTXFDjdDmgFfBZbRJI18vPhyivhpZegWTOYNCk0B4lIeqUyj2CXAo8dCH0Gp8UZlFRtGzeGPQKaNQsTwu64A957T0lAJCkl1giiiWS7uPt1aYpHqrhp0+CSS2DWrDAUtF+/MDRURJJTbI3AzLZ3902AFvKVclu9OvQDHH44fP45DBsWagNKAiLJK6lGMJXQH/CemY0Ange+2/Kku78Uc2xSBbiHL/2rroIvvgjJ4LbbYNddk45MRLZIZdRQTWAlYY/iLfMJHFAikBJ99BFcfjmMGRPWBHr11bBQnIhULiUlgj2jEUPz+F8C2EL7BkuxfvwR+vYNf/lXrw4PPBASQrVqSUcmIkUpKRFUA3bm5wlgCyUCKdKkSXDZZWF/gLPOCklg332TjkpESlJSIlju7n3SFolktK++gl69YMiQsE3ka6+FbSNFpPIraR6BJvdLqTZvhsGD4cAD4ZlnoHdvmD9fSUAkk5RUIzg+bVFIRpo/PzQD/fe/cNRR8OijYeMYEcksxdYI3H1VOgORzLFuHfz979CyZUgGjz0Gb76pJCCSqcq86Jxkt1GjwgigxYvD6qB9+0K9eklHJSLlkcpaQyJ89hl07gwdOkCNGmH7yCFDlAREqgIlAinRpk3w0EPQtCmMGAG33gqzZ0PbtklHJiIVRU1DUqwZM8ICcTNmQLt2YYG4xo2TjkpEKppqBLKVb78N+wQcdljYMyA3F0aPVhIQqapUI5CfuMOLL4YksHx5GBp6++2w225JRyYicVKNQIAwCqhjRzjnHNhzT3jnndAUpCQgUvUpEWS5H3+Eu+6Cgw4K6wTdd1/YPOZw7UotkjXUNJTF3noLLr00TAo744ywQNx++yUdlYikW6w1AjNrb2bvm1memfUu4vlrzGyBmc0xszfM7JdxxiPBypVw8cVwzDGwZk0YFvrSS0oCItkqtkQQ7XfcDzgZaA50NbPmhYrNAnLcvQXwAnBPXPFI6Ax+8skwJ2DIkLBa6IIFcOqpSUcmIkmKs0ZwGJDn7h+7+4/AUOC0ggXcfYK7r4tOpwD1Y4wnqy1aBMcdB926QZMmMHMm3HMP1KqVdGQikrQ4E8G+wLIC5/nRteL8ERhV1BNm1t3MppvZ9C+//LICQ6z6vv8ebrwRWrQIM4IHDgx9Ay1aJB2ZiFQWlaKz2MzOB3KA3xT1vLsPBAYC5OTkaHe0FI0dCz16hL2Dzz8f7r03DA0VESkozhrBp0DB7sf60bWfMbMTgOuBTu7+Q4zxZI3ly6FLFzjpJNhuOxg3Dp5+WklARIoWZyKYBjQxs0ZmVgPoAowoWMDMDgUGEJLAihhjyQqbNkH//qEz+OWX4ZZbYM4cOF5bDIlICWJrGnL3jWbWExgDVAMGu/t8M+sDTHf3EUBfYGfgeTMDWOruneKKqSqbNSvMCZg6NXzx9+8PBxyQdFQikgli7SNw95HAyELXbipwfEKcn58N1qyBm28Ok8Hq1g37Bv/ud2DacVpEUlQpOoul7Nxh+HC44oqwQugll8Cdd8LuuycdmYhkGq01lIGWLIFOneDMM2GPPWDy5LBxvJKAiGwLJYIMsmFD2CO4eXMYPz4cT58ObdokHZmIZDI1DWWIyZNDZ/DcuaE28NBD0KBB0lGJSFWgGkElt2pVaP8/6ij45pvQL/DKK0oCIlJxlAgqKfcwCaxpU3j8cbjmmrBA3Gmnlf5aEZGyUNNQJfT++2FpiPHjwwYxY8dCy5ZJRyUiVZVqBJXI+vVhTkCLFjBjRpgU9vbbSgIiEi/VCCqJceNCLeDDD6Fr17Bl5F57JR2ViGQD1QgS9sUXYWXQE08M/QJjx8KzzyoJiEj6KBEkZPNmGDAgdAY//zzcdFMYGnriiUlHJiLZRk1DCZg9O8wJmDIF2raFRx4JCUFEJAmqEaTR2rVw3XXQujXk5cFTT4WRQUoCIpIk1QjSZMQI6NkTli2Diy+Gu+8O6wSJiCRNNYKYLVsGp58eJoLVrh32Cx40SElARCoPJYKYbNwYhoA2axZGAt19N8ycGZaKEBGpTNQ0FIMpU0Jn8OzZcMop8PDD0LBh0lGJiBRNNYIK9M03cNllcOSR8NVX8OKL8OqrSgIiUrkpEVQAd8jNDaN/Bg4Mu4YtXBg2jtGWkSJS2alpqJzy8sLSEK+/Djk5MHIktGqVdFQiIqlTjWAb/fAD9OkDBx8M774b+gGmTFESEJHMoxrBNpgwIXQGf/ABnHtuGB20zz5JRyVS+WzYsIH8/HzWr1+fdChZo2bNmtSvX5/q1aun/BolgjJYsSLMDH76adh/fxg1Ctq3TzoqkcorPz+fXXbZhYYNG2LqMIudu7Ny5Ury8/Np1KhRyq9T01AKNm8Ok8CaNoWhQ+H662HePCUBkdKsX7+eOnXqKAmkiZlRp06dMtfAVCMoxbx5oRno7bfh2GPDAnHNmycdlUjmUBJIr235fatGUIzvvoO//hUOPRQWLYInnoCJE5UERKTqUSIowmuvwUEHwT33wAUXhETQrZvmBIhkquHDh2NmLFq06KdrEydOpGPHjj8r161bN1544QUgdHT37t2bJk2a0KpVK9q0acOoUaPKHcudd95J48aNOfDAAxkzZkyJZa+44gp23nnnn86XLl3Kcccdx6GHHkqLFi0YOXJkueMBJYKfyc+Hs86CU0+FnXaCSZPg8cehbt2kIxOR8sjNzeXoo48mNzc35dfceOONLF++nHnz5jFz5kyGDx/OmjVryhXHggULGDp0KPPnz2f06NH06NGDTZs2FVl2+vTpfP311z+7dtttt9G5c2dmzZrF0KFD6dGjR7ni2UJ9BIQF4h5+GG68MRzfcQdcey3UqJF0ZCJVx1VXwXvvVex7tmwJ999fcpm1a9fy1ltvMWHCBE499VT+8Y9/lPq+69atY9CgQSxevJgddtgBgF/84hd07ty5XPG+8sordOnShR122IFGjRrRuHFjpk6dSps2bX5WbtOmTfTq1Ytnn32Wl19++afrZsa3334LwOrVq9mngsatZ30imDYNLrkEZs0Ko4D69QtDQ0WkanjllVdo3749BxxwAHXq1GHGjBm0bt26xNfk5eXRoEEDdt1111Lf/+qrr2bChAlbXe/SpQu9e/f+2bVPP/2UI4444qfz+vXr8+mnn2712ocffphOnTqx9957/+z6LbfcQrt27XjooYf47rvvGDduXKnxpSJrE8Hq1WEYaP/+YaP4YcPg7LPVDyASl9L+co9Lbm4uV155JRC+nHNzc2ndunWxo2vKOurmX//6V7ljLOizzz7j+eefZ+LEiVs9l5ubS7du3bj22mt55513+P3vf8+8efPYbrvytfLHmgjMrD3wAFANeMzd7yr0/A7AU0BrYCVwrrt/EmdM7uFL/6qr4Isvwq5ht90GKSR+Eckwq1atYvz48cydOxczY9OmTZgZffv2pU6dOlu1wa9atYq6devSuHFjli5dyrfffltqraAsNYJ9992XZcuW/XSen5/Pvvvu+7Mys2bNIi8vj8aNGwOhmapx48bk5eXx+OOPM3r0aADatGnD+vXr+eqrr9hzzz1T/6UUxd1jeRC+/D8C9gdqALOB5oXK9AAejY67AM+V9r6tW7f2bZWX537SSe7g3qqV+7Rp2/xWIpKCBQsWJPr5AwYM8O7du//s2rHHHuuTJk3y9evXe8OGDX+K8ZNPPvEGDRr4N9984+7uvXr18m7duvkPP/zg7u4rVqzwYcOGlSueefPmeYsWLXz9+vX+8ccfe6NGjXzjxo0lvqZWrVo/Hbdv396feOIJdw+/27333ts3b9681WuK+r0D072Y79U4Rw0dBuS5+8fu/iMwFDitUJnTgCej4xeA4y2m2SeDB4cF4iZPhgcegKlTw2qhIlJ15ebmcsYZZ/zs2llnnUVubi477LADzzzzDBdddBEtW7bk7LPP5rHHHqN27dpAGKFTr149mjdvzsEHH0zHjh1T6jMoyUEHHUTnzp1p3rw57du3p1+/flSrVg2ADh068Nlnn5X4+nvvvZdBgwZxyCGH0LVrV4YMGVIhE/YsJIqKZ2ZnA+3d/eLo/PfA4e7es0CZeVGZ/Oj8o6jMV4XeqzvQHaBBgwatlyxZUuZ43norJID774dCNTERicnChQtp1qxZ0mFknaJ+72Y2w92L/PM3IzqL3X0gMBAgJydnmzLX0UeHh4iI/FycTUOfAvsVOK8fXSuyjJltD9QmdBqLiEiaxJkIpgFNzKyRmdUgdAaPKFRmBHBhdHw2MN7jaqsSkUTof+n02pbfd2yJwN03Aj2BMcBCYJi7zzezPmbWKSr2OFDHzPKAa4DeRb+biGSimjVrsnLlSiWDNPFoP4KaNWuW6XWxdRbHJScnx6dPn550GCKSAu1Qln7F7VCW8Z3FIpKZqlevXqadsiQZWn1URCTLKRGIiGQ5JQIRkSyXcZ3FZvYlUPapxUFd4KtSS1UtuufsoHvODuW551+6e72insi4RFAeZja9uF7zqkr3nB10z9khrntW05CISJZTIhARyXLZlggGJh1AAnTP2UH3nB1iuees6iMQEZGtZVuNQEREClEiEBHJclUyEZhZezN738zyzGyrFU3NbAczey56/l0za5hAmBUqhXu+xswWmNkcM3vDzH6ZRJwVqbR7LlDuLDNzM8v4oYap3LOZdY7+reeb2bPpjrGipfDfdgMzm2Bms6L/vjskEWdFMbPBZrYi2sGxqOfNzB6Mfh9zzKxVuT+0uM2MM/UBVAM+AvYHagCzgeaFyvQAHo2OuwDPJR13Gu75OGCn6PiybLjnqNwuwJvAFCAn6bjT8O/cBJgF7B6d75l03Gm454HAZdFxc+CTpOMu5z0fC7QC5hXzfAdgFGDAEcC75f3MqlgjOAzIc/eP3f1HYChwWqEypwFPRscvAMdbRewAnZxS79ndJ7j7uuh0CmHHuEyWyr8zwK3A3UBVWAc5lXv+E9DP3b8GcPcVaY6xoqVyzw5s2VW+NlDyDvCVnLu/CawqochpwFMeTAF2M7O9y/OZVTER7AssK3CeH10rsoyHDXRWA3XSEl08Urnngv5I+Isik5V6z1GVeT93/086A4tRKv/OBwAHmNnbZjbFzNqnLbp4pHLPtwDnm1k+MBL4c3pCS0xZ/38vlfYjyDJmdj6QA/wm6VjiZGbbAfcB3RIOJd22JzQPtSXU+t40s1+7+zdJBhWzrsAQd7/XzNoAT5vZwe6+OenAMkVVrBF8CuxX4Lx+dK3IMma2PaE6uTIt0cUjlXvGzE4Argc6ufsPaYotLqXd8y7AwcBEM/uE0JY6IsM7jFP5d84HRrj7BndfDHxASAyZKpV7/iMwDMDd3wFqEhZnq6pS+v+9LKpiIpgGNDGzRmZWg9AZPKJQmRHAhdHx2cB4j3phMlSp92xmhwIDCEkg09uNoZR7dvfV7l7X3Ru6e0NCv0gnd8/kfU5T+W97OKE2gJnVJTQVfZzGGCtaKve8FDgewMyaERLBl2mNMr1GABdEo4eOAFa7+/LyvGGVaxpy941m1hMYQxhxMNjd55tZH2C6u48AHidUH/MInTJdkou4/FK8577AzsDzUb/4UnfvlFjQ5ZTiPVcpKd7zGKCdmS0ANgG93D1ja7sp3vO1wCAzu5rQcdwtk/+wM7NcQjKvG/V73AxUB3D3Rwn9IB2APGAdcFG5PzODf18iIlIBqmLTkIiIlIESgYhIllMiEBHJckoEIiJZTolARCTLKRFIpWRmm8zsvQKPhiWUXVsBnzfEzBZHnzUzmqFa1vd4zMyaR8d/L/Tc5PLGGL3Plt/LPDN71cx2K6V8y0xfjVPip+GjUimZ2Vp337miy5bwHkOA19z9BTNrB/zT3VuU4/3KHVNp72tmTwIfuPvtJZTvRlh1tWdFxyJVh2oEkhHMbOdoH4WZZjbXzLZaadTM9jazNwv8xXxMdL2dmb0TvfZ5MyvtC/pNoHH02mui95pnZldF12qZ2X/MbHZ0/dzo+kQzyzGzu4Adozj+HT23Nvo51MxOKRDzEDM728yqmVlfM5sWrTF/SQq/lneIFhszs8Oie5xlZpPN7MBoJm4f4NwolnOj2Aeb2dSobFErtkq2SXrtbT30KOpBmBX7XvR4mTALftfoubqEWZVbarRro5/XAtdHx9UI6w3VJXyx14qu/xW4qYjPGwKcHR2fA7wLtAbmArUIs7LnA4cCZwGDCry2dvRzItGeB1tiKlBmS4xnAE9GxzUIq0juCHQHboiu7wBMBxoVEefaAvf3PNA+Ot8V2D46PgF4MTruBjxc4PV3AOdHx7sR1iKqlfS/tx7JPqrcEhNSZXzv7i23nJhZdeAOMzsW2Ez4S/gXwOcFXjMNGByVHe7u75nZbwiblbwdLa1Rg/CXdFH6mtkNhHVq/khYv+Zld/8uiuEl4BhgNHCvmd1NaE76bxnuaxTwgJntALQH3nT376PmqBZmdnZUrjZhsbjFhV6/o5m9F93/QuD1AuWfNLMmhGUWqhfz+e2ATmZ2XXReE2gQvZdkKSUCyRTnAfWA1u6+wcKKojULFnD3N6NEcQowxMzuA74GXnf3ril8Ri93f2HLiZkdX1Qhd//Awl4HHYDbzOwNd++Tyk24+3ozmwicBJxL2GgFwm5Tf3b3MaW8xffu3tLMdiKsv3M58CBhA54J7n5G1LE+sZjXG3CWu7+fSrySHdRHIJmiNrAiSgLHAVvtuWxhH+Yv3H0Q8Bhhu78pwFFmtqXNv5aZHZDiZ/4XON3MdjKzWoRmnf+a2T7AOnd/hrCYX1F7xm6IaiZFeY6wUNiW2gWEL/XLtrzGzA6IPrNIHnabuwK41v63lPqWpYi7FSi6htBEtsUY4M8WVY8srEorWU6JQDLFv4EcM5sLXAAsKqJMW2C2mc0i/LX9gLt/SfhizDWzOYRmoaapfKC7zyT0HUwl9Bk85u6zgF8DU6MmmpuB24p4+UBgzpbO4kLGEjYGGudh+0UIiWsBMNPCpuUDKKXGHsUyh7Axyz3AndG9F3zdBKD5ls5iQs2hehTb/OhcspyGj4qIZDnVCEREspwSgYhIllMiEBHJckoEIiJZTolARCTLKRGIiGQ5JQIRkSz3/5eisarLeuiQAAAAAElFTkSuQmCC\n",
  836. "text/plain": [
  837. "<Figure size 432x288 with 1 Axes>"
  838. ]
  839. },
  840. "metadata": {
  841. "needs_background": "light"
  842. },
  843. "output_type": "display_data"
  844. }
  845. ],
  846. "source": [
  847. "plt.title('ROC')\n",
  848. "plt.plot(fpr, tpr, 'b', label = 'AUC = %0.2f' % roc_auc)\n",
  849. "plt.legend(loc = 'lower right')\n",
  850. "plt.ylabel('True Positive Rate')\n",
  851. "plt.xlabel('False Positive Rate')\n",
  852. "plt.show()"
  853. ]
  854. },
  855. {
  856. "cell_type": "markdown",
  857. "metadata": {},
  858. "source": [
  859. "## Pipline Example: AutoEncoder"
  860. ]
  861. },
  862. {
  863. "cell_type": "markdown",
  864. "metadata": {},
  865. "source": [
  866. "### Build Pipeline"
  867. ]
  868. },
  869. {
  870. "cell_type": "code",
  871. "execution_count": 38,
  872. "metadata": {},
  873. "outputs": [
  874. {
  875. "data": {
  876. "text/plain": [
  877. "'inputs.0'"
  878. ]
  879. },
  880. "execution_count": 38,
  881. "metadata": {},
  882. "output_type": "execute_result"
  883. }
  884. ],
  885. "source": [
  886. "# Creating pipeline\n",
  887. "pipeline_description = Pipeline()\n",
  888. "pipeline_description.add_input(name='inputs')"
  889. ]
  890. },
  891. {
  892. "cell_type": "code",
  893. "execution_count": 39,
  894. "metadata": {},
  895. "outputs": [
  896. {
  897. "name": "stderr",
  898. "output_type": "stream",
  899. "text": [
  900. "While loading primitive 'tods.data_processing.dataset_to_dataframe', an error has been detected: (networkx 2.5 (/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages), Requirement.parse('networkx==2.4'), {'tamu-axolotl'})\n",
  901. "Attempting to load primitive 'tods.data_processing.dataset_to_dataframe' without checking requirements.\n"
  902. ]
  903. }
  904. ],
  905. "source": [
  906. "# Step 0: dataset_to_dataframe\n",
  907. "step_0 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.dataset_to_dataframe'))\n",
  908. "step_0.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='inputs.0')\n",
  909. "step_0.add_output('produce')\n",
  910. "pipeline_description.add_step(step_0)"
  911. ]
  912. },
  913. {
  914. "cell_type": "code",
  915. "execution_count": 41,
  916. "metadata": {},
  917. "outputs": [
  918. {
  919. "name": "stderr",
  920. "output_type": "stream",
  921. "text": [
  922. "While loading primitive 'tods.data_processing.column_parser', an error has been detected: (networkx 2.5 (/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages), Requirement.parse('networkx==2.4'), {'tamu-axolotl'})\n",
  923. "Attempting to load primitive 'tods.data_processing.column_parser' without checking requirements.\n"
  924. ]
  925. }
  926. ],
  927. "source": [
  928. "# Step 1: column_parser\n",
  929. "step_1 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.column_parser'))\n",
  930. "step_1.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.0.produce')\n",
  931. "step_1.add_output('produce')\n",
  932. "pipeline_description.add_step(step_1)"
  933. ]
  934. },
  935. {
  936. "cell_type": "code",
  937. "execution_count": 42,
  938. "metadata": {},
  939. "outputs": [
  940. {
  941. "name": "stderr",
  942. "output_type": "stream",
  943. "text": [
  944. "While loading primitive 'tods.data_processing.extract_columns_by_semantic_types', an error has been detected: (networkx 2.5 (/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages), Requirement.parse('networkx==2.4'), {'tamu-axolotl'})\n",
  945. "Attempting to load primitive 'tods.data_processing.extract_columns_by_semantic_types' without checking requirements.\n"
  946. ]
  947. }
  948. ],
  949. "source": [
  950. "# Step 2: extract_columns_by_semantic_types(attributes)\n",
  951. "step_2 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.extract_columns_by_semantic_types'))\n",
  952. "step_2.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.1.produce')\n",
  953. "step_2.add_output('produce')\n",
  954. "step_2.add_hyperparameter(name='semantic_types', argument_type=ArgumentType.VALUE,\n",
  955. "\t\t\t\t\t\t\t data=['https://metadata.datadrivendiscovery.org/types/Attribute'])\n",
  956. "pipeline_description.add_step(step_2)"
  957. ]
  958. },
  959. {
  960. "cell_type": "code",
  961. "execution_count": 43,
  962. "metadata": {},
  963. "outputs": [],
  964. "source": [
  965. "# Step 3: extract_columns_by_semantic_types(targets)\n",
  966. "step_3 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.extract_columns_by_semantic_types'))\n",
  967. "step_3.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.0.produce')\n",
  968. "step_3.add_output('produce')\n",
  969. "step_3.add_hyperparameter(name='semantic_types', argument_type=ArgumentType.VALUE,\n",
  970. "\t\t\t\t\t\t\tdata=['https://metadata.datadrivendiscovery.org/types/TrueTarget'])\n",
  971. "pipeline_description.add_step(step_3)"
  972. ]
  973. },
  974. {
  975. "cell_type": "code",
  976. "execution_count": 44,
  977. "metadata": {},
  978. "outputs": [],
  979. "source": [
  980. "attributes = 'steps.2.produce'\n",
  981. "targets = 'steps.3.produce'"
  982. ]
  983. },
  984. {
  985. "cell_type": "code",
  986. "execution_count": 45,
  987. "metadata": {},
  988. "outputs": [
  989. {
  990. "name": "stderr",
  991. "output_type": "stream",
  992. "text": [
  993. "While loading primitive 'tods.feature_analysis.statistical_maximum', an error has been detected: (networkx 2.5 (/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages), Requirement.parse('networkx==2.4'), {'tamu-axolotl'})\n",
  994. "Attempting to load primitive 'tods.feature_analysis.statistical_maximum' without checking requirements.\n"
  995. ]
  996. }
  997. ],
  998. "source": [
  999. "# Step 4: processing\n",
  1000. "step_4 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.feature_analysis.statistical_maximum'))\n",
  1001. "step_4.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference=attributes)\n",
  1002. "step_4.add_output('produce')\n",
  1003. "pipeline_description.add_step(step_4)"
  1004. ]
  1005. },
  1006. {
  1007. "cell_type": "code",
  1008. "execution_count": 46,
  1009. "metadata": {},
  1010. "outputs": [
  1011. {
  1012. "name": "stderr",
  1013. "output_type": "stream",
  1014. "text": [
  1015. "While loading primitive 'tods.detection_algorithm.pyod_ae', an error has been detected: (networkx 2.5 (/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages), Requirement.parse('networkx==2.4'), {'tamu-axolotl'})\n",
  1016. "Attempting to load primitive 'tods.detection_algorithm.pyod_ae' without checking requirements.\n"
  1017. ]
  1018. }
  1019. ],
  1020. "source": [
  1021. "# Step 5: algorithm`\n",
  1022. "step_5 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.detection_algorithm.pyod_ae'))\n",
  1023. "step_5.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.4.produce')\n",
  1024. "step_5.add_output('produce')\n",
  1025. "pipeline_description.add_step(step_5)"
  1026. ]
  1027. },
  1028. {
  1029. "cell_type": "code",
  1030. "execution_count": 47,
  1031. "metadata": {},
  1032. "outputs": [
  1033. {
  1034. "name": "stderr",
  1035. "output_type": "stream",
  1036. "text": [
  1037. "While loading primitive 'tods.data_processing.construct_predictions', an error has been detected: (networkx 2.5 (/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages), Requirement.parse('networkx==2.4'), {'tamu-axolotl'})\n",
  1038. "Attempting to load primitive 'tods.data_processing.construct_predictions' without checking requirements.\n"
  1039. ]
  1040. }
  1041. ],
  1042. "source": [
  1043. "# Step 6: Predictions\n",
  1044. "step_6 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.construct_predictions'))\n",
  1045. "step_6.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.5.produce')\n",
  1046. "step_6.add_argument(name='reference', argument_type=ArgumentType.CONTAINER, data_reference='steps.1.produce')\n",
  1047. "step_6.add_output('produce')\n",
  1048. "pipeline_description.add_step(step_6)"
  1049. ]
  1050. },
  1051. {
  1052. "cell_type": "code",
  1053. "execution_count": 48,
  1054. "metadata": {},
  1055. "outputs": [
  1056. {
  1057. "data": {
  1058. "text/plain": [
  1059. "'outputs.0'"
  1060. ]
  1061. },
  1062. "execution_count": 48,
  1063. "metadata": {},
  1064. "output_type": "execute_result"
  1065. }
  1066. ],
  1067. "source": [
  1068. "# Final Output\n",
  1069. "pipeline_description.add_output(name='output predictions', data_reference='steps.6.produce')"
  1070. ]
  1071. },
  1072. {
  1073. "cell_type": "code",
  1074. "execution_count": 49,
  1075. "metadata": {},
  1076. "outputs": [
  1077. {
  1078. "name": "stdout",
  1079. "output_type": "stream",
  1080. "text": [
  1081. "{\"id\": \"5ea6f8e5-e938-43e3-9dd4-4c9451bb8821\", \"schema\": \"https://metadata.datadrivendiscovery.org/schemas/v0/pipeline.json\", \"created\": \"2021-04-14T16:15:48.973138Z\", \"inputs\": [{\"name\": \"inputs\"}], \"outputs\": [{\"data\": \"steps.6.produce\", \"name\": \"output predictions\"}], \"steps\": [{\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"4b42ce1e-9b98-4a25-b68e-fad13311eb65\", \"version\": \"0.3.0\", \"python_path\": \"d3m.primitives.tods.data_processing.dataset_to_dataframe\", \"name\": \"Extract a DataFrame from a Dataset\", \"digest\": \"fb5cd27ebf69b9587b23940618071ba9ffe9f47ebd7772797d61ae0521f92515\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"inputs.0\"}}, \"outputs\": [{\"id\": \"produce\"}]}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"d510cb7a-1782-4f51-b44c-58f0236e47c7\", \"version\": \"0.6.0\", \"python_path\": \"d3m.primitives.tods.data_processing.column_parser\", \"name\": \"Parses strings into their types\", \"digest\": \"62af3e97e2535681a0b1320e4ac97edeba15895862a46244ab079c47ce56958d\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.0.produce\"}}, \"outputs\": [{\"id\": \"produce\"}]}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"4503a4c6-42f7-45a1-a1d4-ed69699cf5e1\", \"version\": \"0.4.0\", \"python_path\": \"d3m.primitives.tods.data_processing.extract_columns_by_semantic_types\", \"name\": \"Extracts columns by semantic type\", \"digest\": \"d4c8204514d840de1b5acad9831f9d5581b41f425df3d14051336abdeacdf1b2\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.1.produce\"}}, \"outputs\": [{\"id\": \"produce\"}], \"hyperparams\": {\"semantic_types\": {\"type\": \"VALUE\", \"data\": [\"https://metadata.datadrivendiscovery.org/types/Attribute\"]}}}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"4503a4c6-42f7-45a1-a1d4-ed69699cf5e1\", \"version\": \"0.4.0\", \"python_path\": \"d3m.primitives.tods.data_processing.extract_columns_by_semantic_types\", \"name\": \"Extracts columns by semantic type\", \"digest\": \"d4c8204514d840de1b5acad9831f9d5581b41f425df3d14051336abdeacdf1b2\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.0.produce\"}}, \"outputs\": [{\"id\": \"produce\"}], \"hyperparams\": {\"semantic_types\": {\"type\": \"VALUE\", \"data\": [\"https://metadata.datadrivendiscovery.org/types/TrueTarget\"]}}}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"3b448057-ac26-4f1b-96b6-141782f16a54\", \"version\": \"0.1.0\", \"python_path\": \"d3m.primitives.tods.feature_analysis.statistical_maximum\", \"name\": \"Time Series Decompostional\", \"digest\": \"922b594bd6c0894d57f6ebf5a54ccae6d69dab67326bd591c8c25e3a3dea6781\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.2.produce\"}}, \"outputs\": [{\"id\": \"produce\"}]}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"67e7fcdf-d645-3417-9aa4-85cd369487d9\", \"version\": \"0.0.1\", \"python_path\": \"d3m.primitives.tods.detection_algorithm.pyod_ae\", \"name\": \"TODS.anomaly_detection_primitives.AutoEncoder\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.4.produce\"}}, \"outputs\": [{\"id\": \"produce\"}]}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"8d38b340-f83f-4877-baaa-162f8e551736\", \"version\": \"0.3.0\", \"python_path\": \"d3m.primitives.tods.data_processing.construct_predictions\", \"name\": \"Construct pipeline predictions output\", \"digest\": \"33d90bfb7f97f47a6de5372c5f912c26fca8da2d2777661651c69687ad6f9950\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.5.produce\"}, \"reference\": {\"type\": \"CONTAINER\", \"data\": \"steps.1.produce\"}}, \"outputs\": [{\"id\": \"produce\"}]}], \"digest\": \"fc87321fbbe0b4faa956958d39d41d2cafd02700a3ed7ba80b01e80cace8d07e\"}\n"
  1082. ]
  1083. }
  1084. ],
  1085. "source": [
  1086. "# Output to json\n",
  1087. "data = pipeline_description.to_json()\n",
  1088. "with open('autoencoder_pipeline.json', 'w') as f:\n",
  1089. " f.write(data)\n",
  1090. " print(data)"
  1091. ]
  1092. },
  1093. {
  1094. "cell_type": "markdown",
  1095. "metadata": {},
  1096. "source": [
  1097. "### Run Pipeline"
  1098. ]
  1099. },
  1100. {
  1101. "cell_type": "code",
  1102. "execution_count": 54,
  1103. "metadata": {},
  1104. "outputs": [],
  1105. "source": [
  1106. "this_path = os.path.dirname(os.path.abspath(\"__file__\"))\n",
  1107. "default_data_path = os.path.join(this_path, '../../datasets/anomaly/raw_data/yahoo_sub_5.csv')"
  1108. ]
  1109. },
  1110. {
  1111. "cell_type": "code",
  1112. "execution_count": 64,
  1113. "metadata": {},
  1114. "outputs": [
  1115. {
  1116. "data": {
  1117. "text/plain": [
  1118. "_StoreAction(option_strings=['--pipeline_path'], dest='pipeline_path', nargs=None, const=None, default='/Users/wangyanghe/Desktop/Research/Tods Notebook/src/tods/examples/sk_examples/autoencoder_pipeline.json', type=None, choices=None, help='Input the path of the pre-built pipeline description', metavar=None)"
  1119. ]
  1120. },
  1121. "execution_count": 64,
  1122. "metadata": {},
  1123. "output_type": "execute_result"
  1124. }
  1125. ],
  1126. "source": [
  1127. "parser = argparse.ArgumentParser(description='Arguments for running predefined pipelin.')\n",
  1128. "parser.add_argument('--table_path', type=str, default=default_data_path,\n",
  1129. " help='Input the path of the input data table')\n",
  1130. "parser.add_argument('--target_index', type=int, default=6,\n",
  1131. " help='Index of the ground truth (for evaluation)')\n",
  1132. "parser.add_argument('--metric',type=str, default='F1_MACRO',\n",
  1133. " help='Evaluation Metric (F1, F1_MACRO)')\n",
  1134. "parser.add_argument('--pipeline_path', \n",
  1135. " default=os.path.join(this_path, 'autoencoder_pipeline.json'),\n",
  1136. " help='Input the path of the pre-built pipeline description')\n"
  1137. ]
  1138. },
  1139. {
  1140. "cell_type": "code",
  1141. "execution_count": 65,
  1142. "metadata": {},
  1143. "outputs": [],
  1144. "source": [
  1145. "args, unknown = parser.parse_known_args()\n",
  1146. "table_path = args.table_path \n",
  1147. "target_index = args.target_index # what column is the target\n",
  1148. "pipeline_path = args.pipeline_path\n",
  1149. "metric = args.metric # F1 on both label 0 and 1"
  1150. ]
  1151. },
  1152. {
  1153. "cell_type": "code",
  1154. "execution_count": 66,
  1155. "metadata": {},
  1156. "outputs": [],
  1157. "source": [
  1158. "# Read data and generate dataset\n",
  1159. "df = pd.read_csv(table_path)\n",
  1160. "dataset = generate_dataset(df, target_index)"
  1161. ]
  1162. },
  1163. {
  1164. "cell_type": "code",
  1165. "execution_count": 67,
  1166. "metadata": {},
  1167. "outputs": [],
  1168. "source": [
  1169. "# Load the default pipeline\n",
  1170. "pipeline = load_pipeline(pipeline_path)"
  1171. ]
  1172. },
  1173. {
  1174. "cell_type": "code",
  1175. "execution_count": 68,
  1176. "metadata": {},
  1177. "outputs": [
  1178. {
  1179. "name": "stderr",
  1180. "output_type": "stream",
  1181. "text": [
  1182. "Not all provided hyper-parameters for the data preparation pipeline 79ce71bd-db96-494b-a455-14f2e2ac5040 were used: ['method', 'number_of_folds', 'randomSeed', 'shuffle', 'stratified']\n"
  1183. ]
  1184. },
  1185. {
  1186. "name": "stdout",
  1187. "output_type": "stream",
  1188. "text": [
  1189. "Model: \"sequential_2\"\n",
  1190. "_________________________________________________________________\n",
  1191. "Layer (type) Output Shape Param # \n",
  1192. "=================================================================\n",
  1193. "dense_2 (Dense) (None, 12) 156 \n",
  1194. "_________________________________________________________________\n",
  1195. "dropout_2 (Dropout) (None, 12) 0 \n",
  1196. "_________________________________________________________________\n",
  1197. "dense_3 (Dense) (None, 12) 156 \n",
  1198. "_________________________________________________________________\n",
  1199. "dropout_3 (Dropout) (None, 12) 0 \n",
  1200. "_________________________________________________________________\n",
  1201. "dense_4 (Dense) (None, 1) 13 \n",
  1202. "_________________________________________________________________\n",
  1203. "dropout_4 (Dropout) (None, 1) 0 \n",
  1204. "_________________________________________________________________\n",
  1205. "dense_5 (Dense) (None, 4) 8 \n",
  1206. "_________________________________________________________________\n",
  1207. "dropout_5 (Dropout) (None, 4) 0 \n",
  1208. "_________________________________________________________________\n",
  1209. "dense_6 (Dense) (None, 1) 5 \n",
  1210. "_________________________________________________________________\n",
  1211. "dropout_6 (Dropout) (None, 1) 0 \n",
  1212. "_________________________________________________________________\n",
  1213. "dense_7 (Dense) (None, 12) 24 \n",
  1214. "=================================================================\n",
  1215. "Total params: 362\n",
  1216. "Trainable params: 362\n",
  1217. "Non-trainable params: 0\n",
  1218. "_________________________________________________________________\n",
  1219. "None\n",
  1220. "Epoch 1/100\n",
  1221. "40/40 [==============================] - 0s 6ms/step - loss: 2.1020 - val_loss: 1.3966\n",
  1222. "Epoch 2/100\n",
  1223. "40/40 [==============================] - 0s 1ms/step - loss: 1.8250 - val_loss: 1.2834\n",
  1224. "Epoch 3/100\n",
  1225. "40/40 [==============================] - 0s 1ms/step - loss: 1.7095 - val_loss: 1.2056\n",
  1226. "Epoch 4/100\n",
  1227. "40/40 [==============================] - 0s 1ms/step - loss: 1.6036 - val_loss: 1.1504\n",
  1228. "Epoch 5/100\n",
  1229. "40/40 [==============================] - 0s 1ms/step - loss: 1.5416 - val_loss: 1.1075\n",
  1230. "Epoch 6/100\n",
  1231. "40/40 [==============================] - 0s 1ms/step - loss: 1.4905 - val_loss: 1.0713\n",
  1232. "Epoch 7/100\n",
  1233. "40/40 [==============================] - 0s 1ms/step - loss: 1.4248 - val_loss: 1.0404\n",
  1234. "Epoch 8/100\n",
  1235. "40/40 [==============================] - 0s 1ms/step - loss: 1.4080 - val_loss: 1.0133\n",
  1236. "Epoch 9/100\n",
  1237. "40/40 [==============================] - 0s 1ms/step - loss: 1.3664 - val_loss: 0.9888\n",
  1238. "Epoch 10/100\n",
  1239. "40/40 [==============================] - 0s 2ms/step - loss: 1.3319 - val_loss: 0.9664\n",
  1240. "Epoch 11/100\n",
  1241. "40/40 [==============================] - 0s 2ms/step - loss: 1.2825 - val_loss: 0.9456\n",
  1242. "Epoch 12/100\n",
  1243. "40/40 [==============================] - 0s 1ms/step - loss: 1.2695 - val_loss: 0.9260\n",
  1244. "Epoch 13/100\n",
  1245. "40/40 [==============================] - 0s 1ms/step - loss: 1.2545 - val_loss: 0.9075\n",
  1246. "Epoch 14/100\n",
  1247. "40/40 [==============================] - 0s 1ms/step - loss: 1.2153 - val_loss: 0.8899\n",
  1248. "Epoch 15/100\n",
  1249. "40/40 [==============================] - 0s 1ms/step - loss: 1.2071 - val_loss: 0.8733\n",
  1250. "Epoch 16/100\n",
  1251. "40/40 [==============================] - 0s 1ms/step - loss: 1.1693 - val_loss: 0.8575\n",
  1252. "Epoch 17/100\n",
  1253. "40/40 [==============================] - 0s 1ms/step - loss: 1.1569 - val_loss: 0.8424\n",
  1254. "Epoch 18/100\n",
  1255. "40/40 [==============================] - 0s 1ms/step - loss: 1.1470 - val_loss: 0.8280\n",
  1256. "Epoch 19/100\n",
  1257. "40/40 [==============================] - 0s 1ms/step - loss: 1.1229 - val_loss: 0.8143\n",
  1258. "Epoch 20/100\n",
  1259. "40/40 [==============================] - 0s 2ms/step - loss: 1.1088 - val_loss: 0.8011\n",
  1260. "Epoch 21/100\n",
  1261. "40/40 [==============================] - 0s 2ms/step - loss: 1.0923 - val_loss: 0.7885\n",
  1262. "Epoch 22/100\n",
  1263. "40/40 [==============================] - 0s 2ms/step - loss: 1.0745 - val_loss: 0.7764\n",
  1264. "Epoch 23/100\n",
  1265. "40/40 [==============================] - 0s 2ms/step - loss: 1.0592 - val_loss: 0.7648\n",
  1266. "Epoch 24/100\n",
  1267. "40/40 [==============================] - 0s 2ms/step - loss: 1.0476 - val_loss: 0.7537\n",
  1268. "Epoch 25/100\n",
  1269. "40/40 [==============================] - 0s 1ms/step - loss: 1.0341 - val_loss: 0.7430\n",
  1270. "Epoch 26/100\n",
  1271. "40/40 [==============================] - 0s 1ms/step - loss: 1.0216 - val_loss: 0.7328\n",
  1272. "Epoch 27/100\n",
  1273. "40/40 [==============================] - 0s 1ms/step - loss: 1.0110 - val_loss: 0.7230\n",
  1274. "Epoch 28/100\n",
  1275. "40/40 [==============================] - 0s 1ms/step - loss: 0.9972 - val_loss: 0.7136\n",
  1276. "Epoch 29/100\n",
  1277. "40/40 [==============================] - 0s 2ms/step - loss: 0.9867 - val_loss: 0.7046\n",
  1278. "Epoch 30/100\n",
  1279. "40/40 [==============================] - 0s 2ms/step - loss: 0.9769 - val_loss: 0.6959\n",
  1280. "Epoch 31/100\n",
  1281. "40/40 [==============================] - 0s 2ms/step - loss: 0.9668 - val_loss: 0.6876\n",
  1282. "Epoch 32/100\n",
  1283. "40/40 [==============================] - 0s 1ms/step - loss: 0.9543 - val_loss: 0.6797\n",
  1284. "Epoch 33/100\n",
  1285. "40/40 [==============================] - 0s 1ms/step - loss: 0.9452 - val_loss: 0.6720\n",
  1286. "Epoch 34/100\n",
  1287. "40/40 [==============================] - 0s 2ms/step - loss: 0.9357 - val_loss: 0.6647\n",
  1288. "Epoch 35/100\n",
  1289. "40/40 [==============================] - 0s 2ms/step - loss: 0.9327 - val_loss: 0.6576\n",
  1290. "Epoch 36/100\n",
  1291. "40/40 [==============================] - 0s 2ms/step - loss: 0.9219 - val_loss: 0.6509\n",
  1292. "Epoch 37/100\n",
  1293. "40/40 [==============================] - 0s 1ms/step - loss: 0.9144 - val_loss: 0.6443\n",
  1294. "Epoch 38/100\n",
  1295. "40/40 [==============================] - 0s 2ms/step - loss: 0.9061 - val_loss: 0.6381\n",
  1296. "Epoch 39/100\n",
  1297. "40/40 [==============================] - 0s 1ms/step - loss: 0.8996 - val_loss: 0.6321\n",
  1298. "Epoch 40/100\n",
  1299. "40/40 [==============================] - 0s 1ms/step - loss: 0.8924 - val_loss: 0.6263\n",
  1300. "Epoch 41/100\n",
  1301. "40/40 [==============================] - 0s 1ms/step - loss: 0.8858 - val_loss: 0.6207\n",
  1302. "Epoch 42/100\n",
  1303. "40/40 [==============================] - 0s 2ms/step - loss: 0.8793 - val_loss: 0.6153\n",
  1304. "Epoch 43/100\n",
  1305. "40/40 [==============================] - 0s 2ms/step - loss: 0.8744 - val_loss: 0.6102\n",
  1306. "Epoch 44/100\n",
  1307. "40/40 [==============================] - 0s 2ms/step - loss: 0.8671 - val_loss: 0.6052\n",
  1308. "Epoch 45/100\n",
  1309. "40/40 [==============================] - 0s 2ms/step - loss: 0.8610 - val_loss: 0.6004\n",
  1310. "Epoch 46/100\n",
  1311. "40/40 [==============================] - 0s 2ms/step - loss: 0.8574 - val_loss: 0.5958\n",
  1312. "Epoch 47/100\n",
  1313. "40/40 [==============================] - 0s 2ms/step - loss: 0.8517 - val_loss: 0.5913\n",
  1314. "Epoch 48/100\n",
  1315. "40/40 [==============================] - 0s 2ms/step - loss: 0.8469 - val_loss: 0.5870\n",
  1316. "Epoch 49/100\n",
  1317. "40/40 [==============================] - 0s 2ms/step - loss: 0.8431 - val_loss: 0.5829\n",
  1318. "Epoch 50/100\n",
  1319. "40/40 [==============================] - 0s 2ms/step - loss: 0.8371 - val_loss: 0.5789\n",
  1320. "Epoch 51/100\n",
  1321. "40/40 [==============================] - 0s 2ms/step - loss: 0.8338 - val_loss: 0.5750\n",
  1322. "Epoch 52/100\n",
  1323. "40/40 [==============================] - 0s 2ms/step - loss: 0.8288 - val_loss: 0.5713\n",
  1324. "Epoch 53/100\n",
  1325. "40/40 [==============================] - 0s 2ms/step - loss: 0.8245 - val_loss: 0.5677\n",
  1326. "Epoch 54/100\n",
  1327. "40/40 [==============================] - 0s 2ms/step - loss: 0.8207 - val_loss: 0.5642\n",
  1328. "Epoch 55/100\n",
  1329. "40/40 [==============================] - 0s 2ms/step - loss: 0.8179 - val_loss: 0.5608\n",
  1330. "Epoch 56/100\n",
  1331. "40/40 [==============================] - 0s 1ms/step - loss: 0.8163 - val_loss: 0.5576\n",
  1332. "Epoch 57/100\n",
  1333. "40/40 [==============================] - 0s 2ms/step - loss: 0.8105 - val_loss: 0.5545\n",
  1334. "Epoch 58/100\n",
  1335. "40/40 [==============================] - 0s 2ms/step - loss: 0.8069 - val_loss: 0.5514\n",
  1336. "Epoch 59/100\n",
  1337. "40/40 [==============================] - 0s 2ms/step - loss: 0.8037 - val_loss: 0.5485\n",
  1338. "Epoch 60/100\n",
  1339. "40/40 [==============================] - 0s 2ms/step - loss: 0.8004 - val_loss: 0.5457\n",
  1340. "Epoch 61/100\n",
  1341. "40/40 [==============================] - 0s 2ms/step - loss: 0.7974 - val_loss: 0.5429\n",
  1342. "Epoch 62/100\n",
  1343. "40/40 [==============================] - 0s 2ms/step - loss: 0.7941 - val_loss: 0.5403\n",
  1344. "Epoch 63/100\n",
  1345. "40/40 [==============================] - 0s 2ms/step - loss: 0.7915 - val_loss: 0.5377\n",
  1346. "Epoch 64/100\n",
  1347. "40/40 [==============================] - 0s 2ms/step - loss: 0.7891 - val_loss: 0.5352\n"
  1348. ]
  1349. },
  1350. {
  1351. "name": "stdout",
  1352. "output_type": "stream",
  1353. "text": [
  1354. "Epoch 65/100\n",
  1355. "40/40 [==============================] - 0s 2ms/step - loss: 0.7860 - val_loss: 0.5328\n",
  1356. "Epoch 66/100\n",
  1357. "40/40 [==============================] - 0s 2ms/step - loss: 0.7836 - val_loss: 0.5305\n",
  1358. "Epoch 67/100\n",
  1359. "40/40 [==============================] - 0s 2ms/step - loss: 0.7811 - val_loss: 0.5282\n",
  1360. "Epoch 68/100\n",
  1361. "40/40 [==============================] - 0s 2ms/step - loss: 0.7792 - val_loss: 0.5260\n",
  1362. "Epoch 69/100\n",
  1363. "40/40 [==============================] - 0s 2ms/step - loss: 0.7770 - val_loss: 0.5238\n",
  1364. "Epoch 70/100\n",
  1365. "40/40 [==============================] - 0s 2ms/step - loss: 0.7745 - val_loss: 0.5218\n",
  1366. "Epoch 71/100\n",
  1367. "40/40 [==============================] - 0s 2ms/step - loss: 0.7725 - val_loss: 0.5197\n",
  1368. "Epoch 72/100\n",
  1369. "40/40 [==============================] - 0s 2ms/step - loss: 0.7704 - val_loss: 0.5178\n",
  1370. "Epoch 73/100\n",
  1371. "40/40 [==============================] - 0s 2ms/step - loss: 0.7683 - val_loss: 0.5159\n",
  1372. "Epoch 74/100\n",
  1373. "40/40 [==============================] - 0s 1ms/step - loss: 0.7663 - val_loss: 0.5141\n",
  1374. "Epoch 75/100\n",
  1375. "40/40 [==============================] - 0s 1ms/step - loss: 0.7644 - val_loss: 0.5123\n",
  1376. "Epoch 76/100\n",
  1377. "40/40 [==============================] - 0s 2ms/step - loss: 0.7627 - val_loss: 0.5106\n",
  1378. "Epoch 77/100\n",
  1379. "40/40 [==============================] - 0s 1ms/step - loss: 0.7608 - val_loss: 0.5089\n",
  1380. "Epoch 78/100\n",
  1381. "40/40 [==============================] - 0s 2ms/step - loss: 0.7591 - val_loss: 0.5073\n",
  1382. "Epoch 79/100\n",
  1383. "40/40 [==============================] - 0s 2ms/step - loss: 0.7576 - val_loss: 0.5057\n",
  1384. "Epoch 80/100\n",
  1385. "40/40 [==============================] - 0s 2ms/step - loss: 0.7559 - val_loss: 0.5041\n",
  1386. "Epoch 81/100\n",
  1387. "40/40 [==============================] - 0s 2ms/step - loss: 0.7544 - val_loss: 0.5026\n",
  1388. "Epoch 82/100\n",
  1389. "40/40 [==============================] - 0s 2ms/step - loss: 0.7529 - val_loss: 0.5011\n",
  1390. "Epoch 83/100\n",
  1391. "40/40 [==============================] - 0s 2ms/step - loss: 0.7514 - val_loss: 0.4997\n",
  1392. "Epoch 84/100\n",
  1393. "40/40 [==============================] - 0s 2ms/step - loss: 0.7500 - val_loss: 0.4983\n",
  1394. "Epoch 85/100\n",
  1395. "40/40 [==============================] - 0s 2ms/step - loss: 0.7486 - val_loss: 0.4970\n",
  1396. "Epoch 86/100\n",
  1397. "40/40 [==============================] - 0s 2ms/step - loss: 0.7473 - val_loss: 0.4957\n",
  1398. "Epoch 87/100\n",
  1399. "40/40 [==============================] - 0s 2ms/step - loss: 0.7460 - val_loss: 0.4944\n",
  1400. "Epoch 88/100\n",
  1401. "40/40 [==============================] - 0s 2ms/step - loss: 0.7448 - val_loss: 0.4931\n",
  1402. "Epoch 89/100\n",
  1403. "40/40 [==============================] - 0s 2ms/step - loss: 0.7436 - val_loss: 0.4919\n",
  1404. "Epoch 90/100\n",
  1405. "40/40 [==============================] - 0s 2ms/step - loss: 0.7424 - val_loss: 0.4907\n",
  1406. "Epoch 91/100\n",
  1407. "40/40 [==============================] - 0s 2ms/step - loss: 0.7412 - val_loss: 0.4896\n",
  1408. "Epoch 92/100\n",
  1409. "40/40 [==============================] - 0s 2ms/step - loss: 0.7402 - val_loss: 0.4885\n",
  1410. "Epoch 93/100\n",
  1411. "40/40 [==============================] - 0s 2ms/step - loss: 0.7390 - val_loss: 0.4874\n",
  1412. "Epoch 94/100\n",
  1413. "40/40 [==============================] - 0s 2ms/step - loss: 0.7380 - val_loss: 0.4863\n",
  1414. "Epoch 95/100\n",
  1415. "40/40 [==============================] - 0s 2ms/step - loss: 0.7370 - val_loss: 0.4853\n",
  1416. "Epoch 96/100\n",
  1417. "40/40 [==============================] - 0s 2ms/step - loss: 0.7360 - val_loss: 0.4842\n",
  1418. "Epoch 97/100\n",
  1419. "40/40 [==============================] - 0s 2ms/step - loss: 0.7350 - val_loss: 0.4833\n",
  1420. "Epoch 98/100\n",
  1421. "40/40 [==============================] - 0s 2ms/step - loss: 0.7341 - val_loss: 0.4823\n",
  1422. "Epoch 99/100\n",
  1423. "40/40 [==============================] - 0s 2ms/step - loss: 0.7332 - val_loss: 0.4814\n",
  1424. "Epoch 100/100\n",
  1425. "40/40 [==============================] - 0s 2ms/step - loss: 0.7323 - val_loss: 0.4804\n"
  1426. ]
  1427. },
  1428. {
  1429. "name": "stderr",
  1430. "output_type": "stream",
  1431. "text": [
  1432. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  1433. " \"argument is ignored.\", FutureWarning)\n",
  1434. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  1435. " \"argument is ignored.\", FutureWarning)\n"
  1436. ]
  1437. },
  1438. {
  1439. "name": "stdout",
  1440. "output_type": "stream",
  1441. "text": [
  1442. "{'method_called': 'evaluate',\n",
  1443. " 'outputs': \"[{'outputs.0': d3mIndex anomaly\"\n",
  1444. " '0 0 1'\n",
  1445. " '1 1 0'\n",
  1446. " '2 2 1'\n",
  1447. " '3 3 1'\n",
  1448. " '4 4 1'\n",
  1449. " '... ... ...'\n",
  1450. " '1395 1395 1'\n",
  1451. " '1396 1396 0'\n",
  1452. " '1397 1397 1'\n",
  1453. " '1398 1398 1'\n",
  1454. " '1399 1399 1'\n",
  1455. " ''\n",
  1456. " \"[1400 rows x 2 columns]}, {'outputs.0': d3mIndex anomaly\"\n",
  1457. " '0 0 1'\n",
  1458. " '1 1 0'\n",
  1459. " '2 2 1'\n",
  1460. " '3 3 1'\n",
  1461. " '4 4 1'\n",
  1462. " '... ... ...'\n",
  1463. " '1395 1395 1'\n",
  1464. " '1396 1396 0'\n",
  1465. " '1397 1397 1'\n",
  1466. " '1398 1398 1'\n",
  1467. " '1399 1399 1'\n",
  1468. " ''\n",
  1469. " '[1400 rows x 2 columns]}]',\n",
  1470. " 'pipeline': '<d3m.metadata.pipeline.Pipeline object at 0x156c41358>',\n",
  1471. " 'scores': ' metric value normalized randomSeed fold'\n",
  1472. " '0 F1_MACRO 0.509059 0.509059 0 0',\n",
  1473. " 'status': 'COMPLETED'}\n"
  1474. ]
  1475. }
  1476. ],
  1477. "source": [
  1478. "# Run the pipeline\n",
  1479. "pipeline_result = evaluate_pipeline(dataset, pipeline, metric)\n",
  1480. "print(pipeline_result)\n",
  1481. "#raise pipeline_result.error[0]"
  1482. ]
  1483. },
  1484. {
  1485. "cell_type": "markdown",
  1486. "metadata": {},
  1487. "source": [
  1488. "## Searcher Example:"
  1489. ]
  1490. },
  1491. {
  1492. "cell_type": "code",
  1493. "execution_count": 70,
  1494. "metadata": {},
  1495. "outputs": [],
  1496. "source": [
  1497. "table_path = '../../datasets/anomaly/raw_data/yahoo_sub_5.csv'\n",
  1498. "target_index = 6 # what column is the target\n",
  1499. "time_limit = 30 # How many seconds you wanna search"
  1500. ]
  1501. },
  1502. {
  1503. "cell_type": "code",
  1504. "execution_count": 71,
  1505. "metadata": {},
  1506. "outputs": [],
  1507. "source": [
  1508. "metric = 'F1_MACRO' # F1 on both label 0 and 1"
  1509. ]
  1510. },
  1511. {
  1512. "cell_type": "code",
  1513. "execution_count": 72,
  1514. "metadata": {},
  1515. "outputs": [],
  1516. "source": [
  1517. "# Read data and generate dataset and problem\n",
  1518. "df = pd.read_csv(table_path)\n",
  1519. "dataset = generate_dataset(df, target_index=target_index)\n",
  1520. "problem_description = generate_problem(dataset, metric)"
  1521. ]
  1522. },
  1523. {
  1524. "cell_type": "code",
  1525. "execution_count": 73,
  1526. "metadata": {},
  1527. "outputs": [],
  1528. "source": [
  1529. "# Start backend\n",
  1530. "backend = SimpleRunner(random_seed=0)"
  1531. ]
  1532. },
  1533. {
  1534. "cell_type": "code",
  1535. "execution_count": 74,
  1536. "metadata": {},
  1537. "outputs": [],
  1538. "source": [
  1539. "# Start search algorithm\n",
  1540. "search = BruteForceSearch(problem_description=problem_description,\n",
  1541. " backend=backend)"
  1542. ]
  1543. },
  1544. {
  1545. "cell_type": "code",
  1546. "execution_count": 75,
  1547. "metadata": {},
  1548. "outputs": [
  1549. {
  1550. "name": "stderr",
  1551. "output_type": "stream",
  1552. "text": [
  1553. "Not all provided hyper-parameters for the data preparation pipeline 79ce71bd-db96-494b-a455-14f2e2ac5040 were used: ['method', 'number_of_folds', 'randomSeed', 'shuffle', 'stratified']\n"
  1554. ]
  1555. },
  1556. {
  1557. "name": "stdout",
  1558. "output_type": "stream",
  1559. "text": [
  1560. "Model: \"sequential_3\"\n",
  1561. "_________________________________________________________________\n",
  1562. "Layer (type) Output Shape Param # \n",
  1563. "=================================================================\n",
  1564. "dense_8 (Dense) (None, 12) 156 \n",
  1565. "_________________________________________________________________\n",
  1566. "dropout_7 (Dropout) (None, 12) 0 \n",
  1567. "_________________________________________________________________\n",
  1568. "dense_9 (Dense) (None, 12) 156 \n",
  1569. "_________________________________________________________________\n",
  1570. "dropout_8 (Dropout) (None, 12) 0 \n",
  1571. "_________________________________________________________________\n",
  1572. "dense_10 (Dense) (None, 1) 13 \n",
  1573. "_________________________________________________________________\n",
  1574. "dropout_9 (Dropout) (None, 1) 0 \n",
  1575. "_________________________________________________________________\n",
  1576. "dense_11 (Dense) (None, 4) 8 \n",
  1577. "_________________________________________________________________\n",
  1578. "dropout_10 (Dropout) (None, 4) 0 \n",
  1579. "_________________________________________________________________\n",
  1580. "dense_12 (Dense) (None, 1) 5 \n",
  1581. "_________________________________________________________________\n",
  1582. "dropout_11 (Dropout) (None, 1) 0 \n",
  1583. "_________________________________________________________________\n",
  1584. "dense_13 (Dense) (None, 12) 24 \n",
  1585. "=================================================================\n",
  1586. "Total params: 362\n",
  1587. "Trainable params: 362\n",
  1588. "Non-trainable params: 0\n",
  1589. "_________________________________________________________________\n",
  1590. "None\n",
  1591. "Epoch 1/100\n",
  1592. "40/40 [==============================] - 0s 4ms/step - loss: 1.5944 - val_loss: 1.2184\n",
  1593. "Epoch 2/100\n",
  1594. "40/40 [==============================] - 0s 2ms/step - loss: 1.3842 - val_loss: 1.1148\n",
  1595. "Epoch 3/100\n",
  1596. "40/40 [==============================] - 0s 1ms/step - loss: 1.2952 - val_loss: 1.0463\n",
  1597. "Epoch 4/100\n",
  1598. "40/40 [==============================] - 0s 1ms/step - loss: 1.2214 - val_loss: 0.9919\n",
  1599. "Epoch 5/100\n",
  1600. "40/40 [==============================] - 0s 2ms/step - loss: 1.1586 - val_loss: 0.9538\n",
  1601. "Epoch 6/100\n",
  1602. "40/40 [==============================] - 0s 2ms/step - loss: 1.1199 - val_loss: 0.9192\n",
  1603. "Epoch 7/100\n",
  1604. "40/40 [==============================] - 0s 2ms/step - loss: 1.0858 - val_loss: 0.8896\n",
  1605. "Epoch 8/100\n",
  1606. "40/40 [==============================] - 0s 2ms/step - loss: 1.0449 - val_loss: 0.8645\n",
  1607. "Epoch 9/100\n",
  1608. "40/40 [==============================] - 0s 2ms/step - loss: 1.0290 - val_loss: 0.8419\n",
  1609. "Epoch 10/100\n",
  1610. "40/40 [==============================] - 0s 1ms/step - loss: 1.0031 - val_loss: 0.8217\n",
  1611. "Epoch 11/100\n",
  1612. "40/40 [==============================] - 0s 1ms/step - loss: 0.9821 - val_loss: 0.8030\n",
  1613. "Epoch 12/100\n",
  1614. "40/40 [==============================] - 0s 1ms/step - loss: 0.9619 - val_loss: 0.7847\n",
  1615. "Epoch 13/100\n",
  1616. "40/40 [==============================] - 0s 1ms/step - loss: 0.9446 - val_loss: 0.7676\n",
  1617. "Epoch 14/100\n",
  1618. "40/40 [==============================] - 0s 1ms/step - loss: 0.9184 - val_loss: 0.7520\n",
  1619. "Epoch 15/100\n",
  1620. "40/40 [==============================] - 0s 1ms/step - loss: 0.9076 - val_loss: 0.7376\n",
  1621. "Epoch 16/100\n",
  1622. "40/40 [==============================] - 0s 1ms/step - loss: 0.8856 - val_loss: 0.7240\n",
  1623. "Epoch 17/100\n",
  1624. "40/40 [==============================] - 0s 1ms/step - loss: 0.8732 - val_loss: 0.7110\n",
  1625. "Epoch 18/100\n",
  1626. "40/40 [==============================] - 0s 2ms/step - loss: 0.8554 - val_loss: 0.6987\n",
  1627. "Epoch 19/100\n",
  1628. "40/40 [==============================] - 0s 2ms/step - loss: 0.8449 - val_loss: 0.6868\n",
  1629. "Epoch 20/100\n",
  1630. "40/40 [==============================] - 0s 2ms/step - loss: 0.8318 - val_loss: 0.6762\n",
  1631. "Epoch 21/100\n",
  1632. "40/40 [==============================] - 0s 2ms/step - loss: 0.8222 - val_loss: 0.6654\n",
  1633. "Epoch 22/100\n",
  1634. "40/40 [==============================] - 0s 2ms/step - loss: 0.8087 - val_loss: 0.6556\n",
  1635. "Epoch 23/100\n",
  1636. "40/40 [==============================] - 0s 2ms/step - loss: 0.7972 - val_loss: 0.6465\n",
  1637. "Epoch 24/100\n",
  1638. "40/40 [==============================] - 0s 2ms/step - loss: 0.7934 - val_loss: 0.6375\n",
  1639. "Epoch 25/100\n",
  1640. "40/40 [==============================] - 0s 2ms/step - loss: 0.7774 - val_loss: 0.6290\n",
  1641. "Epoch 26/100\n",
  1642. "40/40 [==============================] - 0s 1ms/step - loss: 0.7675 - val_loss: 0.6209\n",
  1643. "Epoch 27/100\n",
  1644. "40/40 [==============================] - 0s 2ms/step - loss: 0.7598 - val_loss: 0.6133\n",
  1645. "Epoch 28/100\n",
  1646. "40/40 [==============================] - 0s 1ms/step - loss: 0.7520 - val_loss: 0.6057\n",
  1647. "Epoch 29/100\n",
  1648. "40/40 [==============================] - 0s 1ms/step - loss: 0.7446 - val_loss: 0.5991\n",
  1649. "Epoch 30/100\n",
  1650. "40/40 [==============================] - 0s 1ms/step - loss: 0.7358 - val_loss: 0.5924\n",
  1651. "Epoch 31/100\n",
  1652. "40/40 [==============================] - 0s 1ms/step - loss: 0.7289 - val_loss: 0.5861\n",
  1653. "Epoch 32/100\n",
  1654. "40/40 [==============================] - 0s 1ms/step - loss: 0.7225 - val_loss: 0.5800\n",
  1655. "Epoch 33/100\n",
  1656. "40/40 [==============================] - 0s 2ms/step - loss: 0.7172 - val_loss: 0.5745\n",
  1657. "Epoch 34/100\n",
  1658. "40/40 [==============================] - 0s 2ms/step - loss: 0.7099 - val_loss: 0.5689\n",
  1659. "Epoch 35/100\n",
  1660. "40/40 [==============================] - 0s 2ms/step - loss: 0.7043 - val_loss: 0.5637\n",
  1661. "Epoch 36/100\n",
  1662. "40/40 [==============================] - 0s 2ms/step - loss: 0.6980 - val_loss: 0.5589\n",
  1663. "Epoch 37/100\n",
  1664. "40/40 [==============================] - 0s 2ms/step - loss: 0.6935 - val_loss: 0.5542\n",
  1665. "Epoch 38/100\n",
  1666. "40/40 [==============================] - 0s 2ms/step - loss: 0.6877 - val_loss: 0.5498\n",
  1667. "Epoch 39/100\n",
  1668. "40/40 [==============================] - 0s 1ms/step - loss: 0.6828 - val_loss: 0.5454\n",
  1669. "Epoch 40/100\n",
  1670. "40/40 [==============================] - 0s 1ms/step - loss: 0.6788 - val_loss: 0.5413\n",
  1671. "Epoch 41/100\n",
  1672. "40/40 [==============================] - 0s 1ms/step - loss: 0.6742 - val_loss: 0.5376\n",
  1673. "Epoch 42/100\n",
  1674. "40/40 [==============================] - 0s 1ms/step - loss: 0.6695 - val_loss: 0.5338\n",
  1675. "Epoch 43/100\n",
  1676. "40/40 [==============================] - 0s 2ms/step - loss: 0.6655 - val_loss: 0.5303\n",
  1677. "Epoch 44/100\n",
  1678. "40/40 [==============================] - 0s 2ms/step - loss: 0.6616 - val_loss: 0.5269\n",
  1679. "Epoch 45/100\n",
  1680. "40/40 [==============================] - 0s 2ms/step - loss: 0.6578 - val_loss: 0.5238\n",
  1681. "Epoch 46/100\n",
  1682. "40/40 [==============================] - 0s 2ms/step - loss: 0.6542 - val_loss: 0.5207\n",
  1683. "Epoch 47/100\n",
  1684. "40/40 [==============================] - 0s 2ms/step - loss: 0.6505 - val_loss: 0.5178\n",
  1685. "Epoch 48/100\n",
  1686. "40/40 [==============================] - 0s 2ms/step - loss: 0.6471 - val_loss: 0.5150\n",
  1687. "Epoch 49/100\n",
  1688. "40/40 [==============================] - 0s 2ms/step - loss: 0.6442 - val_loss: 0.5124\n",
  1689. "Epoch 50/100\n",
  1690. "40/40 [==============================] - 0s 2ms/step - loss: 0.6410 - val_loss: 0.5098\n",
  1691. "Epoch 51/100\n",
  1692. "40/40 [==============================] - 0s 2ms/step - loss: 0.6386 - val_loss: 0.5073\n",
  1693. "Epoch 52/100\n",
  1694. "40/40 [==============================] - 0s 2ms/step - loss: 0.6354 - val_loss: 0.5050\n",
  1695. "Epoch 53/100\n",
  1696. "40/40 [==============================] - 0s 2ms/step - loss: 0.6326 - val_loss: 0.5028\n",
  1697. "Epoch 54/100\n",
  1698. "40/40 [==============================] - 0s 2ms/step - loss: 0.6302 - val_loss: 0.5006\n",
  1699. "Epoch 55/100\n",
  1700. "40/40 [==============================] - 0s 2ms/step - loss: 0.6287 - val_loss: 0.4986\n",
  1701. "Epoch 56/100\n",
  1702. "40/40 [==============================] - 0s 2ms/step - loss: 0.6254 - val_loss: 0.4966\n",
  1703. "Epoch 57/100\n",
  1704. "40/40 [==============================] - 0s 1ms/step - loss: 0.6231 - val_loss: 0.4947\n",
  1705. "Epoch 58/100\n",
  1706. "40/40 [==============================] - 0s 1ms/step - loss: 0.6210 - val_loss: 0.4929\n",
  1707. "Epoch 59/100\n",
  1708. "40/40 [==============================] - 0s 1ms/step - loss: 0.6195 - val_loss: 0.4911\n",
  1709. "Epoch 60/100\n",
  1710. "40/40 [==============================] - 0s 1ms/step - loss: 0.6169 - val_loss: 0.4894\n",
  1711. "Epoch 61/100\n",
  1712. "40/40 [==============================] - 0s 1ms/step - loss: 0.6153 - val_loss: 0.4878\n",
  1713. "Epoch 62/100\n",
  1714. "40/40 [==============================] - 0s 1ms/step - loss: 0.6132 - val_loss: 0.4863\n",
  1715. "Epoch 63/100\n",
  1716. "40/40 [==============================] - 0s 1ms/step - loss: 0.6114 - val_loss: 0.4848\n",
  1717. "Epoch 64/100\n",
  1718. "40/40 [==============================] - 0s 2ms/step - loss: 0.6097 - val_loss: 0.4834\n"
  1719. ]
  1720. },
  1721. {
  1722. "name": "stdout",
  1723. "output_type": "stream",
  1724. "text": [
  1725. "Epoch 65/100\n",
  1726. "40/40 [==============================] - 0s 2ms/step - loss: 0.6080 - val_loss: 0.4820\n",
  1727. "Epoch 66/100\n",
  1728. "40/40 [==============================] - 0s 2ms/step - loss: 0.6065 - val_loss: 0.4806\n",
  1729. "Epoch 67/100\n",
  1730. "40/40 [==============================] - 0s 2ms/step - loss: 0.6050 - val_loss: 0.4794\n",
  1731. "Epoch 68/100\n",
  1732. "40/40 [==============================] - 0s 1ms/step - loss: 0.6035 - val_loss: 0.4781\n",
  1733. "Epoch 69/100\n",
  1734. "40/40 [==============================] - 0s 1ms/step - loss: 0.6020 - val_loss: 0.4770\n",
  1735. "Epoch 70/100\n",
  1736. "40/40 [==============================] - 0s 1ms/step - loss: 0.6007 - val_loss: 0.4758\n",
  1737. "Epoch 71/100\n",
  1738. "40/40 [==============================] - 0s 1ms/step - loss: 0.5994 - val_loss: 0.4747\n",
  1739. "Epoch 72/100\n",
  1740. "40/40 [==============================] - 0s 1ms/step - loss: 0.5981 - val_loss: 0.4736\n",
  1741. "Epoch 73/100\n",
  1742. "40/40 [==============================] - 0s 1ms/step - loss: 0.5969 - val_loss: 0.4726\n",
  1743. "Epoch 74/100\n",
  1744. "40/40 [==============================] - 0s 1ms/step - loss: 0.5958 - val_loss: 0.4716\n",
  1745. "Epoch 75/100\n",
  1746. "40/40 [==============================] - 0s 1ms/step - loss: 0.5947 - val_loss: 0.4706\n",
  1747. "Epoch 76/100\n",
  1748. "40/40 [==============================] - 0s 1ms/step - loss: 0.5937 - val_loss: 0.4697\n",
  1749. "Epoch 77/100\n",
  1750. "40/40 [==============================] - 0s 1ms/step - loss: 0.5926 - val_loss: 0.4688\n",
  1751. "Epoch 78/100\n",
  1752. "40/40 [==============================] - 0s 1ms/step - loss: 0.5918 - val_loss: 0.4679\n",
  1753. "Epoch 79/100\n",
  1754. "40/40 [==============================] - 0s 1ms/step - loss: 0.5907 - val_loss: 0.4671\n",
  1755. "Epoch 80/100\n",
  1756. "40/40 [==============================] - 0s 1ms/step - loss: 0.5897 - val_loss: 0.4663\n",
  1757. "Epoch 81/100\n",
  1758. "40/40 [==============================] - 0s 1ms/step - loss: 0.5890 - val_loss: 0.4655\n",
  1759. "Epoch 82/100\n",
  1760. "40/40 [==============================] - 0s 1ms/step - loss: 0.5880 - val_loss: 0.4647\n",
  1761. "Epoch 83/100\n",
  1762. "40/40 [==============================] - 0s 1ms/step - loss: 0.5872 - val_loss: 0.4640\n",
  1763. "Epoch 84/100\n",
  1764. "40/40 [==============================] - 0s 2ms/step - loss: 0.5864 - val_loss: 0.4632\n",
  1765. "Epoch 85/100\n",
  1766. "40/40 [==============================] - 0s 2ms/step - loss: 0.5856 - val_loss: 0.4626\n",
  1767. "Epoch 86/100\n",
  1768. "40/40 [==============================] - 0s 2ms/step - loss: 0.5849 - val_loss: 0.4619\n",
  1769. "Epoch 87/100\n",
  1770. "40/40 [==============================] - 0s 2ms/step - loss: 0.5841 - val_loss: 0.4612\n",
  1771. "Epoch 88/100\n",
  1772. "40/40 [==============================] - 0s 2ms/step - loss: 0.5834 - val_loss: 0.4606\n",
  1773. "Epoch 89/100\n",
  1774. "40/40 [==============================] - 0s 2ms/step - loss: 0.5828 - val_loss: 0.4600\n",
  1775. "Epoch 90/100\n",
  1776. "40/40 [==============================] - 0s 2ms/step - loss: 0.5821 - val_loss: 0.4594\n",
  1777. "Epoch 91/100\n",
  1778. "40/40 [==============================] - 0s 2ms/step - loss: 0.5815 - val_loss: 0.4588\n",
  1779. "Epoch 92/100\n",
  1780. "40/40 [==============================] - 0s 2ms/step - loss: 0.5809 - val_loss: 0.4582\n",
  1781. "Epoch 93/100\n",
  1782. "40/40 [==============================] - 0s 2ms/step - loss: 0.5803 - val_loss: 0.4577\n",
  1783. "Epoch 94/100\n",
  1784. "40/40 [==============================] - 0s 2ms/step - loss: 0.5797 - val_loss: 0.4572\n",
  1785. "Epoch 95/100\n",
  1786. "40/40 [==============================] - 0s 1ms/step - loss: 0.5792 - val_loss: 0.4567\n",
  1787. "Epoch 96/100\n",
  1788. "40/40 [==============================] - 0s 1ms/step - loss: 0.5786 - val_loss: 0.4562\n",
  1789. "Epoch 97/100\n",
  1790. "40/40 [==============================] - 0s 1ms/step - loss: 0.5781 - val_loss: 0.4557\n",
  1791. "Epoch 98/100\n",
  1792. "40/40 [==============================] - 0s 1ms/step - loss: 0.5776 - val_loss: 0.4552\n",
  1793. "Epoch 99/100\n",
  1794. "40/40 [==============================] - 0s 1ms/step - loss: 0.5771 - val_loss: 0.4548\n",
  1795. "Epoch 100/100\n",
  1796. "40/40 [==============================] - 0s 1ms/step - loss: 0.5766 - val_loss: 0.4543\n"
  1797. ]
  1798. },
  1799. {
  1800. "name": "stderr",
  1801. "output_type": "stream",
  1802. "text": [
  1803. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  1804. " \"argument is ignored.\", FutureWarning)\n",
  1805. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  1806. " \"argument is ignored.\", FutureWarning)\n",
  1807. "Traceback (most recent call last):\n",
  1808. " File \"/Users/wangyanghe/Desktop/Research/tods/tods/searcher/brute_force_search.py\", line 62, in _search\n",
  1809. " for error in pipeline_result.error:\n",
  1810. "TypeError: 'NoneType' object is not iterable\n",
  1811. "Not all provided hyper-parameters for the data preparation pipeline 79ce71bd-db96-494b-a455-14f2e2ac5040 were used: ['method', 'number_of_folds', 'randomSeed', 'shuffle', 'stratified']\n"
  1812. ]
  1813. },
  1814. {
  1815. "name": "stdout",
  1816. "output_type": "stream",
  1817. "text": [
  1818. "Model: \"sequential_4\"\n",
  1819. "_________________________________________________________________\n",
  1820. "Layer (type) Output Shape Param # \n",
  1821. "=================================================================\n",
  1822. "dense_14 (Dense) (None, 12) 156 \n",
  1823. "_________________________________________________________________\n",
  1824. "dropout_12 (Dropout) (None, 12) 0 \n",
  1825. "_________________________________________________________________\n",
  1826. "dense_15 (Dense) (None, 12) 156 \n",
  1827. "_________________________________________________________________\n",
  1828. "dropout_13 (Dropout) (None, 12) 0 \n",
  1829. "_________________________________________________________________\n",
  1830. "dense_16 (Dense) (None, 1) 13 \n",
  1831. "_________________________________________________________________\n",
  1832. "dropout_14 (Dropout) (None, 1) 0 \n",
  1833. "_________________________________________________________________\n",
  1834. "dense_17 (Dense) (None, 4) 8 \n",
  1835. "_________________________________________________________________\n",
  1836. "dropout_15 (Dropout) (None, 4) 0 \n",
  1837. "_________________________________________________________________\n",
  1838. "dense_18 (Dense) (None, 1) 5 \n",
  1839. "_________________________________________________________________\n",
  1840. "dropout_16 (Dropout) (None, 1) 0 \n",
  1841. "_________________________________________________________________\n",
  1842. "dense_19 (Dense) (None, 12) 24 \n",
  1843. "=================================================================\n",
  1844. "Total params: 362\n",
  1845. "Trainable params: 362\n",
  1846. "Non-trainable params: 0\n",
  1847. "_________________________________________________________________\n",
  1848. "None\n",
  1849. "Epoch 1/100\n",
  1850. "40/40 [==============================] - 0s 5ms/step - loss: 1.6224 - val_loss: 1.0535\n",
  1851. "Epoch 2/100\n",
  1852. "40/40 [==============================] - 0s 1ms/step - loss: 1.4768 - val_loss: 0.9671\n",
  1853. "Epoch 3/100\n",
  1854. "40/40 [==============================] - 0s 2ms/step - loss: 1.3657 - val_loss: 0.9039\n",
  1855. "Epoch 4/100\n",
  1856. "40/40 [==============================] - 0s 1ms/step - loss: 1.2853 - val_loss: 0.8548\n",
  1857. "Epoch 5/100\n",
  1858. "40/40 [==============================] - 0s 2ms/step - loss: 1.2478 - val_loss: 0.8155\n",
  1859. "Epoch 6/100\n",
  1860. "40/40 [==============================] - 0s 1ms/step - loss: 1.1850 - val_loss: 0.7841\n",
  1861. "Epoch 7/100\n",
  1862. "40/40 [==============================] - 0s 1ms/step - loss: 1.1566 - val_loss: 0.7577\n",
  1863. "Epoch 8/100\n",
  1864. "40/40 [==============================] - 0s 2ms/step - loss: 1.1255 - val_loss: 0.7338\n",
  1865. "Epoch 9/100\n",
  1866. "40/40 [==============================] - 0s 2ms/step - loss: 1.0796 - val_loss: 0.7136\n",
  1867. "Epoch 10/100\n",
  1868. "40/40 [==============================] - 0s 2ms/step - loss: 1.0545 - val_loss: 0.6954\n",
  1869. "Epoch 11/100\n",
  1870. "40/40 [==============================] - 0s 2ms/step - loss: 1.0291 - val_loss: 0.6783\n",
  1871. "Epoch 12/100\n",
  1872. "40/40 [==============================] - 0s 2ms/step - loss: 1.0136 - val_loss: 0.6627\n",
  1873. "Epoch 13/100\n",
  1874. "40/40 [==============================] - 0s 1ms/step - loss: 0.9984 - val_loss: 0.6483\n",
  1875. "Epoch 14/100\n",
  1876. "40/40 [==============================] - 0s 2ms/step - loss: 0.9704 - val_loss: 0.6347\n",
  1877. "Epoch 15/100\n",
  1878. "40/40 [==============================] - 0s 1ms/step - loss: 0.9495 - val_loss: 0.6222\n",
  1879. "Epoch 16/100\n",
  1880. "40/40 [==============================] - 0s 1ms/step - loss: 0.9450 - val_loss: 0.6098\n",
  1881. "Epoch 17/100\n",
  1882. "40/40 [==============================] - 0s 2ms/step - loss: 0.9221 - val_loss: 0.5983\n",
  1883. "Epoch 18/100\n",
  1884. "40/40 [==============================] - 0s 2ms/step - loss: 0.9060 - val_loss: 0.5875\n",
  1885. "Epoch 19/100\n",
  1886. "40/40 [==============================] - 0s 2ms/step - loss: 0.8961 - val_loss: 0.5772\n",
  1887. "Epoch 20/100\n",
  1888. "40/40 [==============================] - 0s 2ms/step - loss: 0.8758 - val_loss: 0.5674\n",
  1889. "Epoch 21/100\n",
  1890. "40/40 [==============================] - 0s 2ms/step - loss: 0.8635 - val_loss: 0.5580\n",
  1891. "Epoch 22/100\n",
  1892. "40/40 [==============================] - 0s 1ms/step - loss: 0.8520 - val_loss: 0.5492\n",
  1893. "Epoch 23/100\n",
  1894. "40/40 [==============================] - 0s 2ms/step - loss: 0.8367 - val_loss: 0.5407\n",
  1895. "Epoch 24/100\n",
  1896. "40/40 [==============================] - 0s 2ms/step - loss: 0.8265 - val_loss: 0.5328\n",
  1897. "Epoch 25/100\n",
  1898. "40/40 [==============================] - 0s 1ms/step - loss: 0.8198 - val_loss: 0.5251\n",
  1899. "Epoch 26/100\n",
  1900. "40/40 [==============================] - 0s 1ms/step - loss: 0.8084 - val_loss: 0.5180\n",
  1901. "Epoch 27/100\n",
  1902. "40/40 [==============================] - 0s 1ms/step - loss: 0.7999 - val_loss: 0.5108\n",
  1903. "Epoch 28/100\n",
  1904. "40/40 [==============================] - 0s 1ms/step - loss: 0.7929 - val_loss: 0.5042\n",
  1905. "Epoch 29/100\n",
  1906. "40/40 [==============================] - 0s 2ms/step - loss: 0.7812 - val_loss: 0.4979\n",
  1907. "Epoch 30/100\n",
  1908. "40/40 [==============================] - 0s 2ms/step - loss: 0.7739 - val_loss: 0.4918\n",
  1909. "Epoch 31/100\n",
  1910. "40/40 [==============================] - 0s 2ms/step - loss: 0.7629 - val_loss: 0.4861\n",
  1911. "Epoch 32/100\n",
  1912. "40/40 [==============================] - 0s 2ms/step - loss: 0.7554 - val_loss: 0.4807\n",
  1913. "Epoch 33/100\n",
  1914. "40/40 [==============================] - 0s 2ms/step - loss: 0.7480 - val_loss: 0.4754\n",
  1915. "Epoch 34/100\n",
  1916. "40/40 [==============================] - 0s 2ms/step - loss: 0.7435 - val_loss: 0.4704\n",
  1917. "Epoch 35/100\n",
  1918. "40/40 [==============================] - 0s 2ms/step - loss: 0.7358 - val_loss: 0.4656\n",
  1919. "Epoch 36/100\n",
  1920. "40/40 [==============================] - 0s 2ms/step - loss: 0.7309 - val_loss: 0.4610\n",
  1921. "Epoch 37/100\n",
  1922. "40/40 [==============================] - 0s 2ms/step - loss: 0.7260 - val_loss: 0.4567\n",
  1923. "Epoch 38/100\n",
  1924. "40/40 [==============================] - 0s 2ms/step - loss: 0.7171 - val_loss: 0.4525\n",
  1925. "Epoch 39/100\n",
  1926. "40/40 [==============================] - 0s 2ms/step - loss: 0.7124 - val_loss: 0.4485\n",
  1927. "Epoch 40/100\n",
  1928. "40/40 [==============================] - 0s 2ms/step - loss: 0.7065 - val_loss: 0.4447\n",
  1929. "Epoch 41/100\n",
  1930. "40/40 [==============================] - 0s 2ms/step - loss: 0.7011 - val_loss: 0.4412\n",
  1931. "Epoch 42/100\n",
  1932. "40/40 [==============================] - 0s 2ms/step - loss: 0.6965 - val_loss: 0.4377\n",
  1933. "Epoch 43/100\n",
  1934. "40/40 [==============================] - 0s 2ms/step - loss: 0.6914 - val_loss: 0.4343\n",
  1935. "Epoch 44/100\n",
  1936. "40/40 [==============================] - 0s 2ms/step - loss: 0.6886 - val_loss: 0.4311\n",
  1937. "Epoch 45/100\n",
  1938. "40/40 [==============================] - 0s 2ms/step - loss: 0.6872 - val_loss: 0.4280\n",
  1939. "Epoch 46/100\n",
  1940. "40/40 [==============================] - 0s 2ms/step - loss: 0.6799 - val_loss: 0.4251\n",
  1941. "Epoch 47/100\n",
  1942. "40/40 [==============================] - 0s 2ms/step - loss: 0.6760 - val_loss: 0.4223\n",
  1943. "Epoch 48/100\n",
  1944. "40/40 [==============================] - 0s 2ms/step - loss: 0.6714 - val_loss: 0.4196\n",
  1945. "Epoch 49/100\n",
  1946. "40/40 [==============================] - 0s 2ms/step - loss: 0.6685 - val_loss: 0.4171\n",
  1947. "Epoch 50/100\n",
  1948. "40/40 [==============================] - 0s 2ms/step - loss: 0.6642 - val_loss: 0.4146\n",
  1949. "Epoch 51/100\n",
  1950. "40/40 [==============================] - 0s 2ms/step - loss: 0.6605 - val_loss: 0.4123\n",
  1951. "Epoch 52/100\n",
  1952. "40/40 [==============================] - 0s 2ms/step - loss: 0.6584 - val_loss: 0.4100\n",
  1953. "Epoch 53/100\n",
  1954. "40/40 [==============================] - 0s 2ms/step - loss: 0.6540 - val_loss: 0.4078\n",
  1955. "Epoch 54/100\n",
  1956. "40/40 [==============================] - 0s 2ms/step - loss: 0.6517 - val_loss: 0.4057\n",
  1957. "Epoch 55/100\n",
  1958. "40/40 [==============================] - 0s 2ms/step - loss: 0.6498 - val_loss: 0.4037\n",
  1959. "Epoch 56/100\n",
  1960. "40/40 [==============================] - 0s 2ms/step - loss: 0.6457 - val_loss: 0.4018\n",
  1961. "Epoch 57/100\n",
  1962. "40/40 [==============================] - 0s 1ms/step - loss: 0.6431 - val_loss: 0.3999\n",
  1963. "Epoch 58/100\n",
  1964. "40/40 [==============================] - 0s 1ms/step - loss: 0.6410 - val_loss: 0.3982\n",
  1965. "Epoch 59/100\n",
  1966. "40/40 [==============================] - 0s 2ms/step - loss: 0.6376 - val_loss: 0.3964\n",
  1967. "Epoch 60/100\n",
  1968. "40/40 [==============================] - 0s 1ms/step - loss: 0.6357 - val_loss: 0.3948\n",
  1969. "Epoch 61/100\n",
  1970. "40/40 [==============================] - 0s 1ms/step - loss: 0.6334 - val_loss: 0.3932\n",
  1971. "Epoch 62/100\n",
  1972. "40/40 [==============================] - 0s 2ms/step - loss: 0.6313 - val_loss: 0.3917\n",
  1973. "Epoch 63/100\n",
  1974. "40/40 [==============================] - 0s 2ms/step - loss: 0.6290 - val_loss: 0.3902\n",
  1975. "Epoch 64/100\n",
  1976. "40/40 [==============================] - 0s 2ms/step - loss: 0.6271 - val_loss: 0.3888\n"
  1977. ]
  1978. },
  1979. {
  1980. "name": "stdout",
  1981. "output_type": "stream",
  1982. "text": [
  1983. "Epoch 65/100\n",
  1984. "40/40 [==============================] - 0s 2ms/step - loss: 0.6252 - val_loss: 0.3874\n",
  1985. "Epoch 66/100\n",
  1986. "40/40 [==============================] - 0s 2ms/step - loss: 0.6233 - val_loss: 0.3861\n",
  1987. "Epoch 67/100\n",
  1988. "40/40 [==============================] - 0s 2ms/step - loss: 0.6213 - val_loss: 0.3848\n",
  1989. "Epoch 68/100\n",
  1990. "40/40 [==============================] - 0s 2ms/step - loss: 0.6195 - val_loss: 0.3836\n",
  1991. "Epoch 69/100\n",
  1992. "40/40 [==============================] - 0s 2ms/step - loss: 0.6180 - val_loss: 0.3824\n",
  1993. "Epoch 70/100\n",
  1994. "40/40 [==============================] - 0s 2ms/step - loss: 0.6162 - val_loss: 0.3813\n",
  1995. "Epoch 71/100\n",
  1996. "40/40 [==============================] - 0s 2ms/step - loss: 0.6150 - val_loss: 0.3802\n",
  1997. "Epoch 72/100\n",
  1998. "40/40 [==============================] - 0s 2ms/step - loss: 0.6135 - val_loss: 0.3791\n",
  1999. "Epoch 73/100\n",
  2000. "40/40 [==============================] - 0s 1ms/step - loss: 0.6118 - val_loss: 0.3781\n",
  2001. "Epoch 74/100\n",
  2002. "40/40 [==============================] - 0s 1ms/step - loss: 0.6104 - val_loss: 0.3771\n",
  2003. "Epoch 75/100\n",
  2004. "40/40 [==============================] - 0s 2ms/step - loss: 0.6094 - val_loss: 0.3761\n",
  2005. "Epoch 76/100\n",
  2006. "40/40 [==============================] - 0s 2ms/step - loss: 0.6080 - val_loss: 0.3752\n",
  2007. "Epoch 77/100\n",
  2008. "40/40 [==============================] - 0s 2ms/step - loss: 0.6068 - val_loss: 0.3743\n",
  2009. "Epoch 78/100\n",
  2010. "40/40 [==============================] - 0s 2ms/step - loss: 0.6055 - val_loss: 0.3734\n",
  2011. "Epoch 79/100\n",
  2012. "40/40 [==============================] - 0s 2ms/step - loss: 0.6045 - val_loss: 0.3726\n",
  2013. "Epoch 80/100\n",
  2014. "40/40 [==============================] - 0s 2ms/step - loss: 0.6033 - val_loss: 0.3717\n",
  2015. "Epoch 81/100\n",
  2016. "40/40 [==============================] - 0s 2ms/step - loss: 0.6023 - val_loss: 0.3710\n",
  2017. "Epoch 82/100\n",
  2018. "40/40 [==============================] - 0s 2ms/step - loss: 0.6012 - val_loss: 0.3702\n",
  2019. "Epoch 83/100\n",
  2020. "40/40 [==============================] - 0s 2ms/step - loss: 0.6002 - val_loss: 0.3694\n",
  2021. "Epoch 84/100\n",
  2022. "40/40 [==============================] - 0s 2ms/step - loss: 0.5992 - val_loss: 0.3687\n",
  2023. "Epoch 85/100\n",
  2024. "40/40 [==============================] - 0s 2ms/step - loss: 0.5983 - val_loss: 0.3680\n",
  2025. "Epoch 86/100\n",
  2026. "40/40 [==============================] - 0s 2ms/step - loss: 0.5973 - val_loss: 0.3674\n",
  2027. "Epoch 87/100\n",
  2028. "40/40 [==============================] - 0s 2ms/step - loss: 0.5964 - val_loss: 0.3667\n",
  2029. "Epoch 88/100\n",
  2030. "40/40 [==============================] - 0s 2ms/step - loss: 0.5956 - val_loss: 0.3661\n",
  2031. "Epoch 89/100\n",
  2032. "40/40 [==============================] - 0s 2ms/step - loss: 0.5948 - val_loss: 0.3655\n",
  2033. "Epoch 90/100\n",
  2034. "40/40 [==============================] - 0s 2ms/step - loss: 0.5941 - val_loss: 0.3649\n",
  2035. "Epoch 91/100\n",
  2036. "40/40 [==============================] - 0s 2ms/step - loss: 0.5933 - val_loss: 0.3643\n",
  2037. "Epoch 92/100\n",
  2038. "40/40 [==============================] - 0s 2ms/step - loss: 0.5926 - val_loss: 0.3637\n",
  2039. "Epoch 93/100\n",
  2040. "40/40 [==============================] - 0s 2ms/step - loss: 0.5920 - val_loss: 0.3632\n",
  2041. "Epoch 94/100\n",
  2042. "40/40 [==============================] - 0s 2ms/step - loss: 0.5913 - val_loss: 0.3626\n",
  2043. "Epoch 95/100\n",
  2044. "40/40 [==============================] - 0s 2ms/step - loss: 0.5906 - val_loss: 0.3621\n",
  2045. "Epoch 96/100\n",
  2046. "40/40 [==============================] - 0s 2ms/step - loss: 0.5900 - val_loss: 0.3616\n",
  2047. "Epoch 97/100\n",
  2048. "40/40 [==============================] - 0s 1ms/step - loss: 0.5894 - val_loss: 0.3611\n",
  2049. "Epoch 98/100\n",
  2050. "40/40 [==============================] - 0s 1ms/step - loss: 0.5888 - val_loss: 0.3607\n",
  2051. "Epoch 99/100\n",
  2052. "40/40 [==============================] - 0s 1ms/step - loss: 0.5883 - val_loss: 0.3602\n",
  2053. "Epoch 100/100\n",
  2054. "40/40 [==============================] - 0s 2ms/step - loss: 0.5878 - val_loss: 0.3598\n"
  2055. ]
  2056. },
  2057. {
  2058. "name": "stderr",
  2059. "output_type": "stream",
  2060. "text": [
  2061. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  2062. " \"argument is ignored.\", FutureWarning)\n",
  2063. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  2064. " \"argument is ignored.\", FutureWarning)\n",
  2065. "Traceback (most recent call last):\n",
  2066. " File \"/Users/wangyanghe/Desktop/Research/tods/tods/searcher/brute_force_search.py\", line 62, in _search\n",
  2067. " for error in pipeline_result.error:\n",
  2068. "TypeError: 'NoneType' object is not iterable\n"
  2069. ]
  2070. },
  2071. {
  2072. "name": "stdout",
  2073. "output_type": "stream",
  2074. "text": [
  2075. "Model: \"sequential_5\"\n",
  2076. "_________________________________________________________________\n",
  2077. "Layer (type) Output Shape Param # \n",
  2078. "=================================================================\n",
  2079. "dense_20 (Dense) (None, 12) 156 \n",
  2080. "_________________________________________________________________\n",
  2081. "dropout_17 (Dropout) (None, 12) 0 \n",
  2082. "_________________________________________________________________\n",
  2083. "dense_21 (Dense) (None, 12) 156 \n",
  2084. "_________________________________________________________________\n",
  2085. "dropout_18 (Dropout) (None, 12) 0 \n",
  2086. "_________________________________________________________________\n",
  2087. "dense_22 (Dense) (None, 1) 13 \n",
  2088. "_________________________________________________________________\n",
  2089. "dropout_19 (Dropout) (None, 1) 0 \n",
  2090. "_________________________________________________________________\n",
  2091. "dense_23 (Dense) (None, 4) 8 \n",
  2092. "_________________________________________________________________\n",
  2093. "dropout_20 (Dropout) (None, 4) 0 \n",
  2094. "_________________________________________________________________\n",
  2095. "dense_24 (Dense) (None, 1) 5 \n",
  2096. "_________________________________________________________________\n",
  2097. "dropout_21 (Dropout) (None, 1) 0 \n",
  2098. "_________________________________________________________________\n",
  2099. "dense_25 (Dense) (None, 12) 24 \n",
  2100. "=================================================================\n",
  2101. "Total params: 362\n",
  2102. "Trainable params: 362\n",
  2103. "Non-trainable params: 0\n",
  2104. "_________________________________________________________________\n",
  2105. "None\n",
  2106. "Epoch 1/100\n",
  2107. "40/40 [==============================] - 0s 4ms/step - loss: 1.4693 - val_loss: 1.5144\n",
  2108. "Epoch 2/100\n",
  2109. "40/40 [==============================] - 0s 1ms/step - loss: 1.3518 - val_loss: 1.4134\n",
  2110. "Epoch 3/100\n",
  2111. "40/40 [==============================] - 0s 1ms/step - loss: 1.2985 - val_loss: 1.3370\n",
  2112. "Epoch 4/100\n",
  2113. "40/40 [==============================] - 0s 1ms/step - loss: 1.2306 - val_loss: 1.2773\n",
  2114. "Epoch 5/100\n",
  2115. "40/40 [==============================] - 0s 1ms/step - loss: 1.1788 - val_loss: 1.2243\n",
  2116. "Epoch 6/100\n",
  2117. "40/40 [==============================] - 0s 1ms/step - loss: 1.1182 - val_loss: 1.1844\n",
  2118. "Epoch 7/100\n",
  2119. "40/40 [==============================] - 0s 1ms/step - loss: 1.0755 - val_loss: 1.1497\n",
  2120. "Epoch 8/100\n",
  2121. "40/40 [==============================] - 0s 1ms/step - loss: 1.0492 - val_loss: 1.1189\n",
  2122. "Epoch 9/100\n",
  2123. "40/40 [==============================] - 0s 1ms/step - loss: 1.0257 - val_loss: 1.0919\n",
  2124. "Epoch 10/100\n",
  2125. "40/40 [==============================] - 0s 1ms/step - loss: 1.0068 - val_loss: 1.0675\n",
  2126. "Epoch 11/100\n",
  2127. "40/40 [==============================] - 0s 1ms/step - loss: 0.9761 - val_loss: 1.0451\n",
  2128. "Epoch 12/100\n",
  2129. "40/40 [==============================] - 0s 1ms/step - loss: 0.9635 - val_loss: 1.0221\n",
  2130. "Epoch 13/100\n",
  2131. "40/40 [==============================] - 0s 1ms/step - loss: 0.9525 - val_loss: 1.0028\n",
  2132. "Epoch 14/100\n",
  2133. "40/40 [==============================] - 0s 1ms/step - loss: 0.9207 - val_loss: 0.9840\n",
  2134. "Epoch 15/100\n",
  2135. "40/40 [==============================] - 0s 1ms/step - loss: 0.9108 - val_loss: 0.9668\n",
  2136. "Epoch 16/100\n",
  2137. "40/40 [==============================] - 0s 1ms/step - loss: 0.8925 - val_loss: 0.9508\n",
  2138. "Epoch 17/100\n",
  2139. "40/40 [==============================] - 0s 1ms/step - loss: 0.8695 - val_loss: 0.9353\n",
  2140. "Epoch 18/100\n",
  2141. "40/40 [==============================] - 0s 2ms/step - loss: 0.8591 - val_loss: 0.9214\n",
  2142. "Epoch 19/100\n",
  2143. "40/40 [==============================] - 0s 1ms/step - loss: 0.8465 - val_loss: 0.9071\n",
  2144. "Epoch 20/100\n",
  2145. "40/40 [==============================] - 0s 1ms/step - loss: 0.8336 - val_loss: 0.8959\n",
  2146. "Epoch 21/100\n",
  2147. "40/40 [==============================] - 0s 1ms/step - loss: 0.8194 - val_loss: 0.8821\n",
  2148. "Epoch 22/100\n",
  2149. "40/40 [==============================] - 0s 1ms/step - loss: 0.8122 - val_loss: 0.8705\n",
  2150. "Epoch 23/100\n",
  2151. "40/40 [==============================] - 0s 1ms/step - loss: 0.7998 - val_loss: 0.8596\n",
  2152. "Epoch 24/100\n",
  2153. "40/40 [==============================] - 0s 1ms/step - loss: 0.7871 - val_loss: 0.8494\n",
  2154. "Epoch 25/100\n",
  2155. "40/40 [==============================] - 0s 1ms/step - loss: 0.7770 - val_loss: 0.8404\n",
  2156. "Epoch 26/100\n",
  2157. "40/40 [==============================] - 0s 1ms/step - loss: 0.7678 - val_loss: 0.8301\n",
  2158. "Epoch 27/100\n",
  2159. "40/40 [==============================] - 0s 1ms/step - loss: 0.7575 - val_loss: 0.8213\n",
  2160. "Epoch 28/100\n",
  2161. "40/40 [==============================] - 0s 1ms/step - loss: 0.7487 - val_loss: 0.8130\n",
  2162. "Epoch 29/100\n",
  2163. "40/40 [==============================] - 0s 1ms/step - loss: 0.7393 - val_loss: 0.8051\n",
  2164. "Epoch 30/100\n",
  2165. "40/40 [==============================] - 0s 2ms/step - loss: 0.7329 - val_loss: 0.7975\n",
  2166. "Epoch 31/100\n",
  2167. "40/40 [==============================] - 0s 2ms/step - loss: 0.7236 - val_loss: 0.7904\n",
  2168. "Epoch 32/100\n",
  2169. "40/40 [==============================] - 0s 1ms/step - loss: 0.7174 - val_loss: 0.7836\n",
  2170. "Epoch 33/100\n",
  2171. "40/40 [==============================] - 0s 1ms/step - loss: 0.7104 - val_loss: 0.7772\n",
  2172. "Epoch 34/100\n",
  2173. "40/40 [==============================] - 0s 1ms/step - loss: 0.7031 - val_loss: 0.7711\n",
  2174. "Epoch 35/100\n",
  2175. "40/40 [==============================] - 0s 1ms/step - loss: 0.6954 - val_loss: 0.7651\n",
  2176. "Epoch 36/100\n",
  2177. "40/40 [==============================] - 0s 1ms/step - loss: 0.6895 - val_loss: 0.7599\n",
  2178. "Epoch 37/100\n",
  2179. "40/40 [==============================] - 0s 1ms/step - loss: 0.6836 - val_loss: 0.7544\n",
  2180. "Epoch 38/100\n",
  2181. "40/40 [==============================] - 0s 1ms/step - loss: 0.6809 - val_loss: 0.7494\n",
  2182. "Epoch 39/100\n",
  2183. "40/40 [==============================] - 0s 1ms/step - loss: 0.6726 - val_loss: 0.7447\n",
  2184. "Epoch 40/100\n",
  2185. "40/40 [==============================] - 0s 1ms/step - loss: 0.6698 - val_loss: 0.7402\n",
  2186. "Epoch 41/100\n",
  2187. "40/40 [==============================] - 0s 1ms/step - loss: 0.6631 - val_loss: 0.7359\n",
  2188. "Epoch 42/100\n",
  2189. "40/40 [==============================] - 0s 2ms/step - loss: 0.6581 - val_loss: 0.7320\n",
  2190. "Epoch 43/100\n",
  2191. "40/40 [==============================] - 0s 2ms/step - loss: 0.6547 - val_loss: 0.7279\n",
  2192. "Epoch 44/100\n",
  2193. "40/40 [==============================] - 0s 1ms/step - loss: 0.6490 - val_loss: 0.7241\n",
  2194. "Epoch 45/100\n",
  2195. "40/40 [==============================] - 0s 1ms/step - loss: 0.6449 - val_loss: 0.7206\n",
  2196. "Epoch 46/100\n",
  2197. "40/40 [==============================] - 0s 1ms/step - loss: 0.6427 - val_loss: 0.7173\n",
  2198. "Epoch 47/100\n",
  2199. "40/40 [==============================] - 0s 1ms/step - loss: 0.6378 - val_loss: 0.7140\n",
  2200. "Epoch 48/100\n",
  2201. "40/40 [==============================] - 0s 1ms/step - loss: 0.6354 - val_loss: 0.7109\n",
  2202. "Epoch 49/100\n",
  2203. "40/40 [==============================] - 0s 1ms/step - loss: 0.6302 - val_loss: 0.7080\n",
  2204. "Epoch 50/100\n",
  2205. "40/40 [==============================] - 0s 1ms/step - loss: 0.6279 - val_loss: 0.7052\n",
  2206. "Epoch 51/100\n",
  2207. "40/40 [==============================] - 0s 1ms/step - loss: 0.6244 - val_loss: 0.7025\n",
  2208. "Epoch 52/100\n",
  2209. "40/40 [==============================] - 0s 1ms/step - loss: 0.6204 - val_loss: 0.6999\n",
  2210. "Epoch 53/100\n",
  2211. "40/40 [==============================] - 0s 1ms/step - loss: 0.6177 - val_loss: 0.6976\n",
  2212. "Epoch 54/100\n",
  2213. "40/40 [==============================] - 0s 1ms/step - loss: 0.6146 - val_loss: 0.6953\n",
  2214. "Epoch 55/100\n",
  2215. "40/40 [==============================] - 0s 2ms/step - loss: 0.6117 - val_loss: 0.6929\n",
  2216. "Epoch 56/100\n",
  2217. "40/40 [==============================] - 0s 1ms/step - loss: 0.6094 - val_loss: 0.6909\n",
  2218. "Epoch 57/100\n",
  2219. "40/40 [==============================] - 0s 1ms/step - loss: 0.6072 - val_loss: 0.6888\n",
  2220. "Epoch 58/100\n",
  2221. "40/40 [==============================] - 0s 1ms/step - loss: 0.6045 - val_loss: 0.6868\n",
  2222. "Epoch 59/100\n",
  2223. "40/40 [==============================] - 0s 1ms/step - loss: 0.6026 - val_loss: 0.6850\n",
  2224. "Epoch 60/100\n",
  2225. "40/40 [==============================] - 0s 3ms/step - loss: 0.5997 - val_loss: 0.6833\n",
  2226. "Epoch 61/100\n",
  2227. "40/40 [==============================] - 0s 2ms/step - loss: 0.5977 - val_loss: 0.6815\n",
  2228. "Epoch 62/100\n",
  2229. "40/40 [==============================] - 0s 1ms/step - loss: 0.5954 - val_loss: 0.6798\n",
  2230. "Epoch 63/100\n",
  2231. "40/40 [==============================] - 0s 2ms/step - loss: 0.5939 - val_loss: 0.6782\n",
  2232. "Epoch 64/100\n",
  2233. "40/40 [==============================] - 0s 2ms/step - loss: 0.5917 - val_loss: 0.6767\n"
  2234. ]
  2235. },
  2236. {
  2237. "name": "stdout",
  2238. "output_type": "stream",
  2239. "text": [
  2240. "Epoch 65/100\n",
  2241. "40/40 [==============================] - 0s 2ms/step - loss: 0.5898 - val_loss: 0.6753\n",
  2242. "Epoch 66/100\n",
  2243. "40/40 [==============================] - 0s 2ms/step - loss: 0.5880 - val_loss: 0.6739\n",
  2244. "Epoch 67/100\n",
  2245. "40/40 [==============================] - 0s 2ms/step - loss: 0.5865 - val_loss: 0.6726\n",
  2246. "Epoch 68/100\n",
  2247. "40/40 [==============================] - 0s 2ms/step - loss: 0.5848 - val_loss: 0.6713\n",
  2248. "Epoch 69/100\n",
  2249. "40/40 [==============================] - 0s 1ms/step - loss: 0.5832 - val_loss: 0.6700\n",
  2250. "Epoch 70/100\n",
  2251. "40/40 [==============================] - 0s 1ms/step - loss: 0.5817 - val_loss: 0.6689\n",
  2252. "Epoch 71/100\n",
  2253. "40/40 [==============================] - 0s 1ms/step - loss: 0.5804 - val_loss: 0.6677\n",
  2254. "Epoch 72/100\n",
  2255. "40/40 [==============================] - 0s 1ms/step - loss: 0.5790 - val_loss: 0.6666\n",
  2256. "Epoch 73/100\n",
  2257. "40/40 [==============================] - 0s 1ms/step - loss: 0.5777 - val_loss: 0.6655\n",
  2258. "Epoch 74/100\n",
  2259. "40/40 [==============================] - 0s 1ms/step - loss: 0.5764 - val_loss: 0.6645\n",
  2260. "Epoch 75/100\n",
  2261. "40/40 [==============================] - 0s 2ms/step - loss: 0.5750 - val_loss: 0.6635\n",
  2262. "Epoch 76/100\n",
  2263. "40/40 [==============================] - 0s 2ms/step - loss: 0.5739 - val_loss: 0.6626\n",
  2264. "Epoch 77/100\n",
  2265. "40/40 [==============================] - 0s 1ms/step - loss: 0.5728 - val_loss: 0.6617\n",
  2266. "Epoch 78/100\n",
  2267. "40/40 [==============================] - 0s 1ms/step - loss: 0.5716 - val_loss: 0.6608\n",
  2268. "Epoch 79/100\n",
  2269. "40/40 [==============================] - 0s 1ms/step - loss: 0.5706 - val_loss: 0.6599\n",
  2270. "Epoch 80/100\n",
  2271. "40/40 [==============================] - 0s 1ms/step - loss: 0.5696 - val_loss: 0.6591\n",
  2272. "Epoch 81/100\n",
  2273. "40/40 [==============================] - 0s 1ms/step - loss: 0.5686 - val_loss: 0.6584\n",
  2274. "Epoch 82/100\n",
  2275. "40/40 [==============================] - 0s 1ms/step - loss: 0.5676 - val_loss: 0.6576\n",
  2276. "Epoch 83/100\n",
  2277. "40/40 [==============================] - 0s 2ms/step - loss: 0.5667 - val_loss: 0.6569\n",
  2278. "Epoch 84/100\n",
  2279. "40/40 [==============================] - 0s 2ms/step - loss: 0.5659 - val_loss: 0.6561\n",
  2280. "Epoch 85/100\n",
  2281. "40/40 [==============================] - 0s 1ms/step - loss: 0.5651 - val_loss: 0.6554\n",
  2282. "Epoch 86/100\n",
  2283. "40/40 [==============================] - 0s 1ms/step - loss: 0.5642 - val_loss: 0.6548\n",
  2284. "Epoch 87/100\n",
  2285. "40/40 [==============================] - 0s 1ms/step - loss: 0.5635 - val_loss: 0.6541\n",
  2286. "Epoch 88/100\n",
  2287. "40/40 [==============================] - 0s 1ms/step - loss: 0.5627 - val_loss: 0.6535\n",
  2288. "Epoch 89/100\n",
  2289. "40/40 [==============================] - 0s 1ms/step - loss: 0.5620 - val_loss: 0.6529\n",
  2290. "Epoch 90/100\n",
  2291. "40/40 [==============================] - 0s 1ms/step - loss: 0.5613 - val_loss: 0.6523\n",
  2292. "Epoch 91/100\n",
  2293. "40/40 [==============================] - 0s 1ms/step - loss: 0.5606 - val_loss: 0.6518\n",
  2294. "Epoch 92/100\n",
  2295. "40/40 [==============================] - 0s 1ms/step - loss: 0.5599 - val_loss: 0.6512\n",
  2296. "Epoch 93/100\n",
  2297. "40/40 [==============================] - 0s 1ms/step - loss: 0.5593 - val_loss: 0.6507\n",
  2298. "Epoch 94/100\n",
  2299. "40/40 [==============================] - 0s 1ms/step - loss: 0.5587 - val_loss: 0.6502\n",
  2300. "Epoch 95/100\n",
  2301. "40/40 [==============================] - 0s 1ms/step - loss: 0.5581 - val_loss: 0.6497\n",
  2302. "Epoch 96/100\n",
  2303. "40/40 [==============================] - 0s 1ms/step - loss: 0.5575 - val_loss: 0.6492\n",
  2304. "Epoch 97/100\n",
  2305. "40/40 [==============================] - 0s 1ms/step - loss: 0.5570 - val_loss: 0.6487\n",
  2306. "Epoch 98/100\n",
  2307. "40/40 [==============================] - 0s 1ms/step - loss: 0.5564 - val_loss: 0.6483\n",
  2308. "Epoch 99/100\n",
  2309. "40/40 [==============================] - 0s 1ms/step - loss: 0.5559 - val_loss: 0.6478\n",
  2310. "Epoch 100/100\n",
  2311. "40/40 [==============================] - 0s 1ms/step - loss: 0.5554 - val_loss: 0.6474\n"
  2312. ]
  2313. },
  2314. {
  2315. "name": "stderr",
  2316. "output_type": "stream",
  2317. "text": [
  2318. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  2319. " \"argument is ignored.\", FutureWarning)\n"
  2320. ]
  2321. }
  2322. ],
  2323. "source": [
  2324. "# Find the best pipeline\n",
  2325. "best_runtime, best_pipeline_result = search.search_fit(input_data=[dataset], time_limit=time_limit)\n",
  2326. "best_pipeline = best_runtime.pipeline\n",
  2327. "best_output = best_pipeline_result.output"
  2328. ]
  2329. },
  2330. {
  2331. "cell_type": "code",
  2332. "execution_count": 76,
  2333. "metadata": {},
  2334. "outputs": [
  2335. {
  2336. "name": "stderr",
  2337. "output_type": "stream",
  2338. "text": [
  2339. "Not all provided hyper-parameters for the data preparation pipeline 79ce71bd-db96-494b-a455-14f2e2ac5040 were used: ['method', 'number_of_folds', 'randomSeed', 'shuffle', 'stratified']\n"
  2340. ]
  2341. },
  2342. {
  2343. "name": "stdout",
  2344. "output_type": "stream",
  2345. "text": [
  2346. "Model: \"sequential_6\"\n",
  2347. "_________________________________________________________________\n",
  2348. "Layer (type) Output Shape Param # \n",
  2349. "=================================================================\n",
  2350. "dense_26 (Dense) (None, 12) 156 \n",
  2351. "_________________________________________________________________\n",
  2352. "dropout_22 (Dropout) (None, 12) 0 \n",
  2353. "_________________________________________________________________\n",
  2354. "dense_27 (Dense) (None, 12) 156 \n",
  2355. "_________________________________________________________________\n",
  2356. "dropout_23 (Dropout) (None, 12) 0 \n",
  2357. "_________________________________________________________________\n",
  2358. "dense_28 (Dense) (None, 1) 13 \n",
  2359. "_________________________________________________________________\n",
  2360. "dropout_24 (Dropout) (None, 1) 0 \n",
  2361. "_________________________________________________________________\n",
  2362. "dense_29 (Dense) (None, 4) 8 \n",
  2363. "_________________________________________________________________\n",
  2364. "dropout_25 (Dropout) (None, 4) 0 \n",
  2365. "_________________________________________________________________\n",
  2366. "dense_30 (Dense) (None, 1) 5 \n",
  2367. "_________________________________________________________________\n",
  2368. "dropout_26 (Dropout) (None, 1) 0 \n",
  2369. "_________________________________________________________________\n",
  2370. "dense_31 (Dense) (None, 12) 24 \n",
  2371. "=================================================================\n",
  2372. "Total params: 362\n",
  2373. "Trainable params: 362\n",
  2374. "Non-trainable params: 0\n",
  2375. "_________________________________________________________________\n",
  2376. "None\n",
  2377. "Epoch 1/100\n",
  2378. "40/40 [==============================] - 0s 4ms/step - loss: 1.5860 - val_loss: 1.0422\n",
  2379. "Epoch 2/100\n",
  2380. "40/40 [==============================] - 0s 1ms/step - loss: 1.4206 - val_loss: 0.9430\n",
  2381. "Epoch 3/100\n",
  2382. "40/40 [==============================] - 0s 1ms/step - loss: 1.3349 - val_loss: 0.8805\n",
  2383. "Epoch 4/100\n",
  2384. "40/40 [==============================] - 0s 1ms/step - loss: 1.2650 - val_loss: 0.8352\n",
  2385. "Epoch 5/100\n",
  2386. "40/40 [==============================] - 0s 1ms/step - loss: 1.1954 - val_loss: 0.7995\n",
  2387. "Epoch 6/100\n",
  2388. "40/40 [==============================] - 0s 1ms/step - loss: 1.1571 - val_loss: 0.7708\n",
  2389. "Epoch 7/100\n",
  2390. "40/40 [==============================] - 0s 1ms/step - loss: 1.1171 - val_loss: 0.7457\n",
  2391. "Epoch 8/100\n",
  2392. "40/40 [==============================] - 0s 2ms/step - loss: 1.0848 - val_loss: 0.7238\n",
  2393. "Epoch 9/100\n",
  2394. "40/40 [==============================] - 0s 2ms/step - loss: 1.0527 - val_loss: 0.7043\n",
  2395. "Epoch 10/100\n",
  2396. "40/40 [==============================] - 0s 1ms/step - loss: 1.0312 - val_loss: 0.6868\n",
  2397. "Epoch 11/100\n",
  2398. "40/40 [==============================] - 0s 1ms/step - loss: 1.0008 - val_loss: 0.6706\n",
  2399. "Epoch 12/100\n",
  2400. "40/40 [==============================] - 0s 1ms/step - loss: 0.9809 - val_loss: 0.6556\n",
  2401. "Epoch 13/100\n",
  2402. "40/40 [==============================] - 0s 1ms/step - loss: 0.9630 - val_loss: 0.6415\n",
  2403. "Epoch 14/100\n",
  2404. "40/40 [==============================] - 0s 1ms/step - loss: 0.9388 - val_loss: 0.6283\n",
  2405. "Epoch 15/100\n",
  2406. "40/40 [==============================] - 0s 1ms/step - loss: 0.9264 - val_loss: 0.6162\n",
  2407. "Epoch 16/100\n",
  2408. "40/40 [==============================] - 0s 2ms/step - loss: 0.9051 - val_loss: 0.6044\n",
  2409. "Epoch 17/100\n",
  2410. "40/40 [==============================] - 0s 1ms/step - loss: 0.8931 - val_loss: 0.5934\n",
  2411. "Epoch 18/100\n",
  2412. "40/40 [==============================] - 0s 1ms/step - loss: 0.8782 - val_loss: 0.5829\n",
  2413. "Epoch 19/100\n",
  2414. "40/40 [==============================] - 0s 1ms/step - loss: 0.8635 - val_loss: 0.5730\n",
  2415. "Epoch 20/100\n",
  2416. "40/40 [==============================] - 0s 1ms/step - loss: 0.8542 - val_loss: 0.5636\n",
  2417. "Epoch 21/100\n",
  2418. "40/40 [==============================] - 0s 1ms/step - loss: 0.8429 - val_loss: 0.5546\n",
  2419. "Epoch 22/100\n",
  2420. "40/40 [==============================] - 0s 1ms/step - loss: 0.8253 - val_loss: 0.5461\n",
  2421. "Epoch 23/100\n",
  2422. "40/40 [==============================] - 0s 1ms/step - loss: 0.8158 - val_loss: 0.5379\n",
  2423. "Epoch 24/100\n",
  2424. "40/40 [==============================] - 0s 1ms/step - loss: 0.8028 - val_loss: 0.5302\n",
  2425. "Epoch 25/100\n",
  2426. "40/40 [==============================] - 0s 1ms/step - loss: 0.7972 - val_loss: 0.5228\n",
  2427. "Epoch 26/100\n",
  2428. "40/40 [==============================] - 0s 1ms/step - loss: 0.7844 - val_loss: 0.5158\n",
  2429. "Epoch 27/100\n",
  2430. "40/40 [==============================] - 0s 1ms/step - loss: 0.7754 - val_loss: 0.5091\n",
  2431. "Epoch 28/100\n",
  2432. "40/40 [==============================] - 0s 1ms/step - loss: 0.7669 - val_loss: 0.5026\n",
  2433. "Epoch 29/100\n",
  2434. "40/40 [==============================] - 0s 1ms/step - loss: 0.7594 - val_loss: 0.4966\n",
  2435. "Epoch 30/100\n",
  2436. "40/40 [==============================] - 0s 1ms/step - loss: 0.7521 - val_loss: 0.4907\n",
  2437. "Epoch 31/100\n",
  2438. "40/40 [==============================] - 0s 1ms/step - loss: 0.7435 - val_loss: 0.4852\n",
  2439. "Epoch 32/100\n",
  2440. "40/40 [==============================] - 0s 2ms/step - loss: 0.7363 - val_loss: 0.4799\n",
  2441. "Epoch 33/100\n",
  2442. "40/40 [==============================] - 0s 1ms/step - loss: 0.7316 - val_loss: 0.4748\n",
  2443. "Epoch 34/100\n",
  2444. "40/40 [==============================] - 0s 1ms/step - loss: 0.7238 - val_loss: 0.4699\n",
  2445. "Epoch 35/100\n",
  2446. "40/40 [==============================] - 0s 1ms/step - loss: 0.7178 - val_loss: 0.4653\n",
  2447. "Epoch 36/100\n",
  2448. "40/40 [==============================] - 0s 1ms/step - loss: 0.7112 - val_loss: 0.4609\n",
  2449. "Epoch 37/100\n",
  2450. "40/40 [==============================] - 0s 1ms/step - loss: 0.7055 - val_loss: 0.4567\n",
  2451. "Epoch 38/100\n",
  2452. "40/40 [==============================] - 0s 1ms/step - loss: 0.7002 - val_loss: 0.4526\n",
  2453. "Epoch 39/100\n",
  2454. "40/40 [==============================] - 0s 1ms/step - loss: 0.6961 - val_loss: 0.4487\n",
  2455. "Epoch 40/100\n",
  2456. "40/40 [==============================] - 0s 1ms/step - loss: 0.6912 - val_loss: 0.4450\n",
  2457. "Epoch 41/100\n",
  2458. "40/40 [==============================] - 0s 2ms/step - loss: 0.6860 - val_loss: 0.4415\n",
  2459. "Epoch 42/100\n",
  2460. "40/40 [==============================] - 0s 1ms/step - loss: 0.6834 - val_loss: 0.4381\n",
  2461. "Epoch 43/100\n",
  2462. "40/40 [==============================] - 0s 1ms/step - loss: 0.6771 - val_loss: 0.4348\n",
  2463. "Epoch 44/100\n",
  2464. "40/40 [==============================] - 0s 1ms/step - loss: 0.6734 - val_loss: 0.4317\n",
  2465. "Epoch 45/100\n",
  2466. "40/40 [==============================] - 0s 1ms/step - loss: 0.6708 - val_loss: 0.4287\n",
  2467. "Epoch 46/100\n",
  2468. "40/40 [==============================] - 0s 1ms/step - loss: 0.6658 - val_loss: 0.4258\n",
  2469. "Epoch 47/100\n",
  2470. "40/40 [==============================] - 0s 1ms/step - loss: 0.6623 - val_loss: 0.4230\n",
  2471. "Epoch 48/100\n",
  2472. "40/40 [==============================] - 0s 1ms/step - loss: 0.6587 - val_loss: 0.4204\n",
  2473. "Epoch 49/100\n",
  2474. "40/40 [==============================] - 0s 1ms/step - loss: 0.6559 - val_loss: 0.4179\n",
  2475. "Epoch 50/100\n",
  2476. "40/40 [==============================] - 0s 1ms/step - loss: 0.6526 - val_loss: 0.4154\n",
  2477. "Epoch 51/100\n",
  2478. "40/40 [==============================] - 0s 2ms/step - loss: 0.6512 - val_loss: 0.4131\n",
  2479. "Epoch 52/100\n",
  2480. "40/40 [==============================] - 0s 1ms/step - loss: 0.6469 - val_loss: 0.4109\n",
  2481. "Epoch 53/100\n",
  2482. "40/40 [==============================] - 0s 1ms/step - loss: 0.6446 - val_loss: 0.4087\n",
  2483. "Epoch 54/100\n",
  2484. "40/40 [==============================] - 0s 1ms/step - loss: 0.6413 - val_loss: 0.4067\n",
  2485. "Epoch 55/100\n",
  2486. "40/40 [==============================] - 0s 1ms/step - loss: 0.6389 - val_loss: 0.4047\n",
  2487. "Epoch 56/100\n",
  2488. "40/40 [==============================] - 0s 2ms/step - loss: 0.6367 - val_loss: 0.4027\n",
  2489. "Epoch 57/100\n",
  2490. "40/40 [==============================] - 0s 1ms/step - loss: 0.6341 - val_loss: 0.4009\n",
  2491. "Epoch 58/100\n",
  2492. "40/40 [==============================] - 0s 1ms/step - loss: 0.6321 - val_loss: 0.3991\n",
  2493. "Epoch 59/100\n",
  2494. "40/40 [==============================] - 0s 2ms/step - loss: 0.6300 - val_loss: 0.3974\n",
  2495. "Epoch 60/100\n",
  2496. "40/40 [==============================] - 0s 2ms/step - loss: 0.6279 - val_loss: 0.3957\n",
  2497. "Epoch 61/100\n",
  2498. "40/40 [==============================] - 0s 1ms/step - loss: 0.6264 - val_loss: 0.3941\n",
  2499. "Epoch 62/100\n",
  2500. "40/40 [==============================] - 0s 1ms/step - loss: 0.6243 - val_loss: 0.3926\n",
  2501. "Epoch 63/100\n",
  2502. "40/40 [==============================] - 0s 2ms/step - loss: 0.6226 - val_loss: 0.3911\n",
  2503. "Epoch 64/100\n",
  2504. "40/40 [==============================] - 0s 2ms/step - loss: 0.6214 - val_loss: 0.3897\n"
  2505. ]
  2506. },
  2507. {
  2508. "name": "stdout",
  2509. "output_type": "stream",
  2510. "text": [
  2511. "Epoch 65/100\n",
  2512. "40/40 [==============================] - 0s 2ms/step - loss: 0.6192 - val_loss: 0.3883\n",
  2513. "Epoch 66/100\n",
  2514. "40/40 [==============================] - 0s 1ms/step - loss: 0.6176 - val_loss: 0.3870\n",
  2515. "Epoch 67/100\n",
  2516. "40/40 [==============================] - 0s 2ms/step - loss: 0.6158 - val_loss: 0.3857\n",
  2517. "Epoch 68/100\n",
  2518. "40/40 [==============================] - 0s 1ms/step - loss: 0.6143 - val_loss: 0.3845\n",
  2519. "Epoch 69/100\n",
  2520. "40/40 [==============================] - 0s 1ms/step - loss: 0.6130 - val_loss: 0.3833\n",
  2521. "Epoch 70/100\n",
  2522. "40/40 [==============================] - 0s 2ms/step - loss: 0.6116 - val_loss: 0.3821\n",
  2523. "Epoch 71/100\n",
  2524. "40/40 [==============================] - 0s 1ms/step - loss: 0.6103 - val_loss: 0.3810\n",
  2525. "Epoch 72/100\n",
  2526. "40/40 [==============================] - 0s 1ms/step - loss: 0.6090 - val_loss: 0.3799\n",
  2527. "Epoch 73/100\n",
  2528. "40/40 [==============================] - 0s 1ms/step - loss: 0.6079 - val_loss: 0.3789\n",
  2529. "Epoch 74/100\n",
  2530. "40/40 [==============================] - 0s 1ms/step - loss: 0.6068 - val_loss: 0.3779\n",
  2531. "Epoch 75/100\n",
  2532. "40/40 [==============================] - 0s 1ms/step - loss: 0.6058 - val_loss: 0.3769\n",
  2533. "Epoch 76/100\n",
  2534. "40/40 [==============================] - 0s 1ms/step - loss: 0.6044 - val_loss: 0.3760\n",
  2535. "Epoch 77/100\n",
  2536. "40/40 [==============================] - 0s 1ms/step - loss: 0.6034 - val_loss: 0.3751\n",
  2537. "Epoch 78/100\n",
  2538. "40/40 [==============================] - 0s 1ms/step - loss: 0.6024 - val_loss: 0.3742\n",
  2539. "Epoch 79/100\n",
  2540. "40/40 [==============================] - 0s 2ms/step - loss: 0.6016 - val_loss: 0.3733\n",
  2541. "Epoch 80/100\n",
  2542. "40/40 [==============================] - 0s 2ms/step - loss: 0.6005 - val_loss: 0.3725\n",
  2543. "Epoch 81/100\n",
  2544. "40/40 [==============================] - 0s 1ms/step - loss: 0.5996 - val_loss: 0.3717\n",
  2545. "Epoch 82/100\n",
  2546. "40/40 [==============================] - 0s 1ms/step - loss: 0.5987 - val_loss: 0.3709\n",
  2547. "Epoch 83/100\n",
  2548. "40/40 [==============================] - 0s 1ms/step - loss: 0.5979 - val_loss: 0.3702\n",
  2549. "Epoch 84/100\n",
  2550. "40/40 [==============================] - 0s 1ms/step - loss: 0.5971 - val_loss: 0.3694\n",
  2551. "Epoch 85/100\n",
  2552. "40/40 [==============================] - 0s 1ms/step - loss: 0.5963 - val_loss: 0.3687\n",
  2553. "Epoch 86/100\n",
  2554. "40/40 [==============================] - 0s 1ms/step - loss: 0.5956 - val_loss: 0.3680\n",
  2555. "Epoch 87/100\n",
  2556. "40/40 [==============================] - 0s 1ms/step - loss: 0.5948 - val_loss: 0.3674\n",
  2557. "Epoch 88/100\n",
  2558. "40/40 [==============================] - 0s 1ms/step - loss: 0.5941 - val_loss: 0.3667\n",
  2559. "Epoch 89/100\n",
  2560. "40/40 [==============================] - 0s 1ms/step - loss: 0.5934 - val_loss: 0.3661\n",
  2561. "Epoch 90/100\n",
  2562. "40/40 [==============================] - 0s 1ms/step - loss: 0.5928 - val_loss: 0.3655\n",
  2563. "Epoch 91/100\n",
  2564. "40/40 [==============================] - 0s 1ms/step - loss: 0.5922 - val_loss: 0.3649\n",
  2565. "Epoch 92/100\n",
  2566. "40/40 [==============================] - 0s 1ms/step - loss: 0.5915 - val_loss: 0.3644\n",
  2567. "Epoch 93/100\n",
  2568. "40/40 [==============================] - 0s 1ms/step - loss: 0.5910 - val_loss: 0.3638\n",
  2569. "Epoch 94/100\n",
  2570. "40/40 [==============================] - 0s 1ms/step - loss: 0.5903 - val_loss: 0.3633\n",
  2571. "Epoch 95/100\n",
  2572. "40/40 [==============================] - 0s 1ms/step - loss: 0.5898 - val_loss: 0.3627\n",
  2573. "Epoch 96/100\n",
  2574. "40/40 [==============================] - 0s 1ms/step - loss: 0.5892 - val_loss: 0.3622\n",
  2575. "Epoch 97/100\n",
  2576. "40/40 [==============================] - 0s 1ms/step - loss: 0.5887 - val_loss: 0.3617\n",
  2577. "Epoch 98/100\n",
  2578. "40/40 [==============================] - 0s 2ms/step - loss: 0.5882 - val_loss: 0.3613\n",
  2579. "Epoch 99/100\n",
  2580. "40/40 [==============================] - 0s 1ms/step - loss: 0.5877 - val_loss: 0.3608\n",
  2581. "Epoch 100/100\n",
  2582. "40/40 [==============================] - 0s 2ms/step - loss: 0.5872 - val_loss: 0.3603\n"
  2583. ]
  2584. },
  2585. {
  2586. "name": "stderr",
  2587. "output_type": "stream",
  2588. "text": [
  2589. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  2590. " \"argument is ignored.\", FutureWarning)\n",
  2591. "/Users/wangyanghe/anaconda3/envs/tods2/lib/python3.6/site-packages/sklearn/utils/validation.py:933: FutureWarning: Passing attributes to check_is_fitted is deprecated and will be removed in 0.23. The attributes argument is ignored.\n",
  2592. " \"argument is ignored.\", FutureWarning)\n"
  2593. ]
  2594. }
  2595. ],
  2596. "source": [
  2597. "# Evaluate the best pipeline\n",
  2598. "best_scores = search.evaluate(best_pipeline).scores"
  2599. ]
  2600. },
  2601. {
  2602. "cell_type": "code",
  2603. "execution_count": 79,
  2604. "metadata": {},
  2605. "outputs": [
  2606. {
  2607. "name": "stdout",
  2608. "output_type": "stream",
  2609. "text": [
  2610. "Search History:\n",
  2611. "----------------------------------------------------\n",
  2612. "Pipeline id: 108e1dce-67e7-45f2-962c-1965d988710a\n",
  2613. " metric value normalized randomSeed fold\n",
  2614. "0 F1_MACRO 0.708549 0.708549 0 0\n",
  2615. "----------------------------------------------------\n",
  2616. "Pipeline id: 2a42a07c-0263-427c-b6c8-d9ce45ac0b21\n",
  2617. " metric value normalized randomSeed fold\n",
  2618. "0 F1_MACRO 0.616695 0.616695 0 0\n"
  2619. ]
  2620. }
  2621. ],
  2622. "source": [
  2623. "print('Search History:')\n",
  2624. "for pipeline_result in search.history:\n",
  2625. " print('-' * 52)\n",
  2626. " print('Pipeline id:', pipeline_result.pipeline.id)\n",
  2627. " print(pipeline_result.scores)"
  2628. ]
  2629. },
  2630. {
  2631. "cell_type": "code",
  2632. "execution_count": 80,
  2633. "metadata": {
  2634. "scrolled": true
  2635. },
  2636. "outputs": [
  2637. {
  2638. "name": "stdout",
  2639. "output_type": "stream",
  2640. "text": [
  2641. "Best pipeline:\n",
  2642. "----------------------------------------------------\n",
  2643. "Pipeline id: 108e1dce-67e7-45f2-962c-1965d988710a\n",
  2644. "Pipeline json: {\"id\": \"108e1dce-67e7-45f2-962c-1965d988710a\", \"schema\": \"https://metadata.datadrivendiscovery.org/schemas/v0/pipeline.json\", \"created\": \"2021-04-14T16:38:58.226503Z\", \"inputs\": [{\"name\": \"inputs\"}], \"outputs\": [{\"data\": \"steps.7.produce\", \"name\": \"output predictions\"}], \"steps\": [{\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"4b42ce1e-9b98-4a25-b68e-fad13311eb65\", \"version\": \"0.3.0\", \"python_path\": \"d3m.primitives.tods.data_processing.dataset_to_dataframe\", \"name\": \"Extract a DataFrame from a Dataset\", \"digest\": \"fb5cd27ebf69b9587b23940618071ba9ffe9f47ebd7772797d61ae0521f92515\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"inputs.0\"}}, \"outputs\": [{\"id\": \"produce\"}]}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"d510cb7a-1782-4f51-b44c-58f0236e47c7\", \"version\": \"0.6.0\", \"python_path\": \"d3m.primitives.tods.data_processing.column_parser\", \"name\": \"Parses strings into their types\", \"digest\": \"62af3e97e2535681a0b1320e4ac97edeba15895862a46244ab079c47ce56958d\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.0.produce\"}}, \"outputs\": [{\"id\": \"produce\"}]}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"4503a4c6-42f7-45a1-a1d4-ed69699cf5e1\", \"version\": \"0.4.0\", \"python_path\": \"d3m.primitives.tods.data_processing.extract_columns_by_semantic_types\", \"name\": \"Extracts columns by semantic type\", \"digest\": \"d4c8204514d840de1b5acad9831f9d5581b41f425df3d14051336abdeacdf1b2\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.1.produce\"}}, \"outputs\": [{\"id\": \"produce\"}], \"hyperparams\": {\"semantic_types\": {\"type\": \"VALUE\", \"data\": [\"https://metadata.datadrivendiscovery.org/types/Attribute\"]}}}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"4503a4c6-42f7-45a1-a1d4-ed69699cf5e1\", \"version\": \"0.4.0\", \"python_path\": \"d3m.primitives.tods.data_processing.extract_columns_by_semantic_types\", \"name\": \"Extracts columns by semantic type\", \"digest\": \"d4c8204514d840de1b5acad9831f9d5581b41f425df3d14051336abdeacdf1b2\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.0.produce\"}}, \"outputs\": [{\"id\": \"produce\"}], \"hyperparams\": {\"semantic_types\": {\"type\": \"VALUE\", \"data\": [\"https://metadata.datadrivendiscovery.org/types/TrueTarget\"]}}}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"642de2e7-5590-3cab-9266-2a53c326c461\", \"version\": \"0.0.1\", \"python_path\": \"d3m.primitives.tods.timeseries_processing.transformation.axiswise_scaler\", \"name\": \"Axis_wise_scale\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.2.produce\"}}, \"outputs\": [{\"id\": \"produce\"}]}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"eaff2f35-978c-4530-a12e-061a5f0beacd\", \"version\": \"0.1.0\", \"python_path\": \"d3m.primitives.tods.feature_analysis.statistical_mean\", \"name\": \"Time Series Decompostional\", \"digest\": \"86f8a7a74cc872b09ec7dbec5910f9613c918255ba618731aa7f1ff9b42e37ba\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.4.produce\"}}, \"outputs\": [{\"id\": \"produce\"}]}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"67e7fcdf-d645-3417-9aa4-85cd369487d9\", \"version\": \"0.0.1\", \"python_path\": \"d3m.primitives.tods.detection_algorithm.pyod_ae\", \"name\": \"TODS.anomaly_detection_primitives.AutoEncoder\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.5.produce\"}}, \"outputs\": [{\"id\": \"produce\"}], \"hyperparams\": {\"contamination\": {\"type\": \"VALUE\", \"data\": 0.01}}}, {\"type\": \"PRIMITIVE\", \"primitive\": {\"id\": \"8d38b340-f83f-4877-baaa-162f8e551736\", \"version\": \"0.3.0\", \"python_path\": \"d3m.primitives.tods.data_processing.construct_predictions\", \"name\": \"Construct pipeline predictions output\", \"digest\": \"33d90bfb7f97f47a6de5372c5f912c26fca8da2d2777661651c69687ad6f9950\"}, \"arguments\": {\"inputs\": {\"type\": \"CONTAINER\", \"data\": \"steps.6.produce\"}, \"reference\": {\"type\": \"CONTAINER\", \"data\": \"steps.1.produce\"}}, \"outputs\": [{\"id\": \"produce\"}]}], \"digest\": \"a4ba790aa8c5ad34057cd97135f67edc8ccdc79d0bec0c4660fea0d2dfc82eb3\"}\n",
  2645. "Output:\n",
  2646. " d3mIndex anomaly\n",
  2647. "0 0 0\n",
  2648. "1 1 0\n",
  2649. "2 2 0\n",
  2650. "3 3 0\n",
  2651. "4 4 0\n",
  2652. "... ... ...\n",
  2653. "1395 1395 0\n",
  2654. "1396 1396 0\n",
  2655. "1397 1397 1\n",
  2656. "1398 1398 1\n",
  2657. "1399 1399 0\n",
  2658. "\n",
  2659. "[1400 rows x 2 columns]\n",
  2660. "Scores:\n",
  2661. " metric value normalized randomSeed fold\n",
  2662. "0 F1_MACRO 0.708549 0.708549 0 0\n"
  2663. ]
  2664. }
  2665. ],
  2666. "source": [
  2667. "print('Best pipeline:')\n",
  2668. "print('-' * 52)\n",
  2669. "print('Pipeline id:', best_pipeline.id)\n",
  2670. "print('Pipeline json:', best_pipeline.to_json())\n",
  2671. "print('Output:')\n",
  2672. "print(best_output)\n",
  2673. "print('Scores:')\n",
  2674. "print(best_scores)"
  2675. ]
  2676. },
  2677. {
  2678. "cell_type": "code",
  2679. "execution_count": null,
  2680. "metadata": {},
  2681. "outputs": [],
  2682. "source": []
  2683. }
  2684. ],
  2685. "metadata": {
  2686. "kernelspec": {
  2687. "display_name": "Python 3",
  2688. "language": "python",
  2689. "name": "python3"
  2690. },
  2691. "language_info": {
  2692. "codemirror_mode": {
  2693. "name": "ipython",
  2694. "version": 3
  2695. },
  2696. "file_extension": ".py",
  2697. "mimetype": "text/x-python",
  2698. "name": "python",
  2699. "nbconvert_exporter": "python",
  2700. "pygments_lexer": "ipython3",
  2701. "version": "3.6.10"
  2702. }
  2703. },
  2704. "nbformat": 4,
  2705. "nbformat_minor": 4
  2706. }

全栈的自动化机器学习系统,主要针对多变量时间序列数据的异常检测。TODS提供了详尽的用于构建基于机器学习的异常检测系统的模块,它们包括:数据处理(data processing),时间序列处理( time series processing),特征分析(feature analysis),检测算法(detection algorithms),和强化模块( reinforcement module)。这些模块所提供的功能包括常见的数据预处理、时间序列数据的平滑或变换,从时域或频域中抽取特征、多种多样的检测算