程序代写代做代考 python Untitled-checkpoint

Untitled-checkpoint

In [1]:

from sklearn.neural_network import MLPRegressor

import pandas as pd

from sklearn.preprocessing import StandardScaler
import numpy as np
from sklearn.model_selection import GridSearchCV

In [2]:

data = pd.read_csv(“data169765.csv”, header = None)

In [3]:

X = data.iloc[:, :5]
y = data.iloc[:, 5]

In [33]:

data.head()

Out[33]:

0 1 2 3 4 5
0 3.0497 3.13170 -24.831 179.990 10.843000 1258.8
1 6.0873 3.43550 -26.994 59.996 -2.799100 1311.9
2 9.1010 0.57715 79.699 59.996 -1.261500 1198.0
3 12.0790 0.45136 -36.173 239.980 -9.370000 1342.1
4 15.0090 10.26000 -21.143 239.980 0.051995 1350.2

In [86]:

X.head()

Out[86]:

0 1 2 3 4
0 3.0497 3.13170 -24.831 179.990 10.843000
1 6.0873 3.43550 -26.994 59.996 -2.799100
2 9.1010 0.57715 79.699 59.996 -1.261500
3 12.0790 0.45136 -36.173 239.980 -9.370000
4 15.0090 10.26000 -21.143 239.980 0.051995

In [35]:

y.head()

Out[35]:

0 1258.8
1 1311.9
2 1198.0
3 1342.1
4 1350.2
Name: 5, dtype: float64

In [40]:

reg = MLPRegressor (solver=’lbfgs’, alpha=1e-5, hidden_layer_sizes=(5, 2), random_state=1)

In [41]:

reg.fit(X, y)

Out[41]:

MLPRegressor(activation=’relu’, alpha=1e-05, batch_size=’auto’, beta_1=0.9,
beta_2=0.999, early_stopping=False, epsilon=1e-08,
hidden_layer_sizes=(5, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=1, shuffle=True,
solver=’lbfgs’, tol=0.0001, validation_fraction=0.1, verbose=False,
warm_start=False)

In [42]:

reg.predict(X)

Out[42]:

array([ 1266.02155648, 1245.01389353, 1238.62640672, 1268.02044991,
1257.08211267, 1243.38281224, 1246.42477883, 1238.01551804,
1236.47509099, 1235.93832116, 1248.09374674, 1232.16485829,
1267.21313042, 1237.61755919, 1240.36845561, 1227.34541373,
1239.40468438, 1262.74820882, 1243.26409727, 1246.41472225,
1230.3041069 , 1262.50686537, 1233.97988114, 1271.18518758,
1270.95204289, 1258.04243063, 1236.51733553, 1229.42331121,
1221.63803328, 1233.39790118, 1225.58228603, 1260.1546772 ,
1271.05885115, 1234.98048422, 1246.22209206, 1242.96719324,
1240.2142514 , 1252.46126931, 1222.83834835, 1246.90462941,
1249.81136844, 1239.86371144, 1267.95366845, 1252.73117099,
1220.06747197, 1249.49416109, 1249.46057681, 1242.43571653,
1254.13547568, 1229.54925775, 1250.39585208, 1273.3536956 ,
1247.49508277, 1267.14290162, 1260.16175918, 1255.62562011,
1272.22028726, 1278.91399759, 1248.80187771, 1248.22278399,
1257.95650874, 1252.21404345, 1289.08666181, 1291.94713173,
1268.32933635, 1259.57620603, 1209.66906519, 1265.32126686,
1265.02062097, 1261.83728159, 1260.31916109, 1210.71187457,
1239.95355562, 1248.38506131, 1251.8668645 , 1285.59095126,
1262.28656983, 1268.06636733, 1223.08440051, 1230.95371875,
1233.90934235, 1246.88506031, 1292.3273911 , 1230.04032242,
1227.5345179 , 1295.98177468, 1271.54428068, 1264.8463089 ,
1255.54207743, 1244.00006846, 1234.56226319, 1272.2061417 ,
1248.10929453, 1228.73233072, 1250.05332011, 1256.92944695,
1249.42709192, 1234.10522671, 1262.6849538 , 1231.43222976,
1276.51426847, 1236.90965986, 1230.83081126, 1258.23994274,
1243.41456547, 1228.88362321, 1252.36921644, 1237.47630205,
1241.74630085, 1271.54144724, 1225.91900756, 1251.57788594,
1243.01370599, 1266.31422782, 1258.04460877, 1233.19117738,
1239.88601692, 1274.29411949, 1245.78544274, 1270.59294333,
1251.66543474, 1237.34896419, 1259.63088202, 1258.11468402,
1253.68103972, 1238.51358835, 1266.99983764, 1245.76059402,
1255.83476361, 1221.7364963 , 1266.1672025 , 1276.52851451,
1245.91566636, 1263.54463671, 1262.18746412, 1240.17103179,
1239.83513391, 1277.13623341, 1240.03265866, 1252.12078448,
1242.32992731, 1256.11039766, 1209.21047238, 1261.62936462,
1265.32602055, 1239.06089924, 1246.17328372, 1260.35423878,
1263.0831453 , 1248.02969522, 1225.15236274, 1225.65045345,
1233.3702758 , 1267.64275726, 1257.47976977, 1243.51545174,
1259.32072266, 1220.70173575, 1238.23420389, 1248.00307957,
1250.86735437, 1256.21902335, 1274.34985728, 1249.52876089,
1270.16931671, 1271.81803244, 1256.47118715, 1241.42116752,
1267.74087589, 1268.9134506 , 1266.23610312, 1250.66639482,
1292.0340843 , 1273.5124165 , 1252.08241409, 1263.56276893,
1279.55144858, 1237.72422149, 1283.96732185, 1301.93881093,
1256.80897812, 1262.85846725, 1241.54953436, 1233.50425823,
1301.13418372, 1254.34107069, 1236.45457754, 1256.94435887,
1289.17589942, 1250.62879902, 1262.02572152, 1252.79067859,
1242.61976561, 1246.67164419, 1245.93563334, 1232.8631613 ,
1243.56376279, 1230.65714241, 1253.70539179, 1256.14459293,
1261.98995822, 1240.01948329, 1238.14942616, 1248.02029334,
1237.83859447, 1250.88610227, 1266.18873182, 1270.67505812,
1226.82605533, 1255.22461548, 1245.98393102, 1234.33203011,
1254.9036339 , 1250.04992133, 1240.57563718, 1254.22168753,
1244.52724829, 1242.0553965 , 1255.77865603, 1228.71045685,
1261.60109573, 1270.22873794, 1239.27842097, 1250.87220925,
1216.58420208, 1229.98968112, 1229.25866349, 1249.81961337,
1244.14454968, 1248.25389783, 1248.37945495, 1221.72388353,
1229.33692446, 1252.6369524 , 1252.76589795, 1230.60060548,
1221.37506261, 1234.3539324 , 1246.64643083, 1247.55363101,
1245.48777584, 1238.57242761, 1266.88645521, 1256.28391341,
1245.78720032, 1259.85830032, 1272.79780844, 1246.40006231,
1233.34540412, 1241.59457973, 1231.93768616, 1241.39205794,
1259.22328868, 1241.76545199, 1216.96296555, 1228.01399006,
1264.74012276, 1250.16144959, 1282.16286712, 1303.13931764,
1248.34783678, 1250.70730516, 1224.9483868 , 1242.44151174,
1243.59631144, 1255.72021696, 1258.6562913 , 1279.50955003,
1265.21819037, 1279.90679157, 1265.58630686, 1251.4898765 ,
1277.18703913, 1262.96076063, 1249.06763041, 1271.79944547,
1274.22410835, 1251.54711351, 1259.64964776, 1252.39019732,
1246.7848189 , 1264.44140248, 1245.58351226, 1238.78188997,
1264.70926428, 1257.59891656, 1245.70807915, 1286.25443341,
1245.73886393, 1261.31507625, 1266.14753055, 1244.42107483,
1233.15446513, 1289.83102399, 1257.02442986, 1268.00746267,
1255.96142956, 1249.22119213, 1256.67611631, 1214.76676948,
1250.42847768, 1275.11228935, 1253.50895749, 1240.67944975,
1229.45745344, 1249.38699047, 1252.28585729, 1256.97695418,
1233.95573448, 1256.71693969, 1232.15155473, 1262.48628692,
1235.01843608, 1261.78154892, 1266.29616101, 1240.08300268,
1241.69283363, 1255.47480153, 1270.93174017, 1226.13695212,
1243.24939063, 1260.81880836, 1274.25117483, 1262.53269508,
1254.31056007, 1232.64484278, 1268.77244202, 1215.69282292,
1238.55300842, 1279.50138485, 1252.39733075, 1246.03764283,
1258.6067219 , 1256.85876375, 1227.19797022, 1258.67051931,
1271.91859052, 1246.13838112, 1246.24165379, 1240.11434223,
1253.38824571, 1238.28428817, 1227.02962911, 1243.76913081,
1268.24941092, 1236.49425424, 1258.57444064, 1258.67233532,
1238.5222067 , 1239.14281649, 1249.42865901, 1247.70822367,
1242.30968043, 1263.16641577, 1259.72844164, 1237.16778064,
1268.47659769, 1264.56252816, 1229.41204247, 1273.09662147,
1206.32737311, 1273.46373011, 1244.33560463, 1257.18286138,
1261.1955591 , 1261.29847912, 1274.0806904 , 1294.46393758,
1249.24071417, 1283.72367688, 1283.38229799, 1271.87732411,
1271.78158396, 1257.23734453, 1260.80813937, 1290.50341634,
1227.77355116, 1286.4112234 , 1294.02233567, 1240.6511789 ,
1261.14780034, 1240.02348136, 1264.31675393, 1266.30402453,
1241.04379359, 1236.68537896, 1274.70054935, 1268.49064656,
1264.8212113 , 1279.88534337, 1240.82828117, 1248.96476716,
1274.64735913, 1232.33662145, 1220.06359528, 1235.28115168,
1236.60522881, 1254.08198922, 1244.6471569 , 1253.3864558 ,
1256.59455938, 1224.54950641, 1239.24355619, 1225.66959995,
1275.22367026, 1238.1011631 , 1260.0270334 , 1250.66754638,
1246.99570868, 1240.29304771, 1260.25869587, 1262.59414528,
1252.06059872, 1240.45233591, 1233.9377692 , 1282.45732958,
1244.61470091, 1209.78169474, 1243.76621048, 1243.55605206,
1260.13688966, 1245.29776378, 1256.23767984, 1252.42200994,
1230.72532629, 1232.0626278 , 1243.15669951, 1248.11203504,
1246.57215774, 1235.52807581, 1245.94470419, 1230.47687524,
1236.01220396, 1258.37124012, 1247.32509951, 1239.2244884 ,
1248.29070923, 1244.98084598, 1226.82456461, 1235.36073718,
1220.05519367, 1273.94145274, 1238.82834944, 1256.29778483,
1240.94180574, 1236.97640124, 1254.7912236 , 1220.44528998,
1235.82560404, 1255.09201392, 1244.38672093, 1224.48785577,
1250.82006402, 1268.76512804, 1270.54329492, 1264.9421637 ,
1254.10238843, 1274.63801236, 1260.99908411, 1251.90772056,
1255.99124279, 1246.19312125, 1248.68345134, 1262.94965592,
1238.85115979, 1269.17758788, 1291.63628134, 1282.37589628,
1276.20519866, 1213.2220152 , 1249.65268517, 1266.02221452,
1301.97602427, 1208.43090766, 1263.51838105, 1255.8612824 ,
1254.80920491, 1275.94951548, 1247.42865662, 1251.29468612,
1251.99899028, 1262.78902729, 1246.11203786, 1240.58839013,
1288.14304284, 1246.94192806, 1280.20131808, 1307.39168141,
1260.36673957, 1268.28644699, 1233.4479279 , 1255.53067932,
1264.75343308, 1244.33989266, 1258.811593 , 1295.33308508,
1268.90663384, 1255.10467445, 1231.75795374, 1242.53638652,
1240.38153852, 1244.78705172, 1254.15129951, 1236.3527657 ,
1231.9556515 , 1252.71199609, 1253.4104574 , 1268.18363861,
1257.35062653, 1280.4115058 , 1236.94166426, 1249.87732216,
1238.84845361, 1254.55669653, 1239.15923989, 1266.44103327,
1258.82131456, 1251.58245946, 1267.98114225, 1240.05526608,
1234.41534848, 1252.26121853, 1256.38978231, 1244.89815914,
1255.29530224, 1250.93269524, 1244.14909373, 1242.22412673,
1241.80473255, 1250.13359118, 1253.22253538, 1261.34757835,
1225.06329776, 1253.59899276, 1248.63320389, 1226.81883451,
1227.95689859, 1219.38808654, 1255.12946257, 1270.22602894,
1240.45201378, 1233.80124764, 1223.75958784, 1247.29160758,
1246.26068832, 1233.34296465, 1240.17391391, 1229.53123854,
1246.66700758, 1269.4429122 , 1283.66408459, 1259.18597155,
1268.42365462, 1261.62124799, 1252.27190819, 1222.31708314,
1257.34280925, 1272.86990266, 1243.65128055, 1236.34654716,
1243.93173007, 1239.58696252, 1262.52801923, 1286.71921233,
1231.83611112, 1275.29593141, 1249.66288954, 1272.8801918 ,
1225.57519142, 1285.58332864, 1231.65509264, 1254.00375478,
1229.11636307, 1269.07583832, 1237.22614702, 1299.21980327,
1243.72277633, 1259.65738265, 1277.53843378, 1279.33053617,
1270.17445651, 1273.17799175, 1276.69467452, 1265.81055702,
1253.99211814, 1224.76002936, 1238.69158464, 1284.18196215,
1244.03574252, 1242.20666062, 1265.63039581, 1209.27218953,
1256.50952965, 1220.97705026, 1261.30904036, 1242.11276033,
1246.32261688, 1256.10199677, 1227.97739475, 1217.07432323,
1247.61085281, 1267.79121345, 1254.18286345, 1278.04689916,
1241.86946857, 1243.22421059, 1226.57830371, 1249.71365303,
1264.74181973, 1234.09020564, 1214.32200599, 1241.79608233,
1214.80961019, 1222.47929324, 1240.19670591, 1226.61799443,
1262.23124228, 1263.89399384, 1239.77671608, 1248.14265906,
1246.44769815, 1250.80754567, 1230.48711356, 1237.37160663,
1241.31533398, 1237.36009826, 1245.10264677, 1254.6779196 ,
1220.79231838, 1253.82819283, 1259.9110868 , 1248.33468299,
1255.1381292 , 1259.18159961, 1261.97319725, 1244.53197469,
1239.06508649, 1251.47608431, 1238.1733699 , 1285.91336323,
1249.69430895, 1284.22190544, 1233.26185356, 1258.22390574,
1201.45807305, 1293.45309916, 1232.45106393, 1237.1651955 ,
1225.23418695, 1258.34469744, 1270.91295817, 1236.58850134,
1223.06607651, 1219.48535894, 1257.16415069, 1227.76568528,
1271.49834843, 1266.02140344, 1250.6599818 , 1247.18054142,
1270.25414018, 1276.55510372, 1273.80282176, 1244.51255602,
1245.01013145, 1264.8930945 , 1258.36929276, 1283.52158355,
1227.89296153, 1271.12488477, 1262.31016446, 1282.21848518,
1245.15366634, 1279.76726795, 1243.44961721, 1249.26616814,
1266.56862593, 1259.54473198, 1217.47540841, 1255.99191979,
1271.37203298, 1286.78303964, 1265.26435141, 1257.29617655,
1263.09921 , 1215.14052066, 1259.61704452, 1220.87806675,
1285.90399939, 1263.65169482, 1236.33199845, 1276.4380464 ,
1262.84824798, 1269.10633995, 1256.04797452, 1276.61425995,
1261.85000795, 1247.72565258, 1249.28313657, 1234.87731641,
1249.59446894, 1244.0264508 , 1240.43976889, 1250.57066625,
1242.42785273, 1249.09920692, 1257.14220589, 1234.50691318,
1261.1704824 , 1230.05022048, 1228.2956625 , 1276.82505614,
1236.59455127, 1278.52521122, 1234.64534525, 1257.07474801,
1236.22797134, 1254.09867811, 1277.11811329, 1256.80539988,
1247.17308694, 1255.35664215, 1252.51263996, 1256.71758947,
1256.52932628, 1259.14093326, 1278.58430025, 1216.44234674,
1228.96714938, 1262.15652567, 1242.90047753, 1245.59640197,
1255.65676717, 1226.67760669, 1246.78064486, 1251.61569435,
1241.83921761, 1253.13903095, 1277.19477315, 1233.18513051,
1268.39779477, 1261.16943245, 1245.27874871, 1273.46238766,
1264.09338573, 1251.65828335, 1216.69468226, 1258.81814932,
1264.99546897, 1254.17072814, 1219.27335775, 1235.08122133,
1243.00473263, 1230.72320963, 1240.89660529, 1261.16166911,
1259.21791084, 1259.35224967, 1231.21616928, 1249.48069396,
1290.0742499 , 1232.97864096, 1289.10232337, 1293.63205907,
1253.81635684, 1269.25039497, 1237.81402668, 1253.22299481,
1245.65912157, 1285.97801399, 1296.05549432, 1245.49586717,
1249.20272492, 1291.6900966 , 1228.79929174, 1283.43722372,
1243.53910895, 1246.64538624, 1287.51306374, 1279.75458975,
1264.09623648, 1234.51460788, 1249.30244295, 1260.60737788,
1303.05505355, 1252.76475758, 1279.64796492, 1273.19105566,
1237.77568115, 1242.27812073, 1247.19503101, 1272.5969614 ,
1279.61317823, 1236.70336924, 1215.42849291, 1247.22314031,
1263.28429241, 1229.48058516, 1285.50329874, 1237.59438484,
1242.15980323, 1248.15109662, 1260.51431438, 1273.5146405 ,
1236.55612696, 1245.97887681, 1256.75403389, 1221.19537197,
1267.40359355, 1229.2567464 , 1260.31443785, 1276.53174026,
1259.5920821 , 1250.98335458, 1255.60353787, 1236.90238861,
1246.53696793, 1237.19686406, 1251.12571058, 1247.42478191,
1257.74283896, 1267.31670132, 1224.29302777, 1247.62977944,
1246.26463673, 1249.75600881, 1244.31423027, 1229.31926703,
1237.35222853, 1272.70604878, 1289.30150995, 1284.77316112,
1228.00981078, 1220.70339176, 1263.69359438, 1233.43238988,
1244.99979203, 1243.57296567, 1245.76029222, 1240.42913815,
1264.86105259, 1238.99791939, 1251.35758194, 1213.75758825,
1253.52586577, 1250.05837879, 1236.91678644, 1266.70488101,
1255.96716754, 1263.43039146, 1239.3242106 , 1241.95683721,
1233.5118765 , 1217.46029332, 1233.91879892, 1258.16483718,
1254.10568549, 1265.39339425, 1232.95619368, 1271.3407613 ,
1235.0290157 , 1238.02127056, 1245.68832397, 1277.12256406,
1262.0613281 , 1254.84596969, 1242.06725807, 1243.25303266,
1240.33744073, 1268.54538759, 1255.62081534, 1233.34602888,
1285.70952953, 1249.42372873, 1270.30198519, 1300.96572509,
1279.66633575, 1261.17983108, 1294.46621208, 1256.76109053,
1257.33640289, 1224.09056883, 1275.12289486, 1259.68194416,
1244.25645842, 1280.51768594, 1269.27098994, 1271.24625798,
1256.35954512, 1256.50521831, 1240.62169121, 1235.19671976,
1272.86806352, 1259.85500009, 1280.26602678, 1270.30704291,
1238.912574 , 1240.06340867, 1259.66659716, 1257.11987939,
1237.17557926, 1293.29085153, 1253.10773694, 1239.04719498,
1216.72473859, 1250.09119044, 1241.84420516, 1224.85106834,
1243.96422079, 1250.04975349, 1265.2792805 , 1255.55340778,
1244.0227486 , 1252.76421415, 1251.52900907, 1237.95011898,
1250.22150128, 1256.75920857, 1267.70704193, 1239.43461457,
1253.07853888, 1271.59170127, 1271.00779091, 1242.88030739,
1251.27467167, 1231.81155921, 1244.76705547, 1224.71718516,
1254.38802519, 1259.9242957 , 1244.12507147, 1248.83064774,
1244.73254921, 1252.53663059, 1235.86192394, 1236.8502461 ,
1259.68317298, 1243.77454416, 1251.23871765, 1256.1539036 ,
1250.48214927, 1230.87347481, 1249.98635512, 1264.20033367,
1252.62068086, 1274.77595423, 1259.48015721, 1248.67202679,
1253.06929173, 1264.97587891, 1252.29933044, 1251.23941198,
1230.66603116, 1261.596461 , 1248.32521203, 1248.99553756,
1277.74720975, 1233.13862669, 1245.19399172, 1244.36798862,
1263.1787127 , 1245.55807057, 1280.28392737, 1258.01779537,
1253.19672411, 1223.53682135, 1261.25592953, 1263.07397201,
1241.5641673 , 1241.91069878, 1231.69937584, 1257.28007821,
1268.89687065, 1259.90530229, 1291.60998334, 1260.67918373,
1281.55426969, 1280.03637984, 1244.39320286, 1251.49041987,
1228.41478153, 1274.54624169, 1250.57299471, 1297.35023056,
1240.45546633, 1245.24454182, 1269.04979353, 1244.19227775,
1258.10964259, 1253.11254398, 1239.35689102, 1261.32795935,
1271.7254979 , 1233.56394731, 1237.94833633, 1258.04895587,
1236.64099147, 1249.09386874, 1241.13644422, 1241.15497251])

In [90]:

scaler = StandardScaler()
scaler.fit(X)
X = scaler.transform(X)

In [53]:

parameters = {‘solver’:(‘lbfgs’, ‘sgd’, ‘adam’), ‘alpha’: 10.0 ** -np.arange(1, 7)}

In [62]:

mlp = MLPRegressor(hidden_layer_sizes=(5, 2), random_state=1)
gcv = GridSearchCV(mlp, parameters, scoring = ‘neg_mean_squared_error’)

In [63]:

gcv.fit(X, y)

//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)

Out[63]:

GridSearchCV(cv=None, error_score=’raise’,
estimator=MLPRegressor(activation=’relu’, alpha=0.0001, batch_size=’auto’, beta_1=0.9,
beta_2=0.999, early_stopping=False, epsilon=1e-08,
hidden_layer_sizes=(5, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=1, shuffle=True,
solver=’adam’, tol=0.0001, validation_fraction=0.1, verbose=False,
warm_start=False),
fit_params=None, iid=True, n_jobs=1,
param_grid={‘solver’: (‘lbfgs’, ‘sgd’, ‘adam’), ‘alpha’: array([ 1.00000e-01, 1.00000e-02, 1.00000e-03, 1.00000e-04,
1.00000e-05, 1.00000e-06])},
pre_dispatch=’2*n_jobs’, refit=True, return_train_score=’warn’,
scoring=’neg_mean_squared_error’, verbose=0)

In [64]:

gcv.best_score_

Out[64]:

-68648.332680830616

In [57]:

gcv.best_estimator_

Out[57]:

MLPRegressor(activation=’relu’, alpha=1.0000000000000001e-05,
batch_size=’auto’, beta_1=0.9, beta_2=0.999, early_stopping=False,
epsilon=1e-08, hidden_layer_sizes=(5, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=1, shuffle=True,
solver=’lbfgs’, tol=0.0001, validation_fraction=0.1, verbose=False,
warm_start=False)

In [61]:

len(gcv.cv_results_[“params”])

Out[61]:

18

In [65]:

gcv.best_estimator_

Out[65]:

MLPRegressor(activation=’relu’, alpha=1.0000000000000001e-05,
batch_size=’auto’, beta_1=0.9, beta_2=0.999, early_stopping=False,
epsilon=1e-08, hidden_layer_sizes=(5, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=1, shuffle=True,
solver=’lbfgs’, tol=0.0001, validation_fraction=0.1, verbose=False,
warm_start=False)

In [67]:

gcv = GridSearchCV(mlp, parameters, scoring = ‘neg_mean_squared_error’)
gcv.fit(X, y)
gcv.best_score_

//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)

Out[67]:

-33483.452903206744

In [68]:

gcv.best_estimator_

Out[68]:

MLPRegressor(activation=’relu’, alpha=0.10000000000000001, batch_size=’auto’,
beta_1=0.9, beta_2=0.999, early_stopping=False, epsilon=1e-08,
hidden_layer_sizes=(5, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=1, shuffle=True,
solver=’lbfgs’, tol=0.0001, validation_fraction=0.1, verbose=False,
warm_start=False)

In [96]:

mlp = MLPRegressor(solver=’lbfgs’, random_state=1)

parameters = {‘hidden_layer_sizes’:[(5, 2), (5,5, 2), (10, 5)] , ‘alpha’: 10.0 ** -np.arange(1, 7)}
gcv = GridSearchCV(mlp, parameters, scoring = ‘neg_mean_squared_error’)
gcv.fit(X, y)
gcv.best_score_

Out[96]:

-29889.254430003857

In [97]:

gcv.predict(X)

Out[97]:

array([ 1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798,
1252.37896798, 1252.37896798, 1252.37896798, 1252.37896798])

In [71]:

gcv.best_estimator_

Out[71]:

MLPRegressor(activation=’relu’, alpha=0.01, batch_size=’auto’, beta_1=0.9,
beta_2=0.999, early_stopping=False, epsilon=1e-08,
hidden_layer_sizes=(5, 5, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=1, shuffle=True,
solver=’lbfgs’, tol=0.0001, validation_fraction=0.1, verbose=False,
warm_start=False)

In [98]:

mlp = MLPRegressor(solver=’lbfgs’, random_state=1)

parameters = {‘hidden_layer_sizes’:[ (5,5, 2), (5, 5, 2, 2)] , ‘alpha’: 10.0 ** -np.arange(1, 7)}
gcv = GridSearchCV(mlp, parameters, scoring = ‘neg_mean_squared_error’)
gcv.fit(X, y/1000)
gcv.best_score_

Out[98]:

-0.030887437092134363

In [73]:

Out[73]:

MLPRegressor(activation=’relu’, alpha=0.01, batch_size=’auto’, beta_1=0.9,
beta_2=0.999, early_stopping=False, epsilon=1e-08,
hidden_layer_sizes=(5, 5, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=1, shuffle=True,
solver=’lbfgs’, tol=0.0001, validation_fraction=0.1, verbose=False,
warm_start=False)

In [99]:

gcv.predict(X)

Out[99]:

array([ 1.26778139, 1.26778139, 1.26723262, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.24236362, 1.26778139, 1.23501631,
1.26778139, 1.23269729, 1.26778139, 1.23990204, 1.26778139,
1.18890803, 1.26778139, 1.26471808, 1.26778139, 1.26778139,
1.22889657, 1.24518298, 1.26778139, 1.27248545, 1.26778139,
1.30160799, 1.26778139, 0.87390762, 1.20542844, 1.19635591,
1.14076809, 1.26778139, 1.26778139, 1.18556045, 1.26778139,
1.26778139, 1.22181942, 1.26778139, 1.23524005, 1.25476975,
1.30160799, 1.26778139, 1.30160799, 1.26778139, 1.17450507,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.24694687,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.30160799, 0.85494032, 1.22941361, 1.26778139,
1.26778139, 1.26778139, 1.30160799, 1.30160799, 1.26778139,
1.26778139, 1.15346112, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.14894477, 1.2086389 , 1.26778139, 1.26778139,
1.29515457, 1.26778139, 1.26778139, 1.22575374, 1.23057665,
1.2172015 , 1.26778139, 1.26778139, 1.25981618, 1.17441109,
1.30160799, 1.26778139, 1.26778139, 1.1869739 , 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.22288836, 1.30160799, 1.25330953,
1.27020483, 1.26778139, 1.2601627 , 1.26778139, 1.26778139,
1.26738102, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.17959006, 1.28128193, 1.26778139, 1.26778139, 1.19904648,
1.26778139, 1.26778139, 1.12920746, 1.26778139, 1.26778139,
1.26778139, 1.19675497, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.30160799, 1.26388905, 1.26778139, 1.14808753,
1.26778139, 1.03736376, 1.27147812, 1.26778139, 1.2640911 ,
1.26778139, 1.26778139, 1.26778139, 1.20114565, 1.26778139,
1.26778139, 1.26778139, 1.1823425 , 1.30160799, 1.30160799,
1.26778139, 1.26778139, 1.19026706, 1.26778139, 1.26778139,
1.19926349, 1.24421091, 1.20716558, 1.298749 , 1.26778139,
1.14590714, 1.26778139, 1.16030766, 1.22849248, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.20069126, 1.257724 , 1.26778139,
1.26778139, 1.21046113, 1.30160799, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.30160799,
1.26778139, 1.26778139, 1.24060243, 1.2284125 , 1.30160799,
1.26778139, 1.21333401, 1.26778139, 0.97671074, 1.26778139,
1.23082946, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.21321493, 1.23474386, 1.23200603, 1.26778139, 1.26778139,
1.30160799, 1.26778139, 1.26778139, 1.25023271, 1.26778139,
1.26778139, 1.30160799, 1.2983031 , 1.2288348 , 1.26778139,
1.30160799, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.26032068, 1.26760543, 0.73378469,
1.26778139, 1.26778139, 1.22532048, 1.26778139, 1.23110976,
1.26778139, 1.26778139, 1.28257315, 1.26778139, 1.26778139,
1.26778139, 1.16820693, 1.26466245, 1.26778139, 1.26770077,
1.26778139, 1.17263271, 1.23798213, 1.26778139, 1.30160799,
1.29601175, 1.18963432, 1.30160799, 1.24475764, 1.26778139,
1.23468849, 1.30160799, 1.26778139, 1.24920522, 1.26778139,
1.19039881, 1.26778139, 1.26778139, 1.26778139, 1.15782668,
1.26778139, 1.05691568, 1.26778139, 1.30160799, 1.30160799,
1.07551322, 1.26778139, 1.1285813 , 1.22815464, 1.21952173,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.22061374, 1.26778139, 1.24097069,
1.26778139, 1.26778139, 1.19025582, 1.26778139, 1.25390628,
1.26778139, 1.26778139, 1.24231048, 1.15328567, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.22956686, 1.26778139,
1.26778139, 1.26778139, 1.23431065, 1.14063438, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.29738679, 1.23396638,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.20684878,
1.26778139, 1.26778139, 1.26808367, 1.26778139, 1.25070328,
1.26750773, 1.26778139, 1.26778139, 1.26575783, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.30160799, 1.26418133, 1.26778139, 1.26778139, 1.30160799,
1.26778139, 1.26778139, 1.23494348, 1.26778139, 1.26778139,
1.2672153 , 1.30160799, 1.26778139, 1.30160799, 1.23505683,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.21189901, 1.26778139, 1.21651345, 1.28642131, 1.30160799,
1.26778139, 1.26778139, 1.30160799, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.00849887, 1.23241513,
1.26778139, 1.30160799, 1.26778139, 1.23905166, 1.30160799,
1.14743619, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.30160799, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.23293401, 1.26778139,
1.26778139, 1.17446636, 1.30160799, 1.30160799, 1.26778139,
1.23041808, 1.2008431 , 1.2628891 , 1.26003411, 1.21927669,
1.26778139, 1.25486866, 1.26778139, 1.26778139, 1.29792381,
1.26778139, 1.26778139, 1.26778139, 1.24874671, 1.21025795,
1.2324288 , 1.23706034, 1.26778139, 1.26778139, 1.2657293 ,
1.0754393 , 1.19851143, 1.26778139, 1.19453157, 1.26778139,
1.26778139, 1.26778139, 1.30160799, 1.26778139, 1.26778139,
1.30160799, 1.30160799, 1.26778139, 1.26778139, 1.25071718,
1.30160799, 1.26778139, 1.17749412, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.30160799, 1.26778139, 1.16010574,
1.26005775, 1.26778139, 1.26778139, 1.30160799, 1.26778139,
1.26778139, 1.2590658 , 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26571689, 1.30160799, 1.19482515, 1.2565737 ,
1.20822032, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.21999049, 1.26778139, 1.26778139,
1.07551322, 1.16773045, 1.26778139, 1.28678238, 1.30160799,
1.26778139, 1.26778139, 1.26778139, 1.19503339, 1.26778139,
1.26778139, 1.25241361, 1.1988034 , 1.25557235, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.20366904,
1.26778139, 1.26778139, 0.936131 , 1.10450894, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.2258479 , 1.22929065,
1.26778139, 1.26778139, 1.26778139, 1.26768192, 1.26778139,
1.26538625, 1.26778139, 1.30160799, 1.26778139, 1.26778139,
1.24843517, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.30160799, 1.08806254, 1.26778139, 1.21092885, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26360016,
1.26778139, 1.26778139, 1.26778139, 1.24180857, 1.30160799,
1.26778139, 1.26322861, 1.26778139, 1.30160799, 1.25649886,
1.30160799, 1.26778139, 1.26778139, 1.26778139, 1.24742431,
1.26778139, 1.26778139, 1.26778139, 1.30160799, 1.26778139,
1.26778139, 1.25634791, 1.26778139, 1.25599647, 1.26778139,
1.26778139, 1.26778139, 1.20762554, 1.26778139, 1.26778139,
1.25891098, 1.1859228 , 1.22834324, 1.25975238, 1.26778139,
1.26778139, 1.26778139, 1.18167794, 1.26778139, 1.26778139,
1.26778139, 1.25529554, 1.23914589, 1.26778139, 1.26778139,
1.30160799, 1.29493859, 1.26778139, 1.26778139, 1.26778139,
1.17803435, 1.26778139, 1.29617926, 1.26778139, 1.26778139,
1.26778139, 1.23559477, 1.26778139, 1.30160799, 1.22455514,
1.26778139, 1.26778139, 1.26778139, 1.16817863, 1.26778139,
1.19715898, 1.26778139, 1.26778139, 1.26778139, 1.20152742,
1.26778139, 1.23817003, 1.26778139, 1.26778139, 1.2927593 ,
1.26778139, 1.26778139, 1.29940124, 1.26778139, 1.26778139,
1.26778139, 1.21021346, 1.30160799, 1.24854375, 1.26778139,
1.21673487, 1.20454325, 1.26778139, 1.2328711 , 1.26778139,
1.26620407, 1.23545519, 1.26778139, 1.23456599, 1.17087435,
1.26778139, 1.26778139, 1.25375835, 1.29633088, 1.23150301,
1.26778139, 1.15921957, 1.26778139, 1.26778139, 1.26778139,
1.1661495 , 1.26778139, 1.23353549, 1.23036249, 1.25249155,
1.26778139, 1.26778139, 1.30160799, 1.19292261, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26226598, 1.26778139,
1.25685276, 1.30160799, 1.26778139, 1.19946722, 1.26778139,
1.30160799, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.264866 , 1.26342784, 1.26778139, 1.26778139, 1.30160799,
1.23782176, 1.26778139, 1.25143762, 1.26778139, 1.15287472,
1.30160799, 1.24408683, 1.26778139, 1.19956382, 1.26778139,
1.30160799, 1.26778139, 1.16478806, 1.18996442, 1.26778139,
1.14626167, 1.26778139, 1.25153979, 1.26778139, 1.26778139,
1.26778139, 1.27392252, 1.26778139, 1.25878918, 1.23670615,
1.2675091 , 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.30160799, 1.18555623, 1.26778139, 1.26778139,
1.26778139, 1.23970085, 1.26778139, 1.18787564, 1.26778139,
1.26778139, 1.00301238, 1.26778139, 1.2378458 , 1.26778139,
1.14636673, 1.26778139, 1.17053854, 1.30160799, 1.26778139,
1.23862419, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.23674492, 1.25199271, 1.26778139, 1.26778139,
1.25508922, 1.26778139, 1.16949197, 1.26778139, 1.26778139,
1.26778139, 1.17565113, 1.22802458, 1.28324519, 1.26778139,
1.24982379, 1.28976227, 1.26778139, 1.26778139, 1.25693241,
1.26778139, 1.26778139, 1.30160799, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.30160799, 1.09749917, 1.26778139, 1.30160799,
1.26778139, 1.26778139, 1.26778139, 1.03757355, 1.26318058,
1.30160799, 1.26694285, 1.30160799, 1.26778139, 1.22833201,
1.26778139, 1.26778139, 1.17401082, 1.26778139, 1.26778139,
1.26778139, 1.18165463, 1.14879515, 1.26778139, 1.26778139,
1.20323789, 1.26778139, 1.22636323, 1.23663419, 1.26778139,
1.30160799, 1.26778139, 1.22207908, 1.18750322, 1.26778139,
1.11251264, 1.26778139, 1.30160799, 1.30160799, 1.26778139,
1.26778139, 1.25822372, 1.26778139, 1.26267393, 1.26778139,
1.2212061 , 1.26778139, 1.26778139, 1.30160799, 1.17885044,
1.30160799, 1.17775972, 1.26356636, 1.3010351 , 1.26778139,
1.26778139, 1.23961001, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.30160799, 1.26778139, 1.2562606 , 1.26778139,
1.26778139, 1.26778139, 1.30160799, 1.26778139, 1.18064019,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.1985176 ,
1.26778139, 1.26778139, 1.30160799, 1.26778139, 1.23698538,
1.2843024 , 1.26778139, 1.2259646 , 1.30160799, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26527722,
1.26778139, 1.30160799, 1.26130022, 1.26778139, 1.26778139,
1.30160799, 1.26778139, 1.21120076, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.20188735, 1.25677913, 1.30160799,
0.55140002, 1.26778139, 1.25449551, 1.15614975, 1.26778139,
1.25915352, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.24897124, 1.20380727, 1.26778139,
1.26778139, 1.26778139, 1.28487027, 1.21494141, 1.30160799,
1.25902103, 1.26778139, 1.21928342, 1.14441094, 1.24701194,
1.26778139, 1.25115403, 1.26778139, 1.22940283, 1.30160799,
1.21564632, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.16796149, 1.24536864, 1.25163869, 1.26778139,
1.26778139, 1.18908457, 1.26778139, 1.22633779, 1.26778139,
1.30160799, 1.27703217, 1.26778139, 1.30160799, 1.15831808,
1.26778139, 1.1736282 , 1.26778139, 1.26778139, 1.26778139,
1.16645483, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26473784, 1.26504969, 1.26778139, 1.26778139, 1.27603623,
1.20415915, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.30160799, 1.26778139, 1.26778139, 1.16553023,
1.27985531, 1.26778139, 1.10634932, 1.26778139, 1.2595606 ,
1.30160799, 1.23634993, 1.26778139, 1.26778139, 1.30160799,
1.23849721, 1.26778139, 1.30160799, 1.30160799, 1.12539678,
1.26778139, 1.26778139, 1.26778139, 1.27310196, 1.26407555,
1.26778139, 1.26778139, 1.2132354 , 1.26778139, 1.26778139,
1.26704388, 1.26778139, 1.26778139, 1.26778139, 1.09141435,
1.26778139, 1.26312739, 1.28745992, 1.25609328, 1.26778139,
1.26778139, 1.26243859, 1.26778139, 1.30160799, 1.26778139,
1.26778139, 1.30160799, 1.26778139, 1.26778139, 1.29236286,
1.26778139, 1.26778139, 1.2216565 , 1.26778139, 1.26778139,
1.26778139, 1.30160799, 1.16149526, 1.14707126, 1.26778139,
1.26778139, 1.26416792, 1.26778139, 1.26778139, 1.26778139,
1.21461993, 1.26778139, 1.26778139, 1.26778139, 1.2253618 ,
1.19075932, 1.24974786, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.07551322,
1.18957441, 1.26778139, 1.26778139, 1.30160799, 1.24746333,
1.26778139, 1.26778139, 1.26778139, 1.26778139, 1.26778139,
1.26778139, 1.26778139, 1.26778139, 1.23297496, 1.23620582,
1.26778139, 1.23007592, 1.26778139, 1.17823278, 1.26778139])

In [93]:

mlp = MLPRegressor(solver=’lbfgs’, random_state=100)

parameters = {‘hidden_layer_sizes’:[ (5,5, 2), (5, 5, 2, 2)] , ‘alpha’: 10.0 ** -np.arange(1, 7)}
gcv = GridSearchCV(mlp, parameters)
gcv.fit(X, y)
gcv.best_score_

Out[93]:

-0.0066567132002208988

In [95]:

gcv.predict(X)

Out[95]:

array([ 1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701,
1252.37896701, 1252.37896701, 1252.37896701, 1252.37896701])

In [80]:

data

Out[80]:

0 1 2 3 4 5
0 3.049700e+00 3.13170 -24.83100 179.990 10.843000 1258.80
1 6.087300e+00 3.43550 -26.99400 59.996 -2.799100 1311.90
2 9.101000e+00 0.57715 79.69900 59.996 -1.261500 1198.00
3 1.207900e+01 0.45136 -36.17300 239.980 -9.370000 1342.10
4 1.500900e+01 10.26000 -21.14300 239.980 0.051995 1350.20
5 1.788000e+01 4.41410 7.05120 -59.996 16.853000 1262.90
6 2.068000e+01 -20.04800 -31.42200 -239.980 -27.761000 1484.50
7 2.339800e+01 10.08200 22.52200 119.990 3.611400 1319.20
8 2.602500e+01 11.48000 6.77220 119.990 -6.084900 1365.00
9 2.854800e+01 -1.25230 43.76000 -179.990 23.560000 1386.30
10 3.095900e+01 12.81300 -30.48800 59.996 7.094000 1440.10
11 3.324800e+01 10.75000 37.00700 0.000 8.922800 1277.10
12 3.540500e+01 9.44840 -64.70000 179.990 15.278000 1163.90
13 3.742300e+01 16.30800 15.77700 -179.990 29.965000 1178.20
14 3.929300e+01 8.54800 36.02500 -0.000 18.834000 1268.10
15 4.100800e+01 20.72000 56.39100 -59.996 24.444000 1294.40
16 4.256200e+01 -11.63500 -4.59750 -119.990 -18.805000 1071.40
17 4.394700e+01 24.50100 -85.32500 0.000 38.278000 1220.90
18 4.515900e+01 -6.55990 -38.91500 -179.990 -27.289000 1299.60
19 4.619200e+01 -3.05640 43.18100 119.990 7.306400 1243.70
20 4.704300e+01 14.93400 89.80800 -119.990 43.318000 1040.00
21 4.770900e+01 -26.55000 -83.43500 0.000 -69.256000 1386.90
22 4.818600e+01 3.97050 37.23700 119.990 -13.642000 1262.70
23 4.847300e+01 -6.88080 -104.52000 59.996 -21.388000 1375.10
24 4.856900e+01 -21.27900 -103.62000 -179.990 -15.199000 1638.50
25 4.847300e+01 -23.99700 -48.90900 239.980 -56.282000 901.22
26 4.818600e+01 -2.48110 44.91500 0.000 1.207200 1255.10
27 4.770900e+01 19.07600 97.29600 0.000 39.323000 675.87
28 4.704300e+01 21.27500 89.56800 -179.990 40.226000 740.13
29 4.619200e+01 23.99000 89.79800 179.990 35.990000 881.35
… … … … … … …
970 -4.704300e+01 14.66100 62.89100 -59.996 41.154000 1272.80
971 -4.770900e+01 13.50800 87.50000 -59.996 -8.155300 1221.80
972 -4.818600e+01 -17.37100 24.38600 -239.980 -49.959000 1143.70
973 -4.847300e+01 14.60000 -59.18500 119.990 -9.279900 1221.50
974 -4.856900e+01 -17.34000 39.83000 179.990 11.128000 1294.80
975 -4.847300e+01 -22.92400 -7.85380 -179.990 -4.062200 1310.10
976 -4.818600e+01 -20.56400 -17.46400 0.000 12.406000 1381.50
977 -4.770900e+01 -3.79530 67.03900 119.990 -4.808700 1322.90
978 -4.704300e+01 12.42800 -3.22990 -59.996 5.101600 1406.00
979 -4.619200e+01 26.99300 116.61000 239.980 35.386000 1270.30
980 -4.515900e+01 17.26600 63.54800 -59.996 39.570000 1070.40
981 -4.394700e+01 -10.41600 -91.06800 59.996 -16.552000 1346.60
982 -4.256200e+01 11.34500 64.33900 59.996 19.630000 1566.50
983 -4.100800e+01 -10.79400 -41.02000 179.990 -45.177000 1171.30
984 -3.929300e+01 14.85300 44.87900 -59.996 10.127000 1266.90
985 -3.742300e+01 2.80770 -35.73600 -239.980 -16.462000 1326.70
986 -3.540500e+01 2.03060 -0.12697 59.996 -22.811000 1154.80
987 -3.324800e+01 13.62500 -29.21500 59.996 6.927800 1418.60
988 -3.095900e+01 -3.14260 -50.90000 -239.980 -44.663000 1175.80
989 -2.854800e+01 14.88800 -8.82180 119.990 23.442000 1394.60
990 -2.602500e+01 7.37600 -0.85256 -179.990 -7.350900 1210.30
991 -2.339800e+01 0.62822 32.86900 59.996 -11.776000 1254.60
992 -2.068000e+01 -12.43100 -21.52800 59.996 4.988700 1177.60
993 -1.788000e+01 23.45600 36.32800 59.996 39.656000 977.15
994 -1.500900e+01 18.45500 -52.39000 -119.990 35.711000 794.32
995 -1.207900e+01 -24.15700 -6.02410 -59.996 -2.408900 1240.60
996 -9.101000e+00 9.64880 63.31000 59.996 18.516000 1480.50
997 -6.087300e+00 7.82180 -3.27640 59.996 22.724000 1346.60
998 -3.049700e+00 14.69500 105.94000 119.990 44.387000 1108.30
999 -4.640700e-13 4.88970 -40.75700 -119.990 -9.860000 1283.70

1000 rows × 6 columns

In [81]:

Out[81]:

array([[ 8.87997132e-02, 2.32091274e-01, -4.28111179e-01,
1.35828922e+00, 4.14280209e-01],
[ 1.77247104e-01, 2.53241287e-01, -4.63121294e-01,
4.73693554e-01, -9.63090057e-02],
[ 2.64998587e-01, 5.42480630e-02, 1.26380157e+00,
4.73693554e-01, -3.87605362e-02],
…,
[ -1.77247104e-01, 5.58607663e-01, -7.92304507e-02,
4.73693554e-01, 8.58955915e-01],
[ -8.87997132e-02, 1.03710758e+00, 1.68853593e+00,
9.15969270e-01, 1.66974708e+00],
[ -1.34993283e-14, 3.54480095e-01, -6.85887917e-01,
-8.53163081e-01, -3.60580590e-01]])

In [82]:

np.max(X)

Out[82]:

3.6923510838800659

In [83]:

np.max(y)

Out[83]:

2119.0999999999999

In [84]:

np.min(y)

Out[84]:

378.07999999999998

In [89]:

gcv.best_estimator_

Out[89]:

MLPRegressor(activation=’relu’, alpha=9.9999999999999995e-07,
batch_size=’auto’, beta_1=0.9, beta_2=0.999, early_stopping=False,
epsilon=1e-08, hidden_layer_sizes=(5, 5, 2, 2),
learning_rate=’constant’, learning_rate_init=0.001, max_iter=200,
momentum=0.9, nesterovs_momentum=True, power_t=0.5, random_state=1,
shuffle=True, solver=’lbfgs’, tol=0.0001, validation_fraction=0.1,
verbose=False, warm_start=False)

In [5]:

mlp = MLPRegressor(random_state=100)

parameters = {‘hidden_layer_sizes’:[ (5,5, 2), (5, 5, 2, 2)] , ‘alpha’: 10.0 ** -np.arange(1, 7),
‘solver’: (‘lbfgs’, ‘sgd’, ‘adam’), ‘activation’:(‘relu’, ‘logistic’, ‘tanh’)}
gcv = GridSearchCV(mlp, parameters)
gcv.fit(X, y)
print(gcv.best_score_)
print(gcv.best_estimator_)

//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)

-0.00606707666593
MLPRegressor(activation=’logistic’, alpha=0.001, batch_size=’auto’,
beta_1=0.9, beta_2=0.999, early_stopping=False, epsilon=1e-08,
hidden_layer_sizes=(5, 5, 2, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=100,
shuffle=True, solver=’lbfgs’, tol=0.0001, validation_fraction=0.1,
verbose=False, warm_start=False)

In [9]:

mlp = MLPRegressor(random_state=100)

parameters = {‘hidden_layer_sizes’:[ (5,5, 2), (5, 5, 2, 2), (16, 8, 4, 4, 2)] , ‘alpha’: 10.0 ** -np.arange(1, 7),
‘solver’: (‘lbfgs’, ‘sgd’, ‘adam’), ‘activation’:(‘relu’, ‘logistic’, ‘tanh’)}
gcv = GridSearchCV(mlp, parameters, scoring = ‘neg_mean_squared_error’)
gcv.fit(X, y)
print(gcv.best_score_)
print(gcv.best_estimator_)
print(gcv.predict(X))

//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)

-29870.6573206
MLPRegressor(activation=’logistic’, alpha=0.001, batch_size=’auto’,
beta_1=0.9, beta_2=0.999, early_stopping=False, epsilon=1e-08,
hidden_layer_sizes=(5, 5, 2, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=100,
shuffle=True, solver=’lbfgs’, tol=0.0001, validation_fraction=0.1,
verbose=False, warm_start=False)
[ 1267.0810749 1267.61252754 1210.7292607 1267.0810749 1267.0810749
1210.46744826 1247.16699799 1267.0810749 1267.0810749 1210.46744827
1210.73367487 1210.46591681 1267.0810749 1210.46744827 1210.46591681
1210.4659161 1210.46744827 1210.46744827 1247.16359422 1210.73284824
1210.45505024 1270.8856201 1210.73284824 1267.73052971 1247.16699799
1267.0810749 1210.46591681 1210.46591681 1210.45505058 1211.50026988
1210.45537588 1267.0810749 1211.60110014 1210.73285066 1235.78976159
1247.16699799 1210.45664857 1210.46744827 1210.46591597 1210.46744827
1264.50168789 1210.46744827 1271.12067389 1267.0810749 1210.46744827
1267.0810749 1247.16699799 1247.16699799 1267.0810749 1210.5308918
1247.16699799 1267.0810749 1247.16699799 1267.0810749 1247.16699799
1267.08107536 1271.12067412 1271.12795072 1240.45450454 1247.16699799
1271.12067412 1270.9350754 1271.12067412 1267.3889533 1270.93502934
1270.93512254 1247.16699799 1271.12067412 1271.12795072 1271.12794925
1271.12794843 1247.16699799 1210.9215296 1270.93512254 1270.93512251
1271.12067412 1247.16699802 1271.12067412 1270.93512255 1270.23398824
1247.16699799 1270.93512254 1271.13769436 1270.93512254 1247.16699799
1271.11980765 1271.12795072 1270.89533865 1267.0810749 1271.12795022
1247.16699799 1271.12067412 1267.0900479 1268.96369125 1247.16699799
1271.12067411 1247.16699799 1210.46744514 1271.14457448 1213.63486755
1271.12036975 1247.16698755 1210.46591681 1247.16699799 1247.16398286
1210.46744827 1210.46591681 1246.04524472 1267.0810749 1267.0810749
1210.73284622 1247.16699799 1267.0810749 1210.73284838 1270.88562008
1210.46582591 1210.46751171 1271.14457448 1210.46744827 1267.28280476
1210.73284867 1210.88391919 1267.02300174 1247.16699935 1267.0810749
1210.46744814 1267.0810749 1210.46744827 1267.0810749 1210.73000037
1267.0810749 1271.14457448 1210.46744827 1267.0810749 1210.46184158
1210.46744827 1210.45506519 1211.82805954 1267.0810749 1247.16699799
1210.46591788 1267.0810749 1210.45505023 1271.12039145 1210.73286899
1253.35809932 1210.46591696 1271.12795072 1267.08107513 1247.16699799
1210.46565452 1210.45505171 1247.16699799 1267.08224735 1271.12701768
1267.0810749 1267.0810749 1210.45505023 1270.93514086 1247.16699799
1247.16699799 1247.16699799 1267.0810749 1270.93512254 1271.12795072
1271.12067412 1271.12067412 1247.16699799 1271.12067387 1267.10284826
1267.0810749 1247.16699724 1271.12067412 1271.12032178 1270.93512254
1271.12067412 1267.0810749 1262.2055116 1271.12067412 1271.12067412
1270.77714806 1271.11276436 1247.16699799 1270.93512254 1271.12067412
1247.16699799 1210.92152717 1271.12795021 1271.12795073 1270.93512074
1267.0810749 1267.08107512 1271.1279507 1247.16699799 1247.16699799
1247.16699655 1210.92152714 1210.65039395 1247.16699838 1247.16699799
1267.0810749 1247.16699799 1232.82721578 1210.45803162 1210.46606023
1268.77547679 1267.0810749 1271.14457448 1210.46591681 1267.0810749
1210.67771882 1210.46591684 1210.46744827 1210.48921399 1210.46744827
1210.73284824 1210.46744827 1210.46744827 1210.46591681 1210.46591681
1210.5794626 1212.18216972 1210.73284804 1210.46591681 1210.46591681
1210.45505023 1210.46744827 1211.19965688 1210.46591681 1267.0810749
1247.16699799 1210.46591681 1210.46744827 1210.46744827 1247.16698495
1210.45995245 1210.46744827 1210.45505318 1266.18315859 1210.73284825
1210.46747301 1267.0810749 1267.0810749 1267.0810749 1247.16699799
1270.88562015 1267.0810749 1247.16699799 1247.16699799 1247.13047323
1247.16699799 1267.08180938 1270.88218619 1267.57838215 1247.16699234
1270.93512254 1271.12795072 1271.12067289 1271.14457444 1271.12067412
1267.0810749 1270.72467219 1210.45505023 1271.12067412 1270.93512254
1269.34384991 1270.93512255 1271.12067412 1267.08162657 1271.12874629
1271.14457448 1247.16699799 1270.97967945 1271.12795072 1271.12067193
1271.12795072 1270.93512286 1210.49472255 1270.93512254 1270.70540074
1247.16699799 1247.1821467 1270.7089417 1210.4631112 1247.16699799
1271.12795071 1270.93512254 1271.13433939 1210.45607155 1271.13181385
1271.12067412 1247.16699799 1210.92152714 1271.14453324 1270.93512235
1271.12588755 1247.16699799 1271.12795078 1271.14457448 1210.46591683
1267.0810749 1267.0810749 1267.08138764 1247.16699799 1267.08106033
1247.1672415 1247.16699799 1210.57065732 1210.46743295 1210.46744827
1210.46591681 1247.16714478 1210.46461769 1210.73491758 1267.0810749
1210.46744827 1210.46586856 1266.60167366 1210.46745102 1210.46591681
1210.73284824 1210.7328536 1267.0810749 1210.73880098 1210.73294494
1210.73284825 1259.1563028 1210.46591681 1210.46744827 1210.46890478
1210.46744827 1210.73284824 1267.0810749 1267.0810749 1210.45505023
1267.0810749 1210.73284824 1210.73284311 1247.16699799 1210.46744828
1247.16699799 1210.73284824 1210.46744832 1210.46591681 1271.1206763
1210.45505032 1267.0810749 1271.12067568 1247.16699799 1271.12794599
1247.16699799 1247.16699799 1247.16699799 1247.16699799 1267.0810749
1247.16684483 1271.14457448 1271.12067412 1247.16699799 1271.12795072
1247.16696668 1271.12067412 1271.10821404 1248.39444714 1270.93512252
1271.12067412 1271.14457448 1271.12067412 1270.7574221 1271.12795072
1270.98379553 1247.16699799 1271.12067412 1212.65001137 1271.12067412
1271.14457443 1247.16699799 1271.12067412 1271.12067412 1270.93511773
1271.12067354 1247.16699799 1267.08107508 1267.0810749 1247.16699799
1270.90017751 1267.09632254 1271.14428737 1210.4917952 1271.14407544
1247.16699799 1247.16699799 1271.12795072 1247.16699778 1247.16699799
1247.16699799 1247.16699799 1247.16699799 1210.45505023 1247.16699799
1247.16699799 1210.46744827 1247.11984601 1210.46744827 1267.0810749
1210.69274821 1267.0810749 1271.14444101 1247.16699799 1210.46591683
1210.73913285 1210.73284864 1247.16699799 1210.46591681 1267.0810749
1267.0810749 1210.73282923 1210.45505023 1247.16625583 1210.73284824
1247.16699799 1247.15590892 1267.41010867 1210.46591684 1210.46591681
1210.73285146 1210.46744827 1210.46591681 1210.73168967 1210.46744827
1210.73284824 1210.73284824 1210.46744827 1267.0810749 1210.46591682
1210.46745203 1210.46744827 1210.46744827 1210.46591694 1267.0810749
1210.46591681 1267.0810749 1247.16699799 1267.0810749 1270.88562613
1210.45521523 1247.16699799 1210.45505043 1210.46591606 1267.0810749
1267.0810749 1234.93443847 1271.12794682 1267.0810749 1271.12067411
1267.0810749 1271.12795072 1271.14147677 1247.16699799 1247.90125881
1269.94597937 1210.94808544 1267.08103536 1267.0810749 1271.12794662
1247.18569665 1271.14457448 1271.12067412 1271.12795072 1270.93512254
1247.1670645 1270.93512254 1271.14457448 1247.16699799 1271.12795072
1247.1670309 1270.93512267 1271.12795072 1213.71009237 1271.12063564
1271.12795071 1271.12795079 1247.16699799 1270.93543148 1271.14457448
1247.16699799 1271.12795074 1271.12067412 1271.12067412 1267.08498549
1247.16699799 1271.12067412 1271.12067412 1261.30462726 1267.0810749
1271.12067412 1271.14457448 1270.88587872 1210.46591681 1247.16699799
1271.14451705 1270.90784348 1263.79040934 1210.46744831 1210.46710245
1210.53327193 1211.11617122 1267.0810749 1247.16699799 1267.08107491
1210.46744827 1210.46744828 1210.45505649 1267.39811825 1210.46591681
1267.0810749 1247.16699799 1210.73284824 1270.88562006 1210.46744827
1210.46591682 1267.0810749 1210.73284819 1210.73284824 1210.73284862
1210.73285317 1210.46591681 1210.46591702 1210.46744827 1210.73284819
1267.0810749 1211.10908675 1210.46591681 1210.79655102 1267.0810749
1210.46744814 1210.46591681 1210.46744826 1267.07995579 1267.0810749
1210.73696211 1247.16457593 1210.46591681 1267.0810749 1267.08107489
1267.0811505 1267.0810749 1210.46744827 1247.16699799 1267.0810749
1271.12067412 1271.14457448 1269.59975823 1271.12067416 1269.55796331
1247.16699799 1271.12795072 1271.12795072 1247.16699799 1247.16699799
1247.16699799 1247.16699799 1247.16699799 1271.12067412 1247.16699799
1271.12795072 1270.93512254 1271.12795072 1210.46336987 1271.12812571
1270.93512222 1247.16699801 1247.67564606 1271.12795072 1247.16699799
1271.12795072 1270.92887393 1247.16699799 1271.12795072 1271.12795072
1271.0492834 1267.0810749 1271.12067412 1270.70742754 1270.93512254
1247.16706315 1247.16699799 1271.12067412 1267.08107624 1247.16699799
1271.12795071 1247.16699799 1267.0810749 1270.92370337 1271.1420616
1247.16699799 1271.14341577 1267.08107546 1247.16699799 1210.46953858
1247.16699799 1247.16699799 1210.93570566 1267.0810749 1210.46744827
1267.0810749 1210.46591698 1267.0810749 1211.43431035 1210.45506311
1210.45505023 1267.0810749 1210.46591681 1210.46591681 1210.46744827
1210.46744804 1247.16699799 1214.89160626 1267.0810749 1267.08107479
1210.46745043 1210.46744827 1210.45505023 1210.46744827 1210.46744819
1210.46595909 1267.08086019 1210.46592974 1210.46591681 1210.46277838
1210.46745403 1210.46744827 1265.09820051 1247.16699799 1267.0810749
1210.7328392 1210.46591682 1216.31141064 1210.46744827 1267.0810749
1267.08107382 1271.12795072 1210.46748315 1267.08108267 1210.45505023
1271.12067453 1210.56159786 1247.16699799 1210.46591681 1247.16699799
1271.12067412 1247.16700018 1210.46776682 1210.92152723 1271.1279507
1210.46745382 1271.10876924 1270.88549163 1267.08113555 1247.16699799
1267.08107495 1271.12067255 1267.0810749 1266.47700302 1240.66924422
1267.0810749 1271.12067412 1271.12067412 1247.16699799 1240.65737815
1247.16674211 1271.12067423 1210.92152714 1271.12795072 1270.93512254
1271.12067412 1267.0810749 1270.92581802 1270.93500596 1271.12795072
1247.16699799 1271.12795072 1270.93478451 1264.02111839 1271.12795128
1247.16689916 1271.12067412 1247.16699799 1271.12070044 1267.08107476
1247.16699799 1271.12067412 1271.12795072 1271.12846803 1271.12795072
1267.0810749 1270.8842571 1270.9351105 1267.0810749 1247.16699799
1267.08107488 1245.59190433 1267.08107488 1210.73284824 1247.16699799
1247.28587053 1270.88562008 1210.73287417 1267.08107554 1210.46591681
1267.0810749 1271.14457448 1267.08107484 1267.0810749 1210.46591673
1267.41031917 1210.46341268 1267.0810749 1271.1261842 1257.45367481
1210.46744827 1210.73284824 1210.46744827 1267.0810749 1210.46744827
1210.74518773 1267.0810749 1210.46591681 1210.46744827 1267.0810749
1210.46744827 1210.73284827 1210.46934249 1210.46591681 1210.46744827
1210.73284824 1210.47496536 1267.0810749 1271.05538056 1210.46744827
1271.12809806 1247.16699799 1210.95449297 1267.0810749 1267.0810749
1267.0810749 1210.46744827 1247.16699799 1250.06550152 1267.0810749
1210.45508073 1271.14417678 1267.0810749 1210.45505023 1271.1239332
1247.16045615 1267.0810749 1210.46593495 1247.16699799 1247.16699799
1271.12796432 1247.16699799 1271.12067412 1271.12067412 1247.16699799
1271.12710404 1247.16699799 1271.11938484 1271.12047227 1267.0810749
1271.14044123 1247.16699802 1271.12795072 1271.12067412 1270.93511995
1271.12067412 1210.76070894 1270.93512254 1271.12067412 1267.0810749
1271.12795072 1270.93154102 1271.12795061 1267.08107823 1271.12795072
1271.12795072 1271.12794994 1271.12067412 1247.16699799 1247.16699799
1247.16699799 1271.12041575 1271.12067412 1212.83246256 1247.16699768
1247.16699799 1267.0810749 1210.57415538 1271.14276256 1211.78831186
1247.16699799 1247.16677173 1267.0810749 1267.0810749 1267.0810749
1210.46609852 1271.14457446 1210.46591681 1267.08107495 1210.46591681
1267.0810749 1247.23308825 1267.08107433 1211.10979202 1210.46744827
1210.46591869 1264.69860405 1267.0810749 1210.46744827 1210.73283243
1267.41035285 1267.08107456 1210.45553928 1210.46744827 1210.45834549
1210.46744827 1210.45540338 1210.45505023 1210.4550538 1267.0810749
1270.88562008 1267.08107493 1210.46591681 1210.46591681 1210.46744827
1210.46570598 1210.45505312 1210.46744827 1267.0810749 1210.46591681
1267.0810749 1210.46744827 1267.0810749 1210.46744827 1247.16699799
1247.16699799 1247.16699798 1271.14457375 1267.0810749 1271.12067934
1247.16699799 1247.16699799 1247.16699799 1210.46744827 1210.46799753
1271.00986669 1267.08067803 1271.12553282 1247.16699799 1271.12784758
1247.16699799 1247.16699801 1247.16699799 1267.08107818 1270.93512261
1271.12062883 1210.46001105 1270.92962954 1247.16699799 1271.12067412
1270.93512258 1270.93511874 1271.12067412 1241.49291651 1271.12791054
1271.12067412 1271.12067412 1247.16699799 1271.12067412 1267.0810749
1271.12795072 1247.16699799 1271.12795071 1247.16699806 1271.12794285
1271.12795072 1270.93513572 1267.08206681 1271.12067411 1267.0810749
1267.0810749 1247.16699799 1271.12795072 1271.12795072 1271.09302909
1271.12795072 1247.16653013 1247.16699799 1267.80099537 1267.0810749
1247.16699799 1271.12067412 1269.49668511 1247.16699799 1210.45505023
1247.16699799 1247.16699799 1210.46591681 1210.73284825 1210.45649962
1267.0810749 1267.0810749 1210.46744827 1210.46592493 1267.0810749
1210.46744827 1210.46744827 1210.73284846 1271.14456931 1210.46591681
1210.46744827 1247.16699799 1212.05100667 1210.73284824 1210.46591743
1210.46591681 1210.46744827 1210.46724453 1267.0810749 1267.0810749
1210.73284824 1210.73284868 1210.46744827 1210.46604608 1210.46592456
1267.0810749 1210.46744827 1210.46744827 1210.46744827 1267.0810749
1210.46762581 1247.16643795 1247.16699799 1267.0810749 1267.0810749
1267.0810749 1267.0810749 1264.96337655 1270.88561997 1267.08107492
1271.04261144 1270.93508562 1247.16699799 1210.47923807 1271.12667483
1247.16699799 1271.12067412 1210.45524885 1210.86596225 1271.12794774
1271.11855902 1267.0810749 1271.14457442 1271.12712749 1247.16699799
1247.16699799 1268.60692294 1270.93512713 1247.16699799 1219.8509847
1240.76632914 1240.66982589 1247.16699799 1271.14457448 1271.12067411
1247.16699799 1271.12795072 1271.12058777 1270.93512254 1267.0810749
1240.71537128 1271.12795072 1267.62043456 1271.12067412 1270.93512222
1247.16699799 1271.12067663 1271.14456522 1247.16699799 1271.12067386
1247.16699799 1271.12053598 1271.12068669 1210.92385085 1247.166998
1247.167001 1251.854297 1267.08108345 1218.34100688 1247.16699799]

//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)

In [7]:

Out[7]:

array([ 1267.0810749 , 1267.61252754, 1210.7292607 , 1267.0810749 ,
1267.0810749 , 1210.46744826, 1247.16699799, 1267.0810749 ,
1267.0810749 , 1210.46744827, 1210.73367487, 1210.46591681,
1267.0810749 , 1210.46744827, 1210.46591681, 1210.4659161 ,
1210.46744827, 1210.46744827, 1247.16359422, 1210.73284824,
1210.45505024, 1270.8856201 , 1210.73284824, 1267.73052971,
1247.16699799, 1267.0810749 , 1210.46591681, 1210.46591681,
1210.45505058, 1211.50026988, 1210.45537588, 1267.0810749 ,
1211.60110014, 1210.73285066, 1235.78976159, 1247.16699799,
1210.45664857, 1210.46744827, 1210.46591597, 1210.46744827,
1264.50168789, 1210.46744827, 1271.12067389, 1267.0810749 ,
1210.46744827, 1267.0810749 , 1247.16699799, 1247.16699799,
1267.0810749 , 1210.5308918 , 1247.16699799, 1267.0810749 ,
1247.16699799, 1267.0810749 , 1247.16699799, 1267.08107536,
1271.12067412, 1271.12795072, 1240.45450454, 1247.16699799,
1271.12067412, 1270.9350754 , 1271.12067412, 1267.3889533 ,
1270.93502934, 1270.93512254, 1247.16699799, 1271.12067412,
1271.12795072, 1271.12794925, 1271.12794843, 1247.16699799,
1210.9215296 , 1270.93512254, 1270.93512251, 1271.12067412,
1247.16699802, 1271.12067412, 1270.93512255, 1270.23398824,
1247.16699799, 1270.93512254, 1271.13769436, 1270.93512254,
1247.16699799, 1271.11980765, 1271.12795072, 1270.89533865,
1267.0810749 , 1271.12795022, 1247.16699799, 1271.12067412,
1267.0900479 , 1268.96369125, 1247.16699799, 1271.12067411,
1247.16699799, 1210.46744514, 1271.14457448, 1213.63486755,
1271.12036975, 1247.16698755, 1210.46591681, 1247.16699799,
1247.16398286, 1210.46744827, 1210.46591681, 1246.04524472,
1267.0810749 , 1267.0810749 , 1210.73284622, 1247.16699799,
1267.0810749 , 1210.73284838, 1270.88562008, 1210.46582591,
1210.46751171, 1271.14457448, 1210.46744827, 1267.28280476,
1210.73284867, 1210.88391919, 1267.02300174, 1247.16699935,
1267.0810749 , 1210.46744814, 1267.0810749 , 1210.46744827,
1267.0810749 , 1210.73000037, 1267.0810749 , 1271.14457448,
1210.46744827, 1267.0810749 , 1210.46184158, 1210.46744827,
1210.45506519, 1211.82805954, 1267.0810749 , 1247.16699799,
1210.46591788, 1267.0810749 , 1210.45505023, 1271.12039145,
1210.73286899, 1253.35809932, 1210.46591696, 1271.12795072,
1267.08107513, 1247.16699799, 1210.46565452, 1210.45505171,
1247.16699799, 1267.08224735, 1271.12701768, 1267.0810749 ,
1267.0810749 , 1210.45505023, 1270.93514086, 1247.16699799,
1247.16699799, 1247.16699799, 1267.0810749 , 1270.93512254,
1271.12795072, 1271.12067412, 1271.12067412, 1247.16699799,
1271.12067387, 1267.10284826, 1267.0810749 , 1247.16699724,
1271.12067412, 1271.12032178, 1270.93512254, 1271.12067412,
1267.0810749 , 1262.2055116 , 1271.12067412, 1271.12067412,
1270.77714806, 1271.11276436, 1247.16699799, 1270.93512254,
1271.12067412, 1247.16699799, 1210.92152717, 1271.12795021,
1271.12795073, 1270.93512074, 1267.0810749 , 1267.08107512,
1271.1279507 , 1247.16699799, 1247.16699799, 1247.16699655,
1210.92152714, 1210.65039395, 1247.16699838, 1247.16699799,
1267.0810749 , 1247.16699799, 1232.82721578, 1210.45803162,
1210.46606023, 1268.77547679, 1267.0810749 , 1271.14457448,
1210.46591681, 1267.0810749 , 1210.67771882, 1210.46591684,
1210.46744827, 1210.48921399, 1210.46744827, 1210.73284824,
1210.46744827, 1210.46744827, 1210.46591681, 1210.46591681,
1210.5794626 , 1212.18216972, 1210.73284804, 1210.46591681,
1210.46591681, 1210.45505023, 1210.46744827, 1211.19965688,
1210.46591681, 1267.0810749 , 1247.16699799, 1210.46591681,
1210.46744827, 1210.46744827, 1247.16698495, 1210.45995245,
1210.46744827, 1210.45505318, 1266.18315859, 1210.73284825,
1210.46747301, 1267.0810749 , 1267.0810749 , 1267.0810749 ,
1247.16699799, 1270.88562015, 1267.0810749 , 1247.16699799,
1247.16699799, 1247.13047323, 1247.16699799, 1267.08180938,
1270.88218619, 1267.57838215, 1247.16699234, 1270.93512254,
1271.12795072, 1271.12067289, 1271.14457444, 1271.12067412,
1267.0810749 , 1270.72467219, 1210.45505023, 1271.12067412,
1270.93512254, 1269.34384991, 1270.93512255, 1271.12067412,
1267.08162657, 1271.12874629, 1271.14457448, 1247.16699799,
1270.97967945, 1271.12795072, 1271.12067193, 1271.12795072,
1270.93512286, 1210.49472255, 1270.93512254, 1270.70540074,
1247.16699799, 1247.1821467 , 1270.7089417 , 1210.4631112 ,
1247.16699799, 1271.12795071, 1270.93512254, 1271.13433939,
1210.45607155, 1271.13181385, 1271.12067412, 1247.16699799,
1210.92152714, 1271.14453324, 1270.93512235, 1271.12588755,
1247.16699799, 1271.12795078, 1271.14457448, 1210.46591683,
1267.0810749 , 1267.0810749 , 1267.08138764, 1247.16699799,
1267.08106033, 1247.1672415 , 1247.16699799, 1210.57065732,
1210.46743295, 1210.46744827, 1210.46591681, 1247.16714478,
1210.46461769, 1210.73491758, 1267.0810749 , 1210.46744827,
1210.46586856, 1266.60167366, 1210.46745102, 1210.46591681,
1210.73284824, 1210.7328536 , 1267.0810749 , 1210.73880098,
1210.73294494, 1210.73284825, 1259.1563028 , 1210.46591681,
1210.46744827, 1210.46890478, 1210.46744827, 1210.73284824,
1267.0810749 , 1267.0810749 , 1210.45505023, 1267.0810749 ,
1210.73284824, 1210.73284311, 1247.16699799, 1210.46744828,
1247.16699799, 1210.73284824, 1210.46744832, 1210.46591681,
1271.1206763 , 1210.45505032, 1267.0810749 , 1271.12067568,
1247.16699799, 1271.12794599, 1247.16699799, 1247.16699799,
1247.16699799, 1247.16699799, 1267.0810749 , 1247.16684483,
1271.14457448, 1271.12067412, 1247.16699799, 1271.12795072,
1247.16696668, 1271.12067412, 1271.10821404, 1248.39444714,
1270.93512252, 1271.12067412, 1271.14457448, 1271.12067412,
1270.7574221 , 1271.12795072, 1270.98379553, 1247.16699799,
1271.12067412, 1212.65001137, 1271.12067412, 1271.14457443,
1247.16699799, 1271.12067412, 1271.12067412, 1270.93511773,
1271.12067354, 1247.16699799, 1267.08107508, 1267.0810749 ,
1247.16699799, 1270.90017751, 1267.09632254, 1271.14428737,
1210.4917952 , 1271.14407544, 1247.16699799, 1247.16699799,
1271.12795072, 1247.16699778, 1247.16699799, 1247.16699799,
1247.16699799, 1247.16699799, 1210.45505023, 1247.16699799,
1247.16699799, 1210.46744827, 1247.11984601, 1210.46744827,
1267.0810749 , 1210.69274821, 1267.0810749 , 1271.14444101,
1247.16699799, 1210.46591683, 1210.73913285, 1210.73284864,
1247.16699799, 1210.46591681, 1267.0810749 , 1267.0810749 ,
1210.73282923, 1210.45505023, 1247.16625583, 1210.73284824,
1247.16699799, 1247.15590892, 1267.41010867, 1210.46591684,
1210.46591681, 1210.73285146, 1210.46744827, 1210.46591681,
1210.73168967, 1210.46744827, 1210.73284824, 1210.73284824,
1210.46744827, 1267.0810749 , 1210.46591682, 1210.46745203,
1210.46744827, 1210.46744827, 1210.46591694, 1267.0810749 ,
1210.46591681, 1267.0810749 , 1247.16699799, 1267.0810749 ,
1270.88562613, 1210.45521523, 1247.16699799, 1210.45505043,
1210.46591606, 1267.0810749 , 1267.0810749 , 1234.93443847,
1271.12794682, 1267.0810749 , 1271.12067411, 1267.0810749 ,
1271.12795072, 1271.14147677, 1247.16699799, 1247.90125881,
1269.94597937, 1210.94808544, 1267.08103536, 1267.0810749 ,
1271.12794662, 1247.18569665, 1271.14457448, 1271.12067412,
1271.12795072, 1270.93512254, 1247.1670645 , 1270.93512254,
1271.14457448, 1247.16699799, 1271.12795072, 1247.1670309 ,
1270.93512267, 1271.12795072, 1213.71009237, 1271.12063564,
1271.12795071, 1271.12795079, 1247.16699799, 1270.93543148,
1271.14457448, 1247.16699799, 1271.12795074, 1271.12067412,
1271.12067412, 1267.08498549, 1247.16699799, 1271.12067412,
1271.12067412, 1261.30462726, 1267.0810749 , 1271.12067412,
1271.14457448, 1270.88587872, 1210.46591681, 1247.16699799,
1271.14451705, 1270.90784348, 1263.79040934, 1210.46744831,
1210.46710245, 1210.53327193, 1211.11617122, 1267.0810749 ,
1247.16699799, 1267.08107491, 1210.46744827, 1210.46744828,
1210.45505649, 1267.39811825, 1210.46591681, 1267.0810749 ,
1247.16699799, 1210.73284824, 1270.88562006, 1210.46744827,
1210.46591682, 1267.0810749 , 1210.73284819, 1210.73284824,
1210.73284862, 1210.73285317, 1210.46591681, 1210.46591702,
1210.46744827, 1210.73284819, 1267.0810749 , 1211.10908675,
1210.46591681, 1210.79655102, 1267.0810749 , 1210.46744814,
1210.46591681, 1210.46744826, 1267.07995579, 1267.0810749 ,
1210.73696211, 1247.16457593, 1210.46591681, 1267.0810749 ,
1267.08107489, 1267.0811505 , 1267.0810749 , 1210.46744827,
1247.16699799, 1267.0810749 , 1271.12067412, 1271.14457448,
1269.59975823, 1271.12067416, 1269.55796331, 1247.16699799,
1271.12795072, 1271.12795072, 1247.16699799, 1247.16699799,
1247.16699799, 1247.16699799, 1247.16699799, 1271.12067412,
1247.16699799, 1271.12795072, 1270.93512254, 1271.12795072,
1210.46336987, 1271.12812571, 1270.93512222, 1247.16699801,
1247.67564606, 1271.12795072, 1247.16699799, 1271.12795072,
1270.92887393, 1247.16699799, 1271.12795072, 1271.12795072,
1271.0492834 , 1267.0810749 , 1271.12067412, 1270.70742754,
1270.93512254, 1247.16706315, 1247.16699799, 1271.12067412,
1267.08107624, 1247.16699799, 1271.12795071, 1247.16699799,
1267.0810749 , 1270.92370337, 1271.1420616 , 1247.16699799,
1271.14341577, 1267.08107546, 1247.16699799, 1210.46953858,
1247.16699799, 1247.16699799, 1210.93570566, 1267.0810749 ,
1210.46744827, 1267.0810749 , 1210.46591698, 1267.0810749 ,
1211.43431035, 1210.45506311, 1210.45505023, 1267.0810749 ,
1210.46591681, 1210.46591681, 1210.46744827, 1210.46744804,
1247.16699799, 1214.89160626, 1267.0810749 , 1267.08107479,
1210.46745043, 1210.46744827, 1210.45505023, 1210.46744827,
1210.46744819, 1210.46595909, 1267.08086019, 1210.46592974,
1210.46591681, 1210.46277838, 1210.46745403, 1210.46744827,
1265.09820051, 1247.16699799, 1267.0810749 , 1210.7328392 ,
1210.46591682, 1216.31141064, 1210.46744827, 1267.0810749 ,
1267.08107382, 1271.12795072, 1210.46748315, 1267.08108267,
1210.45505023, 1271.12067453, 1210.56159786, 1247.16699799,
1210.46591681, 1247.16699799, 1271.12067412, 1247.16700018,
1210.46776682, 1210.92152723, 1271.1279507 , 1210.46745382,
1271.10876924, 1270.88549163, 1267.08113555, 1247.16699799,
1267.08107495, 1271.12067255, 1267.0810749 , 1266.47700302,
1240.66924422, 1267.0810749 , 1271.12067412, 1271.12067412,
1247.16699799, 1240.65737815, 1247.16674211, 1271.12067423,
1210.92152714, 1271.12795072, 1270.93512254, 1271.12067412,
1267.0810749 , 1270.92581802, 1270.93500596, 1271.12795072,
1247.16699799, 1271.12795072, 1270.93478451, 1264.02111839,
1271.12795128, 1247.16689916, 1271.12067412, 1247.16699799,
1271.12070044, 1267.08107476, 1247.16699799, 1271.12067412,
1271.12795072, 1271.12846803, 1271.12795072, 1267.0810749 ,
1270.8842571 , 1270.9351105 , 1267.0810749 , 1247.16699799,
1267.08107488, 1245.59190433, 1267.08107488, 1210.73284824,
1247.16699799, 1247.28587053, 1270.88562008, 1210.73287417,
1267.08107554, 1210.46591681, 1267.0810749 , 1271.14457448,
1267.08107484, 1267.0810749 , 1210.46591673, 1267.41031917,
1210.46341268, 1267.0810749 , 1271.1261842 , 1257.45367481,
1210.46744827, 1210.73284824, 1210.46744827, 1267.0810749 ,
1210.46744827, 1210.74518773, 1267.0810749 , 1210.46591681,
1210.46744827, 1267.0810749 , 1210.46744827, 1210.73284827,
1210.46934249, 1210.46591681, 1210.46744827, 1210.73284824,
1210.47496536, 1267.0810749 , 1271.05538056, 1210.46744827,
1271.12809806, 1247.16699799, 1210.95449297, 1267.0810749 ,
1267.0810749 , 1267.0810749 , 1210.46744827, 1247.16699799,
1250.06550152, 1267.0810749 , 1210.45508073, 1271.14417678,
1267.0810749 , 1210.45505023, 1271.1239332 , 1247.16045615,
1267.0810749 , 1210.46593495, 1247.16699799, 1247.16699799,
1271.12796432, 1247.16699799, 1271.12067412, 1271.12067412,
1247.16699799, 1271.12710404, 1247.16699799, 1271.11938484,
1271.12047227, 1267.0810749 , 1271.14044123, 1247.16699802,
1271.12795072, 1271.12067412, 1270.93511995, 1271.12067412,
1210.76070894, 1270.93512254, 1271.12067412, 1267.0810749 ,
1271.12795072, 1270.93154102, 1271.12795061, 1267.08107823,
1271.12795072, 1271.12795072, 1271.12794994, 1271.12067412,
1247.16699799, 1247.16699799, 1247.16699799, 1271.12041575,
1271.12067412, 1212.83246256, 1247.16699768, 1247.16699799,
1267.0810749 , 1210.57415538, 1271.14276256, 1211.78831186,
1247.16699799, 1247.16677173, 1267.0810749 , 1267.0810749 ,
1267.0810749 , 1210.46609852, 1271.14457446, 1210.46591681,
1267.08107495, 1210.46591681, 1267.0810749 , 1247.23308825,
1267.08107433, 1211.10979202, 1210.46744827, 1210.46591869,
1264.69860405, 1267.0810749 , 1210.46744827, 1210.73283243,
1267.41035285, 1267.08107456, 1210.45553928, 1210.46744827,
1210.45834549, 1210.46744827, 1210.45540338, 1210.45505023,
1210.4550538 , 1267.0810749 , 1270.88562008, 1267.08107493,
1210.46591681, 1210.46591681, 1210.46744827, 1210.46570598,
1210.45505312, 1210.46744827, 1267.0810749 , 1210.46591681,
1267.0810749 , 1210.46744827, 1267.0810749 , 1210.46744827,
1247.16699799, 1247.16699799, 1247.16699798, 1271.14457375,
1267.0810749 , 1271.12067934, 1247.16699799, 1247.16699799,
1247.16699799, 1210.46744827, 1210.46799753, 1271.00986669,
1267.08067803, 1271.12553282, 1247.16699799, 1271.12784758,
1247.16699799, 1247.16699801, 1247.16699799, 1267.08107818,
1270.93512261, 1271.12062883, 1210.46001105, 1270.92962954,
1247.16699799, 1271.12067412, 1270.93512258, 1270.93511874,
1271.12067412, 1241.49291651, 1271.12791054, 1271.12067412,
1271.12067412, 1247.16699799, 1271.12067412, 1267.0810749 ,
1271.12795072, 1247.16699799, 1271.12795071, 1247.16699806,
1271.12794285, 1271.12795072, 1270.93513572, 1267.08206681,
1271.12067411, 1267.0810749 , 1267.0810749 , 1247.16699799,
1271.12795072, 1271.12795072, 1271.09302909, 1271.12795072,
1247.16653013, 1247.16699799, 1267.80099537, 1267.0810749 ,
1247.16699799, 1271.12067412, 1269.49668511, 1247.16699799,
1210.45505023, 1247.16699799, 1247.16699799, 1210.46591681,
1210.73284825, 1210.45649962, 1267.0810749 , 1267.0810749 ,
1210.46744827, 1210.46592493, 1267.0810749 , 1210.46744827,
1210.46744827, 1210.73284846, 1271.14456931, 1210.46591681,
1210.46744827, 1247.16699799, 1212.05100667, 1210.73284824,
1210.46591743, 1210.46591681, 1210.46744827, 1210.46724453,
1267.0810749 , 1267.0810749 , 1210.73284824, 1210.73284868,
1210.46744827, 1210.46604608, 1210.46592456, 1267.0810749 ,
1210.46744827, 1210.46744827, 1210.46744827, 1267.0810749 ,
1210.46762581, 1247.16643795, 1247.16699799, 1267.0810749 ,
1267.0810749 , 1267.0810749 , 1267.0810749 , 1264.96337655,
1270.88561997, 1267.08107492, 1271.04261144, 1270.93508562,
1247.16699799, 1210.47923807, 1271.12667483, 1247.16699799,
1271.12067412, 1210.45524885, 1210.86596225, 1271.12794774,
1271.11855902, 1267.0810749 , 1271.14457442, 1271.12712749,
1247.16699799, 1247.16699799, 1268.60692294, 1270.93512713,
1247.16699799, 1219.8509847 , 1240.76632914, 1240.66982589,
1247.16699799, 1271.14457448, 1271.12067411, 1247.16699799,
1271.12795072, 1271.12058777, 1270.93512254, 1267.0810749 ,
1240.71537128, 1271.12795072, 1267.62043456, 1271.12067412,
1270.93512222, 1247.16699799, 1271.12067663, 1271.14456522,
1247.16699799, 1271.12067386, 1247.16699799, 1271.12053598,
1271.12068669, 1210.92385085, 1247.166998 , 1247.167001 ,
1251.854297 , 1267.08108345, 1218.34100688, 1247.16699799])

In [8]:

mlp = MLPRegressor(random_state=100)

parameters = {‘hidden_layer_sizes’:[ (16, 8, 4)] , ‘alpha’: 10.0 ** -np.arange(1, 7), }
gcv = GridSearchCV(mlp, parameters, scoring = ‘neg_mean_squared_error’)
gcv.fit(X, y)
print(gcv.best_score_)
print(gcv.best_estimator_)

//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)
//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)

-139250.603805
MLPRegressor(activation=’relu’, alpha=1.0000000000000001e-05,
batch_size=’auto’, beta_1=0.9, beta_2=0.999, early_stopping=False,
epsilon=1e-08, hidden_layer_sizes=(16, 8, 4),
learning_rate=’constant’, learning_rate_init=0.001, max_iter=200,
momentum=0.9, nesterovs_momentum=True, power_t=0.5,
random_state=100, shuffle=True, solver=’adam’, tol=0.0001,
validation_fraction=0.1, verbose=False, warm_start=False)

//anaconda/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn’t converged yet.
% self.max_iter, ConvergenceWarning)

In [ ]:

In [ ]:

train = MLPRegressor(activation=’logistic’, alpha=0.001, batch_size=’auto’,
beta_1=0.9, beta_2=0.999, early_stopping=False, epsilon=1e-08,
hidden_layer_sizes=(5, 5, 2, 2), learning_rate=’constant’,
learning_rate_init=0.001, max_iter=200, momentum=0.9,
nesterovs_momentum=True, power_t=0.5, random_state=100,
shuffle=True, solver=’lbfgs’, tol=0.0001, validation_fraction=0.1,
verbose=False, warm_start=False)