-1.39239004999399 -0.597463161125779 0
-0.450657544657588 -0.213353960216045 0
-1.31065315008163 0.570012198388577 0
0.283034225925803 0.100616206973791 0
-1.20690154470503 1.04567545056343 0
0.363225104287267 -0.251742009446025 0
-1.40581917017698 0.820990815386176 0
-0.827153645455837 0.00797210596501829 0
-0.702793410047889 -0.417177463322878 0
0.392324045300484 -0.305373175442219 0
-0.431528193876147 1.37701694816351 0
-0.11295180208981 -0.361840535327792 0
-0.981689188629389 -0.582154671102762 0
-0.448794264346361 0.463328318297863 0
-0.702622003853321 0.603528268262744 0
-0.328902885317802 -0.267501036077738 0
-0.131961349397898 0.301571784168482 0
-0.628973986953497 -0.485888203978538 0
-1.48559930734336 0.96663834117353 0
-0.296888053417206 0.439752315729857 0
-0.346670994535089 1.1519364412874 0
-1.05533551052213 1.31179966367781 0
-1.49142957106233 0.478683003783226 0
-1.33546997606754 0.324147663637996 0
0.210148001089692 1.12443906217813 0
0.122110720723867 0.959316627681255 0
-0.117016438394785 1.39359128810465 0
-0.439716454595327 0.622985377535224 0
-1.21443545632064 -0.0675721880048513 0
-0.044203020632267 1.08023826554418 0
-1.43781319260597 0.151714637503028 0
-0.210955411195755 0.754373548179865 0
-1.32906787283719 -0.582365798205137 0
0.395755540579557 -0.0482255317270756 0
-0.950064381584525 0.57582343518734 0
0.463538816198707 1.07521523870528 0
-0.915471041575074 0.369861166551709 0
-0.764911223202944 0.887455349415541 0
-1.06273462437093 0.315951484814286 0
-1.01888907328248 0.888837175071239 0
0.304510889574885 0.598082594200969 0
0.30526377633214 0.870007870346308 0
-1.33973498083651 0.544797444343567 0
-1.15637363493443 -0.296885675191879 0
0.0517498999834061 0.250329764187336 0
-1.07870748266578 0.434240109845996 0
-1.49244686216116 0.903072848543525 0
-1.27188692055643 -0.262008222192526 0
-0.877685999497771 0.383768526464701 0
-1.42283617705107 0.799505907669663 0
-1.12144484370947 -0.304959133639932 0
-1.1929553039372 -0.316802667826414 0
0.270748233422637 0.785760683566332 0
-1.10865602828562 0.253092674165964 0
-0.208883255720139 1.33320820108056 0
0.318265102803707 -0.293488938733935 0
0.342569833621383 1.04334607124329 0
-1.19844749011099 -0.217298712953925 0
-0.360936155542731 1.03436738587916 0
0.386019321158528 -0.288873052597046 0
-0.342862766236067 0.864014012366533 0
-0.989416690543294 -0.0408222213387489 0
-0.799497276544571 0.764487050101161 0
0.0904660951346159 0.843815629929304 0
-1.35941226780415 -0.353944836556911 0
-0.318538151681423 1.06931366547942 0
-1.10466577857733 0.434037046134472 0
-0.211337016895413 0.252423381060362 0
-0.29140030965209 1.29867684058845 0
-0.617389865219593 0.499081175401807 0
-0.706919208168983 0.343443296477199 0
-0.315600611269474 1.09392803125083 0
0.398918211460114 0.312187004461885 0
-0.716553501784801 1.36589952334762 0
-1.12933986261487 0.878367142379284 0
-0.351108273491263 -0.207985761016607 0
-0.245318084955215 1.07885727807879 0
-1.40073940716684 0.401824349164963 0
-0.979589378461242 -0.545007653906941 0
0.290631724521518 0.545144684240222 0
-0.0430971663445234 0.462633914873004 0
-0.712866369634867 1.08607340939343 0
0.307697057723999 0.715197622403502 0
-0.72378109395504 1.0842726636678 0
-0.792862934991717 -0.380106011778116 0
-0.00929343514144421 0.0281702294945717 0
-0.0211706962436438 -0.0278359189629555 0
-1.09965377300978 -0.31943767927587 0
-0.13910454697907 1.06923266462982 0
-0.764311667531729 0.600485058128834 0
-1.28119461610913 -0.0945590533316135 0
-1.09384045749903 -0.596761413291097 0
-0.190341887995601 1.01247879639268 0
-0.124750340357423 -0.178850366175175 0
-1.34087672270834 0.506385147199035 0
-1.44437530077994 -0.372433197125792 0
-0.88551652431488 0.90443640910089 0
-1.21463654562831 0.486857536807656 0
-1.36427760496736 0.273425411060452 0
-0.0501587707549334 0.7924127407372 0
0.888038067147136 -0.526633763313293 1
0.809786619618535 -0.244266811385751 1
-0.303372476249933 -0.142666004225612 1
-0.400697354227304 -0.391701476275921 1
0.428098751232028 -0.00846406407654288 1
0.955720882862806 -1.0200966835022 1
0.859844530001283 -1.04324980787933 1
0.24422537907958 -0.48508313074708 1
-0.40003602579236 -1.20495451949537 1
-0.160884959623218 -1.21119154579937 1
0.121141629293561 0.462979030236602 1
-0.0750535950064659 0.389202271029353 1
0.469299875199795 -0.945363217592239 1
0.379730703309178 -0.578576540201902 1
0.711899323388934 -0.143859611451626 1
0.3021453730762 -0.496773294731975 1
1.18529921211302 -0.204343584924936 1
0.746642898768187 0.309584275260568 1
-0.393032053485513 -0.150444281846285 1
1.22707438655198 -0.268507377803326 1
0.112810315564275 -1.03142468966544 1
-0.446556851267815 -0.289715267345309 1
0.282349769026041 -0.914270694926381 1
-0.437899433076382 -0.190545746684074 1
1.21587748639286 -0.230789105966687 1
0.303816361352801 -0.411103492230177 1
0.723193868994713 0.0814826678484678 1
1.25082901492715 -0.159235162287951 1
0.690007263794541 0.209017889946699 1
1.08633192069829 -0.247908465191722 1
0.681249354034662 0.423096599429846 1
0.63841650635004 0.0553249031305313 1
0.84547995403409 -0.0644301451742649 1
-0.479536907747388 -0.769960376992822 1
0.18311963416636 -0.788356084004045 1
1.04201487638056 -1.18284490853548 1
1.23969219066203 0.302438006177545 1
0.951149225234985 -1.09022662937641 1
1.28785296715796 -1.24130391851068 1
0.571161009371281 -0.118028565868735 1
1.06017853878438 -0.309823715686798 1
-0.105230620130897 -0.582049637660384 1
1.09484578669071 -0.468878101184964 1
1.19762514159083 -1.0946824323386 1
0.757396573200822 0.0759713057428598 1
0.733841856941581 0.25337884798646 1
0.00132642686367035 0.34668731726706 1
1.07618230767548 -0.799844728410244 1
0.927763728424907 -1.1455404214561 1
-0.336986906826496 0.16991727091372 1
1.21063993126154 -0.18070336394012 1
0.114964190870523 -1.25541530698538 1
0.445969361811876 -0.0923021171241999 1
-0.116227554157376 -1.19037444852293 1
1.48999117128551 -0.945819684863091 1
0.795269263908267 0.439938424527645 1
-0.151728997007012 -0.074431486055255 1
0.0499680545181036 -0.414930473640561 1
1.17782938480377 -0.406360270082951 1
0.242479216307402 -0.381483010575175 1
-0.0349341910332441 -0.0236857827752829 1
-0.0366696417331696 -0.187451466545463 1
1.44203379750252 -1.38817701078951 1
-0.454176684841514 -1.19872906208038 1
-0.196347231045365 0.326470427587628 1
-0.481485072523355 0.0946592338383198 1
0.787740675732493 -0.63975919559598 1
0.329126983880997 -0.294387384876609 1
-0.318631276488304 0.51141545586288 1
0.898519163951278 -1.04682774432003 1
0.815055921673775 -1.13671000637114 1
0.570957642048597 0.503418699279428 1
-0.392000380903482 -1.34431580603123 1
0.019984545186162 -1.28814458064735 1
1.22665323317051 -1.13670367151499 1
-0.325099743902683 0.328613089770079 1
0.918809719383717 0.0432894252240658 1
1.22844546660781 -1.37063437104225 1
0.0860512908548117 0.0158091507852077 1
0.789840504527092 -0.964876345917583 1
1.15727329067886 -1.06213231906295 1
1.24382985197008 -0.718034704774618 1
1.32504406385124 -0.664852235093713 1
0.380316628143191 0.204785574600101 1
0.20805804617703 -0.346923151612282 1
0.355418080464005 0.196368443220854 1
-0.49825088866055 -1.11034972183406 1
1.04706199467182 -0.595174945518374 1
-0.320349996909499 -1.12687651589513 1
0.867786107584834 -1.2676496706903 1
-0.191824777051806 -0.253314070031047 1
-0.403055045753717 -0.303916897252202 1
-0.462830908596516 -0.777312583848834 1
0.795230325311422 0.216036053746939 1
1.32828597724438 -0.831470453739166 1
0.363404605537653 0.390453467518091 1
-0.0148076210170984 0.0875863771885633 1
0.808031119406223 -0.677718711644411 1
-0.454295286908746 -0.942977077886462 1
-0.223105819895864 -0.941122464463115 1
import numpy as np
np.random.seed(1)
speed_train = 0.5
arr = []
for d in open("data.txt",'r'):
arr.append(d[:-1].split('\t'))
#выбираем данные (1 и 2 столбцы)
train_data = np.array(np.array(arr)[:,:2],dtype=float)
#Добавляем нейрон смещения
train_bias = np.array([[1] * len(arr)],dtype=float).T
#Наши итоговые обучающие данные
train = np.hstack((train_data,train_bias))
#результат, который хотим получить
res = np.array([np.array(arr)[:,2]],dtype=int).T
def sigmoid(z):
return 1 / (1 + np.exp(-z))
def diffsigmoid(z):
return (1 - z) * z
def normal(z,nbias):
return np.round(z-nbias)
weights = np.random.random((3,1))
print("начальные веса: ")
print(weights)
output = sigmoid(np.dot(train, weights))
print("результат: ")
print(output)
err = res - output
print("ошибка: ")
print(err)
adweights = np.dot(train.T, err * diffsigmoid(output))
print("Как нужно изменить веса: ")
print(adweights)
weights += adweights
print("Измененные веса: ")
print(weights)
for i in range(10000):
output = sigmoid(np.dot(train, weights))
err = res - output
adweights = np.dot(train.T, speed_train * err * diffsigmoid(output))
weights += adweights
print(err.sum())
print("weights: ", weights)
print(normal(sigmoid(np.dot(train,weights)),0))