diff --git a/Chapter_1 Logistic Regression/test_data b/Chapter_1 Logistic Regression/test_data new file mode 100644 index 0000000..a90c2bd --- /dev/null +++ b/Chapter_1 Logistic Regression/test_data @@ -0,0 +1,200 @@ +7.33251317753861 9.84290929650398 +1.14288134664155 9.31938382343869 +5.69123321602869 7.01397166818584 +2.50648396980344 7.05766788137925 +8.61756151890868 9.98657202473272 +1.41851773509793 9.77704969645800 +8.61450253418651 9.80161361673675 +7.20252421999920 8.45756147719461 +3.79585154363648 9.56147516348640 +7.12986596603599 9.92424540796407 +5.90166629240928 7.01231298989034 +7.64216375281899 9.91037363924651 +6.10861639371996 9.29953378509465 +6.68819221312425 8.59494569110640 +5.89930101159801 7.43009940132321 +6.35441479217648 7.43863129967550 +2.49230686464801 3.79277610650803 +0.874186031122628 8.56545115532597 +6.25345760678235 8.12438477163678 +8.55199843954519 9.56743033736205 +3.94869923690758 6.87606576238586 +6.88965109334102 9.56780033528095 +1.68185344098941 6.26602106875295 +4.01027580639810 8.23519946958995 +6.38428347772265 9.35832990092658 +2.48422569298721 7.91301493123820 +5.89588203576457 7.40064804417771 +1.07097913402539 6.02251810104346 +8.63769562664473 9.76101886404358 +5.26740975881800 7.10280802002263 +6.76140353375088 8.33245855777541 +4.55361346498628 8.66197879154849 +8.01812927282219 9.96002944206410 +4.92493976967423 6.48984272359645 +1.34364605003152 4.31522038864128 +7.56645530385296 8.93098017284231 +7.32856343461935 8.73559997192947 +8.36337260868505 9.58618186062654 +1.76935725388087 4.58485493022287 +5.54440208531975 8.17989804462944 +3.16493556356697 9.01287414297507 +5.26737682037452 8.31928790306938 +8.25474297446829 9.46776651141527 +6.81480206199649 9.46184932462711 +3.42401262277821 7.59017892397541 +0.682688606067573 2.13140854275735 +4.77717797708075 9.06746251589252 +8.40609615806265 9.48324795436671 +5.11941294784973 7.94092419194072 +0.107118625511173 4.10511031080441 +1.45964077373918 8.44883153836142 +2.80093537840324 7.07734644006141 +1.49083856549803 7.01121814413782 +2.36674156086130 7.70541726069665 +6.20293052826007 9.29556250878087 +4.05487438652248 5.46938162981209 +2.06079271845137 9.39862998789417 +1.37140217072301 8.67122777257986 +4.84508191734051 9.98394006421844 +0.703579758778653 5.37622471649251 +0.959874931625260 9.69365580472953 +0.0417080172066070 7.98358222061436 +7.35572898588090 9.78409851002885 +0.759922609598193 5.05416257751295 +2.33883362565589 8.66822288329864 +3.88272444717190 9.54275911938782 +1.63662325472567 4.57910351557924 +1.30985082346245 3.35623833816854 +7.82362986876080 9.50557703028000 +4.94874181652699 6.53599112906454 +7.67728005949704 9.50008478600453 +3.15857142803044 7.15668195476007 +3.61627230376748 5.02525628581462 +2.15924538198292 4.00283995494553 +1.65517009454175 4.41758058093557 +3.75540362175933 5.01582106720932 +8.12444498923753 9.95165814730251 +4.41777683221272 7.65964160679034 +3.03947468839239 9.40426842177465 +3.32322103008194 4.95449449273065 +7.02226861489024 8.79306734474469 +2.17522157322449 5.93183247074687 +0.868090726515497 2.94128556851324 +8.47845531697937 9.97712220268860 +5.17687735570619 6.40542188001300 +2.11301922035166 5.54521551252613 +7.39074636178163 8.41553439986347 +0.387214214920271 2.84268913849686 +5.84203927460807 9.15278983040824 +5.82971366822676 8.25927093139731 +4.92308003057711 7.13115624031492 +6.70223526366741 8.13640943396238 +6.18097890028784 7.69830072034376 +3.31636136841303 7.87215118881414 +7.02204691636239 8.18250988105294 +8.36447373871857 9.85745951718317 +4.38112469162855 7.39430116436659 +4.02105374486826 6.54635130132668 +4.57657789843014 7.83593612424246 +7.35864937490036 9.66324641785085 +6.79886317174323 2.19550400558507 +8.30422412454229 3.89187751988243 +4.15654393219195 2.96399968284951 +8.88348530343686 4.33714944383228 +6.60227577401105 3.28878632645783 +2.86968063459726 0.563234429967053 +5.23831013665832 0.976880305804374 +8.59877913425850 1.47997081966077 +3.03329602875159 0.347099994341680 +3.04897868034898 0.892737314784995 +3.79992057985372 2.58538966294283 +4.87186652196626 0.715584122601641 +9.14392871811904 7.97900095502468 +4.94982975813493 0.438902015446521 +3.32258226320860 0.949285465202363 +6.35406466607753 1.40389865382386 +6.42558780443875 3.85876366464538 +2.99572060615516 0.234332825339264 +3.67008285896494 0.851164479782249 +4.81750083742427 1.93874942698667 +1.76964217381040 0.202017397699835 +8.20913160492765 0.210652826478027 +9.35968725530240 6.10533760636674 +5.39748076423221 2.54405282747684 +3.13555221794369 0.979895632720517 +9.66779685358222 4.73960088840639 +5.69022247723601 1.08622919814201 +5.40007969528150 2.74591412260864 +7.11221986779173 2.41747595922350 +4.30692983690029 3.26718716457572 +1.33964979615597 0.300647133549756 +9.21958144875315 6.54429819711743 +1.88841050790017 0.232649111467001 +4.01821155966517 2.05156276027461 +2.22897823619833 0.886372899104719 +1.96085675446517 0.628167164249429 +5.44756542975343 3.46488351223326 +7.43533370560625 5.81574338379743 +9.01830253897710 2.67942045419741 +7.28871249101315 1.24396912792495 +1.27486851674173 0.204522588292904 +5.50020192031181 2.15974654118566 +9.14250014260627 4.96583927175149 +6.55899750629609 4.77763763389266 +8.24940482076717 4.18088773553725 +2.64630222473423 0.395000602784235 +8.97860739768491 0.228779804972462 +5.40911249661002 0.740409676547689 +9.80812584677043 6.27750259684544 +5.50424461739359 2.12189727534722 +1.53656980821675 0.365925533818477 +1.38188023750668 0.0272836109904681 +5.69484858217855 0.454132824391398 +8.36333698473662 6.01987473987129 +7.50195633130158 0.974418562562929 +6.93644727617476 3.07861153390469 +9.75677099287476 5.68306987800605 +8.20297517817161 3.26869363187115 +4.89152353405116 3.21172805778414 +1.75122833373023 0.100041834145903 +2.56049751807105 0.610057470246340 +8.48241768555163 6.01110793166852 +1.54424061252904 0.217292293635713 +5.74188247457466 1.97641409239304 +6.91173901876337 3.71241461026804 +3.62785671965543 1.13431742828523 +1.13938413072417 0.137162866799778 +2.50451568923190 0.159804157398042 +4.35168766049983 0.664031005121227 +5.40718874214422 1.49621154952786 +9.56467418299954 7.88234406137552 +1.47409297912713 0.349813342676726 +3.42207483758700 1.02413950354846 +5.93083811093360 4.64848345065932 +4.75969693884996 3.69597934891562 +3.71309453840859 1.90214720551986 +6.99704966425983 3.23316818617885 +7.28294968162278 4.18776134130548 +2.60319208960304 0.205231672986662 +9.99172355285225 1.53867332274632 +1.29340738477475 0.164660163515074 +8.93679850406629 5.31110955598668 +2.71389940461959 0.632285848653224 +5.14653343534370 4.07039458510250 +2.40764457003907 1.20427203219359 +6.80288083183079 2.18346279657764 +2.71831325712673 0.735872795240678 +5.33819854928671 0.523237125832879 +6.30556736225553 1.20005397144010 +4.46157211932470 2.01804940846530 +3.26625510225082 0.658212637318821 +6.55381795953901 1.47332188122579 +8.41938640019952 7.29075946387086 +7.57223913040838 2.26004190249210 +6.25662399950607 0.566501191933395 +9.15677335584760 7.17513606222610 +8.35984503433578 1.91891766916066 +6.34920625597898 0.120424501924355 +4.82733388192721 1.19687959104710 +2.45336269880575 0.259812107633635 diff --git a/Chapter_10 KMeans/KMeans.py b/Chapter_10 KMeans/KMeans.py index d9d01f4..d0ab004 100644 --- a/Chapter_10 KMeans/KMeans.py +++ b/Chapter_10 KMeans/KMeans.py @@ -60,7 +60,7 @@ def kmeans(data, k, centroids): while change == True: change = False # 重置 for i in xrange(m): - minDist = np.inf # 设置样本与聚类中心之间的最小的距离,初始值为争取穷 + minDist = np.inf # 设置样本与聚类中心之间的最小的距离,初始值为正无穷 minIndex = 0 # 所属的类别 for j in xrange(k): # 计算i和每个聚类中心之间的距离 diff --git a/Chapter_2 Softmax Regression/softmax_regression_train.py b/Chapter_2 Softmax Regression/softmax_regression_train.py index fe4ccea..583d7bf 100644 --- a/Chapter_2 Softmax Regression/softmax_regression_train.py +++ b/Chapter_2 Softmax Regression/softmax_regression_train.py @@ -89,7 +89,7 @@ def save_model(file_name, weights): feature, label, k = load_data(inputfile) # 2、训练Softmax模型 print "---------- 2.training ------------" - weights = gradientAscent(feature, label, k, 5000, 0.2) + weights = gradientAscent(feature, label, k, 10000, 0.4) # 3、保存最终的模型 print "---------- 3.save model ------------" save_model("weights", weights) diff --git a/Chapter_3 Factorization Machine/FM_test.py b/Chapter_3 Factorization Machine/FM_test.py index 269e3c7..0b2c72c 100644 --- a/Chapter_3 Factorization Machine/FM_test.py +++ b/Chapter_3 Factorization Machine/FM_test.py @@ -52,15 +52,13 @@ def save_result(file_name, result): input: file_name(string)需要保存的文件名 result(mat):对测试数据的预测结果 ''' - f_result = open(file_name, "w") - m = np.shape(result)[0] - for i in xrange(m): - f_result.write(str(result[i, 0]) + "\n") - f_result.close() + f = open(file_name, "w") + f.write("\n".join(str(x) for x in result)) + f.close() if __name__ == "__main__": # 1、导入测试数据 - dataTest = loadDataSet("data_test.txt") + dataTest = loadDataSet("test_data.txt") # 2、导入FM模型 w0, w , v = loadModel("weights") # 3、预测 diff --git a/Chapter_3 Factorization Machine/FM_train.py b/Chapter_3 Factorization Machine/FM_train.py index 37d9435..204ce8a 100644 --- a/Chapter_3 Factorization Machine/FM_train.py +++ b/Chapter_3 Factorization Machine/FM_train.py @@ -23,8 +23,7 @@ def loadDataSet(data): lineArr.append(float(lines[i])) dataMat.append(lineArr) - #labelMat.append(float(lines[-1]) * 2 - 1) # 转换成{-1,1} - labelMat.append(float(lines[-1])) + labelMat.append(float(lines[-1]) * 2 - 1) # 转换成{-1,1} fr.close() return dataMat, labelMat @@ -34,7 +33,7 @@ def sigmoid(inx): def initialize_v(n, k): '''初始化交叉项 input: n(int)特征的个数 - k(int)FM模型的交叉向量维度 + k(int)FM模型的超参数 output: v(mat):交叉项的系数权重 ''' v = np.mat(np.zeros((n, k))) @@ -84,7 +83,7 @@ def stocGradAscent(dataMatrix, classLabels, k, max_iter, alpha): # 计算损失函数的值 if it % 1000 == 0: print "\t------- iter: ", it, " , cost: ", \ - getCost(getPrediction(np.mat(dataTrain), w0, w, v), classLabels) + getCost(getPrediction(np.mat(dataMatrix), w0, w, v), classLabels) # 3、返回最终的FM模型的参数 return w0, w, v @@ -174,7 +173,7 @@ def save_model(file_name, w0, w, v): dataTrain, labelTrain = loadDataSet("data_1.txt") print "---------- 2.learning ---------" # 2、利用随机梯度训练FM模型 - w0, w, v = stocGradAscent(np.mat(dataTrain), labelTrain, 2, 20000, 0.01) + w0, w, v = stocGradAscent(np.mat(dataTrain), labelTrain, 3, 10000, 0.01) predict_result = getPrediction(np.mat(dataTrain), w0, w, v) # 得到训练的准确性 print "----------training accuracy: %f" % (1 - getAccuracy(predict_result, labelTrain)) print "---------- 3.save result ---------" diff --git a/Chapter_3 Factorization Machine/test_data.txt b/Chapter_3 Factorization Machine/test_data.txt new file mode 100644 index 0000000..f8650e9 --- /dev/null +++ b/Chapter_3 Factorization Machine/test_data.txt @@ -0,0 +1,200 @@ +4.22885689100085 -0.383600664906212 +5.98523668756741 -0.401814062733461 +6.95949313301608 -0.406891865829261 +6.38530758271838 -0.397227032160838 +0.688060991180512 -0.324475388783223 +5.30864280694127 -0.403125099918000 +4.07619197041153 -0.404465070647941 +7.18358943205884 -0.409707070836633 +5.31333906565675 -0.396596366661128 +1.05629203329022 -0.369992373291273 +7.78802241824093 -0.406046543133335 +0.908232857874395 -0.328459209606044 +1.53656717591307 -0.351364694061123 +4.40085139001721 -0.397023473641097 +4.57424365687674 -0.406781494159556 +5.18052108361104 -0.408825386295581 +6.37709098072174 -0.409438975016263 +2.40707035480160 -0.391681735236429 +2.89064571674477 -0.394565774329386 +6.95140499551737 -0.400312578984568 +2.54790156597005 -0.368451837817781 +6.67832727013717 -0.408178347498748 +3.44462411301042 -0.401564489789818 +6.75332065747000 -0.398738626936024 +6.02170487581795 -0.400644238962528 +9.15991244131425 -0.407735759555317 +4.62449159242329 -0.395393857312668 +4.60916366028964 -0.404136726205582 +3.22471807186779 -0.401045032515435 +4.71357153710612 -0.386288539403391 +1.75874415683531 -0.389444742567982 +4.73485992965320 -0.389320232054202 +3.41124607049109 -0.394728964552589 +1.91745255461798 -0.391947209268189 +2.42849598318169 -0.405368635885985 +2.69061586686018 -0.398111035816425 +1.88661976791491 -0.360179143779989 +0.911134636865350 -0.362963861105539 +6.83363243294653 -0.405035385989699 +4.25728841871188 -0.399739646814442 +6.47617630172684 -0.405911721457004 +6.35786710514084 -0.409267285897986 +2.08934922426023 -0.391321994675435 +2.36230576993797 -0.359308512101375 +6.07303940685635 -0.401775908847004 +4.58725493648868 -0.401309556947776 +7.70285514803660 -0.405329650590112 +6.62009598359135 -0.402995014575865 +8.41929152691309 -0.409233742963925 +2.56440992229147 -0.389434289051228 +5.82249164527227 -0.402440260191123 +8.69941032358007 -0.407302464085700 +3.18074075481059 -0.372769131302750 +9.39829470344921 -0.409437184944561 +4.79463224948888 -0.401380241908074 +5.44716110526763 -0.403327530651797 +5.43885933999639 -0.404706501938553 +5.22495305777102 -0.409870913891884 +2.18676632399634 -0.354785976361645 +1.09697464523194 -0.315685237138469 +4.04579995857626 -0.392857794216805 +3.65816176838171 -0.401575759068070 +6.27896379614169 -0.406854769054641 +9.32853570278820 -0.409951384849673 +1.92028349427775 -0.350639391440659 +6.96266337082995 -0.400628405912004 +5.25404403859336 -0.400470088641706 +8.61139811393332 -0.407964326116437 +3.93456361215266 -0.399383003431485 +7.41257943454207 -0.405986863392910 +3.47712671277525 -0.377708337486631 +5.86092067231462 -0.398029583170793 +0.444540922782385 -0.374280609001722 +2.42785357820962 -0.378718538743031 +6.87796085120107 -0.403118962470252 +7.36340074301202 -0.404813730429160 +6.83415866967978 -0.406760189201662 +4.42305413383371 -0.383302839595060 +3.30857880214071 -0.386766093846753 +2.70270423432065 -0.369477504927622 +8.21721184961310 -0.406989567278616 +8.87770954256354 -0.408105816619614 +7.69114387388296 -0.405636700880363 +8.08514095887345 -0.408591528245170 +3.77395544835103 -0.383193789027371 +7.90407217966913 -0.409674704507269 +3.27565434075205 -0.396574186626550 +4.38644982586956 -0.405407692694353 +7.68854252429615 -0.403967834158350 +8.61980478702072 -0.409960253976007 +5.14423456505704 -0.407557450203104 +5.88026055308498 -0.396387506042030 +1.99862822857452 -0.370441048919565 +7.48705718215691 -0.408595410641654 +7.89963029944531 -0.405615902741996 +5.34064127370726 -0.392102808985665 +1.11705744193203 -0.323888651993048 +6.78652304800188 -0.404357991973989 +1.89710406017580 -0.374845678813075 +1.47608221976689 -0.330764364979088 +8.50712674289007 -0.276001595625538 +9.29608866756663 -0.250097725623909 +5.82790965175840 -0.223888078369432 +8.79013904597178 -0.192180167430789 +0.00522375356944771 -0.190112056752678 +6.12566469483999 -0.191862695134956 +5.27680069338442 -0.283620503061425 +8.01347605521952 -0.339789839392611 +4.98094291196390 -0.207611250783842 +5.74661219130188 -0.218342161423755 +7.38640291995402 -0.269295369034182 +2.46734525985975 -0.238288880357043 +0.834828136026227 -0.221533784108590 +6.60944557947342 -0.240792638559536 +8.90752116325322 -0.193485913413969 +7.69029085335896 -0.270681700261449 +9.28313062314188 -0.273179012277787 +0.169829383372613 -0.211350464907188 +8.62710718699670 -0.291129025290239 +8.44855674576263 -0.344570921371766 +5.52291341538775 -0.257212674743648 +0.319910157625669 -0.206112865902040 +3.62411462273053 -0.345825593131402 +4.89569989177322 -0.332877280834451 +1.23083747545945 -0.274049401025970 +1.46514910614890 -0.283848942570996 +0.426524109111434 -0.209171520400684 +2.81866855880430 -0.259990624655050 +6.95163039444332 -0.284971060010288 +5.35801055751113 -0.290120737029962 +1.23932277598070 -0.244113245697875 +8.52998155340816 -0.214682803486139 +2.70294332292698 -0.308364561952262 +5.64979570738201 -0.255621743131591 +4.17028951642886 -0.325183087759844 +9.47933121293169 -0.372333882795863 +1.05709426581721 -0.273398725501725 +1.66460440876421 -0.236624584151135 +5.73709764841198 -0.363472281253222 +9.31201384608250 -0.243771077248671 +7.37841653797590 -0.369353466594125 +8.60440563038232 -0.202858282052828 +9.84398312240972 -0.218156455352533 +7.85558989265031 -0.284113153415768 +1.77602460505865 -0.266184853812900 +1.33931250987971 -0.297183829519125 +9.39141706069548 -0.328615962521731 +2.95533834475356 -0.292786212653185 +4.67068187028852 -0.251577685729938 +0.252281814930363 -0.195413359789637 +5.59032544988695 -0.216561073003438 +3.47879194327261 -0.279761697153916 +0.542394844411296 -0.241760848739405 +6.62808061960974 -0.315832328127600 +8.98486137834300 -0.363912806574386 +9.88417928784981 -0.281869017489731 +7.06917419322763 -0.190096666339403 +2.87849344815137 -0.279436738890204 +4.64839941625137 -0.231269500181348 +8.18204038907671 -0.365092731458073 +1.78116953886766 -0.271224091462708 +0.567046890682912 -0.221052401385065 +3.35848974676925 -0.322190001834344 +2.08946673993135 -0.202875908957950 +6.75391177336247 -0.290281646438828 +9.12132474239623 -0.367065550694628 +7.45546073701717 -0.240588293840949 +5.61861425281637 -0.338669639313080 +5.97211350337855 -0.319126449955653 +1.34122932828682 -0.277150727988936 +8.94941675440814 -0.373023763824194 +2.42486558936719 -0.326102224896383 +4.41722057064424 -0.360419855410762 +8.97191350973572 -0.348399498123565 +0.933705167550930 -0.252575481502707 +4.56057666843742 -0.346355386685340 +9.95389727655092 -0.323504140663397 +2.97346815887922 -0.334814880905946 +2.98243971887764 -0.337381814430199 +5.05428142457703 -0.232514641064066 +6.31069999213594 -0.359626030006551 +0.808624231303137 -0.208403519638889 +9.05134744223571 -0.282039797611739 +1.09154212042459 -0.207245206497595 +3.38097718802172 -0.303443898123144 +7.46313427703679 -0.379865308531902 +0.484473392532221 -0.209222705848565 +6.03467983830770 -0.277585570350284 +7.29709448223228 -0.245958371186387 +7.81377051799277 -0.327592448772205 +6.92531986386519 -0.274004053493805 +3.96520792581593 -0.347683790060566 +7.80175531491174 -0.317975871659649 +6.07865907262946 -0.237887651602709 +1.04813241973500 -0.274359906785175 +5.49540107015198 -0.283385560614568 +8.90475679184438 -0.229599130878489 +7.34341083695970 -0.371524950619845 +0.728852990989761 -0.260383341617659 +7.98350864113952 -0.201049508579053 diff --git a/Chapter_5 Random Forest/random_forests_train.py b/Chapter_5 Random Forest/random_forests_train.py index 58796ab..1b987fb 100644 --- a/Chapter_5 Random Forest/random_forests_train.py +++ b/Chapter_5 Random Forest/random_forests_train.py @@ -141,10 +141,10 @@ def save_model(trees_result, trees_feature, result_file, feature_file): if __name__ == "__main__": # 1、导入数据 print "----------- 1、load data -----------" - data_train = load_data("data_2.txt") + data_train = load_data("data.txt") # 2、训练random_forest模型 print "----------- 2、random forest training ------------" - trees_result, trees_feature = random_forest_training(data_train, 1) + trees_result, trees_feature = random_forest_training(data_train, 50) # 3、得到训练的准确性 print "------------ 3、get prediction correct rate ------------" result = get_predict(trees_result, trees_feature, data_train) diff --git a/Chapter_6 BP/bp_test.py b/Chapter_6 BP/bp_test.py index 9f43bb1..8544ce4 100644 --- a/Chapter_6 BP/bp_test.py +++ b/Chapter_6 BP/bp_test.py @@ -34,11 +34,11 @@ def generate_data(): data[i, 1] = x[i, 1] * 9 - 4.5 # 2、将数据点保存到文件“test_data”中 f = open("test_data", "w") - m,n = np.shape(dataTest) + m,n = np.shape(data) for i in xrange(m): tmp =[] for j in xrange(n): - tmp.append(str(dataTest[i,j])) + tmp.append(str(data[i,j])) f.write("\t".join(tmp) + "\n") f.close() diff --git a/Chapter_7 LinearRegression/data_test.txt b/Chapter_7 LinearRegression/data_test.txt new file mode 100644 index 0000000..4c63a19 --- /dev/null +++ b/Chapter_7 LinearRegression/data_test.txt @@ -0,0 +1,200 @@ +0.58661363181 +0.630835525943 +0.0839286060672 +0.633561188397 +0.432478968841 +0.0562872470932 +0.203962737522 +0.39845049219 +0.653903819601 +0.658444435312 +0.115603749641 +0.662221701909 +0.653704003451 +0.352516059291 +0.574977804896 +0.0992063218709 +0.287801264252 +0.634434143069 +0.567872415911 +0.655087243135 +0.444126435236 +0.0166682147266 +0.6083327884 +0.641835525048 +0.45914451797 +0.533974406109 +0.518747231354 +0.260779044478 +0.443976506343 +0.129536101896 +0.481834664763 +0.014992249584 +0.203359527648 +0.02157852971 +0.0559379549363 +0.592930035923 +0.471916854074 +0.217769426953 +0.649793713876 +0.0161134614081 +0.305063825636 +0.252360942598 +0.541981989919 +0.570553163784 +0.144885715824 +0.356619563558 +0.312183166099 +0.439034218899 +0.484910590719 +0.530801867305 +0.203012063448 +0.459860524071 +0.443760617488 +0.120780473865 +0.0760118157457 +0.364343646966 +0.655239132297 +0.227450835731 +0.415052981515 +0.175485842688 +0.527237756839 +0.193938912507 +0.370778378988 +0.475581342491 +0.625631411225 +0.654966187714 +0.398628048375 +0.0958221675297 +0.106929887783 +0.19509731377 +0.603787045867 +0.193541075202 +0.586281647176 +0.187891370353 +0.639785950792 +0.232235316133 +0.153794092822 +0.191937468143 +0.425963000208 +0.34073935572 +0.233129077011 +0.597827682699 +0.415051659479 +0.399938593397 +0.634981021589 +0.206681941896 +0.53341433254 +0.529804763314 +0.2515296498 +0.408321662205 +0.039340111695 +0.0256509791888 +0.389006630114 +0.555614513494 +0.641843283091 +0.0868783508355 +0.408737730496 +0.336818274193 +0.0072908179208 +0.225945062397 +0.120337420177 +0.215619735976 +0.1239027825 +0.197601277834 +0.467256526035 +0.317356745639 +0.179006226696 +0.110149750354 +0.393659915952 +0.0409936131261 +0.0643484359158 +0.00472060099963 +0.588536857489 +0.0456483151175 +0.196199578888 +0.297516671125 +0.140084194501 +0.103010498515 +0.617546754646 +0.40000882986 +0.610289221943 +0.232749715829 +0.268996275857 +0.185825883762 +0.142067914126 +0.283395365099 +0.629754278545 +0.35763140594 +0.226216179272 +0.243664212723 +0.556666644567 +0.186852989177 +0.0553614353825 +0.645593594255 +0.411294042127 +0.182730218203 +0.591346284063 +0.0200364326324 +0.440487755762 +0.439771269763 +0.398518286997 +0.520374558344 +0.465299014647 +0.243162160616 +0.556642813311 +0.639837644803 +0.353851139709 +0.313433040678 +0.372853916578 +0.588775371973 +0.438029194706 +0.58420539676 +0.232629539089 +0.620174583338 +0.428414181731 +0.163296423808 +0.338365980706 +0.605784509171 +0.176947405139 +0.17812998875 +0.21557925727 +0.29628649494 +0.630514257244 +0.305194035693 +0.195359740892 +0.418460971084 +0.421230479417 +0.174023695655 +0.21051434964 +0.290192821746 +0.0464798429586 +0.575603513579 +0.639613426847 +0.355549231872 +0.184262405338 +0.657301426025 +0.382447479968 +0.355817420512 +0.459439996759 +0.242478760417 +0.01757351944 +0.633528523603 +0.0572949613411 +0.225153649721 +0.0936821206487 +0.064447961939 +0.360635090971 +0.490300761247 +0.625638178235 +0.475291780136 +0.0144495179545 +0.365781183044 +0.630458375928 +0.426569377553 +0.579339955202 +0.141121923165 +0.624075805465 +0.356745980066 +0.667984244246 +0.366167522659 diff --git a/Chapter_7 LinearRegression/linear_regression_test.py b/Chapter_7 LinearRegression/linear_regression_test.py index df4b7ed..8bf7a79 100644 --- a/Chapter_7 LinearRegression/linear_regression_test.py +++ b/Chapter_7 LinearRegression/linear_regression_test.py @@ -54,7 +54,7 @@ def save_predict(file_name, predict): if __name__ == "__main__": # 1、导入测试数据 - testData = load_data("data.txt") + testData = load_data("data_test.txt") # 2、导入线性回归模型 w = load_model("weights") # 3、得到预测结果 diff --git a/Chapter_8 RidgeRegression/data_test.txt b/Chapter_8 RidgeRegression/data_test.txt new file mode 100644 index 0000000..29a2886 --- /dev/null +++ b/Chapter_8 RidgeRegression/data_test.txt @@ -0,0 +1,201 @@ +0 9.97916846285178 +0.0500000000000000 9.74224633243579 +0.100000000000000 10.7570774043879 +0.150000000000000 9.42246670875888 +0.200000000000000 9.32882538970119 +0.250000000000000 10.3526523416461 +0.300000000000000 10.8964651992629 +0.350000000000000 9.94858845084703 +0.400000000000000 9.76718178888102 +0.450000000000000 10.3753343234082 +0.500000000000000 9.68177666087305 +0.550000000000000 10.5586721730218 +0.600000000000000 11.0141936329955 +0.650000000000000 10.7588691741907 +0.700000000000000 9.74537754936864 +0.750000000000000 10.4815890893563 +0.800000000000000 10.6489838507768 +0.850000000000000 11.7567910654522 +0.900000000000000 11.3574258764205 +0.950000000000000 10.9214596420428 +1 11.6546810389147 +1.05000000000000 10.5276320757952 +1.10000000000000 10.9258665948324 +1.15000000000000 11.8562805803646 +1.20000000000000 11.9511914642914 +1.25000000000000 11.6151879972686 +1.30000000000000 11.5453761359517 +1.35000000000000 11.0569369302118 +1.40000000000000 11.7724498252121 +1.45000000000000 11.0359225150972 +1.50000000000000 11.7872603658247 +1.55000000000000 11.6909206913226 +1.60000000000000 12.1696531345153 +1.65000000000000 11.4370660772141 +1.70000000000000 12.0180699457015 +1.75000000000000 12.1465890406948 +1.80000000000000 11.3508114429496 +1.85000000000000 11.9281224221388 +1.90000000000000 12.6986269556388 +1.95000000000000 12.0062198552969 +2 11.8456875327265 +2.05000000000000 12.2783298045576 +2.10000000000000 11.9624495847162 +2.15000000000000 12.3715718043919 +2.20000000000000 12.1326174366386 +2.25000000000000 12.2408358851307 +2.30000000000000 12.5303947125036 +2.35000000000000 13.0311577321712 +2.40000000000000 12.6259372836904 +2.45000000000000 13.2741918285231 +2.50000000000000 11.4858190195736 +2.55000000000000 12.3253715942775 +2.60000000000000 12.7179966008560 +2.65000000000000 12.2324135181084 +2.70000000000000 12.0620224182959 +2.75000000000000 13.0585175406403 +2.80000000000000 13.1063507518536 +2.85000000000000 12.9946905776474 +2.90000000000000 13.0976580611842 +2.95000000000000 12.5147185789715 +3 12.7511558108979 +3.05000000000000 12.9966640914719 +3.10000000000000 12.7560854322861 +3.15000000000000 13.3159404384907 +3.20000000000000 14.3826123712995 +3.25000000000000 13.0088846407651 +3.30000000000000 13.6237241033504 +3.35000000000000 12.8327876413829 +3.40000000000000 14.0697772989521 +3.45000000000000 12.9654298220856 +3.50000000000000 13.6043578006361 +3.55000000000000 13.2407033223197 +3.60000000000000 13.8560078218682 +3.65000000000000 13.6556770855504 +3.70000000000000 13.6780056868667 +3.75000000000000 15.2245462706578 +3.80000000000000 13.4849769032363 +3.85000000000000 13.8265603002309 +3.90000000000000 15.2415127553859 +3.95000000000000 13.3766546569105 +4 14.2764993727331 +4.05000000000000 13.5117707954538 +4.10000000000000 14.6153197920434 +4.15000000000000 14.3137649062381 +4.20000000000000 14.5260624056350 +4.25000000000000 14.1105694418061 +4.30000000000000 14.4225957968620 +4.35000000000000 15.0862567394108 +4.40000000000000 13.2624492288263 +4.45000000000000 13.6333546375394 +4.50000000000000 14.7077344684894 +4.55000000000000 14.2226155740710 +4.60000000000000 14.4518258422967 +4.65000000000000 13.9015405617762 +4.70000000000000 14.2475827763361 +4.75000000000000 14.5479092274844 +4.80000000000000 14.4371009169755 +4.85000000000000 14.4167574873202 +4.90000000000000 14.6890766097790 +4.95000000000000 14.4786668378597 +5 15.6709418586290 +5.05000000000000 14.5557826438526 +5.10000000000000 16.0089713438217 +5.15000000000000 14.9627816967460 +5.20000000000000 14.4741296400413 +5.25000000000000 14.9406590962163 +5.30000000000000 15.7672504811478 +5.35000000000000 15.8779646282464 +5.40000000000000 15.4801136383743 +5.45000000000000 15.5937001601216 +5.50000000000000 15.8164527885206 +5.55000000000000 14.8204790654582 +5.60000000000000 15.3091451372664 +5.65000000000000 14.7349253389483 +5.70000000000000 15.4754484600753 +5.75000000000000 16.2246373518007 +5.80000000000000 16.1587207047224 +5.85000000000000 16.9939143028354 +5.90000000000000 15.9833638316749 +5.95000000000000 14.8717546241501 +6 16.8446996537133 +6.05000000000000 16.6911403735463 +6.10000000000000 15.8086845178264 +6.15000000000000 16.2613072403168 +6.20000000000000 16.5897257364312 +6.25000000000000 16.4423535787929 +6.30000000000000 16.6481834614417 +6.35000000000000 16.2936422204131 +6.40000000000000 16.3805881495360 +6.45000000000000 16.4940443145565 +6.50000000000000 16.1051720133918 +6.55000000000000 17.2614803093996 +6.60000000000000 16.6031658420648 +6.65000000000000 16.9932404239170 +6.70000000000000 16.2725331831758 +6.75000000000000 16.2123823412610 +6.80000000000000 16.7545163744032 +6.85000000000000 16.7236140835305 +6.90000000000000 17.4974119093171 +6.95000000000000 17.2530318546821 +7 17.2702572145810 +7.05000000000000 16.3275302903926 +7.10000000000000 16.6161530961464 +7.15000000000000 17.2510256691088 +7.20000000000000 17.0260610134574 +7.25000000000000 17.8950439950777 +7.30000000000000 17.9705769094400 +7.35000000000000 17.0596011552998 +7.40000000000000 17.8375678009958 +7.45000000000000 18.1477249434664 +7.50000000000000 17.6604926109765 +7.55000000000000 18.3616912292099 +7.60000000000000 18.1312166545544 +7.65000000000000 17.7570526190173 +7.70000000000000 18.1384016009830 +7.75000000000000 17.8472035056455 +7.80000000000000 17.5925539201162 +7.85000000000000 18.0292293671976 +7.90000000000000 17.9181113340221 +7.95000000000000 17.7676844159554 +8 18.8855097835091 +8.05000000000000 18.1606365338578 +8.10000000000000 19.4651887924340 +8.15000000000000 18.0019173083264 +8.20000000000000 18.4821477759675 +8.25000000000000 19.0413103315236 +8.30000000000000 19.6646150616052 +8.35000000000000 18.5017821404205 +8.40000000000000 18.0048707607011 +8.45000000000000 18.8516901162533 +8.50000000000000 17.8400486488207 +8.55000000000000 18.4130771201867 +8.60000000000000 18.7359334608013 +8.65000000000000 19.3947767538930 +8.70000000000000 19.4185707623949 +8.75000000000000 18.7362193017198 +8.80000000000000 19.2619656575633 +8.85000000000000 18.6893597900171 +8.90000000000000 19.2305624430752 +8.95000000000000 19.9076469692696 +9 19.0783798049086 +9.05000000000000 18.8997317710280 +9.10000000000000 18.8499826112651 +9.15000000000000 19.5082356902857 +9.20000000000000 19.8686443635211 +9.25000000000000 20.3128399734285 +9.30000000000000 19.3270228922188 +9.35000000000000 19.4315179590161 +9.40000000000000 19.0836466297650 +9.45000000000000 20.2559957463577 +9.50000000000000 19.4622756379065 +9.55000000000000 19.3133814283343 +9.60000000000000 20.6921203934286 +9.65000000000000 20.0549405531365 +9.70000000000000 20.0581714021386 +9.75000000000000 19.2472144327682 +9.80000000000000 20.0169933066161 +9.85000000000000 20.1100717963125 +9.90000000000000 19.3538777413292 +9.95000000000000 19.8371027740987 +10 19.7975391117254 diff --git a/Chapter_8 RidgeRegression/ridge_regression_test.py b/Chapter_8 RidgeRegression/ridge_regression_test.py index 3b86eea..e31c1e6 100644 --- a/Chapter_8 RidgeRegression/ridge_regression_test.py +++ b/Chapter_8 RidgeRegression/ridge_regression_test.py @@ -55,7 +55,7 @@ def save_result(file_name, predict): if __name__ == "__main__": # 1、导入测试数据 print "----------1.load data ------------" - testData = load_data("data.txt") + testData = load_data("data_test.txt") # 2、导入线性回归模型 print "----------2.load model ------------" w = load_model("weights") diff --git a/Chapter_8 RidgeRegression/ridge_regression_train.py b/Chapter_8 RidgeRegression/ridge_regression_train.py index 3483718..f9a7810 100644 --- a/Chapter_8 RidgeRegression/ridge_regression_train.py +++ b/Chapter_8 RidgeRegression/ridge_regression_train.py @@ -208,7 +208,7 @@ def save_weights(file_name, w0): if method == "bfgs": # 选择BFGS训练模型 w0 = bfgs(feature, label, 0.5, 1000) elif method == "lbfgs": # 选择L-BFGS训练模型 - w0 = lbfgs(feature, label, 0.5, 10, m=10) + w0 = lbfgs(feature, label, 0.5, 1000, m=10) else: # 使用最小二乘的方法 w0 = ridge_regression(feature, label, 0.5) # 3、保存最终的模型 diff --git a/README.md b/README.md index a521af4..5520332 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,12 @@ # Python机器学习算法 +写书不易,写一本好书更加不容易,本书上市以来收到了很多读书的赞赏。在本书的出版过程中,由于时间,精力等其他一些因素,虽经过多次修改,但仍然存在错误,谢谢各位读者能够不吝指出,下面是本书的勘误: + +[勘误地址](http://blog.csdn.net/google19890102/article/details/77996085) + ![这里写图片描述](http://img10.360buyimg.com/n1/jfs/t6391/74/1115083732/144183/2a82437f/594b5bb8Na3c6dfd4.jpg) -程序代码是《Python机器学习算法》的示例代码,目前该书在各大商城已经可以预定: +程序代码是《Python机器学习算法》的示例代码,目前该书在各大商城已经可以购买: - [京东](https://item.jd.com/12109305.html) - [当当](http://product.dangdang.com/25100931.html)