37,744
社区成员




import numpy as np
import matplotlib.pyplot as plt
def logsig(x):
return 1/(1+np.exp(-x))
#x是输入单元,y是输出单元
x = np.loadtxt('data.csv',delimiter=',',dtype='int').T
y = np.loadtxt('result.csv',delimiter=',',dtype='int').T
list =[]
for i in xrange(len(y)):
list.append(y[i])
y = np.array([list])
np.random.seed(1)
maxepochs = 2000#训练次数
learnrate = 0.035#学习效率
errorfinal = 0.05#预期误差
samnum = 1619#样本数量
indim = 7#输入神经元个数
outdim = 1#输出神经元个数
hiddenunitnum = 7#隐层神经元个数
w1 = 0.5*np.random.rand(hiddenunitnum,indim)-0.1 #8*3 输入层与隐藏层的连接权值
b1 = 0.5*np.random.rand(hiddenunitnum,1)-0.1 #7*1 隐藏层与输出层的连接权值
w2 = 0.5*np.random.rand(outdim,hiddenunitnum)-0.1 #1*7 隐藏层各个神经元的阈值
b2 = 0.5*np.random.rand(outdim,1)-0.1 #1*1 输出层各神经元的阈值
errhistory = []
for i in range(maxepochs):
hiddenout = logsig((np.dot(w1,x).transpose()+b1.transpose())).transpose()
networkout = (np.dot(w2,hiddenout).transpose()+b2.transpose()).transpose()
err = y - networkout
sse = sum(sum(err**2))
errhistory.append(sse)
if sse < errorfinal:
break
delta2 = err
delta1 = np.dot(w2.transpose(),delta2)*hiddenout*(1-hiddenout)
dw2 = np.dot(delta2,hiddenout.transpose())
db2 = np.dot(delta2,np.ones((samnum,1)))
dw1 = np.dot(delta1,x.transpose())
db1 = np.dot(delta1,np.ones((samnum,1)))
w2 += learnrate*dw2
b2 += learnrate*db2
w1 += learnrate*dw1
b1 += learnrate*db1
# 误差曲线图
errhistory10 = np.log10(errhistory)
minerr = min(errhistory10)
print errhistory10
print minerr
plt.plot(errhistory10)
plt.plot(range(0,i+1000,1000),[minerr]*len(range(0,i+1000,1000)))
ax=plt.gca()
ax.set_yticks([-2,-1,0,1,2,minerr])
ax.set_yticklabels([u'$10^{-2}$',u'$10^{-1}$',u'$1$',u'$10^{1}$',u'$10^{2}$',str(('%.4f'%np.power(10,minerr)))])
ax.set_xlabel('iteration')
ax.set_ylabel('error')
ax.set_title('Error History')
plt.savefig('errorhistory.png',dpi=700)
plt.close()