{
[Serializable]
public class YangBen {
public double[] input;
public double[] output;
}
//BP 神经网络
[Serializable]
class BpNet
{
public const int out_dimension = 11;
public const int in_dimension = 24 * 16;
public const int hidden_dimension = 100;
double v=new double[in_dimension,hidden_dimension];//输入层到隐藏层权重矩阵
double w = new double[hidden_dimension, out_dimension];
public double a = 0.05;//学习率
public double b = 0.01;//精度控制参数
public int loopcount = 2000;//最大循环次数
public double mom = 0.9;//动量因子
//sigmoid 函数,神经网络激励函数
public double fnet(double net){
return 1.0/(1+Math.Exp(-1*net));
}
//初始化权重矩阵
public void initNet()
{
Random rand=new Random(17);
//初始化输入到隐藏层权重矩阵
for (int i = 0; i < in_dimension;++i )
{
for(int j=0;j<hidden_dimension;++j)
{
v[i, j] = rand.NextDouble();
}
}
//初始化隐藏层到输出层权重矩阵
for (int i = 0; i < hidden_dimension; ++i)
{
for (int j = 0; j < out_dimension; ++j)
{
w[i, j] = rand.NextDouble();
}
}
}//end initNet
//训练
public void trainBPNet(YangBen[] yangbens)
{
int yangbenCount = yangbens.Length;
double e = b + 1;
//最多循环 loopcount次,并且e小于误差控制参数b则跳出循环
double[] O1 = new double[hidden_dimension];
double[] O2 = new double[out_dimension];
double[] ChgH = new double[hidden_dimension];
double[] ChgO = new double[out_dimension];
for (int i = 0; i < hidden_dimension;++i )
{
ChgH[i] = 0;
O1[i] = 0;
}
for (int i = 0; i < hidden_dimension; ++i)
{
ChgO[i] = 0;
O2[i] = 0;
}
for (int n = 0; n < loopcount&&e>b;++n )
{
//循环处理每个样本
for(int i=0;i<yangbenCount;++i)
{
//计算隐藏层输出
for (int k = 0; k < hidden_dimension;++k )
{
double temp = 0;
for (int j = 0; j < in_dimension;++j )
{
temp += yangbens[i].input[j] * v[j, k];
}
O1[k] = fnet(temp);
}
//计算输出层输出
for (int k = 0; k < out_dimension; ++k)
{
double temp = 0;
for (int j = 0; j < hidden_dimension; ++j)
{
temp += O1[k] * w[j, k];
}
O2[k] = fnet(temp);
}
//计算输出层权重修改量
for (int j = 0; j < out_dimension;++j )
{
//使用 mom 动量因子,避免陷入局部最优,mom设置为0.9
double t=O2[j] * (1 - O2[j]) * (yangbens[i].output[j] - O2[j]);
ChgO[i] = mom * ChgO[i] + (1 - mom) * t;
}
e = 0;
//计算输出误差
for (int j = 0; j < out_dimension;++j )
{
e += (yangbens[i].output[j] - O2[j]) * (yangbens[i].output[j] - O2[j]);
}
//计算隐藏层修改量
for (int j = 0; j < hidden_dimension;++j )
{
double temp = 0;
for (int k = 0; k < out_dimension; ++k)
{
temp += ChgO[k] * w[j, k];
}
//使用动量因子
double t=temp*O1[j]*(1-O1[j]);
ChgH[j] = mom * ChgH[j] + (1 - mom) * t;
}
//修改输出层权重矩阵
for (int j = 0; j < hidden_dimension; ++j)
{
for (int k = 0; k < out_dimension; ++k)
{
w[j, k]=w[j, k]+a*O1[j]*ChgO[k];
}
}
//修改 输入到隐藏层权重
for (int j = 0; j <in_dimension ; ++j)
{
for (int k = 0; k < hidden_dimension; ++k)
{
v[j, k] = v[j, k] + a * yangbens[i].output[j] * ChgH[k];
}
}
}
}
}//end trainBPNet function
//使用 BP网络,输入为1个样本
public void useBpNet(YangBen yangBen)
{
double[] O1 = new double[hidden_dimension];
double[] O2 = new double[out_dimension];
//计算隐藏层输出
for (int i = 0; i < hidden_dimension;++i )
{
double temp = 0;
for (int j = 0; j < in_dimension;++j )
{
temp+=yangBen.input[j]*v[j,i];
}
O1[i] = fnet(temp);
}
//计算输出层输出
for (int i = 0; i <out_dimension ; ++i)
{
double temp = 0;
for (int j = 0; j < hidden_dimension; ++j)
{
temp += O1[j] * w[j, i];
}
O2[i] = fnet(temp);
}
for (int i = 0; i < out_dimension; ++i)
{
yangBen.output[i] = O2[i];
}
}//end useBpNet function
//存储 训练好的BP网络
public void storeNet()
{
IFormatter formatter = new BinaryFormatter();
Stream stream = new FileStream(modleFileName, FileMode.Create, FileAccess.Write, FileShare.None);
formatter.Serialize(stream, this);
stream.Close();
}
//加载训练好的BP网络
public static BpNet loadNet()
{
IFormatter formatter = new BinaryFormatter();
Stream stream = new FileStream(modleFileName, FileMode.Open, FileAccess.Read, FileShare.Read);
BpNet bpNet = (BpNet)formatter.Deserialize(stream);
stream.Close();
return bpNet;
}
public static String modleFileName="bpNetModel.model";
}
}