• 增减网络20220101


    现代c++有任意层数和每层任意结点数的全连接神经网络.代码包括了基本的梯度下降,要求输入数据有两组{1,0}、{1,1},训练目标数据target也有两个{1}、{0}……程序运行开始读取一个文本文件"s1.txt",而文本文件中如果有字符串,"{2,4,3,1}"则网络结构就是{2,4,3,1}(即输入层2个Nodes,输出层有1个node,第1个隐藏层有4个nodes,第2个隐藏层有3nodes! 之后程序再读取文本文件"s2.txt"读取文本文件中的第二个字符串,比如:"{2,4,1}"又重新重构神经网络,这次输入层有2个nodes,输出层有1个node,隐藏层有4个nodes,并且再次反向传播训练!

    1. #include
    2. #include
    3. #include
    4. #include
    5. #include
    6. #include
    7. using namespace std;
    8. // Sigmoid 函数和它的导数
    9. double sigmoid(double x) {
    10. return 1 / (1 + exp(-x));
    11. }
    12. double sigmoid_derivative(double x) {
    13. return x * (1 - x);
    14. }
    15. // 全连接神经网络
    16. class NeuralNetwork {
    17. public:
    18. vector<int> layers;
    19. vectordouble>> weights;
    20. vectordouble>> outputs;
    21. NeuralNetwork(vector<int> layers) : layers(layers) {
    22. random_device rd;
    23. mt19937 gen(rd());
    24. uniform_real_distribution<> dis(0, 1);
    25. for (int i = 1; i < layers.size(); ++i) {
    26. vector<double> weightLayer;
    27. for (int j = 0; j < layers[i - 1] * layers[i]; ++j) {
    28. weightLayer.push_back(dis(gen));
    29. }
    30. weights.push_back(weightLayer);
    31. }
    32. }
    33. vector<double> forward(vector<double> input) {
    34. outputs.clear();
    35. outputs.push_back(input);
    36. for (int i = 1; i < layers.size(); ++i) {
    37. vector<double> output(layers[i]);
    38. int k = 0;
    39. for (int j = 0; j < layers[i]; ++j) {
    40. double sum = 0;
    41. for (int l = 0; l < layers[i - 1]; ++l) {
    42. sum += outputs[i - 1][l] * weights[i - 1][k++];
    43. }
    44. output[j] = sigmoid(sum);
    45. }
    46. outputs.push_back(output);
    47. }
    48. return outputs.back();
    49. }
    50. vector<double> forwarOut(vector<double> input) {
    51. outputs.clear();
    52. outputs.push_back(input);
    53. cout << endl<<" [";
    54. for (int jj = 0; jj < input.size(); ++jj) { cout << input[jj]; }
    55. cout << "]; " << endl;
    56. for (int i = 1; i < layers.size(); ++i) {
    57. vector<double> output(layers[i]);
    58. int k = 0;
    59. for (int j = 0; j < layers[i]; ++j) {
    60. double sum = 0;
    61. for (int l = 0; l < layers[i - 1]; ++l) {
    62. sum += outputs[i - 1][l] * weights[i - 1][k++];
    63. }
    64. output[j] = sigmoid(sum);
    65. cout << output[j] << "], ";
    66. }
    67. outputs.push_back(output);
    68. cout << output[0] << "}; " << endl;
    69. cout << "}}; " << endl;
    70. }//for110i
    71. return outputs.back();
    72. }//forwarOut
    73. //---------------------------------------------------------------------
    74. void train(vector<double> input, vector<double> target, double lr) {
    75. forward(input);
    76. vectordouble>> deltas(layers.size());
    77. for (int i = layers.size() - 1; i >= 0; --i) {
    78. deltas[i].resize(layers[i]);
    79. if (i == layers.size() - 1) {
    80. for (int j = 0; j < layers[i]; ++j) {
    81. double error = target[j] - outputs[i][j];
    82. deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
    83. }
    84. }
    85. else {
    86. int k = 0;
    87. for (int j = 0; j < layers[i]; ++j) {
    88. double error = 0;
    89. for (int l = 0; l < layers[i + 1]; ++l) {
    90. error += weights[i][k++] * deltas[i + 1][l];
    91. }
    92. deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
    93. }
    94. }
    95. }
    96. for (int i = layers.size() - 1; i > 0; --i) {
    97. int k = 0;
    98. for (int j = 0; j < layers[i]; ++j) {
    99. for (int l = 0; l < layers[i - 1]; ++l) {
    100. weights[i - 1][k++] += lr * deltas[i][j] * outputs[i - 1][l];
    101. }
    102. }
    103. }
    104. }
    105. };
    106. // 从文件读取层信息
    107. vector<int> readLayersFromFile(const string& filename) {
    108. ifstream file(filename);
    109. string str;
    110. if (file) {
    111. getline(file, str);
    112. }
    113. stringstream ss(str.substr(1, str.length() - 2));
    114. string token;
    115. vector<int> layers;
    116. while (getline(ss, token, ',')) {
    117. layers.push_back(stoi(token));
    118. }
    119. return layers;
    120. }
    121. int main() {
    122. // 第一次从文件s1.txt读取网络结构并构建网络
    123. vector<int> layers1 = readLayersFromFile("\/s1.txt");
    124. NeuralNetwork nn1(layers1);
    125. // 使用{1, 0} 和 {1, 1}训练
    126. vectordouble>> inputs1 = { {1, 0}, {1, 1},{0,1},{0,0} }; //{ {1, 0}, {1, 1} };
    127. vectordouble>> targets1 = { {1}, {0},{1},{0} };
    128. for (int epoch = 0; epoch < 1000; ++epoch) {
    129. for (int i = 0; i < inputs1.size(); ++i) {
    130. nn1.train(inputs1[i], targets1[i], 0.5);
    131. }
    132. }
    133. // 第二次从文件s2.txt读取网络结构并构建网络
    134. vector<int> layers2 = readLayersFromFile("\/s2.txt");
    135. NeuralNetwork nn2(layers2);
    136. // 再次使用{1, 0} 和 {1, 1}训练
    137. for (int epoch = 0; epoch < 5000; ++epoch) {
    138. for (int i = 0; i < inputs1.size(); ++i) {
    139. nn2.train(inputs1[i], targets1[i], 0.5);
    140. }
    141. }
    142. nn2.forwarOut( {0,1} );
    143. cout << endl;
    144. nn2.forwarOut({ 1,1 });
    145. cout << endl;
    146. nn2.forwarOut({ 1,0 });
    147. cout << endl;
    148. nn2.forwarOut({ 0,0 });
    149. return 0;
    150. }

  • 相关阅读:
    c语言入门——三子棋(N子棋)
    图解 LeetCode 算法汇总——二分查找
    线程池学习心得
    有来团队后台项目-解析5
    C++20开发工程师 系列 笔记 环境搭建篇(2022/11/30)
    嵌入式学习笔记(59)内存管理之栈
    怎么给PDF添加页面?推荐三个PDF如何插入页面小妙招
    kindle自定义屏保之自定义字帖
    C++学习 --pair
    体验提升-一个“小技巧”彻底解决锦礼商品可见不可售
  • 原文地址:https://blog.csdn.net/aw344/article/details/132619932