提交 f60eaf7b 编写于 作者: sahduashufa's avatar sahduashufa

zero to one

上级 c2c38ade
无法预览此类型文件
......@@ -64,6 +64,33 @@ make install
## 新的demo程序实现5层全连接层,可自定义神经元和激活函数,损失函数
全连接层使用方法:
第一层的权重自定义,而后调用forward函数前向传播一层,自动求出激活以后的值,激活函数可自定义.
首先定义一个权重矩阵和偏置矩阵,第一个矩阵的维度大小使用数据列去定义:
```c
Matrix bias1 = CreateRandMat(2,1);
Matrix weight1 = CreateRandMat(2,data.col);
```
之后可以输出第一层前向传播的值,同时可以定义下一层的bias的维度, row使用第一层的权重矩阵的行,第二层的权重矩阵的行使用了第一层的输出的行, 而列自行定义即可, 这一点体现了前向传播算法的维度相容. 也就是:
```c
Matrix output1 = sequaltial.forward(get_T(get_row(data_mine,index)),weight1,bias1);
```
```c
Matrix weight2 = CreateRandMat(output1.row,2);
Matrix bias2 = CreateRandMat(weight2.row,1);
Matrix output2 = sequaltial.forward(output1,weight2,bias2);
```
同时第二层的输出也可以求出来,以此类推 .
最终输出代码见nerual_test.cpp ![nerual_test1](/Users/zhangyiji/Documents/code/cpp_demo/my_os/Edge-Computing-Engine/picture/nerual_test1.png)
代码:
```c
......
......@@ -18,9 +18,6 @@ Node sigmoid(Node z){
struct edge_network
{
edge_network(int input, int num_neuron){
}
Matrix forward(Matrix data,Matrix weights,Matrix bais)
{
......
无法预览此类型文件
......@@ -5,7 +5,7 @@
#include <math.h>
#include <fstream>
#include "./autodiff/node.h"
#include"./matrix/matrix_def.h"a
#include"./matrix/matrix_def.h"
#include"./matrix/matrix_pro.h"
#include"./welcome/score_wel.cpp"
#include"./logistic/logistic_def.h"
......@@ -29,38 +29,7 @@ return sigmoid_act;
Node (*loss)(Node,Node) = loss_act;
Node (*act)(Node) = sigmoid_act;
int main()
{ /*
welcome();
string path = "./data/new_data2.csv";
Matrix data = read_csv(path);
Matrix bais = CreateMatrix(data.row,1);
data = appply(data,bais,1);
Matrix y = iloc(data,0,0,3,4);
Matrix x_1 = iloc(data,0,0,0,3);
Matrix x_2 = get_T(x_1);
double alpha = 0.002;
int max_epoch = 1;
Matrix weight = CreateMatrix(3,1);
change_va(weight,0,0,1);
change_va(weight,1,0,1);
change_va(weight,2,0,1);
int epoch = 0;
for(epoch = 0;epoch<=max_epoch;epoch++)
{
cout<<"-----------split-line-----------"<<endl;
Matrix temp_mul = mul(x_1,weight);
Matrix h =e_sigmoid(temp_mul);
Matrix error = subtract(y,h);
Matrix temp_update = mul(x_2,error);
Matrix updata = add(weight,times_mat(alpha,temp_update),0);
cout_mat(weight);
cout<<"epoch: "<<epoch<<" error: "<<matrix_sum(error)<<endl;
cout<<"-----------split-line-----------"<<endl;
}
stop = clock();
printf("%f\n", (double)(stop - start) / CLOCKS_PER_SEC);
*/
{
cout<<"------------autodiff for neraul network-----------"<<endl;
Matrix data_mine = CreateMatrix(2,1);
change_va(data_mine,0,0,0.55);
......@@ -107,30 +76,7 @@ int main()
bais1 = subtract(bais1,times_mat(0.001,backward3));
weight2 = subtract(weight2,times_mat(0.001,weight_2_grad));
bais2 = subtract(bais2,times_mat(0.001,output_end));
map<string,int>count_word;
string path_word = "mytest.csv";
str_Matrix data_readed = read_file(path_word);
for(int index_x = 0;index_x<data_readed.row;index_x++)
{
for(int index_y = 0;index_y<data_readed.col;index_y++)
{
string word = data_readed.str_matrix[index_x][index_y];
if(count_word.count(data_readed.str_matrix[index_x][index_y]) == 0)
{
count_word[word] = 1;
}
else
{
count_word[word]=count_word[word]+1;
}
}
}
cout<<"class: "<<count_word["class"]<<endl;
cout<<"classes: "<<count_word["classes"]<<endl;
cout<<"classification: "<<count_word["classification"]<<endl;
cout<<"classifiers: "<<count_word["classifiers"]<<endl;
cout<<"neraul end;"<<endl;
return 0;
}
}
......@@ -331,4 +331,14 @@ Matrix padding(Matrix mid1,int shape1,int shape2)
}
return result;
}
Matrix get_row(Matrix mid1,int index)
{
int index_y = 0;
Matrix mid2 = CreateMatrix(1,mid1.col);
for(index_y=0;index_y<mid1.col;index_y++)
{
mid2.matrix[0][index_y] = mid1.matrix[index][index_y];
}
return mid2;
}
#endif
#include<iostream>
#include<time.h>
#include<string>
#include<math.h>
#include<fstream>
#include"./autodiff/node.h"
#include"./matrix/matrix_def.h"
#include"./matrix/matrix_pro.h"
#include"./welcome/score_wel.cpp"
#include"./logistic/logistic_def.h"
#include"./file_pro/data_read.h"
#include"./grad_edge/matrix_grad.h"
using namespace std;
Node z = 1;
Node t1 = 1,a13 = 1;
Node loss_act(Node t1,Node a13)
{
Node loss = 0.5*(pow((t1-a13),2));
return loss;
}
Node sigmoid_act(Node z)
{
Node sigmoid_act = 1/(1+(1/exp(z)));
return sigmoid_act;
}
Node (*loss)(Node,Node) = loss_act;
Node (*act)(Node) = sigmoid_act;
int main()
{
cout<<"begin to train"<<endl;
int input_dim = 2;
int output_dim = 2;
edge_network sequaltial(input_dim,output_dim);
Matrix data_mine = CreateMatrix(100,2);
Matrix label_mine = CreateRandMat(100,1);
Matrix data = iloc(data_mine,0,1,0,0);
cout<<"data: ";
cout_mat(data);
Matrix label = iloc(label_mine,0,1,0,0);
cout<<"label: ";
cout_mat(label);
Matrix bias1 = CreateRandMat(2,1);
Matrix weight1 = CreateRandMat(2,data.col);
cout<<"weight1: ";
cout_mat(weight1);
int index = 0;
Matrix output1 = sequaltial.forward(get_T(get_row(data_mine,index)),weight1,bias1);
Matrix weight2 = CreateRandMat(output1.row,2);
cout<<"weight2: "<<endl;
cout_mat(weight2);
Matrix bias2 = CreateRandMat(weight2.row,1);
Matrix output2 = sequaltial.forward(output1,weight2,bias2);
cout_mat(output2);
cout<<"output1:"<<endl;
cout_mat(output1);
}
picture/logo.png

45.1 KB | W: | H:

picture/logo.png

140.2 KB | W: | H:

picture/logo.png
picture/logo.png
picture/logo.png
picture/logo.png
  • 2-up
  • Swipe
  • Onion skin
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册