溫馨提示×

溫馨提示×

您好,登錄后才能下訂單哦!

密碼登錄×
登錄注冊×
其他方式登錄
點(diǎn)擊 登錄注冊 即表示同意《億速云用戶服務(wù)條款》

怎么使用C#完成常用神經(jīng)網(wǎng)絡(luò)

發(fā)布時(shí)間:2021-11-24 09:04:58 來源:億速云 閱讀:181 作者:iii 欄目:大數(shù)據(jù)

這篇文章主要介紹“怎么使用C#完成常用神經(jīng)網(wǎng)絡(luò)”,在日常操作中,相信很多人在怎么使用C#完成常用神經(jīng)網(wǎng)絡(luò)問題上存在疑惑,小編查閱了各式資料,整理出簡單好用的操作方法,希望對大家解答”怎么使用C#完成常用神經(jīng)網(wǎng)絡(luò)”的疑惑有所幫助!接下來,請跟著小編一起來學(xué)習(xí)吧!

WeaveNetC#編寫的用于神經(jīng)網(wǎng)絡(luò)的計(jì)算圖框架

這是一個(gè)由c#編寫的神經(jīng)網(wǎng)絡(luò),可以看到內(nèi)部任何細(xì)節(jié)的實(shí)現(xiàn),可以對學(xué)習(xí)神經(jīng)網(wǎng)絡(luò),以及理解其中的計(jì)算方式。此架構(gòu)不包含自動(dòng) Backward 向后傳播,為了展示更多的計(jì)算細(xì)節(jié)。

源碼包含 cnn,bp,fcn,lstm,convlstm,GRU 等示例內(nèi)容,包含示例所用的數(shù)據(jù)內(nèi)容。

  1. LOSS支持:MESLOSS,cross-entropy

  2. 激活函數(shù)支持:ReLu,Tanh,Sigmod,Softmax

  3. 數(shù)據(jù)類型支持: float[][] 與 float[][][,],二維與四維

  4. 池化支持:平均池化Averpooling,最大池化Maxpooling

  5. 其他支持:ConvLayer,Conv2DLayer,MulLayer,ConvTranspose2DLayer  

  6. 每個(gè)支持類都包含了向前傳播Forward,與Backward向后傳播的方法

以下幾個(gè)小例子

CNN的訓(xùn)練實(shí)現(xiàn)

 public class CNN
    {
        Conv2DLayer cl;
        
      
       
        Conv2DLayer cl2;
        Conv2DLayer cl3;
        //TanhLayer sl = new TanhLayer();
        //TanhLayer sl2 = new TanhLayer();
        //TanhLayer sl3 = new TanhLayer();
        Maxpooling ap1;
        Maxpooling ap2;
        SigmodLayer sl = new SigmodLayer();
        SigmodLayer sl2 = new SigmodLayer();
        //SigmodLayer sl3 = new SigmodLayer();

        Softmax sl3 = new Softmax();
        //Averpooling ap2;
        //Averpooling ap1;



        public CNN()
        {
              cl = new Conv2DLayer(1, 0, 5, 1, 6);
              //ap1 = new Averpooling(2);
            ap1 = new Maxpooling(2);
            cl2 = new Conv2DLayer(1, 0, 5, 6, 12);
            // ap2 = new Averpooling(2);
            ap2 = new Maxpooling(2);
              cl3 = new Conv2DLayer(in_channels: 12, out_channels: 10, _inSize: 4,_full:true );
        }
        public dynamic Forward(float[][][,] matrices)
        {
            dynamic data = cl.Forward(matrices);
            data = sl.Forward(data);
            data = ap1.Forward(data);
            data = cl2.Forward(data);
            data = sl2.Forward(data);
            data = ap2.Forward(data);
            data = cl3.Forward(data);
            data = sl3.Forward(data);
            return data;
        }
        dynamic cl3grid;
        dynamic cl2grid;
        dynamic clgrid;
        public void backward(dynamic grid)
        {

            dynamic grid2 = sl3.Backward(grid);

            cl3grid = cl3.backweight(grid2);//獲取cl3的權(quán)重

            //--------------------------------
              

            grid2 = cl3.Backward(grid2);
            grid2 =ap2.Backward(grid2);
            grid2 = sl2.Backward(grid2);

            cl2grid = cl2.backweight(grid2);//獲取cl2的權(quán)重
            //-------------------------------------

            grid2 = cl2.Backward(grid2);
            grid2 = ap1.Backward(grid2);
            grid2 = sl.Backward(grid2);

            clgrid = cl.backweight(grid2);//獲取cl的權(quán)重


        }
        float lr = 1.0f;
        public void update()
        {
            
        //    int channl = cl3grid.grid.Length;

            cl3.wdata = Matrix.MatrixSub(cl3.wdata, Matrix.multiply(cl3grid.grid, lr)); 
            cl3.basicData = Matrix.MatrixSub(cl3.basicData, Matrix.multiply(cl3grid.basic, lr));

            cl2.weights = Matrix.MatrixSub(cl2.weights, Matrix.multiply(cl2grid.grid, lr));
            cl2.basicData = Matrix.MatrixSub(cl2.basicData, Matrix.multiply(cl2grid.basic, lr));

            cl.weights = Matrix.MatrixSub(cl.weights, Matrix.multiply(clgrid.grid, lr));
            cl.basicData = Matrix.MatrixSub(cl.basicData, Matrix.multiply(clgrid.basic, lr));
        }
    }

LSTM 實(shí)現(xiàn)例子

 public class LSTMCELL
    {
        ConvLayer convLayerih;
        ConvLayer convLayerhh;
        int input_size; int hidden_size;
        public LSTMCELL(int _input_size, int _hidden_size)
        {
            input_size = _input_size;
            hidden_size = _hidden_size;
            convLayerih = new ConvLayer(input_size, hidden_size * 4 );
            //convLayerih.weights = JsonConvert.DeserializeObject<float[][]>(util.getstr("D:\\lstmihw.json"));
            //convLayerih.basicData = JsonConvert.DeserializeObject<float[]>(util.getstr("D:\\lstmihb.json"));
            convLayerhh = new ConvLayer( hidden_size, hidden_size * 4);
            //convLayerhh.weights = JsonConvert.DeserializeObject<float[][]>(util.getstr("D:\\lstmhhw.json"));
            //convLayerhh.basicData = JsonConvert.DeserializeObject<float[]>(util.getstr("D:\\lstmhhb.json"));
        }
        SigmodLayer input_gate_s = new SigmodLayer();
        SigmodLayer forget_gate_s = new SigmodLayer();
        SigmodLayer output_gate_s = new SigmodLayer();
        TanhLayer cell_memory_tl = new TanhLayer();
        TanhLayer cell_tl = new TanhLayer();
        
        MulLayer c_next_mul = new MulLayer();
        MulLayer mulin_gate_mul = new MulLayer();
        MulLayer h_next_mul = new MulLayer();
        

        public dynamic Forward(float[][] input, float[][] h_prev, float[][] c_prev)
        {
            //a_vector = np.dot(x, self.weight_ih.T) + np.dot(h_prev, self.weight_hh.T)
            //a_vector += self.bias_ih + self.bias_hh
            Xinput = input;
            xh_prev = h_prev;
            xc_prev = c_prev;
            var ih = convLayerih.Forward(input);
            var hh = convLayerhh.Forward(h_prev);
            var a_vector = Matrix.MatrixAdd(ih, hh);
           
            List<float[][]> liast = Matrix.chunk(a_vector,4,1);
            var a_i = liast[0];
            var a_f = liast[1];
            var a_c = liast[2];
            var a_o = liast[3];
          
             input_gate = input_gate_s.Forward(a_i);
             forget_gate = forget_gate_s.Forward(a_f);
             cell_memory = cell_memory_tl.Forward(a_c);
             output_gate = output_gate_s.Forward(a_o);
            var c_next_temp = c_next_mul.Forward(forget_gate, c_prev);
            var mulin_gate = mulin_gate_mul.Forward(input_gate, cell_memory);
            var c_next = Matrix.MatrixAdd(c_next_temp, mulin_gate);

            var h_next = h_next_mul.Forward(output_gate, cell_tl.Forward(c_next));
            
           // dh_prev = Matrix.zroe(h_next.Length, h_next[0].Length);
            return (h_next,c_next);//上次的狀態(tài),上次的記憶
        }
        dynamic  Xinput, xh_prev, xc_prev, input_gate, forget_gate, cell_memory, output_gate;
       // dynamic dh_prev;
        dynamic ihweight, hhweight;
        public dynamic backward(dynamic grid)
        {
             
            var dh  = h_next_mul.BackwardY(grid);
            var d_tanh_c = cell_tl.Backward(dh);
             //var dc_prev=c_next_mul.backwardY(d_tanh_c);
            

            var d_input_gate = mulin_gate_mul.Backward(d_tanh_c);
            var d_forget_gate=c_next_mul.Backward(d_tanh_c);
            var d_cell_memory = mulin_gate_mul.BackwardY(d_tanh_c);

            var d_output_gate = h_next_mul.Backward(grid);// d_tanh_c
            var d_ai = input_gate_s.Backward(d_input_gate);
            var d_af = forget_gate_s.Backward(d_forget_gate);
            var d_ao = output_gate_s.Backward(d_output_gate);
            var d_ac = cell_memory_tl.Backward(d_cell_memory);

            var temp=Matrix.cat(d_ai, d_af, 1);
            var temp2 = Matrix.cat( d_ac, d_ao, 1);
            var da= Matrix.cat(temp, temp2, 1);
           // var daT=Matrix.T(da);
             ihweight = convLayerih.backweight(da);
             hhweight = convLayerhh.backweight(da);
            return convLayerih.backward(da);
        }
        float lr = 0.1f;
        public void update()
        {
            convLayerih.weights = Matrix.MatrixSub(convLayerih.weights, Matrix.multiply(ihweight.grid, lr));
            convLayerih.basicData = Matrix.MatrixSub(convLayerih.basicData, Matrix.multiply(ihweight.basic, lr));

            convLayerhh.weights = Matrix.MatrixSub(convLayerhh.weights, Matrix.multiply(hhweight.grid, lr));
            convLayerhh.basicData = Matrix.MatrixSub(convLayerhh.basicData, Matrix.multiply(hhweight.basic, lr));

        }
    }

FCN實(shí)現(xiàn)例子

 public class FCN
    {
        Conv2DLayer cl; 
        Conv2DLayer cl2;
        Conv2DLayer cl3;
        ConvTranspose2DLayer Tcl1;
        Maxpooling mpl = new Maxpooling();
        Maxpooling mpl2 = new Maxpooling();
        SigmodLayer sl = new SigmodLayer();
        SigmodLayer sl2 = new SigmodLayer();
        SigmodLayer sl3 = new SigmodLayer();
        Softmax sl4 = new Softmax();
        public FCN(int weightssize)
        {
            cl = new Conv2DLayer(1, weightssize / 2, weightssize, 1, 6, bias: false);
            cl2 = new Conv2DLayer(1, weightssize / 2, weightssize, 6, 12, bias: false);
            cl3 = new Conv2DLayer(1, weightssize / 2, weightssize, 12, 24, bias: false);
            Tcl1 = new ConvTranspose2DLayer(2, 1, weightssize + 1, 24, 1, bias: false);
        }
        public dynamic Forward(dynamic data)
        {
            dynamic data2= cl.Forward(data);
            data2=sl.Forward(data2);
            data2=mpl.Forward(data2);
            data2 = cl2.Forward(data2);
            data2 = sl2.Forward(data2);
            data2 = mpl2.Forward(data2);
            data2 = cl3.Forward(data2);
            data2 = sl3.Forward(data2);
            data2=Tcl1.Forward(data2);
            data2 = sl4.Forward(data2);
            return data2;
        }
        public dynamic backward(dynamic grid)
        {
            var grid2 = sl4.Backward(grid);
            grid2= Tcl1.Backward(grid2);
            grid2 = sl3.Backward(grid2);
            grid2 = cl3.Backward(grid2);
            grid2 = mpl2.Backward(grid2);
            grid2 = sl2.Backward(grid2);
            grid2 = cl2.Backward(grid2);
            grid2 = mpl.Backward(grid2);
            grid2 = sl.Backward(grid2);
            grid2 = cl.Backward(grid2);
            return grid2;
        }
    }

到此,關(guān)于“怎么使用C#完成常用神經(jīng)網(wǎng)絡(luò)”的學(xué)習(xí)就結(jié)束了,希望能夠解決大家的疑惑。理論與實(shí)踐的搭配能更好的幫助大家學(xué)習(xí),快去試試吧!若想繼續(xù)學(xué)習(xí)更多相關(guān)知識(shí),請繼續(xù)關(guān)注億速云網(wǎng)站,小編會(huì)繼續(xù)努力為大家?guī)砀鄬?shí)用的文章!

向AI問一下細(xì)節(jié)

免責(zé)聲明:本站發(fā)布的內(nèi)容(圖片、視頻和文字)以原創(chuàng)、轉(zhuǎn)載和分享為主,文章觀點(diǎn)不代表本網(wǎng)站立場,如果涉及侵權(quán)請聯(lián)系站長郵箱:is@yisu.com進(jìn)行舉報(bào),并提供相關(guān)證據(jù),一經(jīng)查實(shí),將立刻刪除涉嫌侵權(quán)內(nèi)容。

AI