• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

C# mlpbase.multilayerperceptron类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C#中mlpbase.multilayerperceptron的典型用法代码示例。如果您正苦于以下问题:C# mlpbase.multilayerperceptron类的具体用法?C# mlpbase.multilayerperceptron怎么用?C# mlpbase.multilayerperceptron使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



mlpbase.multilayerperceptron类属于命名空间,在下文中一共展示了mlpbase.multilayerperceptron类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。

示例1: Main

    public static int Main(string[] args)
    {
        mlpbase.multilayerperceptron net = new mlpbase.multilayerperceptron();
        double[] x = new double[0];
        double[] y = new double[0];

        
        //
        // regression task with 2 inputs (independent variables)
        // and 2 outputs (dependent variables).
        //
        // network weights are initialized with small random values.
        //
        mlpbase.mlpcreate0(2, 2, ref net);
        x = new double[2];
        y = new double[2];
        x[0] = AP.Math.RandomReal()-0.5;
        x[1] = AP.Math.RandomReal()-0.5;
        mlpbase.mlpprocess(ref net, ref x, ref y);
        System.Console.Write("Regression task");
        System.Console.WriteLine();
        System.Console.Write("IN[0]  = ");
        System.Console.Write("{0,5:F2}",x[0]);
        System.Console.WriteLine();
        System.Console.Write("IN[1]  = ");
        System.Console.Write("{0,5:F2}",x[1]);
        System.Console.WriteLine();
        System.Console.Write("OUT[0] = ");
        System.Console.Write("{0,5:F2}",y[0]);
        System.Console.WriteLine();
        System.Console.Write("OUT[1] = ");
        System.Console.Write("{0,5:F2}",y[1]);
        System.Console.WriteLine();
        return 0;
    }
开发者ID:palefacer,项目名称:TelescopeOrientation,代码行数:35,代码来源:_demo_mlp_process.cs


示例2: Main

    public static int Main(string[] args)
    {
        mlpbase.multilayerperceptron net = new mlpbase.multilayerperceptron();

        mlpbase.mlpcreate0(2, 1, ref net);
        mlpbase.mlprandomize(ref net);
        return 0;
    }
开发者ID:palefacer,项目名称:TelescopeOrientation,代码行数:8,代码来源:_demo_mlp_randomize.cs


示例3: Main

    public static int Main(string[] args)
    {
        mlpbase.multilayerperceptron network1 = new mlpbase.multilayerperceptron();
        mlpbase.multilayerperceptron network2 = new mlpbase.multilayerperceptron();
        mlpbase.multilayerperceptron network3 = new mlpbase.multilayerperceptron();
        double[] x = new double[0];
        double[] y = new double[0];
        double[] r = new double[0];
        int rlen = 0;
        double v1 = 0;
        double v2 = 0;

        
        //
        // Generate two networks filled with small random values.
        // Use MLPSerialize/MLPUnserialize to make network copy.
        //
        mlpbase.mlpcreate0(1, 1, ref network1);
        mlpbase.mlpcreate0(1, 1, ref network2);
        mlpbase.mlpserialize(ref network1, ref r, ref rlen);
        mlpbase.mlpunserialize(ref r, ref network2);
        
        //
        // Now Network1 and Network2 should be identical.
        // Let's demonstrate it.
        //
        System.Console.Write("Test serialization/unserialization");
        System.Console.WriteLine();
        x = new double[1];
        y = new double[1];
        x[0] = 2*AP.Math.RandomReal()-1;
        mlpbase.mlpprocess(ref network1, ref x, ref y);
        v1 = y[0];
        System.Console.Write("Network1(X) = ");
        System.Console.Write("{0,0:F2}",y[0]);
        System.Console.WriteLine();
        mlpbase.mlpprocess(ref network2, ref x, ref y);
        v2 = y[0];
        System.Console.Write("Network2(X) = ");
        System.Console.Write("{0,0:F2}",y[0]);
        System.Console.WriteLine();
        if( (double)(v1)==(double)(v2) )
        {
            System.Console.Write("Results are equal, OK.");
            System.Console.WriteLine();
        }
        else
        {
            System.Console.Write("Results are not equal... Strange...");
        }
        return 0;
    }
开发者ID:palefacer,项目名称:TelescopeOrientation,代码行数:52,代码来源:_demo_mlp_serialize.cs


示例4: Main

    public static int Main(string[] args)
    {
        mlpbase.multilayerperceptron net = new mlpbase.multilayerperceptron();
        double[] x = new double[0];
        double[] y = new double[0];

        
        //
        // classification task with 2 inputs and 3 classes.
        //
        // network weights are initialized with small random values.
        //
        mlpbase.mlpcreatec0(2, 3, ref net);
        x = new double[2];
        y = new double[3];
        x[0] = AP.Math.RandomReal()-0.5;
        x[1] = AP.Math.RandomReal()-0.5;
        mlpbase.mlpprocess(ref net, ref x, ref y);
        
        //
        // output results
        //
        System.Console.Write("Classification task");
        System.Console.WriteLine();
        System.Console.Write("IN[0]  = ");
        System.Console.Write("{0,5:F2}",x[0]);
        System.Console.WriteLine();
        System.Console.Write("IN[1]  = ");
        System.Console.Write("{0,5:F2}",x[1]);
        System.Console.WriteLine();
        System.Console.Write("Prob(Class=0|IN) = ");
        System.Console.Write("{0,5:F2}",y[0]);
        System.Console.WriteLine();
        System.Console.Write("Prob(Class=1|IN) = ");
        System.Console.Write("{0,5:F2}",y[1]);
        System.Console.WriteLine();
        System.Console.Write("Prob(Class=2|IN) = ");
        System.Console.Write("{0,5:F2}",y[2]);
        System.Console.WriteLine();
        return 0;
    }
开发者ID:palefacer,项目名称:TelescopeOrientation,代码行数:41,代码来源:_demo_mlp_process_cls.cs


示例5: PassThroughSerializer

        /*************************************************************************
        Network creation

        This function creates network with desired structure. Network  is  created
        using one of the three methods:
        a) straighforward creation using MLPCreate???()
        b) MLPCreate???() for proxy object, which is copied with PassThroughSerializer()
        c) MLPCreate???() for proxy object, which is copied with MLPCopy()
        One of these methods is chosen with probability 1/3.
        *************************************************************************/
        private static void createnetwork(mlpbase.multilayerperceptron network,
            int nkind,
            double a1,
            double a2,
            int nin,
            int nhid1,
            int nhid2,
            int nout)
        {
            int mkind = 0;
            mlpbase.multilayerperceptron tmp = new mlpbase.multilayerperceptron();

            ap.assert(((nin>0 & nhid1>=0) & nhid2>=0) & nout>0, "CreateNetwork error");
            ap.assert(nhid1!=0 | nhid2==0, "CreateNetwork error");
            ap.assert(nkind!=1 | nout>=2, "CreateNetwork error");
            mkind = math.randominteger(3);
            if( nhid1==0 )
            {
                
                //
                // No hidden layers
                //
                if( nkind==0 )
                {
                    if( mkind==0 )
                    {
                        mlpbase.mlpcreate0(nin, nout, network);
                    }
                    if( mkind==1 )
                    {
                        mlpbase.mlpcreate0(nin, nout, tmp);
                        {
                            //
                            // This code passes data structure through serializers
                            // (serializes it to string and loads back)
                            //
                            serializer _local_serializer;
                            string _local_str;
                            
                            _local_serializer = new serializer();
                            _local_serializer.alloc_start();
                            mlpbase.mlpalloc(_local_serializer, tmp);
                            _local_serializer.sstart_str();
                            mlpbase.mlpserialize(_local_serializer, tmp);
                            _local_serializer.stop();
                            _local_str = _local_serializer.get_string();
                            
                            _local_serializer = new serializer();
                            _local_serializer.ustart_str(_local_str);
                            mlpbase.mlpunserialize(_local_serializer, network);
                            _local_serializer.stop();
                        }
                    }
                    if( mkind==2 )
                    {
                        mlpbase.mlpcreate0(nin, nout, tmp);
                        mlpbase.mlpcopy(tmp, network);
                    }
                }
                else
                {
                    if( nkind==1 )
                    {
                        if( mkind==0 )
                        {
                            mlpbase.mlpcreatec0(nin, nout, network);
                        }
                        if( mkind==1 )
                        {
                            mlpbase.mlpcreatec0(nin, nout, tmp);
                            {
                                //
                                // This code passes data structure through serializers
                                // (serializes it to string and loads back)
                                //
                                serializer _local_serializer;
                                string _local_str;
                                
                                _local_serializer = new serializer();
                                _local_serializer.alloc_start();
                                mlpbase.mlpalloc(_local_serializer, tmp);
                                _local_serializer.sstart_str();
                                mlpbase.mlpserialize(_local_serializer, tmp);
                                _local_serializer.stop();
                                _local_str = _local_serializer.get_string();
                                
                                _local_serializer = new serializer();
                                _local_serializer.ustart_str(_local_str);
                                mlpbase.mlpunserialize(_local_serializer, network);
                                _local_serializer.stop();
//.........这里部分代码省略.........
开发者ID:dmX-Inc,项目名称:Clustering-Search-Results,代码行数:101,代码来源:test_c.cs


示例6: init

 public override void init()
 {
     densexy = new double[0,0];
     sparsexy = new sparse.sparsematrix();
     tnetwork = new mlpbase.multilayerperceptron();
     tstate = new minlbfgs.minlbfgsstate();
     wbest = new double[0];
     wfinal = new double[0];
     subset = new int[0];
     valsubset = new int[0];
 }
开发者ID:thunder176,项目名称:HeuristicLab,代码行数:11,代码来源:dataanalysis.cs


示例7: mlpecreater1

        /*************************************************************************
        Like MLPCreateR1, but for ensembles.

          -- ALGLIB --
             Copyright 18.02.2009 by Bochkanov Sergey
        *************************************************************************/
        public static void mlpecreater1(int nin,
            int nhid,
            int nout,
            double a,
            double b,
            int ensemblesize,
            mlpensemble ensemble)
        {
            mlpbase.multilayerperceptron net = new mlpbase.multilayerperceptron();

            mlpbase.mlpcreater1(nin, nhid, nout, a, b, net);
            mlpecreatefromnetwork(net, ensemblesize, ensemble);
        }
开发者ID:lgatto,项目名称:proteowizard,代码行数:19,代码来源:dataanalysis.cs


示例8: mlpkfoldcvgeneral

        /*************************************************************************
        Internal cross-validation subroutine
        *************************************************************************/
        private static void mlpkfoldcvgeneral(mlpbase.multilayerperceptron n,
            double[,] xy,
            int npoints,
            double decay,
            int restarts,
            int foldscount,
            bool lmalgorithm,
            double wstep,
            int maxits,
            ref int info,
            mlpreport rep,
            mlpcvreport cvrep)
        {
            int i = 0;
            int fold = 0;
            int j = 0;
            int k = 0;
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            int nin = 0;
            int nout = 0;
            int rowlen = 0;
            int wcount = 0;
            int nclasses = 0;
            int tssize = 0;
            int cvssize = 0;
            double[,] cvset = new double[0,0];
            double[,] testset = new double[0,0];
            int[] folds = new int[0];
            int relcnt = 0;
            mlpreport internalrep = new mlpreport();
            double[] x = new double[0];
            double[] y = new double[0];
            int i_ = 0;

            info = 0;

            
            //
            // Read network geometry, test parameters
            //
            mlpbase.mlpproperties(n, ref nin, ref nout, ref wcount);
            if( mlpbase.mlpissoftmax(n) )
            {
                nclasses = nout;
                rowlen = nin+1;
            }
            else
            {
                nclasses = -nout;
                rowlen = nin+nout;
            }
            if( (npoints<=0 || foldscount<2) || foldscount>npoints )
            {
                info = -1;
                return;
            }
            mlpbase.mlpcopy(n, network);
            
            //
            // K-fold out cross-validation.
            // First, estimate generalization error
            //
            testset = new double[npoints-1+1, rowlen-1+1];
            cvset = new double[npoints-1+1, rowlen-1+1];
            x = new double[nin-1+1];
            y = new double[nout-1+1];
            mlpkfoldsplit(xy, npoints, nclasses, foldscount, false, ref folds);
            cvrep.relclserror = 0;
            cvrep.avgce = 0;
            cvrep.rmserror = 0;
            cvrep.avgerror = 0;
            cvrep.avgrelerror = 0;
            rep.ngrad = 0;
            rep.nhess = 0;
            rep.ncholesky = 0;
            relcnt = 0;
            for(fold=0; fold<=foldscount-1; fold++)
            {
                
                //
                // Separate set
                //
                tssize = 0;
                cvssize = 0;
                for(i=0; i<=npoints-1; i++)
                {
                    if( folds[i]==fold )
                    {
                        for(i_=0; i_<=rowlen-1;i_++)
                        {
                            testset[tssize,i_] = xy[i,i_];
                        }
                        tssize = tssize+1;
                    }
                    else
                    {
                        for(i_=0; i_<=rowlen-1;i_++)
//.........这里部分代码省略.........
开发者ID:lgatto,项目名称:proteowizard,代码行数:101,代码来源:dataanalysis.cs


示例9: multilayerperceptron

 public multilayerperceptron(mlpbase.multilayerperceptron obj)
 {
     _innerobj = obj;
 }
开发者ID:lgatto,项目名称:proteowizard,代码行数:4,代码来源:dataanalysis.cs


示例10: testgradient

        /*************************************************************************
        Gradient functions test
        *************************************************************************/
        private static void testgradient(int nkind,
            int nin,
            int nhid1,
            int nhid2,
            int nout,
            int passcount,
            ref bool err)
        {
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            int n1 = 0;
            int n2 = 0;
            int wcount = 0;
            double h = 0;
            double etol = 0;
            double a1 = 0;
            double a2 = 0;
            int pass = 0;
            int i = 0;
            int j = 0;
            int ssize = 0;
            double[,] xy = new double[0,0];
            double[] grad1 = new double[0];
            double[] grad2 = new double[0];
            double[] x = new double[0];
            double[] y = new double[0];
            double[] x1 = new double[0];
            double[] x2 = new double[0];
            double[] y1 = new double[0];
            double[] y2 = new double[0];
            double v = 0;
            double e = 0;
            double e1 = 0;
            double e2 = 0;
            double v1 = 0;
            double v2 = 0;
            double v3 = 0;
            double v4 = 0;
            double wprev = 0;
            int i_ = 0;
            int i1_ = 0;

            ap.assert(passcount>=2, "PassCount<2!");
            a1 = 0;
            a2 = 0;
            if( nkind==2 )
            {
                a1 = 1000*math.randomreal()-500;
                a2 = 2*math.randomreal()-1;
            }
            if( nkind==3 )
            {
                a1 = 1000*math.randomreal()-500;
                a2 = a1+(2*math.randominteger(2)-1)*(0.1+0.9*math.randomreal());
            }
            createnetwork(network, nkind, a1, a2, nin, nhid1, nhid2, nout);
            mlpbase.mlpproperties(network, ref n1, ref n2, ref wcount);
            h = 0.0001;
            etol = 0.01;
            
            //
            // Initialize
            //
            x = new double[nin-1+1];
            x1 = new double[nin-1+1];
            x2 = new double[nin-1+1];
            y = new double[nout-1+1];
            y1 = new double[nout-1+1];
            y2 = new double[nout-1+1];
            grad1 = new double[wcount-1+1];
            grad2 = new double[wcount-1+1];
            
            //
            // Process
            //
            for(pass=1; pass<=passcount; pass++)
            {
                mlpbase.mlprandomizefull(network);
                
                //
                // Test error/gradient calculation (least squares)
                //
                xy = new double[0+1, nin+nout-1+1];
                for(i=0; i<=nin-1; i++)
                {
                    x[i] = 4*math.randomreal()-2;
                }
                for(i_=0; i_<=nin-1;i_++)
                {
                    xy[0,i_] = x[i_];
                }
                if( mlpbase.mlpissoftmax(network) )
                {
                    for(i=0; i<=nout-1; i++)
                    {
                        y[i] = 0;
                    }
                    xy[0,nin] = math.randominteger(nout);
//.........这里部分代码省略.........
开发者ID:dmX-Inc,项目名称:Clustering-Search-Results,代码行数:101,代码来源:test_c.cs


示例11: init

 public override void init()
 {
     bestparameters = new double[0];
     network = new mlpbase.multilayerperceptron();
     optimizer = new minlbfgs.minlbfgsstate();
     optimizerrep = new minlbfgs.minlbfgsreport();
     wbuf0 = new double[0];
     wbuf1 = new double[0];
     allminibatches = new int[0];
     currentminibatch = new int[0];
     rstate = new rcommstate();
     generator = new hqrnd.hqrndstate();
 }
开发者ID:Kerbas-ad-astra,项目名称:MechJeb2,代码行数:13,代码来源:dataanalysis.cs


示例12: testinformational

        /*************************************************************************
        Iformational functions test
        *************************************************************************/
        private static void testinformational(int nkind,
            int nin,
            int nhid1,
            int nhid2,
            int nout,
            int passcount,
            ref bool err)
        {
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            int n1 = 0;
            int n2 = 0;
            int wcount = 0;

            createnetwork(network, nkind, 0.0, 0.0, nin, nhid1, nhid2, nout);
            mlpbase.mlpproperties(network, ref n1, ref n2, ref wcount);
            err = ((err | n1!=nin) | n2!=nout) | wcount<=0;
        }
开发者ID:palefacer,项目名称:TelescopeOrientation,代码行数:20,代码来源:test_c.cs


示例13: testinformational

        /*************************************************************************
        Informational functions test
        *************************************************************************/
        private static void testinformational(int nkind,
            int nin,
            int nhid1,
            int nhid2,
            int nout,
            int passcount,
            ref bool err)
        {
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            int n1 = 0;
            int n2 = 0;
            int wcount = 0;
            int i = 0;
            int j = 0;
            int k = 0;
            double threshold = 0;
            int nlayers = 0;
            int nmax = 0;
            bool issoftmax = new bool();
            double[,] neurons = new double[0,0];
            double[] x = new double[0];
            double[] y = new double[0];
            double mean = 0;
            double sigma = 0;
            int fkind = 0;
            double c = 0;
            double f = 0;
            double df = 0;
            double d2f = 0;
            double s = 0;

            threshold = 100000*math.machineepsilon;
            createnetwork(network, nkind, 0.0, 0.0, nin, nhid1, nhid2, nout);
            
            //
            // test MLPProperties()
            //
            mlpbase.mlpproperties(network, ref n1, ref n2, ref wcount);
            err = ((err | n1!=nin) | n2!=nout) | wcount<=0;
            
            //
            // Test network geometry functions
            //
            // In order to do this we calculate neural network output using
            // informational functions only, and compare results with ones
            // obtained with MLPProcess():
            // 1. we allocate 2-dimensional array of neurons and fill it by zeros
            // 2. we full first layer of neurons by input values
            // 3. we move through array, calculating values of subsequent layers
            // 4. if we have classification network, we SOFTMAX-normalize output layer
            // 5. we apply scaling to the outputs
            // 6. we compare results with ones obtained by MLPProcess()
            //
            // NOTE: it is important to do (4) before (5), because on SOFTMAX network
            //       MLPGetOutputScaling() must return Mean=0 and Sigma=1. In order
            //       to test it implicitly, we apply it to the classifier results
            //       (already normalized). If one of the coefficients deviates from
            //       expected values, we will get error during (6).
            //
            nlayers = 2;
            nmax = Math.Max(nin, nout);
            issoftmax = nkind==1;
            if( nhid1!=0 )
            {
                nlayers = 3;
                nmax = Math.Max(nmax, nhid1);
            }
            if( nhid2!=0 )
            {
                nlayers = 4;
                nmax = Math.Max(nmax, nhid2);
            }
            neurons = new double[nlayers, nmax];
            for(i=0; i<=nlayers-1; i++)
            {
                for(j=0; j<=nmax-1; j++)
                {
                    neurons[i,j] = 0;
                }
            }
            x = new double[nin];
            for(i=0; i<=nin-1; i++)
            {
                x[i] = 2*math.randomreal()-1;
            }
            y = new double[nout];
            for(i=0; i<=nout-1; i++)
            {
                y[i] = 2*math.randomreal()-1;
            }
            for(j=0; j<=nin-1; j++)
            {
                mlpbase.mlpgetinputscaling(network, j, ref mean, ref sigma);
                neurons[0,j] = (x[j]-mean)/sigma;
            }
            for(i=1; i<=nlayers-1; i++)
            {
//.........这里部分代码省略.........
开发者ID:dmX-Inc,项目名称:Clustering-Search-Results,代码行数:101,代码来源:test_c.cs


示例14: testprocessing

        /*************************************************************************
        Processing functions test
        *************************************************************************/
        private static void testprocessing(int nkind,
            int nin,
            int nhid1,
            int nhid2,
            int nout,
            int passcount,
            ref bool err)
        {
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            mlpbase.multilayerperceptron network2 = new mlpbase.multilayerperceptron();
            int n1 = 0;
            int n2 = 0;
            int wcount = 0;
            bool zeronet = new bool();
            double a1 = 0;
            double a2 = 0;
            int pass = 0;
            int i = 0;
            bool allsame = new bool();
            double[] x1 = new double[0];
            double[] x2 = new double[0];
            double[] y1 = new double[0];
            double[] y2 = new double[0];
            double v = 0;
            int i_ = 0;

            ap.assert(passcount>=2, "PassCount<2!");
            
            //
            // Prepare network
            //
            a1 = 0;
            a2 = 0;
            if( nkind==2 )
            {
                a1 = 1000*math.randomreal()-500;
                a2 = 2*math.randomreal()-1;
            }
            if( nkind==3 )
            {
                a1 = 1000*math.randomreal()-500;
                a2 = a1+(2*math.randominteger(2)-1)*(0.1+0.9*math.randomreal());
            }
            createnetwork(network, nkind, a1, a2, nin, nhid1, nhid2, nout);
            mlpbase.mlpproperties(network, ref n1, ref n2, ref wcount);
            
            //
            // Initialize arrays
            //
            x1 = new double[nin-1+1];
            x2 = new double[nin-1+1];
            y1 = new double[nout-1+1];
            y2 = new double[nout-1+1];
            
            //
            // Main cycle
            //
            for(pass=1; pass<=passcount; pass++)
            {
                
                //
                // Last run is made on zero network
                //
                mlpbase.mlprandomizefull(network);
                zeronet = false;
                if( pass==passcount )
                {
                    for(i_=0; i_<=wcount-1;i_++)
                    {
                        network.weights[i_] = 0*network.weights[i_];
                    }
                    zeronet = true;
                }
                
                //
                // Same inputs leads to same outputs
                //
                for(i=0; i<=nin-1; i++)
                {
                    x1[i] = 2*math.randomreal()-1;
                    x2[i] = x1[i];
                }
                for(i=0; i<=nout-1; i++)
                {
                    y1[i] = 2*math.randomreal()-1;
                    y2[i] = 2*math.randomreal()-1;
                }
                mlpbase.mlpprocess(network, x1, ref y1);
                mlpbase.mlpprocess(network, x2, ref y2);
                allsame = true;
                for(i=0; i<=nout-1; i++)
                {
                    allsame = allsame & (double)(y1[i])==(double)(y2[i]);
                }
                err = err | !allsame;
                
                //
//.........这里部分代码省略.........
开发者ID:dmX-Inc,项目名称:Clustering-Search-Results,代码行数:101,代码来源:test_c.cs


示例15: testhessian

        /*************************************************************************
        Hessian functions test
        *************************************************************************/
        private static void testhessian(int nkind,
            int nin,
            int nhid1,
            int nhid2,
            int nout,
            int passcount,
            ref bool err)
        {
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            int hkind = 0;
            int n1 = 0;
            int n2 = 0;
            int wcount = 0;
            double h = 0;
            double etol = 0;
            int pass = 0;
            int i = 0;
            int j = 0;
            int ssize = 0;
            double a1 = 0;
            double a2 = 0;
            double[,] xy = new double[0,0];
            double[,] h1 = new double[0,0];
            double[,] h2 = new double[0,0];
            double[] grad1 = new double[0];
            double[] grad2 = new double[0];
            double[] grad3 = new double[0];
            double[] x = new double[0];
            double[] y = new double[0];
            double[] x1 = new double[0];
            double[] x2 = new double[0];
            double[] y1 = new double[0];
            double[] y2 = new double[0];
            double v = 0;
            double e1 = 0;
            double e2 = 0;
            double wprev = 0;
            int i_ = 0;
            int i1_ = 0;

            ap.assert(passcount>=2, "PassCount<2!");
            a1 = 0;
            a2 = 0;
            if( nkind==2 )
            {
                a1 = 1000*math.randomreal()-500;
                a2 = 2*math.randomreal()-1;
            }
            if( nkind==3 )
            {
                a1 = 1000*math.randomreal()-500;
                a2 = a1+(2*math.randominteger(2)-1)*(0.1+0.9*math.randomreal());
            }
            createnetwork(network, nkind, a1, a2, nin, nhid1, nhid2, nout);
            mlpbase.mlpproperties(network, ref n1, ref n2, ref wcount);
            h = 0.0001;
            etol = 0.05;
            
            //
            // Initialize
            //
            x = new double[nin-1+1];
            x1 = new double[nin-1+1];
            x2 = new double[nin-1+1];
            y = new double[nout-1+1];
            y1 = new double[nout-1+1];
            y2 = new double[nout-1+1];
            grad1 = new double[wcount-1+1];
            grad2 = new double[wcount-1+1];
            grad3 = new double[wcount-1+1];
            h1 = new double[wcount-1+1, wcount-1+1];
            h2 = new double[wcount-1+1, wcount-1+1];
            
            //
            // Process
            //
            for(pass=1; pass<=passcount; pass++)
            {
                mlpbase.mlprandomizefull(network);
                
                //
                // Test hessian calculation .
                // E1 contains total error (calculated using MLPGrad/MLPGradN)
                // Grad1 contains total gradient (calculated using MLPGrad/MLPGradN)
                // H1 contains Hessian calculated using differences of gradients
                //
                // E2, Grad2 and H2 contains corresponing values calculated using MLPHessianBatch/MLPHessianNBatch
                //
                for(hkind=0; hkind<=1; hkind++)
                {
                    ssize = 1+math.randominteger(10);
                    xy = new double[ssize-1+1, nin+nout-1+1];
                    for(i=0; i<=wcount-1; i++)
                    {
                        grad1[i] = 0;
                    }
                    for(i=0; i<=wcount-1; i++)
//.........这里部分代码省略.........
开发者ID:dmX-Inc,项目名称:Clustering-Search-Results,代码行数:101,代码来源:test_c.cs


示例16: mlpetraines

        /*************************************************************************
        Training neural networks ensemble using early stopping.

        INPUT PARAMETERS:
            Ensemble    -   model with initialized geometry
            XY          -   training set
            NPoints     -   training set size
            Decay       -   weight decay coefficient, >=0.001
            Restarts    -   restarts, >0.

        OUTPUT PARAMETERS:
            Ensemble    -   trained model
            Info        -   return code:
                            * -2, if there is a point with class number
                                  outside of [0..NClasses-1].
                            * -1, if incorrect parameters was passed
                                  (NPoints<0, Restarts<1).
                            *  6, if task has been solved.
            Rep         -   training report.
            OOBErrors   -   out-of-bag generalization error estimate

          -- ALGLIB --
             Copyright 10.03.2009 by Bochkanov Sergey
        *************************************************************************/
        public static void mlpetraines(mlpensemble ensemble,
            double[,] xy,
            int npoints,
            double decay,
            int restarts,
            ref int info,
            mlptrain.mlpreport rep)
        {
            int i = 0;
            int k = 0;
            int ccount = 0;
            int pcount = 0;
            double[,] trnxy = new double[0,0];
            double[,] valxy = new double[0,0];
            int trnsize = 0;
            int valsize = 0;
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            int tmpinfo = 0;
            mlptrain.mlpreport tmprep = new mlptrain.mlpreport();
            int i_ = 0;
            int i1_ = 0;

            info = 0;

            if( (npoints<2 | restarts<1) | (double)(decay)<(double)(0) )
            {
                info = -1;
                return;
            }
            if( ensemble.issoftmax )
            {
                for(i=0; i<=npoints-1; i++)
                {
                    if( (int)Math.Round(xy[i,ensemble.nin])<0 | (int)Math.Round(xy[i,ensemble.nin])>=ensemble.nout )
                    {
                        info = -2;
                        return;
                    }
                }
            }
            info = 6;
            
            //
            // allocate
            //
            if( ensemble.issoftmax )
            {
                ccount = ensemble.nin+1;
                pcount = ensemble.nin;
            }
            else
            {
                ccount = ensemble.nin+ensemble.nout;
                pcount = ensemble.nin+ensemble.nout;
            }
            trnxy = new double[npoints-1+1, ccount-1+1];
            valxy = new double[npoints-1+1, ccount-1+1];
            mlpbase.mlpunserializeold(ensemble.serializedmlp, network);
            rep.ngrad = 0;
            rep.nhess = 0;
            rep.ncholesky = 0;
            
            //
            // train networks
            //
            for(k=0; k<=ensemble.ensemblesize-1; k++)
            {
                
                //
                // Split set
                //
                do
                {
                    trnsize = 0;
                    valsize = 0;
                    for(i=0; i<=npoints-1; i++)
//.........这里部分代码省略.........
开发者ID:Ring-r,项目名称:opt,代码行数:101,代码来源:dataanalysis.cs


示例17: class

        /*************************************************************************
        This subroutine trains logit model.

        INPUT PARAMETERS:
            XY          -   training set, array[0..NPoints-1,0..NVars]
                            First NVars columns store values of independent
                            variables, next column stores number of class (from 0
                            to NClasses-1) which dataset element belongs to. Fractional
                            values are rounded to nearest integer.
            NPoints     -   training set size, NPoints>=1
            NVars       -   number of independent variables, NVars>=1
            NClasses    -   number of classes, NClasses>=2

        OUTPUT PARAMETERS:
            Info        -   return code:
                            * -2, if there is a point with class number
                                  outside of [0..NClasses-1].
                            * -1, if incorrect parameters was passed
                                  (NPoints<NVars+2, NVars<1, NClasses<2).
                            *  1, if task has been solved
            LM          -   model built
            Rep         -   training report

          -- ALGLIB --
             Copyright 10.09.2008 by Bochkanov Sergey
        *************************************************************************/
        public static void mnltrainh(double[,] xy,
            int npoints,
            int nvars,
            int nclasses,
            ref int info,
            logitmodel lm,
            mnlreport rep)
        {
            int i = 0;
            int j = 0;
            int k = 0;
            int ssize = 0;
            bool allsame = new bool();
            int offs = 0;
            double threshold = 0;
            double wminstep = 0;
            double decay = 0;
            int wdim = 0;
            int expoffs = 0;
            double v = 0;
            double s = 0;
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            int nin = 0;
            int nout = 0;
            int wcount = 0;
            double e = 0;
            double[] g = new double[0];
            double[,] h = new double[0,0];
            bool spd = new bool();
            double[] x = new double[0];
            double[] y = new double[0];
            double[] wbase = new double[0];
            double wstep = 0;
            double[] wdir = new double[0];
            double[] work = new double[0];
            int mcstage = 0;
            logitmcstate mcstate = new logitmcstate();
            int mcinfo = 0;
            int mcnfev = 0;
            int solverinfo = 0;
            densesolver.densesolverreport solverrep = new densesolver.densesolverreport();
            int i_ = 0;
            int i1_ = 0;

            info = 0;

            threshold = 1000*math.machineepsilon;
            wminstep = 0.001;
            decay = 0.001;
            
            //
            // Test for inputs
            //
            if( (npoints<nvars+2 || nvars<1) || nclasses<2 )
            {
                info = -1;
                return;
            }
            for(i=0; i<=npoints-1; i++)
            {
                if( (int)Math.Round(xy[i,nvars])<0 || (int)Math.Round(xy[i,nvars])>=nclasses )
                {
                    info = -2;
                    return;
                }
            }
            info = 1;
            
            //
            // Initialize data
            //
            rep.ngrad = 0;
            rep.nhess = 0;
            
//.........这里部分代码省略.........
开发者ID:lgatto,项目名称:proteowizard,代码行数:101,代码来源:dataanalysis.cs


示例18: mlpebagginginternal

        /*************************************************************************
        Internal bagging subroutine.

          -- ALGLIB --
             Copyright 19.02.2009 by Bochkanov Sergey
        *************************************************************************/
        private static void mlpebagginginternal(mlpensemble ensemble,
            double[,] xy,
            int npoints,
            double decay,
            int restarts,
            double wstep,
            int maxits,
            bool lmalgorithm,
            ref int info,
            mlptrain.mlpreport rep,
            mlptrain.mlpcvreport ooberrors)
        {
            double[,] xys = new double[0,0];
            bool[] s = new bool[0];
            double[,] oobbuf = new double[0,0];
            int[] oobcntbuf = new int[0];
            double[] x = new double[0];
            double[] y = new double[0];
            double[] dy = new double[0];
            double[] dsbuf = new double[0];
            int nin = 0;
            int nout = 0;
            int ccnt = 0;
            int pcnt = 0;
            int i = 0;
            int j = 0;
            int k = 0;
            double v = 0;
            mlptrain.mlpreport tmprep = new mlptrain.mlpreport();
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            int i_ = 0;
            int i1_ = 0;

            info = 0;

            
            //
            // Test for inputs
            //
            if( (!lmalgorithm & (double)(wstep)==(double)(0)) & maxits==0 )
            {
                info = -8;
                return;
            }
            if( ((npoints<=0 | restarts<1) | (double)(wstep)<(double)(0)) | maxits<0 )
            {
                info = -1;
                return;
            }
            if( ensemble.issoftmax )
            {
                for(i=0; i<=npoints-1; i++)
                {
                    if( (int)Math.Round(xy[i,ensemble.nin])<0 | (int)Math.Round(xy[i,ensemble.nin])>=ensemble.nout )
                    {
                        info = -2;
                        return;
                    }
                }
            }
            
            //
            // allocate temporaries
   

鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C# mlpensemble类代码示例发布时间:2022-05-24
下一篇:
C# mlpbase类代码示例发布时间:2022-05-24
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap