• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Java InMemoryStatsStorage类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Java中org.deeplearning4j.ui.storage.InMemoryStatsStorage的典型用法代码示例。如果您正苦于以下问题:Java InMemoryStatsStorage类的具体用法?Java InMemoryStatsStorage怎么用?Java InMemoryStatsStorage使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



InMemoryStatsStorage类属于org.deeplearning4j.ui.storage包,在下文中一共展示了InMemoryStatsStorage类的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。

示例1: testListenersViaModel

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
@Test
public void testListenersViaModel() {
    TestListener.clearCounts();

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list().layer(0,
                    new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(10).nOut(10)
                                    .activation(Activation.TANH).build());

    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();

    StatsStorage ss = new InMemoryStatsStorage();
    model.setListeners(new TestListener(), new StatsListener(ss));

    testListenersForModel(model, null);

    assertEquals(1, ss.listSessionIDs().size());
    assertEquals(2, ss.listWorkerIDsForSession(ss.listSessionIDs().get(0)).size());
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:21,代码来源:TestListeners.java


示例2: testListenersViaModelGraph

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
@Test
public void testListenersViaModelGraph() {
    TestListener.clearCounts();

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder()
                    .addInputs("in").addLayer("0",
                                    new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(10).nOut(10)
                                                    .activation(Activation.TANH).build(),
                                    "in")
                    .setOutputs("0").build();

    ComputationGraph model = new ComputationGraph(conf);
    model.init();

    StatsStorage ss = new InMemoryStatsStorage();
    model.setListeners(new TestListener(), new StatsListener(ss));

    testListenersForModel(model, null);

    assertEquals(1, ss.listSessionIDs().size());
    assertEquals(2, ss.listWorkerIDsForSession(ss.listSessionIDs().get(0)).size());
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:23,代码来源:TestListeners.java


示例3: enableRemoteListener

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
@Override
public void enableRemoteListener() {
    if (remoteReceiverModule == null)
        remoteReceiverModule = new RemoteReceiverModule();
    if (remoteReceiverModule.isEnabled())
        return;
    enableRemoteListener(new InMemoryStatsStorage(), true);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:9,代码来源:PlayUIServer.java


示例4: testUIMultipleSessions

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
@Test
@Ignore
public void testUIMultipleSessions() throws Exception {

    for (int session = 0; session < 3; session++) {

        StatsStorage ss = new InMemoryStatsStorage();

        UIServer uiServer = UIServer.getInstance();
        uiServer.attach(ss);

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
                        .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build())
                        .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                                        .activation(Activation.SOFTMAX).nIn(4).nOut(3).build())
                        .pretrain(false).backprop(true).build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();
        net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));

        DataSetIterator iter = new IrisDataSetIterator(150, 150);

        for (int i = 0; i < 20; i++) {
            net.fit(iter);
            Thread.sleep(100);
        }
    }


    Thread.sleep(1000000);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:34,代码来源:TestPlayUI.java


示例5: testUICompGraph

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
@Test
@Ignore
public void testUICompGraph() throws Exception {

    StatsStorage ss = new InMemoryStatsStorage();

    UIServer uiServer = UIServer.getInstance();
    uiServer.attach(ss);

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in")
                    .addLayer("L0", new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build(),
                                    "in")
                    .addLayer("L1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                                    .activation(Activation.SOFTMAX).nIn(4).nOut(3).build(), "L0")
                    .pretrain(false).backprop(true).setOutputs("L1").build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));

    DataSetIterator iter = new IrisDataSetIterator(150, 150);

    for (int i = 0; i < 100; i++) {
        net.fit(iter);
        Thread.sleep(100);
    }

    Thread.sleep(100000);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:31,代码来源:TestPlayUI.java


示例6: testParallelStatsListenerCompatibility

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
@Test
@Ignore //To be run manually
public void testParallelStatsListenerCompatibility() throws Exception {
    UIServer uiServer = UIServer.getInstance();

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd()).weightInit(WeightInit.XAVIER).list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
                    .layer(1, new OutputLayer.Builder().nIn(3).nOut(3)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .pretrain(false).backprop(true).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);

    // it's important that the UI can report results from parallel training
    // there's potential for StatsListener to fail if certain properties aren't set in the model
    StatsStorage statsStorage = new InMemoryStatsStorage();
    net.setListeners(new StatsListener(statsStorage));
    uiServer.attach(statsStorage);

    DataSetIterator irisIter = new IrisDataSetIterator(50, 500);
    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(500))
                                    .scoreCalculator(new DataSetLossCalculator(irisIter, true))
                                    .evaluateEveryNEpochs(2).modelSaver(saver).build();

    IEarlyStoppingTrainer<MultiLayerNetwork> trainer =
                    new EarlyStoppingParallelTrainer<>(esConf, net, irisIter, null, 3, 6, 2);

    EarlyStoppingResult<MultiLayerNetwork> result = trainer.fit();
    System.out.println(result);

    assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason());
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:37,代码来源:TestParallelEarlyStoppingUI.java


示例7: main

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
public static void main(String[] args) {
    UIServer server = UIServer.getInstance();
    StatsStorage statsStorage = new InMemoryStatsStorage();
    server.attach(statsStorage);
    server.enableRemoteListener();
}
 
开发者ID:buybrain,项目名称:docker-dl4j-ui,代码行数:7,代码来源:Server.java


示例8: LSTMTrainer

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
/**
 * Constructor
 * @param trainingSet Text file containing several ABC music files
 * @throws IOException
 */
public LSTMTrainer(String trainingSet, int seed) throws IOException {
    lstmLayerSize_ = 200; // original 200
    batchSize_ = 32; // original 32
    truncatedBackPropThroughTimeLength_ = 50;
    nbEpochs_ = 100;
    learningRate_ = 0.04; // 0.1 original // best 0.05 3epochs
    generateSamplesEveryNMinibatches_ = 200;
    generationInitialization_ = "X";
    seed_ = seed;
    random_ = new Random(seed);
    output_ = null;

    trainingSetIterator_ = new ABCIterator(trainingSet, Charset.forName("ASCII"), batchSize_, random_);
    charToInt_ = trainingSetIterator_.getCharToInt();
    intToChar_ = trainingSetIterator_.getIntToChar();
    exampleLength_ = trainingSetIterator_.getExampleLength();

    int nOut = trainingSetIterator_.totalOutcomes();

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
            .learningRate(learningRate_)
            .rmsDecay(0.95) // 0.95 original
            .seed(seed_)
            .regularization(true) // true original
            .l2(0.001)
            .weightInit(WeightInit.XAVIER)
            .updater(Updater.RMSPROP)
            .list()
            .layer(0, new GravesLSTM.Builder().nIn(trainingSetIterator_.inputColumns()).nOut(lstmLayerSize_)
                    .activation("tanh").build())
            .layer(1, new GravesLSTM.Builder().nIn(lstmLayerSize_).nOut(lstmLayerSize_)
                    .activation("tanh").build())
            .layer(2, new GravesLSTM.Builder().nIn(lstmLayerSize_).nOut(lstmLayerSize_)
                    .activation("tanh").build())
            .layer(3, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax")
                    .nIn(lstmLayerSize_).nOut(nOut).build())
            .backpropType(BackpropType.TruncatedBPTT)
                .tBPTTForwardLength(truncatedBackPropThroughTimeLength_)
                .tBPTTBackwardLength(truncatedBackPropThroughTimeLength_)
            .pretrain(false).backprop(true)
            .build();

    lstmNet_ = new MultiLayerNetwork(conf);
    lstmNet_.init();
    //lstmNet_.setListeners(new ScoreIterationListener(1));
    //lstmNet_.setListeners(new HistogramIterationListener(1));
    UIServer uiServer = UIServer.getInstance();
    StatsStorage statsStorage = new InMemoryStatsStorage();
    uiServer.attach(statsStorage);
    lstmNet_.setListeners(new StatsListener(statsStorage));

    if (ExecutionParameters.verbose) {
        Layer[] layers = lstmNet_.getLayers();
        int totalNumParams = 0;
        for (int i = 0; i < layers.length; i++) {
            int nParams = layers[i].numParams();
            System.out.println("Number of parameters in layer " + i + ": " + nParams);
            totalNumParams += nParams;
        }
        System.out.println("Total number of network parameters: " + totalNumParams);
    }
}
 
开发者ID:paveyry,项目名称:LyreLand,代码行数:69,代码来源:LSTMTrainer.java


示例9: train

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
private static void train(CommandLine c) {
    int nEpochs = Integer.parseInt(c.getOptionValue("e"));
    String modelName = c.getOptionValue("o");
    DataIterator<NormalizerStandardize> it = DataIterator.irisCsv(c.getOptionValue("i"));
    RecordReaderDataSetIterator trainData = it.getIterator();
    NormalizerStandardize normalizer = it.getNormalizer();

    log.info("Data Loaded");

    MultiLayerConfiguration conf = net(4, 3);
    MultiLayerNetwork model = new MultiLayerNetwork(conf);

    model.init();

    UIServer uiServer = UIServer.getInstance();

    StatsStorage statsStorage = new InMemoryStatsStorage();
    uiServer.attach(statsStorage);
    model.setListeners(Arrays.asList(new ScoreIterationListener(1), new StatsListener(statsStorage)));

    for (int i = 0; i < nEpochs; i++) {
        log.info("Starting epoch {} of {}", i, nEpochs);

        while (trainData.hasNext()) {
            model.fit(trainData.next());
        }

        log.info("Finished epoch {}", i);
        trainData.reset();
    }

    try {
        ModelSerializer.writeModel(model, modelName, true);

        normalizer.save(
                new File(modelName + ".norm1"),
                new File(modelName + ".norm2"),
                new File(modelName + ".norm3"),
                new File(modelName + ".norm4")
        );
    } catch (IOException e) {
        e.printStackTrace();
    }

    log.info("Model saved to: {}", modelName);
}
 
开发者ID:wmeddie,项目名称:dl4j-trainer-archetype,代码行数:47,代码来源:Train.java


示例10: testUI

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
@Test
@Ignore
public void testUI() throws Exception {

    StatsStorage ss = new InMemoryStatsStorage();

    PlayUIServer uiServer = (PlayUIServer) UIServer.getInstance();
    assertEquals(9000, uiServer.getPort());
    uiServer.stop();
    PlayUIServer playUIServer = new PlayUIServer();
    playUIServer.runMain(new String[] {"--uiPort", "9100", "-r", "true"});

    assertEquals(9100, playUIServer.getPort());
    playUIServer.stop();


    //        uiServer.attach(ss);
    //
    //        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
    //                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
    //                .list()
    //                .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build())
    //                .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(4).nOut(3).build())
    //                .pretrain(false).backprop(true).build();
    //
    //        MultiLayerNetwork net = new MultiLayerNetwork(conf);
    //        net.init();
    //        net.setListeners(new StatsListener(ss, 3), new ScoreIterationListener(1));
    //
    //        DataSetIterator iter = new IrisDataSetIterator(150, 150);
    //
    //        for (int i = 0; i < 500; i++) {
    //            net.fit(iter);
    ////            Thread.sleep(100);
    //            Thread.sleep(100);
    //        }
    //
    ////        uiServer.stop();

    Thread.sleep(100000);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:42,代码来源:TestPlayUI.java


示例11: testUI_VAE

import org.deeplearning4j.ui.storage.InMemoryStatsStorage; //导入依赖的package包/类
@Test
@Ignore
public void testUI_VAE() throws Exception {
    //Variational autoencoder - for unsupervised layerwise pretraining

    StatsStorage ss = new InMemoryStatsStorage();

    UIServer uiServer = UIServer.getInstance();
    uiServer.attach(ss);

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(1e-5))
                    .list().layer(0,
                                    new VariationalAutoencoder.Builder().nIn(4).nOut(3).encoderLayerSizes(10, 11)
                                                    .decoderLayerSizes(12, 13).weightInit(WeightInit.XAVIER)
                                                    .pzxActivationFunction(Activation.IDENTITY)
                                                    .reconstructionDistribution(
                                                                    new GaussianReconstructionDistribution())
                                                    .activation(Activation.LEAKYRELU).build())
                    .layer(1, new VariationalAutoencoder.Builder().nIn(3).nOut(3).encoderLayerSizes(7)
                                    .decoderLayerSizes(8).weightInit(WeightInit.XAVIER)
                                    .pzxActivationFunction(Activation.IDENTITY)
                                    .reconstructionDistribution(new GaussianReconstructionDistribution())
                                    .activation(Activation.LEAKYRELU).build())
                    .layer(2, new OutputLayer.Builder().nIn(3).nOut(3).build()).pretrain(true).backprop(true)
                    .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));

    DataSetIterator iter = new IrisDataSetIterator(150, 150);

    for (int i = 0; i < 50; i++) {
        net.fit(iter);
        Thread.sleep(100);
    }


    Thread.sleep(100000);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:43,代码来源:TestPlayUI.java



注:本文中的org.deeplearning4j.ui.storage.InMemoryStatsStorage类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Java Server类代码示例发布时间:2022-05-23
下一篇:
Java ObjectMonitor类代码示例发布时间:2022-05-23
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap