本文整理汇总了Java中org.datavec.api.split.FileSplit类的典型用法代码示例。如果您正苦于以下问题:Java FileSplit类的具体用法?Java FileSplit怎么用?Java FileSplit使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
FileSplit类属于org.datavec.api.split包,在下文中一共展示了FileSplit类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: test
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void test() throws Exception {
CSVRecordReader rr = new CSVRecordReader(0, ',');
rr.initialize(new FileSplit(new ClassPathResource("iris.dat").getFile()));
CSVRecordReader rr2 = new CSVRecordReader(0, ',');
rr2.initialize(new FileSplit(new ClassPathResource("iris.dat").getFile()));
RecordReader rrC = new ConcatenatingRecordReader(rr, rr2);
int count = 0;
while(rrC.hasNext()){
rrC.next();
count++;
}
assertEquals(300, count);
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:20,代码来源:TestConcatenatingRecordReader.java
示例2: load
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
public void load(int batchSize, int numExamples, int numLabels, PathLabelGenerator labelGenerator,
double splitTrainTest, Random rng) {
if (!imageFilesExist()) {
if (!fullDir.exists() || fullDir.listFiles() == null || fullDir.listFiles().length == 0) {
fullDir.mkdir();
if (useSubset) {
log.info("Downloading {} subset...", localDir);
downloadAndUntar(lfwSubsetData, fullDir);
} else {
log.info("Downloading {}...", localDir);
downloadAndUntar(lfwData, fullDir);
downloadAndUntar(lfwLabel, fullDir);
}
}
}
FileSplit fileSplit = new FileSplit(fullDir, ALLOWED_FORMATS, rng);
BalancedPathFilter pathFilter = new BalancedPathFilter(rng, ALLOWED_FORMATS, labelGenerator, numExamples,
numLabels, 0, batchSize, null);
inputSplit = fileSplit.sample(pathFilter, numExamples * splitTrainTest, numExamples * (1 - splitTrainTest));
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:22,代码来源:LFWLoader.java
示例3: testReader
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testReader() throws Exception {
TfidfVectorizer vectorizer = new TfidfVectorizer();
Configuration conf = new Configuration();
conf.setInt(TfidfVectorizer.MIN_WORD_FREQUENCY, 1);
conf.setBoolean(RecordReader.APPEND_LABEL, true);
vectorizer.initialize(conf);
TfidfRecordReader reader = new TfidfRecordReader();
reader.initialize(conf, new FileSplit(new ClassPathResource("labeled").getFile()));
int count = 0;
int[] labelAssertions = new int[3];
while (reader.hasNext()) {
Collection<Writable> record = reader.next();
Iterator<Writable> recordIter = record.iterator();
NDArrayWritable writable = (NDArrayWritable) recordIter.next();
labelAssertions[count] = recordIter.next().toInt();
count++;
}
assertArrayEquals(new int[] {0, 1, 2}, labelAssertions);
assertEquals(3, reader.getLabels().size());
assertEquals(3, count);
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:24,代码来源:TfidfRecordReaderTest.java
示例4: testRecordMetaData
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testRecordMetaData() throws Exception {
TfidfVectorizer vectorizer = new TfidfVectorizer();
Configuration conf = new Configuration();
conf.setInt(TfidfVectorizer.MIN_WORD_FREQUENCY, 1);
conf.setBoolean(RecordReader.APPEND_LABEL, true);
vectorizer.initialize(conf);
TfidfRecordReader reader = new TfidfRecordReader();
reader.initialize(conf, new FileSplit(new ClassPathResource("labeled").getFile()));
while (reader.hasNext()) {
Record record = reader.nextRecord();
assertNotNull(record.getMetaData().getURI());
assertEquals(record.getMetaData().getReaderClass(), TfidfRecordReader.class);
}
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:17,代码来源:TfidfRecordReaderTest.java
示例5: testReadRecordFromMetaData
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testReadRecordFromMetaData() throws Exception {
TfidfVectorizer vectorizer = new TfidfVectorizer();
Configuration conf = new Configuration();
conf.setInt(TfidfVectorizer.MIN_WORD_FREQUENCY, 1);
conf.setBoolean(RecordReader.APPEND_LABEL, true);
vectorizer.initialize(conf);
TfidfRecordReader reader = new TfidfRecordReader();
reader.initialize(conf, new FileSplit(new ClassPathResource("labeled").getFile()));
Record record = reader.nextRecord();
Record reread = reader.loadFromMetaData(record.getMetaData());
assertEquals(record.getRecord().size(), 2);
assertEquals(reread.getRecord().size(), 2);
assertEquals(record.getRecord().get(0), reread.getRecord().get(0));
assertEquals(record.getRecord().get(1), reread.getRecord().get(1));
assertEquals(record.getMetaData(), reread.getMetaData());
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:21,代码来源:TfidfRecordReaderTest.java
示例6: testCodecReader
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testCodecReader() throws Exception {
File file = new ClassPathResource("fire_lowres.mp4").getFile();
SequenceRecordReader reader = new CodecRecordReader();
Configuration conf = new Configuration();
conf.set(CodecRecordReader.RAVEL, "true");
conf.set(CodecRecordReader.START_FRAME, "160");
conf.set(CodecRecordReader.TOTAL_FRAMES, "500");
conf.set(CodecRecordReader.ROWS, "80");
conf.set(CodecRecordReader.COLUMNS, "46");
reader.initialize(new FileSplit(file));
reader.setConf(conf);
assertTrue(reader.hasNext());
List<List<Writable>> record = reader.sequenceRecord();
// System.out.println(record.size());
Iterator<List<Writable>> it = record.iterator();
List<Writable> first = it.next();
// System.out.println(first);
//Expected size: 80x46x3
assertEquals(1, first.size());
assertEquals(80 * 46 * 3, ((ArrayWritable) first.iterator().next()).length());
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:25,代码来源:CodecReaderTest.java
示例7: testCodecReaderMeta
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testCodecReaderMeta() throws Exception {
File file = new ClassPathResource("fire_lowres.mp4").getFile();
SequenceRecordReader reader = new CodecRecordReader();
Configuration conf = new Configuration();
conf.set(CodecRecordReader.RAVEL, "true");
conf.set(CodecRecordReader.START_FRAME, "160");
conf.set(CodecRecordReader.TOTAL_FRAMES, "500");
conf.set(CodecRecordReader.ROWS, "80");
conf.set(CodecRecordReader.COLUMNS, "46");
reader.initialize(new FileSplit(file));
reader.setConf(conf);
assertTrue(reader.hasNext());
List<List<Writable>> record = reader.sequenceRecord();
assertEquals(500, record.size()); //500 frames
reader.reset();
SequenceRecord seqR = reader.nextSequence();
assertEquals(record, seqR.getSequenceRecord());
RecordMetaData meta = seqR.getMetaData();
// System.out.println(meta);
assertTrue(meta.getURI().toString().endsWith("fire_lowres.mp4"));
SequenceRecord fromMeta = reader.loadSequenceFromMetaData(meta);
assertEquals(seqR, fromMeta);
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:27,代码来源:CodecReaderTest.java
示例8: testNativeCodecReader
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Ignore
@Test
public void testNativeCodecReader() throws Exception {
File file = new ClassPathResource("fire_lowres.mp4").getFile();
SequenceRecordReader reader = new NativeCodecRecordReader();
Configuration conf = new Configuration();
conf.set(CodecRecordReader.RAVEL, "true");
conf.set(CodecRecordReader.START_FRAME, "160");
conf.set(CodecRecordReader.TOTAL_FRAMES, "500");
conf.set(CodecRecordReader.ROWS, "80");
conf.set(CodecRecordReader.COLUMNS, "46");
reader.initialize(new FileSplit(file));
reader.setConf(conf);
assertTrue(reader.hasNext());
List<List<Writable>> record = reader.sequenceRecord();
// System.out.println(record.size());
Iterator<List<Writable>> it = record.iterator();
List<Writable> first = it.next();
// System.out.println(first);
//Expected size: 80x46x3
assertEquals(1, first.size());
assertEquals(80 * 46 * 3, ((ArrayWritable) first.iterator().next()).length());
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:26,代码来源:CodecReaderTest.java
示例9: simpleTransformTest
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void simpleTransformTest() throws Exception {
Schema schema = new Schema.Builder().addColumnDouble("0").addColumnDouble("1").addColumnDouble("2")
.addColumnDouble("3").addColumnDouble("4").build();
TransformProcess transformProcess = new TransformProcess.Builder(schema).removeColumns("0").build();
CSVRecordReader csvRecordReader = new CSVRecordReader();
csvRecordReader.initialize(new FileSplit(new ClassPathResource("iris.dat").getFile()));
TransformProcessRecordReader transformProcessRecordReader =
new TransformProcessRecordReader(csvRecordReader, transformProcess);
assertEquals(4, transformProcessRecordReader.next().size());
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:13,代码来源:TransformProcessRecordReaderTests.java
示例10: testRegexLineRecordReader
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testRegexLineRecordReader() throws Exception {
String regex = "(\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}) (\\d+) ([A-Z]+) (.*)";
RecordReader rr = new RegexLineRecordReader(regex, 1);
rr.initialize(new FileSplit(new ClassPathResource("/logtestdata/logtestfile0.txt").getFile()));
List<Writable> exp0 = Arrays.asList((Writable) new Text("2016-01-01 23:59:59.001"), new Text("1"),
new Text("DEBUG"), new Text("First entry message!"));
List<Writable> exp1 = Arrays.asList((Writable) new Text("2016-01-01 23:59:59.002"), new Text("2"),
new Text("INFO"), new Text("Second entry message!"));
List<Writable> exp2 = Arrays.asList((Writable) new Text("2016-01-01 23:59:59.003"), new Text("3"),
new Text("WARN"), new Text("Third entry message!"));
assertEquals(exp0, rr.next());
assertEquals(exp1, rr.next());
assertEquals(exp2, rr.next());
assertFalse(rr.hasNext());
//Test reset:
rr.reset();
assertEquals(exp0, rr.next());
assertEquals(exp1, rr.next());
assertEquals(exp2, rr.next());
assertFalse(rr.hasNext());
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:26,代码来源:RegexRecordReaderTest.java
示例11: testReset
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testReset() throws Exception {
CSVRecordReader rr = new CSVRecordReader(0, ',');
rr.initialize(new FileSplit(new ClassPathResource("iris.dat").getFile()));
int nResets = 5;
for (int i = 0; i < nResets; i++) {
int lineCount = 0;
while (rr.hasNext()) {
List<Writable> line = rr.next();
assertEquals(5, line.size());
lineCount++;
}
assertFalse(rr.hasNext());
assertEquals(150, lineCount);
rr.reset();
}
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:20,代码来源:CSVRecordReaderTest.java
示例12: testResetWithSkipLines
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testResetWithSkipLines() throws Exception {
CSVRecordReader rr = new CSVRecordReader(10, ',');
rr.initialize(new FileSplit(new ClassPathResource("iris.dat").getFile()));
int lineCount = 0;
while (rr.hasNext()) {
rr.next();
++lineCount;
}
assertEquals(140, lineCount);
rr.reset();
lineCount = 0;
while (rr.hasNext()) {
rr.next();
++lineCount;
}
assertEquals(140, lineCount);
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:19,代码来源:CSVRecordReaderTest.java
示例13: testCsvSkipAllLines
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test(expected = NoSuchElementException.class)
public void testCsvSkipAllLines() throws IOException, InterruptedException {
final int numLines = 4;
final List<Writable> lineList = Arrays.asList((Writable) new IntWritable(numLines - 1),
(Writable) new Text("one"), (Writable) new Text("two"), (Writable) new Text("three"));
String header = ",one,two,three";
List<String> lines = new ArrayList<>();
for (int i = 0; i < numLines; i++)
lines.add(Integer.toString(i) + header);
File tempFile = File.createTempFile("csvSkipLines", ".csv");
FileUtils.writeLines(tempFile, lines);
CSVRecordReader rr = new CSVRecordReader(numLines, ',');
rr.initialize(new FileSplit(tempFile));
rr.reset();
assertTrue(!rr.hasNext());
rr.next();
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:19,代码来源:CSVRecordReaderTest.java
示例14: testCsvSkipAllButOneLine
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testCsvSkipAllButOneLine() throws IOException, InterruptedException {
final int numLines = 4;
final List<Writable> lineList = Arrays.<Writable>asList(new Text(Integer.toString(numLines - 1)),
new Text("one"), new Text("two"), new Text("three"));
String header = ",one,two,three";
List<String> lines = new ArrayList<>();
for (int i = 0; i < numLines; i++)
lines.add(Integer.toString(i) + header);
File tempFile = File.createTempFile("csvSkipLines", ".csv");
FileUtils.writeLines(tempFile, lines);
CSVRecordReader rr = new CSVRecordReader(numLines - 1, ',');
rr.initialize(new FileSplit(tempFile));
rr.reset();
assertTrue(rr.hasNext());
assertEquals(rr.next(), lineList);
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:19,代码来源:CSVRecordReaderTest.java
示例15: testNextRecord
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testNextRecord() throws IOException, InterruptedException {
SVMLightRecordReader rr = new SVMLightRecordReader();
Configuration config = new Configuration();
config.setBoolean(SVMLightRecordReader.ZERO_BASED_INDEXING, false);
config.setInt(SVMLightRecordReader.NUM_FEATURES, 10);
config.setBoolean(SVMLightRecordReader.APPEND_LABEL, false);
rr.initialize(config, new FileSplit(new ClassPathResource("svmlight/basic.txt").getFile()));
Record record = rr.nextRecord();
List<Writable> recordList = record.getRecord();
assertEquals(new DoubleWritable(1.0), recordList.get(1));
assertEquals(new DoubleWritable(3.0), recordList.get(5));
assertEquals(new DoubleWritable(4.0), recordList.get(7));
record = rr.nextRecord();
recordList = record.getRecord();
assertEquals(new DoubleWritable(0.1), recordList.get(0));
assertEquals(new DoubleWritable(6.6), recordList.get(5));
assertEquals(new DoubleWritable(80.0), recordList.get(7));
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:22,代码来源:SVMLightRecordReaderTest.java
示例16: testCsvRRSerializationResults
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testCsvRRSerializationResults() throws Exception {
int skipLines = 3;
RecordReader r1 = new CSVRecordReader(skipLines, '\t');
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(baos);
os.writeObject(r1);
byte[] bytes = baos.toByteArray();
ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bytes));
RecordReader r2 = (RecordReader) ois.readObject();
File f = new ClassPathResource("iris_tab_delim.txt").getFile();
r1.initialize(new FileSplit(f));
r2.initialize(new FileSplit(f));
int count = 0;
while(r1.hasNext()){
List<Writable> n1 = r1.next();
List<Writable> n2 = r2.next();
assertEquals(n1, n2);
count++;
}
assertEquals(150-skipLines, count);
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:27,代码来源:TestSerialization.java
示例17: testCsvRecordReader
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testCsvRecordReader() throws Exception {
SerializerInstance si = sc.env().serializer().newInstance();
assertTrue(si instanceof KryoSerializerInstance);
RecordReader r1 = new CSVRecordReader(1,'\t');
RecordReader r2 = serDe(r1, si);
File f = new ClassPathResource("iris_tab_delim.txt").getFile();
r1.initialize(new FileSplit(f));
r2.initialize(new FileSplit(f));
while(r1.hasNext()){
assertEquals(r1.next(), r2.next());
}
assertFalse(r2.hasNext());
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:18,代码来源:TestKryoSerialization.java
示例18: getRecordReader
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Override
public RecordReader getRecordReader(long rngSeed, int[] imgDim, DataSetType set, ImageTransform imageTransform) {
try {
Random rng = new Random(rngSeed);
File datasetPath = getDataSetPath(set);
FileSplit data = new FileSplit(datasetPath, BaseImageLoader.ALLOWED_FORMATS, rng);
ObjectDetectionRecordReader recordReader = new ObjectDetectionRecordReader(imgDim[1], imgDim[0], imgDim[2],
imgDim[4], imgDim[3], null);
recordReader.initialize(data);
return recordReader;
} catch (IOException e) {
throw new RuntimeException("Could not download SVHN", e);
}
}
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:17,代码来源:SvhnDataFetcher.java
示例19: testLRN
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testLRN() throws Exception {
List<String> labels = new ArrayList<>(Arrays.asList("Zico", "Ziwang_Xu"));
String rootDir = new ClassPathResource("lfwtest").getFile().getAbsolutePath();
RecordReader reader = new ImageRecordReader(28, 28, 3);
reader.initialize(new FileSplit(new File(rootDir)));
DataSetIterator recordReader = new RecordReaderDataSetIterator(reader, 10, 1, labels.size());
labels.remove("lfwtest");
NeuralNetConfiguration.ListBuilder builder = (NeuralNetConfiguration.ListBuilder) incompleteLRN();
builder.setInputType(InputType.convolutional(28, 28, 3));
MultiLayerConfiguration conf = builder.build();
ConvolutionLayer layer2 = (ConvolutionLayer) conf.getConf(3).getLayer();
assertEquals(6, layer2.getNIn());
}
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:19,代码来源:ConvolutionLayerSetupTest.java
示例20: testRRDSIwithAsync
import org.datavec.api.split.FileSplit; //导入依赖的package包/类
@Test
public void testRRDSIwithAsync() throws Exception {
RecordReader csv = new CSVRecordReader();
csv.initialize(new FileSplit(new ClassPathResource("iris.txt").getTempFileFromArchive()));
int batchSize = 10;
int labelIdx = 4;
int numClasses = 3;
RecordReaderDataSetIterator rrdsi = new RecordReaderDataSetIterator(csv, batchSize, labelIdx, numClasses);
AsyncDataSetIterator adsi = new AsyncDataSetIterator(rrdsi, 8, true);
while (adsi.hasNext()) {
DataSet ds = adsi.next();
}
}
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:18,代码来源:RecordReaderDataSetiteratorTest.java
注:本文中的org.datavec.api.split.FileSplit类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论