本文整理汇总了Java中org.apache.spark.serializer.SerializerInstance类的典型用法代码示例。如果您正苦于以下问题:Java SerializerInstance类的具体用法?Java SerializerInstance怎么用?Java SerializerInstance使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
SerializerInstance类属于org.apache.spark.serializer包,在下文中一共展示了SerializerInstance类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: testCsvRecordReader
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
@Test
public void testCsvRecordReader() throws Exception {
SerializerInstance si = sc.env().serializer().newInstance();
assertTrue(si instanceof KryoSerializerInstance);
RecordReader r1 = new CSVRecordReader(1,'\t');
RecordReader r2 = serDe(r1, si);
File f = new ClassPathResource("iris_tab_delim.txt").getFile();
r1.initialize(new FileSplit(f));
r2.initialize(new FileSplit(f));
while(r1.hasNext()){
assertEquals(r1.next(), r2.next());
}
assertFalse(r2.hasNext());
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:18,代码来源:TestKryoSerialization.java
示例2: testJavaTypes
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
@Test
public void testJavaTypes() {
Map<Object, Object> m = new HashMap<>();
m.put("key", "value");
SerializerInstance si = sc.env().serializer().newInstance();
testSerialization(Collections.singletonMap("key", "value"), si);
testSerialization(Collections.synchronizedMap(m), si);
testSerialization(Collections.emptyMap(), si);
testSerialization(new ConcurrentHashMap<>(m), si);
testSerialization(Collections.unmodifiableMap(m), si);
testSerialization(Arrays.asList("s"), si);
testSerialization(Collections.singleton("s"), si);
testSerialization(Collections.synchronizedList(Arrays.asList("s")), si);
testSerialization(Collections.emptyList(), si);
testSerialization(new CopyOnWriteArrayList<>(Arrays.asList("s")), si);
testSerialization(Collections.unmodifiableList(Arrays.asList("s")), si);
testSerialization(Collections.singleton("s"), si);
testSerialization(Collections.synchronizedSet(new HashSet<>(Arrays.asList("s"))), si);
testSerialization(Collections.emptySet(), si);
testSerialization(Collections.unmodifiableSet(new HashSet<>(Arrays.asList("s"))), si);
}
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:27,代码来源:TestKryo.java
示例3: testSerializationPrimitives
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
@Test
public void testSerializationPrimitives(){
Counter<Integer> c = new Counter<>();
c.incrementCount(5, 3.0);
CounterMap<Integer,Double> cm = new CounterMap<>();
cm.setCount(7, 3.0, 4.5);
Object[] objs = new Object[]{
new AtomicBoolean(true),
new AtomicBoolean(false),
new AtomicDouble(5.0),
c,
cm,
new ImmutablePair<>(5,3.0),
new ImmutableQuad<>(1,2.0,3.0f,4L),
new ImmutableTriple<>(1,2.0,3.0f),
new Pair<>(5, 3.0),
new Quad<>(1,2.0,3.0f,4L),
new Triple<>(1,2.0,3.0f)};
SerializerInstance si = sc.env().serializer().newInstance();
for (Object o : objs) {
System.out.println(o.getClass());
//System.out.println(ie.getClass());
testSerialization(o, si);
}
}
开发者ID:deeplearning4j,项目名称:nd4j,代码行数:32,代码来源:TestNd4jKryoSerialization.java
示例4: testSerialization
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
private <T> void testSerialization(T in, SerializerInstance si) {
ByteBuffer bb = si.serialize(in, null);
T deserialized = (T)si.deserialize(bb, null);
// assertEquals(in, deserialized);
boolean equals = in.equals(deserialized);
assertTrue(in.getClass() + "\t" + in.toString(), equals);
}
开发者ID:deeplearning4j,项目名称:nd4j,代码行数:9,代码来源:TestNd4jKryoSerialization.java
示例5: testSerialization
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
private <T> void testSerialization(T in, SerializerInstance si) {
ByteBuffer bb = si.serialize(in, null);
T deserialized = (T)si.deserialize(bb, null);
boolean equals = in.equals(deserialized);
assertTrue(in.getClass() + "\t" + in.toString(), equals);
}
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:8,代码来源:TestKryo.java
示例6: testSerializationEvaluation
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
@Test
public void testSerializationEvaluation() {
Evaluation e = new Evaluation();
e.eval(Nd4j.create(new double[] {1, 0, 0}), Nd4j.create(new double[] {0.2, 0.5, 0.3}));
EvaluationBinary eb = new EvaluationBinary();
eb.eval(Nd4j.create(new double[] {1, 0, 0}), Nd4j.create(new double[] {0.2, 0.6, 0.3}));
ROC roc = new ROC(30);
roc.eval(Nd4j.create(new double[] {1}), Nd4j.create(new double[] {0.2}));
ROC roc2 = new ROC();
roc2.eval(Nd4j.create(new double[] {1}), Nd4j.create(new double[] {0.2}));
ROCMultiClass rocM = new ROCMultiClass(30);
rocM.eval(Nd4j.create(new double[] {1, 0, 0}), Nd4j.create(new double[] {0.2, 0.5, 0.3}));
ROCMultiClass rocM2 = new ROCMultiClass();
rocM2.eval(Nd4j.create(new double[] {1, 0, 0}), Nd4j.create(new double[] {0.2, 0.5, 0.3}));
ROCBinary rocB = new ROCBinary(30);
rocB.eval(Nd4j.create(new double[] {1, 0, 0}), Nd4j.create(new double[] {0.2, 0.6, 0.3}));
ROCBinary rocB2 = new ROCBinary();
rocB2.eval(Nd4j.create(new double[] {1, 0, 0}), Nd4j.create(new double[] {0.2, 0.6, 0.3}));
RegressionEvaluation re = new RegressionEvaluation();
re.eval(Nd4j.rand(1, 5), Nd4j.rand(1, 5));
IEvaluation[] evaluations = new IEvaluation[] {new Evaluation(), e, new EvaluationBinary(), eb, new ROC(), roc,
roc2, new ROCMultiClass(), rocM, rocM2, new ROCBinary(), rocB, rocB2,
new RegressionEvaluation(), re};
SerializerInstance si = sc.env().serializer().newInstance();
for (IEvaluation ie : evaluations) {
//System.out.println(ie.getClass());
testSerialization(ie, si);
}
}
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:40,代码来源:TestKryo.java
示例7: testScalaCollections
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
@Test
public void testScalaCollections() {
//Scala collections should already work with Spark + kryo; some very basic tests to check this is still the case
SerializerInstance si = sc.env().serializer().newInstance();
scala.collection.immutable.Map<Integer, String> emptyImmutableMap =
scala.collection.immutable.Map$.MODULE$.empty();
testSerialization(emptyImmutableMap, si);
Map<Integer, Double> m = new HashMap<>();
m.put(0, 1.0);
scala.collection.Map<Integer, Double> m2 = JavaConversions.mapAsScalaMap(m);
testSerialization(m2, si);
}
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:16,代码来源:TestKryo.java
示例8: serDe
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
private <T> T serDe(T in, SerializerInstance si){
ByteBuffer bb = si.serialize(in, null);
return (T)si.deserialize(bb, null);
}
开发者ID:deeplearning4j,项目名称:DataVec,代码行数:5,代码来源:TestKryoSerialization.java
示例9: newInstance
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
@Override
public SerializerInstance newInstance() {
return new GryoSerializerInstance(this);
}
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:5,代码来源:GryoSerializer.java
示例10: testSerializationConfigurations
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
@Test
public void testSerializationConfigurations() {
SerializerInstance si = sc.env().serializer().newInstance();
//Check network configurations:
Map<Integer, Double> m = new HashMap<>();
m.put(0, 0.5);
m.put(10, 0.1);
MultiLayerConfiguration mlc = new NeuralNetConfiguration.Builder()
.updater(new Nadam(new MapSchedule(ScheduleType.ITERATION,m))).list().layer(0, new OutputLayer.Builder().nIn(10).nOut(10).build())
.build();
testSerialization(mlc, si);
ComputationGraphConfiguration cgc = new NeuralNetConfiguration.Builder().weightInit(WeightInit.DISTRIBUTION)
.dist(new UniformDistribution(-1, 1))
.updater(new Adam(new MapSchedule(ScheduleType.ITERATION,m)))
.graphBuilder()
.addInputs("in").addLayer("out", new OutputLayer.Builder().nIn(10).nOut(10).build(), "in")
.setOutputs("out").build();
testSerialization(cgc, si);
//Check main layers:
Layer[] layers = new Layer[] {new OutputLayer.Builder().nIn(10).nOut(10).build(),
new RnnOutputLayer.Builder().nIn(10).nOut(10).build(), new LossLayer.Builder().build(),
new CenterLossOutputLayer.Builder().nIn(10).nOut(10).build(),
new DenseLayer.Builder().nIn(10).nOut(10).build(),
new ConvolutionLayer.Builder().nIn(10).nOut(10).build(), new SubsamplingLayer.Builder().build(),
new Convolution1DLayer.Builder(2, 2).nIn(10).nOut(10).build(),
new ActivationLayer.Builder().activation(Activation.TANH).build(),
new GlobalPoolingLayer.Builder().build(), new GravesLSTM.Builder().nIn(10).nOut(10).build(),
new LSTM.Builder().nIn(10).nOut(10).build(), new DropoutLayer.Builder(0.5).build(),
new BatchNormalization.Builder().build(), new LocalResponseNormalization.Builder().build()};
for (Layer l : layers) {
testSerialization(l, si);
}
//Check graph vertices
GraphVertex[] vertices = new GraphVertex[] {new ElementWiseVertex(ElementWiseVertex.Op.Add),
new L2NormalizeVertex(), new LayerVertex(null, null), new MergeVertex(), new PoolHelperVertex(),
new PreprocessorVertex(new CnnToFeedForwardPreProcessor(28, 28, 1)),
new ReshapeVertex(new int[] {1, 1}), new ScaleVertex(1.0), new ShiftVertex(1.0),
new SubsetVertex(1, 1), new UnstackVertex(0, 2), new DuplicateToTimeSeriesVertex("in1"),
new LastTimeStepVertex("in1")};
for (GraphVertex gv : vertices) {
testSerialization(gv, si);
}
}
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:55,代码来源:TestKryo.java
示例11: newInstance
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
@Override
public SerializerInstance newInstance() {
return new SpliceKryoSerializerInstance(this);
}
开发者ID:splicemachine,项目名称:spliceengine,代码行数:5,代码来源:SpliceKryoSerializer.java
示例12: roundTripInKryo
import org.apache.spark.serializer.SerializerInstance; //导入依赖的package包/类
/**
* Takes an input object and returns the value of the object after it has been serialized and then deserialized in Kryo.
* Requires the class of the input object as a parameter because it's not generally possible to get the class of a
* generified method parameter with reflection.
*
* @param input instance of inputClazz. Never {@code null}
* @param inputClazz class to cast input
* @param conf Spark configuration to test
* @param <T> class to attempt. Same or subclass of inputClazz
* @return serialized and deserialized instance of input. Throws exception if serialization round trip fails.
*/
public static <T> T roundTripInKryo(final T input, final Class<?> inputClazz, final SparkConf conf) {
Utils.nonNull(input);
final KryoSerializer kryoSerializer = new KryoSerializer(conf);
final SerializerInstance sparkSerializer = kryoSerializer.newInstance();
final ClassTag<T> tag = ClassTag$.MODULE$.apply(inputClazz);
return sparkSerializer.deserialize(sparkSerializer.serialize(input, tag), tag);
}
开发者ID:broadinstitute,项目名称:gatk,代码行数:19,代码来源:SparkTestUtils.java
注:本文中的org.apache.spark.serializer.SerializerInstance类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论