本文整理汇总了Java中org.jcodec.codecs.h264.H264Utils类的典型用法代码示例。如果您正苦于以下问题:Java H264Utils类的具体用法?Java H264Utils怎么用?Java H264Utils使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
H264Utils类属于org.jcodec.codecs.h264包,在下文中一共展示了H264Utils类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: encodeNativeFrame
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeNativeFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth() , pic.getHeight() , encoder.getSupportedColorSpaces()[0]);
}
// Perform conversion
try {
transform.transform(pic, toEncode);
}catch (Exception e){
return;
}
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 5, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
开发者ID:hiliving,项目名称:P2Video-master,代码行数:27,代码来源:SequenceEncoderMp4.java
示例2: transcode
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public List<ByteBuffer> transcode() throws IOException {
H264Decoder decoder = new H264Decoder();
decoder.addSps(avcC.getSpsList());
decoder.addPps(avcC.getPpsList());
Picture buf = Picture.create(mbW << 4, mbH << 4, ColorSpace.YUV420);
Frame dec = null;
for (VirtualPacket virtualPacket : head) {
dec = decoder.decodeFrame(H264Utils.splitMOVPacket(virtualPacket.getData(), avcC), buf.getData());
}
H264Encoder encoder = new H264Encoder(rc);
ByteBuffer tmp = ByteBuffer.allocate(frameSize);
List<ByteBuffer> result = new ArrayList<ByteBuffer>();
for (VirtualPacket pkt : tail) {
dec = decoder.decodeFrame(H264Utils.splitMOVPacket(pkt.getData(), avcC), buf.getData());
tmp.clear();
ByteBuffer res = encoder.encodeFrame(dec, tmp);
ByteBuffer out = ByteBuffer.allocate(frameSize);
processFrame(res, out);
result.add(out);
}
return result;
}
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:27,代码来源:AVCClipTrack.java
示例3: encodeNativeFrame
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeNativeFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth() , pic.getHeight() , encoder.getSupportedColorSpaces()[0]);
}
// Perform conversion
transform.transform(pic, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, timeScale, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
开发者ID:ynztlxdeai,项目名称:ImageToVideo,代码行数:24,代码来源:SequenceEncoderMp4.java
示例4: encode
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
@Override
public void encode(BufferedImage img) throws IOException {
JHVRgbToYuv420j8Bit.transform(img, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, fps, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
开发者ID:Helioviewer-Project,项目名称:JHelioviewer-SWHV,代码行数:17,代码来源:JCodecExporter.java
示例5: encodeImage
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeImage(BufferedImage bi) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(bi.getWidth(), bi.getHeight(), ColorSpace.YUV420);
}
// Perform conversion
for (int i = 0; i < 3; i++) {
Arrays.fill(toEncode.getData()[i], 0);
}
transform.transform(AWTUtil.fromBufferedImage(bi), toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, toEncode);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
开发者ID:deepakpk009,项目名称:JScreenRecorder,代码行数:26,代码来源:SequenceEncoder.java
示例6: readMBQpDelta
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public int readMBQpDelta(MDecoder decoder, MBType prevMbType) {
int ctx = 60;
ctx += prevMbType == null || prevMbType == I_PCM || (prevMbType != I_16x16 && prevCBP == 0)
|| prevMbQpDelta == 0 ? 0 : 1;
int val = 0;
if (decoder.decodeBin(ctx) == 1) {
val++;
if (decoder.decodeBin(62) == 1) {
val++;
while (decoder.decodeBin(63) == 1)
val++;
}
}
prevMbQpDelta = H264Utils.golomb2Signed(val);
return prevMbQpDelta;
}
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:19,代码来源:CABAC.java
示例7: calcBufferSize
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
private void calcBufferSize() {
int w = Integer.MIN_VALUE, h = Integer.MIN_VALUE;
for (SampleEntry se : ses) {
if ("avc1".equals(se.getFourcc())) {
AvcCBox avcC = H264Utils.parseAVCC((VideoSampleEntry) se);
for (SeqParameterSet sps : H264Utils.readSPS(avcC.getSpsList())) {
int ww = sps.pic_width_in_mbs_minus1 + 1;
if (ww > w)
w = ww;
int hh = H264Utils.getPicHeightInMbs(sps);
if (hh > h)
h = hh;
}
}
}
size = new Size(w << 4, h << 4);
}
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:19,代码来源:AVCMP4Adaptor.java
示例8: encodeNativeFrame
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeNativeFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth(), pic.getHeight(), encoder.getSupportedColorSpaces()[0]);
}
// Perform conversion
transform.transform(pic, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:24,代码来源:SequenceEncoder.java
示例9: Transcode2AVCTrack
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public Transcode2AVCTrack(VirtualTrack src, Size frameDim) {
checkFourCC(src);
this.src = src;
ConstantRateControl rc = new ConstantRateControl(TARGET_RATE);
H264Encoder encoder = new H264Encoder(rc);
scaleFactor = selectScaleFactor(frameDim);
thumbWidth = frameDim.getWidth() >> scaleFactor;
thumbHeight = (frameDim.getHeight() >> scaleFactor) & ~1;
mbW = (thumbWidth + 15) >> 4;
mbH = (thumbHeight + 15) >> 4;
se = H264Utils.createMOVSampleEntry(encoder.initSPS(new Size(thumbWidth, thumbHeight)), encoder.initPPS());
PixelAspectExt pasp = Box.findFirst(src.getSampleEntry(), PixelAspectExt.class, "pasp");
if (pasp != null)
se.add(pasp);
frameSize = rc.calcFrameSize(mbW * mbH);
frameSize += frameSize >> 4;
}
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:22,代码来源:Transcode2AVCTrack.java
示例10: replaySps
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
private boolean replaySps() {
int inputIndex = dequeueInputBuffer();
if (inputIndex < 0) {
return false;
}
ByteBuffer inputBuffer = getEmptyInputBuffer(inputIndex);
if (inputBuffer == null) {
return false;
}
// Write the Annex B header
inputBuffer.put(new byte[]{0x00, 0x00, 0x00, 0x01, 0x67});
// Switch the H264 profile back to high
savedSps.profileIdc = 100;
// Patch the SPS constraint flags
doProfileSpecificSpsPatching(savedSps);
// The H264Utils.writeSPS function safely handles
// Annex B NALUs (including NALUs with escape sequences)
ByteBuffer escapedNalu = H264Utils.writeSPS(savedSps, 128);
inputBuffer.put(escapedNalu);
// No need for the SPS anymore
savedSps = null;
// Queue the new SPS
return queueInputBuffer(inputIndex,
0, inputBuffer.position(),
System.nanoTime() / 1000,
MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
}
开发者ID:moonlight-stream,项目名称:moonlight-android,代码行数:35,代码来源:MediaCodecDecoderRenderer.java
示例11: close
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
/**
* Close the file and clear resource
* @throws IOException
*/
public void close() throws IOException{
outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList));
muxer.writeHeader();
NIOUtils.closeQuietly(ch);
outBuffer.clear();
spsList.clear();
ppsList.clear();
outBuffer=null;
transform=null;
muxer=null;
encoder=null;
spsList=null;
ppsList=null;
}
开发者ID:shadoq,项目名称:s3gdxcodec,代码行数:21,代码来源:PixmapEncoder.java
示例12: finish
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void finish() throws IOException {
// Push saved SPS/PPS to a special storage in MP4
outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList, 4));
// Write MP4 header and finalize recording
muxer.writeHeader();
NIOUtils.closeQuietly(ch);
}
开发者ID:hiliving,项目名称:P2Video-master,代码行数:9,代码来源:SequenceEncoderMp4.java
示例13: encodeImage
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeImage(Bitmap bi, int timeEachFrame) throws IOException {
setTimeEachFrame(timeEachFrame);
if (toEncode == null) {
toEncode = Picture.create(bi.getWidth(), bi.getHeight(), ColorSpace.YUV420);
}
// Perform conversion
for (int i = 0; i < 3; i++)
Arrays.fill(toEncode.getData()[i], 0);
transform.transform(fromBufferedImage(bi), toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, toEncode);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
outTrack.addFrame(new MP4Packet(result,
frameNo, // frameNo * (this.timeEachFrame) = 5s, image will stop at second 5 and show the next image
timescale, // set default = 1. How many frame per duration: timescale = 2 duration = 1 => 0.5s show 1 image
duration, // auto-increase each time current duration = duration + pass duration.
frameNo,
true,
null,
frameNo,
0));
}
开发者ID:rafaelaaraujo,项目名称:Face-detect-framework,代码行数:33,代码来源:SequenceImagesEncoder.java
示例14: close
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
@Override
public void close() throws IOException {
// Push saved SPS/PPS to a special storage in MP4
outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList, 4));
// Write MP4 header and finalize recording
muxer.writeHeader();
ch.close();
}
开发者ID:Helioviewer-Project,项目名称:JHelioviewer-SWHV,代码行数:9,代码来源:JCodecExporter.java
示例15: encodeImage
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeImage(BufferedImage bi) throws IOException {
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, makeFrame(bi));
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
开发者ID:kamil-karkus,项目名称:EasySnap,代码行数:16,代码来源:Encoder.java
示例16: finish
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void finish() throws IOException {
// Push saved SPS/PPS to a special storage in MP4
outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList));
// Write MP4 header and finalize recording
muxer.writeHeader();
NIOUtils.closeQuietly(ch);
}
开发者ID:kamil-karkus,项目名称:EasySnap,代码行数:9,代码来源:Encoder.java
示例17: decodeFrame
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public Picture decodeFrame(Packet packet, int[][] data) {
updateState(packet);
Picture pic = ((H264Decoder) decoder).decodeFrame(H264Utils.splitMOVPacket(packet.getData(), avcCBox), data);
PixelAspectExt pasp = Box.findFirst(ses[curENo], PixelAspectExt.class, "pasp");
if (pasp != null) {
// TODO: transform
}
return pic;
}
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:13,代码来源:AVCMP4Adaptor.java
示例18: updateState
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
private void updateState(Packet packet) {
int eNo = ((MP4Packet) packet).getEntryNo();
if (eNo != curENo) {
curENo = eNo;
avcCBox = H264Utils.parseAVCC((VideoSampleEntry) ses[curENo]);
decoder = new H264Decoder();
((H264Decoder) decoder).addSps(avcCBox.getSpsList());
((H264Decoder) decoder).addPps(avcCBox.getPpsList());
}
}
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:11,代码来源:AVCMP4Adaptor.java
示例19: Mpeg2AVCTrack
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public Mpeg2AVCTrack(VirtualTrack src) throws IOException {
checkFourCC(src);
this.src = src;
ConstantRateControl rc = new ConstantRateControl(TARGET_RATE);
H264Encoder encoder = new H264Encoder(rc);
nextPacket = src.nextPacket();
Size frameDim = MPEGDecoder.getSize(nextPacket.getData());
scaleFactor = selectScaleFactor(frameDim);
thumbWidth = frameDim.getWidth() >> scaleFactor;
thumbHeight = (frameDim.getHeight() >> scaleFactor) & ~1;
mbW = (thumbWidth + 15) >> 4;
mbH = (thumbHeight + 15) >> 4;
se = H264Utils.createMOVSampleEntry(encoder.initSPS(new Size(thumbWidth, thumbHeight)), encoder.initPPS());
PixelAspectExt pasp = Box.findFirst(src.getSampleEntry(), PixelAspectExt.class, "pasp");
if (pasp != null)
se.add(pasp);
frameSize = rc.calcFrameSize(mbW * mbH);
frameSize += frameSize >> 4;
}
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:25,代码来源:Mpeg2AVCTrack.java
示例20: transcodeFrame
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public ByteBuffer transcodeFrame(ByteBuffer src, ByteBuffer dst) throws IOException {
if(src == null)
return null;
Picture decoded = decoder.decodeFrame(src, pic0.getData());
if (pic1 == null) {
pic1 = Picture.create(decoded.getWidth(), decoded.getHeight(), encoder.getSupportedColorSpaces()[0]);
transform = ColorUtil.getTransform(decoded.getColor(), encoder.getSupportedColorSpaces()[0]);
}
transform.transform(decoded, pic1);
pic1.setCrop(new Rect(0, 0, thumbWidth, thumbHeight));
int rate = TARGET_RATE;
do {
try {
encoder.encodeFrame(pic1, dst);
break;
} catch (BufferOverflowException ex) {
System.out.println("Abandon frame!!!");
rate -= 10;
rc.setRate(rate);
}
} while (rate > 10);
rc.setRate(TARGET_RATE);
H264Utils.encodeMOVPacket(dst);
return dst;
}
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:28,代码来源:Transcode2AVCTrack.java
注:本文中的org.jcodec.codecs.h264.H264Utils类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论