Java源码示例:org.jcodec.scale.ColorUtil
示例1
public SequenceEncoderMp4(File out)
throws IOException
{
super(out);
this.ch = NIOUtils.writableFileChannel(out);
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrack(TrackType.VIDEO, timeScale);
// Allocate a buffer big enough to hold output frames
_out = ByteBuffer.allocate(1920 * 1080 * 6);
// Create an instance of encoder
encoder = new H264Encoder();
// Transform to convert between RGB and YUV
transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);
// Encoder extra data ( SPS, PPS ) to be stored in a special place of
// MP4
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
}
示例2
public static int[] toColorArray(Picture src){
if (src.getColor() != ColorSpace.RGB) {
Transform transform = ColorUtil.getTransform(src.getColor(), ColorSpace.RGB);
Picture rgb = Picture.create(src.getWidth(), src.getHeight(), ColorSpace.RGB, src.getCrop());
transform.transform(src, rgb);
src = rgb;
}
int[] _return = new int[src.getCroppedWidth() * src.getCroppedHeight()];
int[] data = src.getPlaneData(0);
for(int i = 0; i < _return.length; ++i){
_return[i] = ReadableRGBContainer.toIntColor(data[3*i + 2], data[3*i + 1], data[3*i]);
}
return _return;
}
示例3
public ImageToH264MP4Encoder(SeekableByteChannel ch, AudioFormat af) throws IOException {
this.ch = ch;
this.af = af;
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrack(TrackType.VIDEO, 25);
// Create an instance of encoder
encoder = new H264Encoder();
// Transform to convert between RGB and YUV
transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);
// Encoder extra data ( SPS, PPS ) to be stored in a special place of
// MP4
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
if (af != null)
audioTrack = muxer.addPCMAudioTrack(af);
}