如何提高由ComponentColorModel在Java语言中创建的BuffereImage的绘制性能?
我正在开发一个Java应用程序,它从dvi帧捕获器捕获视频。 我想渲染帧,这是我从这个帧抓取器中实时获得的。但我面临着生产力的问题——渲染速度非常慢,我的计算机每秒只能渲染5-6帧。尽管该设备能够每秒捕获40帧。我分析了我的代码,发现drawImage方法相对较慢。若我用BMP图像调用drawImage方法,我通过ImageIO.readImage加载,然后用drawImage绘制它需要20毫秒。有了这些图像,我从帧抓取器中获得,绘制它们需要100多毫秒。我研究了frame grabber库的代码,发现BuffereImage是通过以下方式创建的-如何提高由ComponentColorModel在Java语言中创建的BuffereImage的绘制性能?,java,performance,bufferedimage,drawimage,Java,Performance,Bufferedimage,Drawimage,我正在开发一个Java应用程序,它从dvi帧捕获器捕获视频。 我想渲染帧,这是我从这个帧抓取器中实时获得的。但我面临着生产力的问题——渲染速度非常慢,我的计算机每秒只能渲染5-6帧。尽管该设备能够每秒捕获40帧。我分析了我的代码,发现drawImage方法相对较慢。若我用BMP图像调用drawImage方法,我通过ImageIO.readImage加载,然后用drawImage绘制它需要20毫秒。有了这些图像,我从帧抓取器中获得,绘制它们需要100多毫秒。我研究了frame grabber库的代
ColorModel cm;
if (format == PixelFormat.RGB24) {
cm = new ComponentColorModel(
ColorSpace.getInstance(ColorSpace.CS_sRGB), new int[] {8,8,8},
false, false, Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
} else {
throw new UnsupportedOperationException();
}
SampleModel sm = cm.createCompatibleSampleModel(width, height);
DataBuffer db = new DataBufferByte(pixels, length);
WritableRaster raster = Raster.createWritableRaster(sm, db, null);
return new BufferedImage(cm, raster, false, null);
像素是由帧捕获器提供的字节数组。
是否有任何方法可以创建不同的BuffereImage,以加快对drawImage方法的调用。我知道颜色空间,我不需要保留正确的颜色空间。速度更重要
提前感谢我对jai针对ImageIO API的tiff实现有一个问题,它听起来很相似(我不知道这是否有用) 基本上,它会将颜色模型转换为“单像素压缩样本模型”:p 这不是我的代码,我不相信它,我在网上找到了它,我恐怕不记得在哪里(我尝试过搜索它,但没有找到合适的参考)
/*******************************************************************************
*
* It seems that SinglePixelPackedSampleModel is the only fast mode when a
* color profile is converted. This is probably a bug (that has nothing to do
* with bugs 4886071 and 4705399).
* Note that grayscale images (TYPE_GRAY) are not converted.
*
******************************************************************************/
public static BufferedImage convertToSinglePixelPackedSampleModel(BufferedImage image) {
long time = System.currentTimeMillis();
WritableRaster sourceRaster = image.getRaster();
ColorModel colorModel = image.getColorModel();
ICC_ColorSpace colorSpace = (ICC_ColorSpace) colorModel.getColorSpace();
final SampleModel ssmd = sourceRaster.getSampleModel();
if (colorSpace.getType() == ColorSpace.TYPE_GRAY) {
logger.info(">>>> TYPE_GRAY, not converting");
} else if (!(ssmd instanceof PixelInterleavedSampleModel)) {
logger.info(">>>> sourceSampleModel is " + ssmd.getClass() + ", not converting");
} else {
PixelInterleavedSampleModel sourceSampleModel = (PixelInterleavedSampleModel) ssmd;
int[] bitMasks = new int[]{0x00ff0000, 0x0000ff00, 0x000000ff};
SinglePixelPackedSampleModel sampleModel = new SinglePixelPackedSampleModel(DataBuffer.TYPE_INT, image.getWidth(),
image.getHeight(), bitMasks);
WritableRaster destRaster = Raster.createWritableRaster(sampleModel, null);
DataBufferInt destDataBuffer = (DataBufferInt) destRaster.getDataBuffer();
int[] destBuffer = destDataBuffer.getData();
int[] bandOffsets = sourceSampleModel.getBandOffsets();
for (int i = 0; i < bandOffsets.length; i++) {
bandOffsets[i] += ((-sourceRaster.getSampleModelTranslateX() * sourceSampleModel.getPixelStride())
- (sourceRaster.getSampleModelTranslateY() * sourceSampleModel.getScanlineStride()));
}
DataBuffer sourceDataBuffer = sourceRaster.getDataBuffer();
if (sourceDataBuffer instanceof DataBufferUShort) {
convertUShortDataBuffer(image, (DataBufferUShort) sourceDataBuffer, sourceSampleModel, bandOffsets, destBuffer);
} else if (sourceDataBuffer instanceof DataBufferByte) {
convertByteDataBuffer(image, (DataBufferByte) sourceDataBuffer, sourceSampleModel, bandOffsets, destBuffer);
} else {
throw new IllegalArgumentException("Cannot deal with " + sourceDataBuffer.getClass());
}
String sourceProfileName = getICCProfileName(colorSpace.getProfile());
if (sourceProfileName.equals("Nikon sRGB 4.0.0.3001")) {
logger.warn(">>>> Workaround #1094403: using sRGB instead of " + sourceProfileName);
colorSpace = new ICC_ColorSpace(ICC_Profile.getInstance(ColorSpace.CS_LINEAR_RGB));
}
colorModel = new DirectColorModel(colorSpace, 24, bitMasks[0], bitMasks[1], bitMasks[2], 0, false, DataBuffer.TYPE_INT);
image = new BufferedImage(colorModel, destRaster, false, null);
}
time = System.currentTimeMillis() - time;
logger.info(">>>> convertToSinglePixelPackedSampleModel() completed ok in " + time + " msec");
return image;
}
/**
* @param image
* @param sourceDataBuffer
* @param sourceSampleModel
* @param bandOffsets
* @param destBuffer
*/
protected static void convertByteDataBuffer(BufferedImage image, DataBufferByte sourceDataBuffer,
PixelInterleavedSampleModel sourceSampleModel, int[] bandOffsets, int[] destBuffer) {
int base = 0;
int i = 0;
byte[] sourceBuffer = sourceDataBuffer.getData();
int pixelStride = sourceSampleModel.getPixelStride();
for (int y = 0; y < image.getHeight(); y++) {
int j = base;
for (int x = 0; x < image.getWidth(); x++) {
int r = (sourceBuffer[j + bandOffsets[0]] & 0xff);
int g = (sourceBuffer[j + bandOffsets[1]] & 0xff);
int b = (sourceBuffer[j + bandOffsets[2]] & 0xff);
destBuffer[i++] = (r << 16) | (g << 8) | b;
j += pixelStride;
}
base += sourceSampleModel.getScanlineStride();
}
}
protected static void convertUShortDataBuffer(BufferedImage image, DataBufferUShort sourceDataBuffer,
PixelInterleavedSampleModel sourceSampleModel, int[] bandOffsets, int[] destBuffer) {
int base = 0;
int i = 0;
short[] sourceBuffer = sourceDataBuffer.getData();
for (int y = 0; y < image.getHeight(); y++) {
int j = base;
for (int x = 0; x < image.getWidth(); x++) {
int r = (sourceBuffer[j + bandOffsets[0]] & 0xffff) >> 8;
int g = (sourceBuffer[j + bandOffsets[1]] & 0xffff) >> 8;
int b = (sourceBuffer[j + bandOffsets[2]] & 0xffff) >> 8;
destBuffer[i++] = (r << 16) | (g << 8) | b;
j += 3;
}
base += sourceSampleModel.getScanlineStride();
}
}
// public static ICC_Profile getICCProfile(RenderedImage image) {
//
// ColorSpace colorSpace = image.getColorModel().getColorSpace();
//
// if (colorSpace instanceof ICC_ColorSpace) {
//
// ICC_ColorSpace iccColorSpace = (ICC_ColorSpace) colorSpace;
//
// return iccColorSpace.getProfile();
//
// }
//
// return null;
//
// }
public static String getICCProfileName(ICC_Profile profile) {
if (profile == null) {
return null;
}
byte[] xx = profile.getData(ICC_Profile.icSigProfileDescriptionTag);
int offset = 12;
int count;
for (count = 1; xx[offset + count] != 0; count++) {
;
}
return new String(xx, 0, offset, count);
}
/*******************************************************************************
*
*单像素PackedSampleModel似乎是
*颜色配置文件已转换。这可能是一个bug(与此无关)
*带有bug 4886071和4705399)。
*请注意,灰度图像(灰度类型)不会转换。
*
******************************************************************************/
公共静态BuffereImage转换为SinglePixelPackedSampleModel(BuffereImage映像){
长时间=System.currentTimeMillis();
WritableRaster sourceRaster=image.getRaster();
ColorModel ColorModel=image.getColorModel();
ICC_ColorSpace ColorSpace=(ICC_ColorSpace)colorModel.getColorSpace();
final SampleModel ssmd=sourceRaster.getSampleModel();
if(colorSpace.getType()==colorSpace.TYPE\u GRAY){
logger.info(“>>>>类型_灰色,不转换”);
}else if(!(像素交错采样模型的ssmd实例)){
info(“>>>>sourceSampleModel为“+ssmd.getClass()+”,未转换”);
}否则{
PixelInterleavedSampleModel sourceSampleModel=(PixelInterleavedSampleModel)ssmd;
int[]位掩码=新的int[]{0x00ff0000,0x0000ff00,0x000000ff};
SinglePixelPackedSampleModel=新的SinglePixelPackedSampleModel(DataBuffer.TYPE_INT,image.getWidth(),
image.getHeight(),位掩码);
WritableRaster destRaster=Raster.createWritableRaster(sampleModel,null);
DataBufferInt destDataBuffer=(DataBufferInt)destRaster.getDataBuffer();
int[]destBuffer=destDataBuffer.getData();
int[]bandOffsets=sourceSampleModel.getBandOffsets();
对于(int i=0;i>>>解决方案#1094403:使用sRGB而不是“+sourceProfileName”);
colorSpace=newicc_colorSpace(ICC_Profile.getInstance(colorSpace.CS_LINEAR_RGB));
}
colorModel=新的DirectColorModel(颜色空间,24,位掩码[0],位掩码[1],位掩码[2],0,false,数据缓冲区.TYPE_INT);
image=newbufferedimage(colorModel、desmaster、false、null);
}
时间=System.currentTimeMillis()-时间;
logger.info(“>>>>>convertToSinglePixelPackedSampleModel()在“+时间+”毫秒”内完成正常;
返回图像;
}
/**
*@param图像
*@param sourceDataBuffer
*@param sourceSampleModel
*@param带偏移量
*@param destBuffer
*/
受保护的静态void convertByteDataBuffer(BuffereImage映像、DataBufferByte sourceDataBuffer、,
像素交错采样模型sourceSampleModel,int[]带偏移量,int[]数据缓冲区){
int base=0;
int i=0;
字节[]sourceBuffer=sourceDataBuffer.getData();
int pixelStride=sourceSampleModel.getPixelStride();
对于(int y=0;y>8;
intb=(sourceBuffer[j+带偏移量[2]]&0xffff)>>8;
destBuffer[i++]=(r我对jai针对ImageIO API的tiff实现有一个问题,它听起来很相似(我不知道这是否有用)
基本上,它是w