想问下用过JAVACV的大兄弟,为啥我这边抽取图片总是有灰色图片
//javacv抽取图片
public void AiVideoAnalysisProcessor() throws Exception{
//rtsp地址
FFmpegFrameGrabber grabber = FFmpegFrameGrabber.createDefault(url);
// 使用tcp的方式,不然会丢包很严重
grabber.setOption("rtsp_transport", "tcp");
//开始拉流
grabber.start();
//获取摄像头宽高
int width = grabber.getImageWidth();
int height = grabber.getImageHeight();
Frame frame = null;
ByteArrayOutputStream stream = null;
while (true) {
try{
//抽帧
frame = grabber.grabImage();
if (frame.image == null) {
continue;
}
BufferedImage image = null;
image = createBufferedImage(frame,image);
stream = new ByteArrayOutputStream();
//生成UUID
//String uuid = UUID.randomUUID().toString().toUpperCase();
//BufferedImage bufferedImage = Java2DFrameUtils.toBufferedImage(frame);
//输出流
ImageIO.write(image, "jpg", stream);
String base64 = Base64.encode(stream.toByteArray());
//往队列里面插入数据
redisService.lPush(RedisUtils.PJOB_LIST_CACHE_NAME + context.getJobId(), base64);//TODO 还有AI返回的分析信息,用来编码
}catch (Exception e){
if(grabber != null) {
grabber.close();
}
if(stream != null) {
stream.close();
}
throw e;
}
}
}
private static BufferedImage createBufferedImage(Frame frame, BufferedImage image) {
ByteBuffer buffer = (ByteBuffer) frame.image[0].position(0);
if(image == null) {
ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
ColorModel cm = new ComponentColorModel(cs, false,false, Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
// this assumes BGR format
DataBuffer dataBuffer = new DataBufferByte(buffer.limit());
WritableRaster wr = Raster.createWritableRaster(new ComponentSampleModel(DataBuffer.TYPE_BYTE, frame.imageWidth, frame.imageHeight, frame.imageChannels, frame.imageStride, new int[] {2, 1, 0}), dataBuffer,null);
byte[] bufferPixels = ((DataBufferByte) wr.getDataBuffer()).getData();
buffer.get(bufferPixels);
return new BufferedImage(cm, wr, false, null);
}
else {
WritableRaster wr = image.getRaster();
byte[] bufferPixels = ((DataBufferByte) wr.getDataBuffer()).getData();
buffer.get(bufferPixels);
return image;
}
}
//图片转rtsp播放
public void AiVideoBrowseRtspProcessor() throws Exception{
String pushPath = "rtsp://" + nodeConfig.getAddress() + "/" + nodeConfig.getName();
int width = 1920, height = 1080;
//开启日志级别
//avutil.av_log_set_level(avutil.AV_LOG_ERROR);
//FFmpegLogCallback.set();
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(pushPath, width, height, 0);// 流媒体输出地址,分辨率(长,高),是否录制音频(0:不录制/1:录制)
recorder.setVideoCodec(AV_CODEC_ID_H264);
recorder.setFormat("rtsp"); // flv:rtmp的类型、rtsp:rtsp类型
//recorder.setPixelFormat(0);
//recorder.setVideoOption("tune", "zerolatency");// 降低编码延时
//recorder.setVideoOption("preset", "ultrafast");// 提升编码速度
recorder.setVideoOption("crf", "28");// 视频质量参数(详见 https://trac.ffmpeg.org/wiki/Encode/H.264)
recorder.start();
ByteArrayInputStream bais = null;
//Frame frame = null;
IplImage img = null;
Frame frame = null;
OpenCVFrameConverter converter = new OpenCVFrameConverter.ToIplImage();
while (true) {
try {
Long start = System.currentTimeMillis();
//轮询获取解析后的图片信息
String image = (String) redisService.lPop
(RedisUtils.PJOB_LIST_CACHE_NAME + saveAiVideRtspSaveJobRequest.getTaskPJobId());
if (resultInfo == null) {//TODO 是否能够lpop到数据,此处需要判断返回值是否可以这样判断空值
//Thread.sleep(1000);
continue;
}
byte[] bytes = Base64.decode(image);
bais = new ByteArrayInputStream(bytes);
BufferedImage bufferedImage = ImageIO.read(bais);
frame = createFrame(bufferedImage,frame);
if(frame == null){
continue;
}
Map map = resultInfo.getInfo();
//坐标 二维数组
JSONArray boxxes = (JSONArray) map.get("bboxes"); // [[503, 246, 110, 37]]
//json转对象
//image是图片对象 从redis list获取pop(根据key)
//画框
img = converter.convertToIplImage(frame);
for (int i = 0; i < boxxes.size(); i++) {
JSONArray array = (JSONArray) boxxes.get(i);
cvRectangle(img, cvPoint(Integer.parseInt(array.get(0).toString()),
Integer.parseInt(array.get(1).toString())),
cvPoint(Integer.parseInt(array.get(2).toString()),
Integer.parseInt(array.get(3).toString())),
CV_RGB(255, 0, 0), 5, 4, 0); //TODO 编码 具体参数不清楚 找 魏望浩
}
//recorder.record(Java2DFrameUtils.toFrame(img)); //推图片
recorder.record(converter.convert(img));
Long end = System.currentTimeMillis();
} catch (Exception e) {
if(recorder != null) {
recorder.close();
}
if (bais != null) {
bais.close();
}
if(img != null){
img.close();
}
throw e;
}
}
}
private Frame createFrame(BufferedImage image, Frame frame) {
DataBufferByte imageBuffer = (DataBufferByte) image.getRaster().getDataBuffer();
int stride = imageBuffer.getData().length / image.getHeight();
if(frame == null || frame.imageWidth != image.getWidth() || frame.imageHeight != image.getHeight())
frame = new Frame(image.getWidth(), image.getHeight(), 8, 3, stride);
ByteBuffer frameBuffer = (ByteBuffer) frame.image[0].position(0);
frameBuffer.put(((DataBufferByte) image.getRaster().getDataBuffer()).getData());
frameBuffer.position(0);
return frame;
}