简要介绍
ImageReader 作为Camera2 API数据接收方承载方,需要搞清楚数据如何从Camera过来的,才能保证正确使用。
PS:本文写完之后发现ImageReader 只是一个单纯的运行在APP 进程中的 API,不涉及Camera的进程,下篇文章介绍如何Camera数据如何从Cmaera进程传送到APP进程
1:ImageReader 创建
ImageReader构造函数是 protect,外部只能通过newInstance 来构建,内部重载2个构造函数,主要构建工作的如下:
protected ImageReader(int width, int height, int format, int maxImages, long usage) {
mWidth = width;
mHeight = height;
mFormat = format;
mMaxImages = maxImages;
if (width < 1 || height < 1) {
throw new IllegalArgumentException(
"The image dimensions must be positive");
}
if (mMaxImages < 1) {
throw new IllegalArgumentException(
"Maximum outstanding image count must be at least 1");
}
if (format == ImageFormat.NV21) {
throw new IllegalArgumentException(
"NV21 format is not supported");
}
mNumPlanes = ImageUtils.getNumPlanesForFormat(mFormat);
nativeInit(new WeakReference<>(this), width, height, format, maxImages, usage);
mSurface = nativeGetSurface();
mIsReaderValid = true;
// Estimate the native buffer allocation size and register it so it gets accounted for
// during GC. Note that this doesn't include the buffers required by the buffer queue
// itself and the buffers requested by the producer.
// Only include memory for 1 buffer, since actually accounting for the memory used is
// complex, and 1 buffer is enough for the VM to treat the ImageReader as being of some
// size.
mEstimatedNativeAllocBytes = ImageUtils.getEstimatedNativeAllocBytes(
width, height, format, /*buffer count*/ 1);
VMRuntime.getRuntime().registerNativeAllocation(mEstimatedNativeAllocBytes);
}
由构造函数可知,主要干了2件事情:
1:传了一个虚引用,在native层去初始化
2:从native 层构造了一个Surface
我们先去看下第一件事情:native 层进行的初始化Imagereader,代码位置:
system\frameworks\base\media\jni\android_media_ImageReader.cpp
static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz, jint width, jint height,
jint format, jint maxImages, jlong ndkUsage)
{
status_t res;
int nativeFormat;
android_dataspace nativeDataspace;
ALOGV("%s: width:%d, height: %d, format: 0x%x, maxImages:%d",
__FUNCTION__, width, height, format, maxImages);
PublicFormat publicFormat = static_cast<PublicFormat>(format);
nativeFormat = android_view_Surface_mapPublicFormatToHalFormat(
publicFormat);
nativeDataspace = android_view_Surface_mapPublicFormatToHalDataspace(
publicFormat);
jclass clazz = env->GetObjectClass(thiz);
if (clazz == NULL) {
jniThrowRuntimeException(env, "Can't find android/graphics/ImageReader");
return;
}
sp<JNIImageReaderContext> ctx(new JNIImageReaderContext(env, weakThiz, clazz, maxImages));
sp<IGraphicBufferProducer> gbProducer;
sp<IGraphicBufferConsumer> gbConsumer;
BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
sp<BufferItemConsumer> bufferConsumer;
String8 consumerName = String8::format("ImageReader-%dx%df%xm%d-%d-%d",
width, height, format, maxImages, getpid(),
createProcessUniqueId());
uint32_t consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
bool needUsageOverride = ndkUsage != CONSUMER_BUFFER_USAGE_UNKNOWN;
uint64_t outProducerUsage = 0;
uint64_t outConsumerUsage = 0;
android_hardware_HardwareBuffer_convertToGrallocUsageBits(&outProducerUsage, &outConsumerUsage,
ndkUsage, 0);
if (isFormatOpaque(nativeFormat)) {
// Use the SW_READ_NEVER usage to tell producer that this format is not for preview or video
// encoding. The only possibility will be ZSL output.
consumerUsage = GRALLOC_USAGE_SW_READ_NEVER;
if (needUsageOverride) {
consumerUsage = android_convertGralloc1To0Usage(0, outConsumerUsage);
}
} else if (needUsageOverride) {
ALOGW("Consumer usage override for non-opaque format is not implemented yet, "
"ignore the provided usage from the application");
}
bufferConsumer = new BufferItemConsumer(gbConsumer, consumerUsage, maxImages,
/*controlledByApp*/true);
if (bufferConsumer == nullptr) {
jniThrowExceptionFmt(env, "java/lang/RuntimeException",
"Failed to allocate native buffer consumer for format 0x%x and usage 0x%x",
nativeFormat, consumerUsage);
return;
}
ctx->setBufferConsumer(bufferConsumer);
bufferConsumer->setName(consumerName);
ctx->setProducer(gbProducer);
bufferConsumer->setFrameAvailableListener(ctx);
ImageReader_setNativeContext(env, thiz, ctx);
ctx->setBufferFormat(nativeFormat);
ctx->setBufferDataspace(nativeDataspace);
ctx->setBufferWidth(width);
ctx->setBufferHeight(height);
// Set the width/height/format/dataspace to the bufferConsumer.
res = bufferConsumer->setDefaultBufferSize(width, height);
if (res != OK) {
jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
"Failed to set buffer consumer default size (%dx%d) for format 0x%x",
width, height, nativeFormat);
return;
}
res = bufferConsumer->setDefaultBufferFormat(nativeFormat);
if (res != OK) {
jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
"Failed to set buffer consumer default format 0x%x", nativeFormat);
}
res = bufferConsumer->setDefaultBufferDataSpace(nativeDataspace);
if (res != OK) {
jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
"Failed to set buffer consumer default dataSpace 0x%x", nativeDataspace);
}
}
从上述代码可知,jni层主要做了一下工作:
1:创建生产者 gbProducer 和消费者 gbConsumer
sp<IGraphicBufferProducer> gbProducer;
sp<IGraphicBufferConsumer> gbConsumer;
BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
2:基于gbConsumer创建实际返回给高层数据的实际干活的消费者 bufferConsumer
bufferConsumer = new BufferItemConsumer(gbConsumer, consumerUsage, maxImages,/*controlledByApp*/true);
3:创建了JNI 层的上下文JNIImageReaderContext,并对其进行配置
sp<JNIImageReaderContext> ctx(new JNIImageReaderContext(env, weakThiz, clazz, maxImages));
ctx->setBufferConsumer(bufferConsumer);
bufferConsumer->setName(consumerName);
ctx->setProducer(gbProducer);
bufferConsumer->setFrameAvailableListener(ctx);
ImageReader_setNativeContext(env, thiz, ctx);
ctx->setBufferFormat(nativeFormat);
ctx->setBufferDataspace(nativeDataspace);
ctx->setBufferWidth(width);
ctx->setBufferHeight(height);
// Set the width/height/format/dataspace to the bufferConsumer.
res = bufferConsumer->setDefaultBufferSize(width, height);
if (res != OK) {
jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
"Failed to set buffer consumer default size (%dx%d) for format 0x%x",
width, height, nativeFormat);
return;
}
res = bufferConsumer->setDefaultBufferFormat(nativeFormat);
if (res != OK) {
jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
"Failed to set buffer consumer default format 0x%x", nativeFormat);
}
res = bufferConsumer->setDefaultBufferDataSpace(nativeDataspace);
在natiave层init完成之后,继续执行java 的构造,进行获取surface
static jobject ImageReader_getSurface(JNIEnv* env, jobject thiz)
{
ALOGV("%s: ", __FUNCTION__);
IGraphicBufferProducer* gbp = ImageReader_getProducer(env, thiz);
if (gbp == NULL) {
jniThrowRuntimeException(env, "Buffer consumer is uninitialized");
return NULL;
}
// Wrap the IGBP in a Java-language Surface.
return android_view_Surface_createFromIGraphicBufferProducer(env, gbp);
}
static IGraphicBufferProducer* ImageReader_getProducer(JNIEnv* env, jobject thiz)
{
ALOGV("%s:", __FUNCTION__);
JNIImageReaderContext* const ctx = ImageReader_getContext(env, thiz);
if (ctx == NULL) {
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return NULL;
}
return ctx->getProducer();
}
jobject android_view_Surface_createFromIGraphicBufferProducer(JNIEnv* env,
const sp<IGraphicBufferProducer>& bufferProducer) {
if (bufferProducer == NULL) {
return NULL;
}
sp<Surface> surface(new Surface(bufferProducer, true));
return android_view_Surface_createFromSurface(env, surface);
}
获取native的surface 就简单很多了,就是在native 的JNIImageReaderContext初始化完成之后从中获取处ButtonQueen 构造出来的gbProducer,基于此进行构造出来Surface,这样ImageReader就和Surface联系起来,相当于把ImageReader的surface就和相机采集数据后输送的Surface联系起来,这样高层在配置相机的参数将此surface传给相机的请求当中,相机采集完数据输送 到此surface,ImageReader就能收到数据了
简单总结:
Java 层在创建了ImageReader之后,native层实际创建了生产者和消费者,基于消费者创建了给高层使用的消费者bufferConsumer,然后在jni层的上下文进行配置,配置完成之后,在native继续创建和此生产者gbProducer 联系的surface,创建完成,高层java就有了surface的引用,可以将数据配置给相机的输出target 列表,此时相机采集完数据来了就会通过此回调给native,进一步回调java 层数据
2:ImageReader 数据接收
ImageReader 在java APP端创建到接收数据主要包括3步:
1:创建ImageReader,指定接收参数
2:将创建的ImageReader的surface传入Camera的request的target中,使得camera处理数据后将数据回传给surface
3:给创建的ImageReader 设置一个OnImageAvailableListener接口对象,当数据可用时,native 层会通过此接口回调 java APP 进程
上一章节搞定了前2步,本小节主要详细介绍native 层如何将可用数据回传.
Java APP 中设置OnImageAvailableListener接口对象
2.1何时回调通知由可用数据
public void setOnImageAvailableListener(OnImageAvailableListener listener, Handler handler) {
synchronized (mListenerLock) {
if (listener != null) {
Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
if (looper == null) {
throw new IllegalArgumentException(
"handler is null but the current thread is not a looper");
}
if (mListenerHandler == null || mListenerHandler.getLooper() != looper) {
mListenerHandler = new ListenerHandler(looper);
}
mListener = listener;
} else {
mListener = null;
mListenerHandler = null;
}
}
}
代码很简单,只做了2件事情,
1:将传进来的listener保存到mListener中
2:创建了一个内部类对象ListenerHandler,用作向APP 传递数据
private static void postEventFromNative(Object selfRef) {
@SuppressWarnings("unchecked")
WeakReference<ImageReader> weakSelf = (WeakReference<ImageReader>)selfRef;
final ImageReader ir = weakSelf.get();
if (ir == null) {
return;
}
final Handler handler;
synchronized (ir.mListenerLock) {
handler = ir.mListenerHandler;
}
if (handler != null) {
handler.sendEmptyMessage(0);
}
}
private final class ListenerHandler extends Handler {
public ListenerHandler(Looper looper) {
super(looper, null, true /*async*/);
}
@Override
public void handleMessage(Message msg) {
OnImageAvailableListener listener;
synchronized (mListenerLock) {
listener = mListener;
}
// It's dangerous to fire onImageAvailable() callback when the ImageReader is being
// closed, as application could acquire next image in the onImageAvailable() callback.
boolean isReaderValid = false;
synchronized (mCloseLock) {
isReaderValid = mIsReaderValid;
}
if (listener != null && isReaderValid) {
listener.onImageAvailable(ImageReader.this);
}
}
}
此处逻辑也很简单,native向APP 传递数据好了,通过之前创建的handler 发一个空消息,ListenerHandler 收到之后就告诉APP 拿数据
void JNIImageReaderContext::onFrameAvailable(const BufferItem& /*item*/)
{
ALOGV("%s: frame available", __FUNCTION__);
bool needsDetach = false;
JNIEnv* env = getJNIEnv(&needsDetach);
if (env != NULL) {
env->CallStaticVoidMethod(mClazz, gImageReaderClassInfo.postEventFromNative, mWeakThiz);
} else {
ALOGW("onFrameAvailable event will not posted");
}
if (needsDetach) {
detachJNI();
}
}
根据native 代码可知,native收到存在可用的帧时,直接回调java 层 APP 静态方法
postEventFromNative,将数据从native 传递到了 APP,
到此,出现了ImageReader接收数据的2个关键问题之一:native层什么时候通知高层存在了可用帧?native 层是通过 onFrameAvailable jni方法直接调用的java方法,那么native防范是何时收到的消息?
查找过程思路:
首先第一步想到的就是 谁回调的onFrameAvailable,结果同名函数太多,经过筛查发现大部分的onFrameAvailable函数其实是定义,不是真正的调用,调用的地方只有一个 就是 在BufferQueueProducer中的queueBuffer的时候会调用
virtual status_t queueBuffer(int slot,
const QueueBufferInput& input, QueueBufferOutput* output);
从整体理解上,似乎说的通,生产者生产出数据之后,通知消费者数据好了,那此BufferQueueProducter怎么和我们的ImageReader联系起来的?
BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
void BufferQueue::createBufferQueue(sp<IGraphicBufferProducer>* outProducer,
sp<IGraphicBufferConsumer>* outConsumer,
bool consumerIsSurfaceFlinger) {
LOG_ALWAYS_FATAL_IF(outProducer == NULL,
"BufferQueue: outProducer must not be NULL");
LOG_ALWAYS_FATAL_IF(outConsumer == NULL,
"BufferQueue: outConsumer must not be NULL");
sp<BufferQueueCore> core(new BufferQueueCore());
LOG_ALWAYS_FATAL_IF(core == NULL,
"BufferQueue: failed to create BufferQueueCore");
sp<IGraphicBufferProducer> producer(new BufferQueueProducer(core, consumerIsSurfaceFlinger));
LOG_ALWAYS_FATAL_IF(producer == NULL,
"BufferQueue: failed to create BufferQueueProducer");
sp<IGraphicBufferConsumer> consumer(new BufferQueueConsumer(core));
LOG_ALWAYS_FATAL_IF(consumer == NULL,
"BufferQueue: failed to create BufferQueueConsumer");
*outProducer = producer;
*outConsumer = consumer;
}
看到上面的代码就应该明白,原来在ImageReader构造的时候 创建的gbProducer实际上就是一个BufferQueueProducter,因此整体流程就串起来了,在ImageReader 的native创建的过程中,创建的生产者会在Camera采集到数据之后,将数据入队了的同时通知消费者来使用。 此处还遗留一个问题,生产者的listener来自哪?
sp<IConsumerListener> frameAvailableListener;
sp<IConsumerListener> frameReplacedListener;
if (mCore->mQueue.empty()) {
// When the queue is empty, we can ignore mDequeueBufferCannotBlock
// and simply queue this buffer
mCore->mQueue.push_back(item);
frameAvailableListener = mCore->mConsumerListener;
} else {
// When the queue is not empty, we need to look at the last buffer
// in the queue to see if we need to replace it
const BufferItem& last = mCore->mQueue.itemAt(
mCore->mQueue.size() - 1);
if (last.mIsDroppable) {
if (!last.mIsStale) {
mSlots[last.mSlot].mBufferState.freeQueued();
// After leaving shared buffer mode, the shared buffer will
// still be around. Mark it as no longer shared if this
// operation causes it to be free.
if (!mCore->mSharedBufferMode &&
mSlots[last.mSlot].mBufferState.isFree()) {
mSlots[last.mSlot].mBufferState.mShared = false;
}
// Don't put the shared buffer on the free list.
if (!mSlots[last.mSlot].mBufferState.isShared()) {
mCore->mActiveBuffers.erase(last.mSlot);
mCore->mFreeBuffers.push_back(last.mSlot);
output->bufferReplaced = true;
}
}
// Overwrite the droppable buffer with the incoming one
mCore->mQueue.editItemAt(mCore->mQueue.size() - 1) = item;
frameReplacedListener = mCore->mConsumerListener;
} else {
mCore->mQueue.push_back(item);
frameAvailableListener = mCore->mConsumerListener;
}
}
if (frameAvailableListener != NULL) {
frameAvailableListener->onFrameAvailable(item);
} else if (frameReplacedListener != NULL) {
frameReplacedListener->onFrameReplaced(item);
}
由上面三段代码可知,listener来自mCore对象,就是说mCore由listener的引用,哪mCore的listener又如何来的?
首先知道mCore是一个BufferQueueCore对象,并且在构造的时候并没有对mConsumerListener进行赋值
sp<BufferQueueCore> mCore;
system\frameworks\native\libs\gui\BufferQueueConsumer.cpp
status_t BufferQueueConsumer::connect(
const sp<IConsumerListener>& consumerListener, bool controlledByApp) {
ATRACE_CALL();
if (consumerListener == NULL) {
BQ_LOGE("connect: consumerListener may not be NULL");
return BAD_VALUE;
}
BQ_LOGV("connect: controlledByApp=%s",
controlledByApp ? "true" : "false");
Mutex::Autolock lock(mCore->mMutex);
if (mCore->mIsAbandoned) {
BQ_LOGE("connect: BufferQueue has been abandoned");
return NO_INIT;
}
mCore->mConsumerListener = consumerListener;
mCore->mConsumerControlledByApp = controlledByApp;
return NO_ERROR;
}
system\frameworks\native\include\gui\BufferQueueConsumer.h
virtual status_t consumerConnect(const sp<IConsumerListener>& consumer, bool controlledByApp) {
return connect(consumer, controlledByApp);
}
system\frameworks\native\libs\gui\ConsumerBase.cpp
ConsumerBase::ConsumerBase(const sp<IGraphicBufferConsumer>& bufferQueue, bool controlledByApp) :
mAbandoned(false),
mConsumer(bufferQueue),
mPrevFinalReleaseFence(Fence::NO_FENCE) {
// Choose a name using the PID and a process-unique ID.
mName = String8::format("unnamed-%d-%d", getpid(), createProcessUniqueId());
// Note that we can't create an sp<...>(this) in a ctor that will not keep a
// reference once the ctor ends, as that would cause the refcount of 'this'
// dropping to 0 at the end of the ctor. Since all we need is a wp<...>
// that's what we create.
wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this);
sp<IConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
status_t err = mConsumer->consumerConnect(proxy, controlledByApp);
if (err != NO_ERROR) {
CB_LOGE("ConsumerBase: error connecting to BufferQueue: %s (%d)",
strerror(-err), err);
} else {
mConsumer->setConsumerName(mName);
}
}
system\frameworks\native\libs\gui\BufferItemConsumer.cpp
BufferItemConsumer::BufferItemConsumer(
const sp<IGraphicBufferConsumer>& consumer, uint32_t consumerUsage,
int bufferCount, bool controlledByApp) :
ConsumerBase(consumer, controlledByApp)
{
status_t err = mConsumer->setConsumerUsageBits(consumerUsage);
LOG_ALWAYS_FATAL_IF(err != OK,
"Failed to set consumer usage bits to %#x", consumerUsage);
if (bufferCount != DEFAULT_MAX_BUFFERS) {
err = mConsumer->setMaxAcquiredBufferCount(bufferCount);
LOG_ALWAYS_FATAL_IF(err != OK,
"Failed to set max acquired buffer count to %d", bufferCount);
}
}
system\frameworks\base\media\jni\android_media_ImageReader.cpp
bufferConsumer = new BufferItemConsumer(gbConsumer, consumerUsage, maxImages, /*controlledByApp*/true);
最终赋值在BufferQueueConsumer.cpp中进行的赋值,简单说下其实就是在ImageReader的构造函数中,创建BufferQueueConsumer消费者的时候,将会把BufferQueueConsumer作为一个ConsumerListener传入构造一个ProxyConsumerListener,此ProxyConsumerListener会在生产者生产完数据调用onFrameAvailable方法通知ImageReader的jni层,jni会调用java的方法通知APP可以读数据了
Camera2使用ImageReader接收数据流程.png此处调用会涉及到binder通信,在消费者端生产数据,通过Binder通知消费者获取数据,消费者会创建共享内存,和APP通信
2.2 APP 读取数据
App读取数据时需要2小步:
1:获取最新的数据image
2:关闭image
具体如下:
mCameraData.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireNextImage();
//Image image = reader.acquireLatestImage();
image.close();
}
},null);
我们首先看下如何获取数据
public Image acquireLatestImage() {
Image image = acquireNextImage();
if (image == null) {
return null;
}
try {
for (;;) {
Image next = acquireNextImageNoThrowISE();
if (next == null) {
Image result = image;
image = null;
return result;
}
image.close();
image = next;
}
} finally {
if (image != null) {
image.close();
}
}
}
public Image acquireNextImage() {
// Initialize with reader format, but can be overwritten by native if the image
// format is different from the reader format.
SurfaceImage si = new SurfaceImage(mFormat);
int status = acquireNextSurfaceImage(si);
switch (status) {
case ACQUIRE_SUCCESS:
return si;
case ACQUIRE_NO_BUFS:
return null;
case ACQUIRE_MAX_IMAGES:
throw new IllegalStateException(
String.format(
"maxImages (%d) has already been acquired, " +
"call #close before acquiring more.", mMaxImages));
default:
throw new AssertionError("Unknown nativeImageSetup return code " + status);
}
}
private int acquireNextSurfaceImage(SurfaceImage si) {
synchronized (mCloseLock) {
// A null image will eventually be returned if ImageReader is already closed.
int status = ACQUIRE_NO_BUFS;
if (mIsReaderValid) {
status = nativeImageSetup(si);
}
switch (status) {
case ACQUIRE_SUCCESS:
si.mIsImageValid = true;
case ACQUIRE_NO_BUFS:
case ACQUIRE_MAX_IMAGES:
break;
default:
throw new AssertionError("Unknown nativeImageSetup return code " + status);
}
// Only keep track the successfully acquired image, as the native buffer is only mapped
// for such case.
if (status == ACQUIRE_SUCCESS) {
mAcquiredImages.add(si);
}
return status;
}
}
static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz, jobject image) {
ALOGV("%s:", __FUNCTION__);
JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
if (ctx == NULL) {
jniThrowException(env, "java/lang/IllegalStateException",
"ImageReader is not initialized or was already closed");
return -1;
}
BufferItemConsumer* bufferConsumer = ctx->getBufferConsumer();
BufferItem* buffer = ctx->getBufferItem();
if (buffer == NULL) {
ALOGW("Unable to acquire a buffer item, very likely client tried to acquire more than"
" maxImages buffers");
return ACQUIRE_MAX_IMAGES;
}
通过上面代码可知,其实在App层获取最新的相机采集回来的帧就是通过用户在帧可用时间时一步一步调用下来的,最后其实就是BufferItemConsumer对象最终调用acquireBuffer去获取的数据,数据在jni层获取的数据格式是 BufferItem,然后将此BufferItem 转换成了 image,然后 通知java层,用户就可以对对数据进行操作了。流程就是一直函数调用,全部在APP 进程中.
读取数据的时候需要2个问题注意下:
问题1:我们在构建ImageReader的时候进行指定了最大的缓存量,如下:
public static ImageReader newInstance(int width, int height, int format, int maxImages,
long usage) {
if (!isFormatUsageCombinationAllowed(format, usage)) {
throw new IllegalArgumentException("Format usage combination is not supported:"
+ " format = " + format + ", usage = " + usage);
}
return new ImageReader(width, height, format, maxImages, usage);
}
那么在获取这个Image的时候 这个最大缓存体现在什么地方?
答案1:
首先在创建ImageReader的时候,创建了一个JNIImageReaderContext,这个在创建的时候就制定了最大缓存量
sp<JNIImageReaderContext> ctx(new JNIImageReaderContext(env, weakThiz, clazz, maxImages));
JNIImageReaderContext::JNIImageReaderContext(JNIEnv* env,
jobject weakThiz, jclass clazz, int maxImages) :
mWeakThiz(env->NewGlobalRef(weakThiz)),
mClazz((jclass)env->NewGlobalRef(clazz)),
mFormat(0),
mDataSpace(HAL_DATASPACE_UNKNOWN),
mWidth(-1),
mHeight(-1) {
for (int i = 0; i < maxImages; i++) {
BufferItem* buffer = new BufferItem;
mBuffers.push_back(buffer);
}
}
List<BufferItem*> mBuffers;
这个就在创建的时候就创建了最大缓存数量的list,但是此时每一个ButterItem的属性都为default,并未将相机的参数取回来
其次java在收到帧可用通知(onImageAvailable回调)时,通过
acquireNextImage/acquireLatestImage 函数获取 image的时候需要首先获取此list中的ButterItem,你能获取到的最多也就是最大数量的item也就是构造的时候传入的最大值,当你获取超过数量的这个数据时,就会因为拿不到ButterItem承载采集回来的数据而失败
BufferItem* JNIImageReaderContext::getBufferItem() {
if (mBuffers.empty()) {
return NULL;
}
// Return a BufferItem pointer and remove it from the list
List<BufferItem*>::iterator it = mBuffers.begin();
BufferItem* buffer = *it;
mBuffers.erase(it);
return buffer;
}
static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz, jobject image) {
ALOGV("%s:", __FUNCTION__);
JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
if (ctx == NULL) {
jniThrowException(env, "java/lang/IllegalStateException",
"ImageReader is not initialized or was already closed");
return -1;
}
BufferItemConsumer* bufferConsumer = ctx->getBufferConsumer();
BufferItem* buffer = ctx->getBufferItem();
if (buffer == NULL) {
ALOGW("Unable to acquire a buffer item, very likely client tried to acquire more than"
" maxImages buffers");
return ACQUIRE_MAX_IMAGES;
}
最后,当你使用完成此image的时候,会在java层调用close,此时就会将ButterItem释放出来继续供用来承载数据
@Override
public void close() {
ImageReader.this.releaseImage(this);
}
private void releaseImage(Image i) {
if (! (i instanceof SurfaceImage) ) {
throw new IllegalArgumentException(
"This image was not produced by an ImageReader");
}
SurfaceImage si = (SurfaceImage) i;
if (si.mIsImageValid == false) {
return;
}
if (si.getReader() != this || !mAcquiredImages.contains(i)) {
throw new IllegalArgumentException(
"This image was not produced by this ImageReader");
}
si.clearSurfacePlanes();
nativeReleaseImage(i);
si.mIsImageValid = false;
mAcquiredImages.remove(i);
}
static void ImageReader_imageRelease(JNIEnv* env, jobject thiz, jobject image)
{
ALOGV("%s:", __FUNCTION__);
JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
if (ctx == NULL) {
ALOGW("ImageReader#close called before Image#close, consider calling Image#close first");
return;
}
BufferItemConsumer* bufferConsumer = ctx->getBufferConsumer();
BufferItem* buffer = Image_getBufferItem(env, image);
if (buffer == nullptr) {
// Release an already closed image is harmless.
return;
}
sp<Fence> releaseFence = Image_unlockIfLocked(env, image);
bufferConsumer->releaseBuffer(*buffer, releaseFence);
Image_setBufferItem(env, image, NULL);
ctx->returnBufferItem(buffer);
ALOGV("%s: Image (format: 0x%x) has been released", __FUNCTION__, ctx->getBufferFormat());
}
void JNIImageReaderContext::returnBufferItem(BufferItem* buffer) {
buffer->mGraphicBuffer = nullptr;
mBuffers.push_back(buffer);
}
问题2:ImageReader对象在收到帧可用时,获取下一帧数据的API存在2个函数,acquireNextImage/acquireLatestImage 这2个函数都可以拿到数据,那么有什么区别?
答案2:此问题在函数的实现中既可以看到
public Image acquireNextImage() {
// Initialize with reader format, but can be overwritten by native if the image
// format is different from the reader format.
SurfaceImage si = new SurfaceImage(mFormat);
int status = acquireNextSurfaceImage(si);
switch (status) {
case ACQUIRE_SUCCESS:
return si;
case ACQUIRE_NO_BUFS:
return null;
case ACQUIRE_MAX_IMAGES:
throw new IllegalStateException(
String.format(
"maxImages (%d) has already been acquired, " +
"call #close before acquiring more.", mMaxImages));
default:
throw new AssertionError("Unknown nativeImageSetup return code " + status);
}
}
获取下一帧可用图像,直接去jni获取
public Image acquireLatestImage() {
Image image = acquireNextImage();
if (image == null) {
return null;
}
try {
for (;;) {
Image next = acquireNextImageNoThrowISE();
if (next == null) {
Image result = image;
image = null;
return result;
}
image.close();
image = next;
}
} finally {
if (image != null) {
image.close();
}
}
}
获取最新的可用image,如果你在规定时间内没处理完,则系统替你把当前等待获取的image close掉,在获取一个新的image等待你来拿,一直循环,直到拿到的是最新的(后面没有了),循环结束
网友评论