1 /*
2 * Copyright (C) 2013-2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-OutputStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <utils/Log.h>
22 #include <utils/Trace.h>
23 #include "Camera3OutputStream.h"
24
25 #ifndef container_of
26 #define container_of(ptr, type, member) \
27 (type *)((char*)(ptr) - offsetof(type, member))
28 #endif
29
30 namespace android {
31
32 namespace camera3 {
33
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera3_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,int setId)34 Camera3OutputStream::Camera3OutputStream(int id,
35 sp<Surface> consumer,
36 uint32_t width, uint32_t height, int format,
37 android_dataspace dataSpace, camera3_stream_rotation_t rotation,
38 nsecs_t timestampOffset, const String8& physicalCameraId,
39 int setId) :
40 Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
41 /*maxSize*/0, format, dataSpace, rotation,
42 physicalCameraId, setId),
43 mConsumer(consumer),
44 mTransform(0),
45 mTraceFirstBuffer(true),
46 mUseBufferManager(false),
47 mTimestampOffset(timestampOffset),
48 mConsumerUsage(0),
49 mDropBuffers(false),
50 mDequeueBufferLatency(kDequeueLatencyBinSize) {
51
52 if (mConsumer == NULL) {
53 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
54 mState = STATE_ERROR;
55 }
56
57 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
58 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
59 }
60
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,size_t maxSize,int format,android_dataspace dataSpace,camera3_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,int setId)61 Camera3OutputStream::Camera3OutputStream(int id,
62 sp<Surface> consumer,
63 uint32_t width, uint32_t height, size_t maxSize, int format,
64 android_dataspace dataSpace, camera3_stream_rotation_t rotation,
65 nsecs_t timestampOffset, const String8& physicalCameraId, int setId) :
66 Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize,
67 format, dataSpace, rotation, physicalCameraId, setId),
68 mConsumer(consumer),
69 mTransform(0),
70 mTraceFirstBuffer(true),
71 mUseMonoTimestamp(false),
72 mUseBufferManager(false),
73 mTimestampOffset(timestampOffset),
74 mConsumerUsage(0),
75 mDropBuffers(false),
76 mDequeueBufferLatency(kDequeueLatencyBinSize) {
77
78 if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
79 ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
80 format);
81 mState = STATE_ERROR;
82 }
83
84 if (mConsumer == NULL) {
85 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
86 mState = STATE_ERROR;
87 }
88
89 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
90 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
91 }
92
Camera3OutputStream(int id,uint32_t width,uint32_t height,int format,uint64_t consumerUsage,android_dataspace dataSpace,camera3_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,int setId)93 Camera3OutputStream::Camera3OutputStream(int id,
94 uint32_t width, uint32_t height, int format,
95 uint64_t consumerUsage, android_dataspace dataSpace,
96 camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
97 const String8& physicalCameraId, int setId) :
98 Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
99 /*maxSize*/0, format, dataSpace, rotation,
100 physicalCameraId, setId),
101 mConsumer(nullptr),
102 mTransform(0),
103 mTraceFirstBuffer(true),
104 mUseBufferManager(false),
105 mTimestampOffset(timestampOffset),
106 mConsumerUsage(consumerUsage),
107 mDropBuffers(false),
108 mDequeueBufferLatency(kDequeueLatencyBinSize) {
109 // Deferred consumer only support preview surface format now.
110 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
111 ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
112 __FUNCTION__);
113 mState = STATE_ERROR;
114 }
115
116 // Sanity check for the consumer usage flag.
117 if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
118 (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
119 ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
120 __FUNCTION__, consumerUsage);
121 mState = STATE_ERROR;
122 }
123
124 mConsumerName = String8("Deferred");
125 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
126 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
127 }
128
Camera3OutputStream(int id,camera3_stream_type_t type,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera3_stream_rotation_t rotation,const String8 & physicalCameraId,uint64_t consumerUsage,nsecs_t timestampOffset,int setId)129 Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type,
130 uint32_t width, uint32_t height,
131 int format,
132 android_dataspace dataSpace,
133 camera3_stream_rotation_t rotation,
134 const String8& physicalCameraId,
135 uint64_t consumerUsage, nsecs_t timestampOffset,
136 int setId) :
137 Camera3IOStreamBase(id, type, width, height,
138 /*maxSize*/0,
139 format, dataSpace, rotation,
140 physicalCameraId, setId),
141 mTransform(0),
142 mTraceFirstBuffer(true),
143 mUseMonoTimestamp(false),
144 mUseBufferManager(false),
145 mTimestampOffset(timestampOffset),
146 mConsumerUsage(consumerUsage),
147 mDropBuffers(false),
148 mDequeueBufferLatency(kDequeueLatencyBinSize) {
149
150 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
151 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
152
153 // Subclasses expected to initialize mConsumer themselves
154 }
155
156
~Camera3OutputStream()157 Camera3OutputStream::~Camera3OutputStream() {
158 disconnectLocked();
159 }
160
getBufferLocked(camera3_stream_buffer * buffer,const std::vector<size_t> &)161 status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer,
162 const std::vector<size_t>&) {
163 ATRACE_CALL();
164
165 ANativeWindowBuffer* anb;
166 int fenceFd = -1;
167
168 status_t res;
169 res = getBufferLockedCommon(&anb, &fenceFd);
170 if (res != OK) {
171 return res;
172 }
173
174 /**
175 * FenceFD now owned by HAL except in case of error,
176 * in which case we reassign it to acquire_fence
177 */
178 handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
179 /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/true);
180
181 return OK;
182 }
183
queueBufferToConsumer(sp<ANativeWindow> & consumer,ANativeWindowBuffer * buffer,int anwReleaseFence,const std::vector<size_t> &)184 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
185 ANativeWindowBuffer* buffer, int anwReleaseFence,
186 const std::vector<size_t>&) {
187 return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
188 }
189
returnBufferLocked(const camera3_stream_buffer & buffer,nsecs_t timestamp,const std::vector<size_t> & surface_ids)190 status_t Camera3OutputStream::returnBufferLocked(
191 const camera3_stream_buffer &buffer,
192 nsecs_t timestamp, const std::vector<size_t>& surface_ids) {
193 ATRACE_CALL();
194
195 status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true, surface_ids);
196
197 if (res != OK) {
198 return res;
199 }
200
201 mLastTimestamp = timestamp;
202 mFrameCount++;
203
204 return OK;
205 }
206
returnBufferCheckedLocked(const camera3_stream_buffer & buffer,nsecs_t timestamp,bool output,const std::vector<size_t> & surface_ids,sp<Fence> * releaseFenceOut)207 status_t Camera3OutputStream::returnBufferCheckedLocked(
208 const camera3_stream_buffer &buffer,
209 nsecs_t timestamp,
210 bool output,
211 const std::vector<size_t>& surface_ids,
212 /*out*/
213 sp<Fence> *releaseFenceOut) {
214
215 (void)output;
216 ALOG_ASSERT(output, "Expected output to be true");
217
218 status_t res;
219
220 // Fence management - always honor release fence from HAL
221 sp<Fence> releaseFence = new Fence(buffer.release_fence);
222 int anwReleaseFence = releaseFence->dup();
223
224 /**
225 * Release the lock briefly to avoid deadlock with
226 * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
227 * thread will go into StreamingProcessor::onFrameAvailable) during
228 * queueBuffer
229 */
230 sp<ANativeWindow> currentConsumer = mConsumer;
231 StreamState state = mState;
232 mLock.unlock();
233
234 ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
235 /**
236 * Return buffer back to ANativeWindow
237 */
238 if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
239 // Cancel buffer
240 if (mDropBuffers) {
241 ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
242 } else if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
243 ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
244 } else {
245 ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
246 }
247
248 res = currentConsumer->cancelBuffer(currentConsumer.get(),
249 anwBuffer,
250 anwReleaseFence);
251 if (shouldLogError(res, state)) {
252 ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
253 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
254 }
255
256 notifyBufferReleased(anwBuffer);
257 if (mUseBufferManager) {
258 // Return this buffer back to buffer manager.
259 mBufferProducerListener->onBufferReleased();
260 }
261 } else {
262 if (mTraceFirstBuffer && (stream_type == CAMERA3_STREAM_OUTPUT)) {
263 {
264 char traceLog[48];
265 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
266 ATRACE_NAME(traceLog);
267 }
268 mTraceFirstBuffer = false;
269 }
270
271 /* Certain consumers (such as AudioSource or HardwareComposer) use
272 * MONOTONIC time, causing time misalignment if camera timestamp is
273 * in BOOTTIME. Do the conversion if necessary. */
274 res = native_window_set_buffers_timestamp(mConsumer.get(),
275 mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp);
276 if (res != OK) {
277 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
278 __FUNCTION__, mId, strerror(-res), res);
279 return res;
280 }
281
282 res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
283 if (shouldLogError(res, state)) {
284 ALOGE("%s: Stream %d: Error queueing buffer to native window:"
285 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
286 }
287 }
288 mLock.lock();
289
290 // Once a valid buffer has been returned to the queue, can no longer
291 // dequeue all buffers for preallocation.
292 if (buffer.status != CAMERA3_BUFFER_STATUS_ERROR) {
293 mStreamUnpreparable = true;
294 }
295
296 if (res != OK) {
297 close(anwReleaseFence);
298 }
299
300 *releaseFenceOut = releaseFence;
301
302 return res;
303 }
304
dump(int fd,const Vector<String16> & args) const305 void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
306 (void) args;
307 String8 lines;
308 lines.appendFormat(" Stream[%d]: Output\n", mId);
309 lines.appendFormat(" Consumer name: %s\n", mConsumerName.string());
310 write(fd, lines.string(), lines.size());
311
312 Camera3IOStreamBase::dump(fd, args);
313
314 mDequeueBufferLatency.dump(fd,
315 " DequeueBuffer latency histogram:");
316 }
317
setTransform(int transform)318 status_t Camera3OutputStream::setTransform(int transform) {
319 ATRACE_CALL();
320 Mutex::Autolock l(mLock);
321 return setTransformLocked(transform);
322 }
323
setTransformLocked(int transform)324 status_t Camera3OutputStream::setTransformLocked(int transform) {
325 status_t res = OK;
326 if (mState == STATE_ERROR) {
327 ALOGE("%s: Stream in error state", __FUNCTION__);
328 return INVALID_OPERATION;
329 }
330
331 mTransform = transform;
332 if (mState == STATE_CONFIGURED) {
333 res = native_window_set_buffers_transform(mConsumer.get(),
334 transform);
335 if (res != OK) {
336 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
337 __FUNCTION__, transform, strerror(-res), res);
338 }
339 }
340 return res;
341 }
342
configureQueueLocked()343 status_t Camera3OutputStream::configureQueueLocked() {
344 status_t res;
345
346 mTraceFirstBuffer = true;
347 if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
348 return res;
349 }
350
351 if ((res = configureConsumerQueueLocked()) != OK) {
352 return res;
353 }
354
355 // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
356 // We need skip these cases as timeout will disable the non-blocking (async) mode.
357 if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
358 if (mUseBufferManager) {
359 // When buffer manager is handling the buffer, we should have available buffers in
360 // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
361 // free buffers.
362 // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
363 // can discard free buffers without notifying buffer manager. We want the timeout to
364 // happen immediately here so buffer manager can try to update its internal state and
365 // try to allocate a buffer instead of waiting.
366 mConsumer->setDequeueTimeout(0);
367 } else {
368 mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
369 }
370 }
371
372 return OK;
373 }
374
configureConsumerQueueLocked()375 status_t Camera3OutputStream::configureConsumerQueueLocked() {
376 status_t res;
377
378 mTraceFirstBuffer = true;
379
380 ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
381
382 // Configure consumer-side ANativeWindow interface. The listener may be used
383 // to notify buffer manager (if it is used) of the returned buffers.
384 res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
385 /*reportBufferRemoval*/true,
386 /*listener*/mBufferProducerListener);
387 if (res != OK) {
388 ALOGE("%s: Unable to connect to native window for stream %d",
389 __FUNCTION__, mId);
390 return res;
391 }
392
393 mConsumerName = mConsumer->getConsumerName();
394
395 res = native_window_set_usage(mConsumer.get(), mUsage);
396 if (res != OK) {
397 ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
398 __FUNCTION__, mUsage, mId);
399 return res;
400 }
401
402 res = native_window_set_scaling_mode(mConsumer.get(),
403 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
404 if (res != OK) {
405 ALOGE("%s: Unable to configure stream scaling: %s (%d)",
406 __FUNCTION__, strerror(-res), res);
407 return res;
408 }
409
410 if (mMaxSize == 0) {
411 // For buffers of known size
412 res = native_window_set_buffers_dimensions(mConsumer.get(),
413 camera3_stream::width, camera3_stream::height);
414 } else {
415 // For buffers with bounded size
416 res = native_window_set_buffers_dimensions(mConsumer.get(),
417 mMaxSize, 1);
418 }
419 if (res != OK) {
420 ALOGE("%s: Unable to configure stream buffer dimensions"
421 " %d x %d (maxSize %zu) for stream %d",
422 __FUNCTION__, camera3_stream::width, camera3_stream::height,
423 mMaxSize, mId);
424 return res;
425 }
426 res = native_window_set_buffers_format(mConsumer.get(),
427 camera3_stream::format);
428 if (res != OK) {
429 ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
430 __FUNCTION__, camera3_stream::format, mId);
431 return res;
432 }
433
434 res = native_window_set_buffers_data_space(mConsumer.get(),
435 camera3_stream::data_space);
436 if (res != OK) {
437 ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
438 __FUNCTION__, camera3_stream::data_space, mId);
439 return res;
440 }
441
442 int maxConsumerBuffers;
443 res = static_cast<ANativeWindow*>(mConsumer.get())->query(
444 mConsumer.get(),
445 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
446 if (res != OK) {
447 ALOGE("%s: Unable to query consumer undequeued"
448 " buffer count for stream %d", __FUNCTION__, mId);
449 return res;
450 }
451
452 ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
453 maxConsumerBuffers, camera3_stream::max_buffers);
454 if (camera3_stream::max_buffers == 0) {
455 ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
456 __FUNCTION__, camera3_stream::max_buffers);
457 return INVALID_OPERATION;
458 }
459
460 mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers;
461 mHandoutTotalBufferCount = 0;
462 mFrameCount = 0;
463 mLastTimestamp = 0;
464 mUseMonoTimestamp = (isConsumedByHWComposer() | isVideoStream());
465
466 res = native_window_set_buffer_count(mConsumer.get(),
467 mTotalBufferCount);
468 if (res != OK) {
469 ALOGE("%s: Unable to set buffer count for stream %d",
470 __FUNCTION__, mId);
471 return res;
472 }
473
474 res = native_window_set_buffers_transform(mConsumer.get(),
475 mTransform);
476 if (res != OK) {
477 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
478 __FUNCTION__, mTransform, strerror(-res), res);
479 return res;
480 }
481
482 /**
483 * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
484 * buffers to be statically allocated for internal static buffer registration, while the
485 * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
486 * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
487 * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
488 * HAL3.2 devices may not support the dynamic buffer registeration.
489 * Also Camera3BufferManager does not support display/texture streams as they have its own
490 * buffer management logic.
491 */
492 if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
493 !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
494 uint64_t consumerUsage = 0;
495 getEndpointUsage(&consumerUsage);
496 StreamInfo streamInfo(
497 getId(), getStreamSetId(), getWidth(), getHeight(), getFormat(), getDataSpace(),
498 mUsage | consumerUsage, mTotalBufferCount,
499 /*isConfigured*/true);
500 wp<Camera3OutputStream> weakThis(this);
501 res = mBufferManager->registerStream(weakThis,
502 streamInfo);
503 if (res == OK) {
504 // Disable buffer allocation for this BufferQueue, buffer manager will take over
505 // the buffer allocation responsibility.
506 mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
507 mUseBufferManager = true;
508 } else {
509 ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
510 "(error %d %s), fall back to BufferQueue for buffer management!",
511 __FUNCTION__, mId, res, strerror(-res));
512 }
513 }
514
515 return OK;
516 }
517
getBufferLockedCommon(ANativeWindowBuffer ** anb,int * fenceFd)518 status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
519 ATRACE_CALL();
520 status_t res;
521
522 if ((res = getBufferPreconditionCheckLocked()) != OK) {
523 return res;
524 }
525
526 bool gotBufferFromManager = false;
527
528 if (mUseBufferManager) {
529 sp<GraphicBuffer> gb;
530 res = mBufferManager->getBufferForStream(getId(), getStreamSetId(), &gb, fenceFd);
531 if (res == OK) {
532 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
533 // successful return.
534 *anb = gb.get();
535 res = mConsumer->attachBuffer(*anb);
536 if (shouldLogError(res, mState)) {
537 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
538 __FUNCTION__, mId, strerror(-res), res);
539 }
540 if (res != OK) {
541 checkRetAndSetAbandonedLocked(res);
542 return res;
543 }
544 gotBufferFromManager = true;
545 ALOGV("Stream %d: Attached new buffer", getId());
546 } else if (res == ALREADY_EXISTS) {
547 // Have sufficient free buffers already attached, can just
548 // dequeue from buffer queue
549 ALOGV("Stream %d: Reusing attached buffer", getId());
550 gotBufferFromManager = false;
551 } else if (res != OK) {
552 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
553 __FUNCTION__, mId, strerror(-res), res);
554 return res;
555 }
556 }
557 if (!gotBufferFromManager) {
558 /**
559 * Release the lock briefly to avoid deadlock for below scenario:
560 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
561 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
562 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
563 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
564 * StreamingProcessor lock.
565 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
566 * and try to lock bufferQueue lock.
567 * Then there is circular locking dependency.
568 */
569 sp<ANativeWindow> currentConsumer = mConsumer;
570 mLock.unlock();
571
572 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
573 res = currentConsumer->dequeueBuffer(currentConsumer.get(), anb, fenceFd);
574 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
575 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
576
577 mLock.lock();
578
579 if (mUseBufferManager && res == TIMED_OUT) {
580 checkRemovedBuffersLocked();
581
582 sp<GraphicBuffer> gb;
583 res = mBufferManager->getBufferForStream(
584 getId(), getStreamSetId(), &gb, fenceFd, /*noFreeBuffer*/true);
585
586 if (res == OK) {
587 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
588 // a successful return.
589 *anb = gb.get();
590 res = mConsumer->attachBuffer(*anb);
591 gotBufferFromManager = true;
592 ALOGV("Stream %d: Attached new buffer", getId());
593
594 if (res != OK) {
595 if (shouldLogError(res, mState)) {
596 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
597 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
598 }
599 checkRetAndSetAbandonedLocked(res);
600 return res;
601 }
602 } else {
603 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
604 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
605 return res;
606 }
607 } else if (res != OK) {
608 if (shouldLogError(res, mState)) {
609 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
610 __FUNCTION__, mId, strerror(-res), res);
611 }
612 checkRetAndSetAbandonedLocked(res);
613 return res;
614 }
615 }
616
617 if (res == OK) {
618 checkRemovedBuffersLocked();
619 }
620
621 return res;
622 }
623
checkRemovedBuffersLocked(bool notifyBufferManager)624 void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
625 std::vector<sp<GraphicBuffer>> removedBuffers;
626 status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
627 if (res == OK) {
628 onBuffersRemovedLocked(removedBuffers);
629
630 if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
631 mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), removedBuffers.size());
632 }
633 }
634 }
635
checkRetAndSetAbandonedLocked(status_t res)636 void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
637 // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
638 // STATE_PREPARING, let prepareNextBuffer handle the error.)
639 if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
640 mState = STATE_ABANDONED;
641 }
642 }
643
shouldLogError(status_t res,StreamState state)644 bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
645 if (res == OK) {
646 return false;
647 }
648 if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
649 return false;
650 }
651 return true;
652 }
653
disconnectLocked()654 status_t Camera3OutputStream::disconnectLocked() {
655 status_t res;
656
657 if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
658 return res;
659 }
660
661 // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
662 // state), don't need change the stream state, return OK.
663 if (mConsumer == nullptr) {
664 return OK;
665 }
666
667 ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
668
669 res = native_window_api_disconnect(mConsumer.get(),
670 NATIVE_WINDOW_API_CAMERA);
671 /**
672 * This is not an error. if client calling process dies, the window will
673 * also die and all calls to it will return DEAD_OBJECT, thus it's already
674 * "disconnected"
675 */
676 if (res == DEAD_OBJECT) {
677 ALOGW("%s: While disconnecting stream %d from native window, the"
678 " native window died from under us", __FUNCTION__, mId);
679 }
680 else if (res != OK) {
681 ALOGE("%s: Unable to disconnect stream %d from native window "
682 "(error %d %s)",
683 __FUNCTION__, mId, res, strerror(-res));
684 mState = STATE_ERROR;
685 return res;
686 }
687
688 // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
689 // stream at this point should be safe.
690 if (mUseBufferManager) {
691 res = mBufferManager->unregisterStream(getId(), getStreamSetId());
692 if (res != OK) {
693 ALOGE("%s: Unable to unregister stream %d from buffer manager "
694 "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
695 mState = STATE_ERROR;
696 return res;
697 }
698 // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
699 // the stream is still in usable state after this call.
700 mUseBufferManager = false;
701 }
702
703 mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
704 : STATE_CONSTRUCTED;
705
706 mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
707 mDequeueBufferLatency.reset();
708 return OK;
709 }
710
getEndpointUsage(uint64_t * usage) const711 status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
712
713 status_t res;
714
715 if (mConsumer == nullptr) {
716 // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
717 *usage = mConsumerUsage;
718 return OK;
719 }
720
721 res = getEndpointUsageForSurface(usage, mConsumer);
722
723 return res;
724 }
725
applyZSLUsageQuirk(int format,uint64_t * consumerUsage)726 void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
727 if (consumerUsage == nullptr) {
728 return;
729 }
730
731 // If an opaque output stream's endpoint is ImageReader, add
732 // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
733 // for the ZSL use case.
734 // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
735 // 1. GRALLOC_USAGE_HW_TEXTURE
736 // 2. GRALLOC_USAGE_HW_RENDER
737 // 3. GRALLOC_USAGE_HW_COMPOSER
738 // 4. GRALLOC_USAGE_HW_VIDEO_ENCODER
739 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
740 (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
741 GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
742 *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
743 }
744 }
745
getEndpointUsageForSurface(uint64_t * usage,const sp<Surface> & surface) const746 status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
747 const sp<Surface>& surface) const {
748 status_t res;
749 uint64_t u = 0;
750
751 res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
752 applyZSLUsageQuirk(camera3_stream::format, &u);
753 *usage = u;
754 return res;
755 }
756
isVideoStream() const757 bool Camera3OutputStream::isVideoStream() const {
758 uint64_t usage = 0;
759 status_t res = getEndpointUsage(&usage);
760 if (res != OK) {
761 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
762 return false;
763 }
764
765 return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
766 }
767
setBufferManager(sp<Camera3BufferManager> bufferManager)768 status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
769 Mutex::Autolock l(mLock);
770 if (mState != STATE_CONSTRUCTED) {
771 ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
772 __FUNCTION__);
773 return INVALID_OPERATION;
774 }
775 mBufferManager = bufferManager;
776
777 return OK;
778 }
779
updateStream(const std::vector<sp<Surface>> &,const std::vector<OutputStreamInfo> &,const std::vector<size_t> &,KeyedVector<sp<Surface>,size_t> *)780 status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
781 const std::vector<OutputStreamInfo> &/*outputInfo*/,
782 const std::vector<size_t> &/*removedSurfaceIds*/,
783 KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
784 ALOGE("%s: this method is not supported!", __FUNCTION__);
785 return INVALID_OPERATION;
786 }
787
onBufferReleased()788 void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
789 sp<Camera3OutputStream> stream = mParent.promote();
790 if (stream == nullptr) {
791 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
792 return;
793 }
794
795 Mutex::Autolock l(stream->mLock);
796 if (!(stream->mUseBufferManager)) {
797 return;
798 }
799
800 ALOGV("Stream %d: Buffer released", stream->getId());
801 bool shouldFreeBuffer = false;
802 status_t res = stream->mBufferManager->onBufferReleased(
803 stream->getId(), stream->getStreamSetId(), &shouldFreeBuffer);
804 if (res != OK) {
805 ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
806 strerror(-res), res);
807 stream->mState = STATE_ERROR;
808 }
809
810 if (shouldFreeBuffer) {
811 sp<GraphicBuffer> buffer;
812 // Detach and free a buffer (when buffer goes out of scope)
813 stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
814 if (buffer.get() != nullptr) {
815 stream->mBufferManager->notifyBufferRemoved(
816 stream->getId(), stream->getStreamSetId());
817 }
818 }
819 }
820
onBuffersDiscarded(const std::vector<sp<GraphicBuffer>> & buffers)821 void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
822 const std::vector<sp<GraphicBuffer>>& buffers) {
823 sp<Camera3OutputStream> stream = mParent.promote();
824 if (stream == nullptr) {
825 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
826 return;
827 }
828
829 if (buffers.size() > 0) {
830 Mutex::Autolock l(stream->mLock);
831 stream->onBuffersRemovedLocked(buffers);
832 if (stream->mUseBufferManager) {
833 stream->mBufferManager->onBuffersRemoved(stream->getId(),
834 stream->getStreamSetId(), buffers.size());
835 }
836 ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
837 }
838 }
839
onBuffersRemovedLocked(const std::vector<sp<GraphicBuffer>> & removedBuffers)840 void Camera3OutputStream::onBuffersRemovedLocked(
841 const std::vector<sp<GraphicBuffer>>& removedBuffers) {
842 sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
843 if (callback != nullptr) {
844 for (const auto& gb : removedBuffers) {
845 callback->onBufferFreed(mId, gb->handle);
846 }
847 }
848 }
849
detachBuffer(sp<GraphicBuffer> * buffer,int * fenceFd)850 status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
851 Mutex::Autolock l(mLock);
852 return detachBufferLocked(buffer, fenceFd);
853 }
854
detachBufferLocked(sp<GraphicBuffer> * buffer,int * fenceFd)855 status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
856 ALOGV("Stream %d: detachBuffer", getId());
857 if (buffer == nullptr) {
858 return BAD_VALUE;
859 }
860
861 sp<Fence> fence;
862 status_t res = mConsumer->detachNextBuffer(buffer, &fence);
863 if (res == NO_MEMORY) {
864 // This may rarely happen, which indicates that the released buffer was freed by other
865 // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
866 // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
867 // therefore log a warning.
868 *buffer = 0;
869 ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
870 } else if (res != OK) {
871 // Treat other errors as abandonment
872 if (shouldLogError(res, mState)) {
873 ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
874 }
875 mState = STATE_ABANDONED;
876 return res;
877 }
878
879 if (fenceFd != nullptr) {
880 if (fence!= 0 && fence->isValid()) {
881 *fenceFd = fence->dup();
882 } else {
883 *fenceFd = -1;
884 }
885 }
886
887 // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
888 checkRemovedBuffersLocked(/*notifyBufferManager*/false);
889 return res;
890 }
891
dropBuffers(bool dropping)892 status_t Camera3OutputStream::dropBuffers(bool dropping) {
893 Mutex::Autolock l(mLock);
894 mDropBuffers = dropping;
895 return OK;
896 }
897
getPhysicalCameraId() const898 const String8& Camera3OutputStream::getPhysicalCameraId() const {
899 Mutex::Autolock l(mLock);
900 return physicalCameraId();
901 }
902
notifyBufferReleased(ANativeWindowBuffer *)903 status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
904 return OK;
905 }
906
isConsumerConfigurationDeferred(size_t surface_id) const907 bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
908 Mutex::Autolock l(mLock);
909
910 if (surface_id != 0) {
911 ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
912 }
913 return mConsumer == nullptr;
914 }
915
setConsumers(const std::vector<sp<Surface>> & consumers)916 status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
917 Mutex::Autolock l(mLock);
918 if (consumers.size() != 1) {
919 ALOGE("%s: it's illegal to set %zu consumer surfaces!",
920 __FUNCTION__, consumers.size());
921 return INVALID_OPERATION;
922 }
923 if (consumers[0] == nullptr) {
924 ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
925 return INVALID_OPERATION;
926 }
927
928 if (mConsumer != nullptr) {
929 ALOGE("%s: consumer surface was already set!", __FUNCTION__);
930 return INVALID_OPERATION;
931 }
932
933 mConsumer = consumers[0];
934 return OK;
935 }
936
isConsumedByHWComposer() const937 bool Camera3OutputStream::isConsumedByHWComposer() const {
938 uint64_t usage = 0;
939 status_t res = getEndpointUsage(&usage);
940 if (res != OK) {
941 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
942 return false;
943 }
944
945 return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
946 }
947
isConsumedByHWTexture() const948 bool Camera3OutputStream::isConsumedByHWTexture() const {
949 uint64_t usage = 0;
950 status_t res = getEndpointUsage(&usage);
951 if (res != OK) {
952 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
953 return false;
954 }
955
956 return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
957 }
958
959 }; // namespace camera3
960
961 }; // namespace android
962