1 /*
2  * Copyright (C) 2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #define LOG_TAG "ExtCamDevSsn@3.4"
17 //#define LOG_NDEBUG 0
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 #include <log/log.h>
20 
21 #include <inttypes.h>
22 #include "ExternalCameraDeviceSession.h"
23 
24 #include "android-base/macros.h"
25 #include <utils/Timers.h>
26 #include <utils/Trace.h>
27 #include <linux/videodev2.h>
28 #include <sync/sync.h>
29 
30 #define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs
31 #include <libyuv.h>
32 
33 #include <jpeglib.h>
34 
35 
36 namespace android {
37 namespace hardware {
38 namespace camera {
39 namespace device {
40 namespace V3_4 {
41 namespace implementation {
42 
43 namespace {
44 // Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer.
45 static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */;
46 
47 const int kBadFramesAfterStreamOn = 1; // drop x frames after streamOn to get rid of some initial
48                                        // bad frames. TODO: develop a better bad frame detection
49                                        // method
50 constexpr int MAX_RETRY = 15; // Allow retry some ioctl failures a few times to account for some
51                              // webcam showing temporarily ioctl failures.
52 constexpr int IOCTL_RETRY_SLEEP_US = 33000; // 33ms * MAX_RETRY = 0.5 seconds
53 
54 // Constants for tryLock during dumpstate
55 static constexpr int kDumpLockRetries = 50;
56 static constexpr int kDumpLockSleep = 60000;
57 
tryLock(Mutex & mutex)58 bool tryLock(Mutex& mutex)
59 {
60     bool locked = false;
61     for (int i = 0; i < kDumpLockRetries; ++i) {
62         if (mutex.tryLock() == NO_ERROR) {
63             locked = true;
64             break;
65         }
66         usleep(kDumpLockSleep);
67     }
68     return locked;
69 }
70 
tryLock(std::mutex & mutex)71 bool tryLock(std::mutex& mutex)
72 {
73     bool locked = false;
74     for (int i = 0; i < kDumpLockRetries; ++i) {
75         if (mutex.try_lock()) {
76             locked = true;
77             break;
78         }
79         usleep(kDumpLockSleep);
80     }
81     return locked;
82 }
83 
84 buffer_handle_t sEmptyBuffer = nullptr;
85 
86 } // Anonymous namespace
87 
88 // Static instances
89 const int ExternalCameraDeviceSession::kMaxProcessedStream;
90 const int ExternalCameraDeviceSession::kMaxStallStream;
91 HandleImporter ExternalCameraDeviceSession::sHandleImporter;
92 
ExternalCameraDeviceSession(const sp<ICameraDeviceCallback> & callback,const ExternalCameraConfig & cfg,const std::vector<SupportedV4L2Format> & sortedFormats,const CroppingType & croppingType,const common::V1_0::helper::CameraMetadata & chars,const std::string & cameraId,unique_fd v4l2Fd)93 ExternalCameraDeviceSession::ExternalCameraDeviceSession(
94         const sp<ICameraDeviceCallback>& callback,
95         const ExternalCameraConfig& cfg,
96         const std::vector<SupportedV4L2Format>& sortedFormats,
97         const CroppingType& croppingType,
98         const common::V1_0::helper::CameraMetadata& chars,
99         const std::string& cameraId,
100         unique_fd v4l2Fd) :
101         mCallback(callback),
102         mCfg(cfg),
103         mCameraCharacteristics(chars),
104         mSupportedFormats(sortedFormats),
105         mCroppingType(croppingType),
106         mCameraId(cameraId),
107         mV4l2Fd(std::move(v4l2Fd)),
108         mMaxThumbResolution(getMaxThumbResolution()),
109         mMaxJpegResolution(getMaxJpegResolution()) {}
110 
initialize()111 bool ExternalCameraDeviceSession::initialize() {
112     if (mV4l2Fd.get() < 0) {
113         ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get());
114         return true;
115     }
116 
117     struct v4l2_capability capability;
118     int ret = ioctl(mV4l2Fd.get(), VIDIOC_QUERYCAP, &capability);
119     std::string make, model;
120     if (ret < 0) {
121         ALOGW("%s v4l2 QUERYCAP failed", __FUNCTION__);
122         make = "Generic UVC webcam";
123         model = "Generic UVC webcam";
124     } else {
125         // capability.card is UTF-8 encoded
126         char card[32];
127         int j = 0;
128         for (int i = 0; i < 32; i++) {
129             if (capability.card[i] < 128) {
130                 card[j++] = capability.card[i];
131             }
132             if (capability.card[i] == '\0') {
133                 break;
134             }
135         }
136         if (j == 0 || card[j - 1] != '\0') {
137             make = "Generic UVC webcam";
138             model = "Generic UVC webcam";
139         } else {
140             make = card;
141             model = card;
142         }
143     }
144 
145     initOutputThread();
146     if (mOutputThread == nullptr) {
147         ALOGE("%s: init OutputThread failed!", __FUNCTION__);
148         return true;
149     }
150     mOutputThread->setExifMakeModel(make, model);
151 
152     status_t status = initDefaultRequests();
153     if (status != OK) {
154         ALOGE("%s: init default requests failed!", __FUNCTION__);
155         return true;
156     }
157 
158     mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
159             kMetadataMsgQueueSize, false /* non blocking */);
160     if (!mRequestMetadataQueue->isValid()) {
161         ALOGE("%s: invalid request fmq", __FUNCTION__);
162         return true;
163     }
164     mResultMetadataQueue = std::make_shared<RequestMetadataQueue>(
165             kMetadataMsgQueueSize, false /* non blocking */);
166     if (!mResultMetadataQueue->isValid()) {
167         ALOGE("%s: invalid result fmq", __FUNCTION__);
168         return true;
169     }
170 
171     // TODO: check is PRIORITY_DISPLAY enough?
172     mOutputThread->run("ExtCamOut", PRIORITY_DISPLAY);
173     return false;
174 }
175 
isInitFailed()176 bool ExternalCameraDeviceSession::isInitFailed() {
177     Mutex::Autolock _l(mLock);
178     if (!mInitialized) {
179         mInitFail = initialize();
180         mInitialized = true;
181     }
182     return mInitFail;
183 }
184 
initOutputThread()185 void ExternalCameraDeviceSession::initOutputThread() {
186     mOutputThread = new OutputThread(this, mCroppingType);
187 }
188 
closeOutputThread()189 void ExternalCameraDeviceSession::closeOutputThread() {
190     closeOutputThreadImpl();
191 }
192 
closeOutputThreadImpl()193 void ExternalCameraDeviceSession::closeOutputThreadImpl() {
194     if (mOutputThread) {
195         mOutputThread->flush();
196         mOutputThread->requestExit();
197         mOutputThread->join();
198         mOutputThread.clear();
199     }
200 }
201 
initStatus() const202 Status ExternalCameraDeviceSession::initStatus() const {
203     Mutex::Autolock _l(mLock);
204     Status status = Status::OK;
205     if (mInitFail || mClosed) {
206         ALOGI("%s: sesssion initFailed %d closed %d", __FUNCTION__, mInitFail, mClosed);
207         status = Status::INTERNAL_ERROR;
208     }
209     return status;
210 }
211 
~ExternalCameraDeviceSession()212 ExternalCameraDeviceSession::~ExternalCameraDeviceSession() {
213     if (!isClosed()) {
214         ALOGE("ExternalCameraDeviceSession deleted before close!");
215         close(/*callerIsDtor*/true);
216     }
217 }
218 
219 
dumpState(const native_handle_t * handle)220 void ExternalCameraDeviceSession::dumpState(const native_handle_t* handle) {
221     if (handle->numFds != 1 || handle->numInts != 0) {
222         ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints",
223                 __FUNCTION__, handle->numFds, handle->numInts);
224         return;
225     }
226     int fd = handle->data[0];
227 
228     bool intfLocked = tryLock(mInterfaceLock);
229     if (!intfLocked) {
230         dprintf(fd, "!! ExternalCameraDeviceSession interface may be deadlocked !!\n");
231     }
232 
233     if (isClosed()) {
234         dprintf(fd, "External camera %s is closed\n", mCameraId.c_str());
235         return;
236     }
237 
238     bool streaming = false;
239     size_t v4L2BufferCount = 0;
240     SupportedV4L2Format streamingFmt;
241     {
242         bool sessionLocked = tryLock(mLock);
243         if (!sessionLocked) {
244             dprintf(fd, "!! ExternalCameraDeviceSession mLock may be deadlocked !!\n");
245         }
246         streaming = mV4l2Streaming;
247         streamingFmt = mV4l2StreamingFmt;
248         v4L2BufferCount = mV4L2BufferCount;
249 
250         if (sessionLocked) {
251             mLock.unlock();
252         }
253     }
254 
255     std::unordered_set<uint32_t>  inflightFrames;
256     {
257         bool iffLocked = tryLock(mInflightFramesLock);
258         if (!iffLocked) {
259             dprintf(fd,
260                     "!! ExternalCameraDeviceSession mInflightFramesLock may be deadlocked !!\n");
261         }
262         inflightFrames = mInflightFrames;
263         if (iffLocked) {
264             mInflightFramesLock.unlock();
265         }
266     }
267 
268     dprintf(fd, "External camera %s V4L2 FD %d, cropping type %s, %s\n",
269             mCameraId.c_str(), mV4l2Fd.get(),
270             (mCroppingType == VERTICAL) ? "vertical" : "horizontal",
271             streaming ? "streaming" : "not streaming");
272     if (streaming) {
273         // TODO: dump fps later
274         dprintf(fd, "Current V4L2 format %c%c%c%c %dx%d @ %ffps\n",
275                 streamingFmt.fourcc & 0xFF,
276                 (streamingFmt.fourcc >> 8) & 0xFF,
277                 (streamingFmt.fourcc >> 16) & 0xFF,
278                 (streamingFmt.fourcc >> 24) & 0xFF,
279                 streamingFmt.width, streamingFmt.height,
280                 mV4l2StreamingFps);
281 
282         size_t numDequeuedV4l2Buffers = 0;
283         {
284             std::lock_guard<std::mutex> lk(mV4l2BufferLock);
285             numDequeuedV4l2Buffers = mNumDequeuedV4l2Buffers;
286         }
287         dprintf(fd, "V4L2 buffer queue size %zu, dequeued %zu\n",
288                 v4L2BufferCount, numDequeuedV4l2Buffers);
289     }
290 
291     dprintf(fd, "In-flight frames (not sorted):");
292     for (const auto& frameNumber : inflightFrames) {
293         dprintf(fd, "%d, ", frameNumber);
294     }
295     dprintf(fd, "\n");
296     mOutputThread->dump(fd);
297     dprintf(fd, "\n");
298 
299     if (intfLocked) {
300         mInterfaceLock.unlock();
301     }
302 
303     return;
304 }
305 
constructDefaultRequestSettings(V3_2::RequestTemplate type,V3_2::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb)306 Return<void> ExternalCameraDeviceSession::constructDefaultRequestSettings(
307         V3_2::RequestTemplate type,
308         V3_2::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) {
309     V3_2::CameraMetadata outMetadata;
310     Status status = constructDefaultRequestSettingsRaw(
311             static_cast<RequestTemplate>(type), &outMetadata);
312     _hidl_cb(status, outMetadata);
313     return Void();
314 }
315 
constructDefaultRequestSettingsRaw(RequestTemplate type,V3_2::CameraMetadata * outMetadata)316 Status ExternalCameraDeviceSession::constructDefaultRequestSettingsRaw(RequestTemplate type,
317         V3_2::CameraMetadata *outMetadata) {
318     CameraMetadata emptyMd;
319     Status status = initStatus();
320     if (status != Status::OK) {
321         return status;
322     }
323 
324     switch (type) {
325         case RequestTemplate::PREVIEW:
326         case RequestTemplate::STILL_CAPTURE:
327         case RequestTemplate::VIDEO_RECORD:
328         case RequestTemplate::VIDEO_SNAPSHOT: {
329             *outMetadata = mDefaultRequests[type];
330             break;
331         }
332         case RequestTemplate::MANUAL:
333         case RequestTemplate::ZERO_SHUTTER_LAG:
334             // Don't support MANUAL, ZSL templates
335             status = Status::ILLEGAL_ARGUMENT;
336             break;
337         default:
338             ALOGE("%s: unknown request template type %d", __FUNCTION__, static_cast<int>(type));
339             status = Status::ILLEGAL_ARGUMENT;
340             break;
341     }
342     return status;
343 }
344 
configureStreams(const V3_2::StreamConfiguration & streams,ICameraDeviceSession::configureStreams_cb _hidl_cb)345 Return<void> ExternalCameraDeviceSession::configureStreams(
346         const V3_2::StreamConfiguration& streams,
347         ICameraDeviceSession::configureStreams_cb _hidl_cb) {
348     V3_2::HalStreamConfiguration outStreams;
349     V3_3::HalStreamConfiguration outStreams_v33;
350     Mutex::Autolock _il(mInterfaceLock);
351 
352     Status status = configureStreams(streams, &outStreams_v33);
353     size_t size = outStreams_v33.streams.size();
354     outStreams.streams.resize(size);
355     for (size_t i = 0; i < size; i++) {
356         outStreams.streams[i] = outStreams_v33.streams[i].v3_2;
357     }
358     _hidl_cb(status, outStreams);
359     return Void();
360 }
361 
configureStreams_3_3(const V3_2::StreamConfiguration & streams,ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb)362 Return<void> ExternalCameraDeviceSession::configureStreams_3_3(
363         const V3_2::StreamConfiguration& streams,
364         ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) {
365     V3_3::HalStreamConfiguration outStreams;
366     Mutex::Autolock _il(mInterfaceLock);
367 
368     Status status = configureStreams(streams, &outStreams);
369     _hidl_cb(status, outStreams);
370     return Void();
371 }
372 
configureStreams_3_4(const V3_4::StreamConfiguration & requestedConfiguration,ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb)373 Return<void> ExternalCameraDeviceSession::configureStreams_3_4(
374         const V3_4::StreamConfiguration& requestedConfiguration,
375         ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb)  {
376     V3_2::StreamConfiguration config_v32;
377     V3_3::HalStreamConfiguration outStreams_v33;
378     V3_4::HalStreamConfiguration outStreams;
379     Mutex::Autolock _il(mInterfaceLock);
380 
381     config_v32.operationMode = requestedConfiguration.operationMode;
382     config_v32.streams.resize(requestedConfiguration.streams.size());
383     uint32_t blobBufferSize = 0;
384     int numStallStream = 0;
385     for (size_t i = 0; i < config_v32.streams.size(); i++) {
386         config_v32.streams[i] = requestedConfiguration.streams[i].v3_2;
387         if (config_v32.streams[i].format == PixelFormat::BLOB) {
388             blobBufferSize = requestedConfiguration.streams[i].bufferSize;
389             numStallStream++;
390         }
391     }
392 
393     // Fail early if there are multiple BLOB streams
394     if (numStallStream > kMaxStallStream) {
395         ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__,
396                 kMaxStallStream, numStallStream);
397         _hidl_cb(Status::ILLEGAL_ARGUMENT, outStreams);
398         return Void();
399     }
400 
401     Status status = configureStreams(config_v32, &outStreams_v33, blobBufferSize);
402 
403     outStreams.streams.resize(outStreams_v33.streams.size());
404     for (size_t i = 0; i < outStreams.streams.size(); i++) {
405         outStreams.streams[i].v3_3 = outStreams_v33.streams[i];
406     }
407     _hidl_cb(status, outStreams);
408     return Void();
409 }
410 
getCaptureRequestMetadataQueue(ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb)411 Return<void> ExternalCameraDeviceSession::getCaptureRequestMetadataQueue(
412     ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) {
413     Mutex::Autolock _il(mInterfaceLock);
414     _hidl_cb(*mRequestMetadataQueue->getDesc());
415     return Void();
416 }
417 
getCaptureResultMetadataQueue(ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb)418 Return<void> ExternalCameraDeviceSession::getCaptureResultMetadataQueue(
419     ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) {
420     Mutex::Autolock _il(mInterfaceLock);
421     _hidl_cb(*mResultMetadataQueue->getDesc());
422     return Void();
423 }
424 
processCaptureRequest(const hidl_vec<CaptureRequest> & requests,const hidl_vec<BufferCache> & cachesToRemove,ICameraDeviceSession::processCaptureRequest_cb _hidl_cb)425 Return<void> ExternalCameraDeviceSession::processCaptureRequest(
426         const hidl_vec<CaptureRequest>& requests,
427         const hidl_vec<BufferCache>& cachesToRemove,
428         ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) {
429     Mutex::Autolock _il(mInterfaceLock);
430     updateBufferCaches(cachesToRemove);
431 
432     uint32_t numRequestProcessed = 0;
433     Status s = Status::OK;
434     for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) {
435         s = processOneCaptureRequest(requests[i]);
436         if (s != Status::OK) {
437             break;
438         }
439     }
440 
441     _hidl_cb(s, numRequestProcessed);
442     return Void();
443 }
444 
processCaptureRequest_3_4(const hidl_vec<V3_4::CaptureRequest> & requests,const hidl_vec<V3_2::BufferCache> & cachesToRemove,ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb)445 Return<void> ExternalCameraDeviceSession::processCaptureRequest_3_4(
446         const hidl_vec<V3_4::CaptureRequest>& requests,
447         const hidl_vec<V3_2::BufferCache>& cachesToRemove,
448         ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) {
449     Mutex::Autolock _il(mInterfaceLock);
450     updateBufferCaches(cachesToRemove);
451 
452     uint32_t numRequestProcessed = 0;
453     Status s = Status::OK;
454     for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) {
455         s = processOneCaptureRequest(requests[i].v3_2);
456         if (s != Status::OK) {
457             break;
458         }
459     }
460 
461     _hidl_cb(s, numRequestProcessed);
462     return Void();
463 }
464 
flush()465 Return<Status> ExternalCameraDeviceSession::flush() {
466     ATRACE_CALL();
467     Mutex::Autolock _il(mInterfaceLock);
468     Status status = initStatus();
469     if (status != Status::OK) {
470         return status;
471     }
472     mOutputThread->flush();
473     return Status::OK;
474 }
475 
close(bool callerIsDtor)476 Return<void> ExternalCameraDeviceSession::close(bool callerIsDtor) {
477     Mutex::Autolock _il(mInterfaceLock);
478     bool closed = isClosed();
479     if (!closed) {
480         if (callerIsDtor) {
481             closeOutputThreadImpl();
482         } else {
483             closeOutputThread();
484         }
485 
486         Mutex::Autolock _l(mLock);
487         // free all buffers
488         {
489             Mutex::Autolock _l(mCbsLock);
490             for(auto pair : mStreamMap) {
491                 cleanupBuffersLocked(/*Stream ID*/pair.first);
492             }
493         }
494         v4l2StreamOffLocked();
495         ALOGV("%s: closing V4L2 camera FD %d", __FUNCTION__, mV4l2Fd.get());
496         mV4l2Fd.reset();
497         mClosed = true;
498     }
499     return Void();
500 }
501 
importRequestLocked(const CaptureRequest & request,hidl_vec<buffer_handle_t * > & allBufPtrs,hidl_vec<int> & allFences)502 Status ExternalCameraDeviceSession::importRequestLocked(
503     const CaptureRequest& request,
504     hidl_vec<buffer_handle_t*>& allBufPtrs,
505     hidl_vec<int>& allFences) {
506     return importRequestLockedImpl(request, allBufPtrs, allFences);
507 }
508 
importBuffer(int32_t streamId,uint64_t bufId,buffer_handle_t buf,buffer_handle_t ** outBufPtr,bool allowEmptyBuf)509 Status ExternalCameraDeviceSession::importBuffer(int32_t streamId,
510         uint64_t bufId, buffer_handle_t buf,
511         /*out*/buffer_handle_t** outBufPtr,
512         bool allowEmptyBuf) {
513     Mutex::Autolock _l(mCbsLock);
514     return importBufferLocked(streamId, bufId, buf, outBufPtr, allowEmptyBuf);
515 }
516 
importBufferLocked(int32_t streamId,uint64_t bufId,buffer_handle_t buf,buffer_handle_t ** outBufPtr,bool allowEmptyBuf)517 Status ExternalCameraDeviceSession::importBufferLocked(int32_t streamId,
518         uint64_t bufId, buffer_handle_t buf,
519         /*out*/buffer_handle_t** outBufPtr,
520         bool allowEmptyBuf) {
521 
522     if (buf == nullptr && bufId == BUFFER_ID_NO_BUFFER) {
523         if (allowEmptyBuf) {
524             *outBufPtr = &sEmptyBuffer;
525             return Status::OK;
526         } else {
527             ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId);
528             return Status::ILLEGAL_ARGUMENT;
529         }
530     }
531 
532     CirculatingBuffers& cbs = mCirculatingBuffers[streamId];
533     if (cbs.count(bufId) == 0) {
534         if (buf == nullptr) {
535             ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId);
536             return Status::ILLEGAL_ARGUMENT;
537         }
538         // Register a newly seen buffer
539         buffer_handle_t importedBuf = buf;
540         sHandleImporter.importBuffer(importedBuf);
541         if (importedBuf == nullptr) {
542             ALOGE("%s: output buffer for stream %d is invalid!", __FUNCTION__, streamId);
543             return Status::INTERNAL_ERROR;
544         } else {
545             cbs[bufId] = importedBuf;
546         }
547     }
548     *outBufPtr = &cbs[bufId];
549     return Status::OK;
550 }
551 
importRequestLockedImpl(const CaptureRequest & request,hidl_vec<buffer_handle_t * > & allBufPtrs,hidl_vec<int> & allFences,bool allowEmptyBuf)552 Status ExternalCameraDeviceSession::importRequestLockedImpl(
553         const CaptureRequest& request,
554         hidl_vec<buffer_handle_t*>& allBufPtrs,
555         hidl_vec<int>& allFences,
556         bool allowEmptyBuf) {
557     size_t numOutputBufs = request.outputBuffers.size();
558     size_t numBufs = numOutputBufs;
559     // Validate all I/O buffers
560     hidl_vec<buffer_handle_t> allBufs;
561     hidl_vec<uint64_t> allBufIds;
562     allBufs.resize(numBufs);
563     allBufIds.resize(numBufs);
564     allBufPtrs.resize(numBufs);
565     allFences.resize(numBufs);
566     std::vector<int32_t> streamIds(numBufs);
567 
568     for (size_t i = 0; i < numOutputBufs; i++) {
569         allBufs[i] = request.outputBuffers[i].buffer.getNativeHandle();
570         allBufIds[i] = request.outputBuffers[i].bufferId;
571         allBufPtrs[i] = &allBufs[i];
572         streamIds[i] = request.outputBuffers[i].streamId;
573     }
574 
575     {
576         Mutex::Autolock _l(mCbsLock);
577         for (size_t i = 0; i < numBufs; i++) {
578             Status st = importBufferLocked(
579                     streamIds[i], allBufIds[i], allBufs[i], &allBufPtrs[i],
580                     allowEmptyBuf);
581             if (st != Status::OK) {
582                 // Detailed error logs printed in importBuffer
583                 return st;
584             }
585         }
586     }
587 
588     // All buffers are imported. Now validate output buffer acquire fences
589     for (size_t i = 0; i < numOutputBufs; i++) {
590         if (!sHandleImporter.importFence(
591                 request.outputBuffers[i].acquireFence, allFences[i])) {
592             ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i);
593             cleanupInflightFences(allFences, i);
594             return Status::INTERNAL_ERROR;
595         }
596     }
597     return Status::OK;
598 }
599 
cleanupInflightFences(hidl_vec<int> & allFences,size_t numFences)600 void ExternalCameraDeviceSession::cleanupInflightFences(
601         hidl_vec<int>& allFences, size_t numFences) {
602     for (size_t j = 0; j < numFences; j++) {
603         sHandleImporter.closeFence(allFences[j]);
604     }
605 }
606 
waitForV4L2BufferReturnLocked(std::unique_lock<std::mutex> & lk)607 int ExternalCameraDeviceSession::waitForV4L2BufferReturnLocked(std::unique_lock<std::mutex>& lk) {
608     ATRACE_CALL();
609     std::chrono::seconds timeout = std::chrono::seconds(kBufferWaitTimeoutSec);
610     mLock.unlock();
611     auto st = mV4L2BufferReturned.wait_for(lk, timeout);
612     // Here we introduce a order where mV4l2BufferLock is acquired before mLock, while
613     // the normal lock acquisition order is reversed. This is fine because in most of
614     // cases we are protected by mInterfaceLock. The only thread that can cause deadlock
615     // is the OutputThread, where we do need to make sure we don't acquire mLock then
616     // mV4l2BufferLock
617     mLock.lock();
618     if (st == std::cv_status::timeout) {
619         ALOGE("%s: wait for V4L2 buffer return timeout!", __FUNCTION__);
620         return -1;
621     }
622     return 0;
623 }
624 
processOneCaptureRequest(const CaptureRequest & request)625 Status ExternalCameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request)  {
626     ATRACE_CALL();
627     Status status = initStatus();
628     if (status != Status::OK) {
629         return status;
630     }
631 
632     if (request.inputBuffer.streamId != -1) {
633         ALOGE("%s: external camera does not support reprocessing!", __FUNCTION__);
634         return Status::ILLEGAL_ARGUMENT;
635     }
636 
637     Mutex::Autolock _l(mLock);
638     if (!mV4l2Streaming) {
639         ALOGE("%s: cannot process request in streamOff state!", __FUNCTION__);
640         return Status::INTERNAL_ERROR;
641     }
642 
643     const camera_metadata_t *rawSettings = nullptr;
644     bool converted = true;
645     CameraMetadata settingsFmq;  // settings from FMQ
646     if (request.fmqSettingsSize > 0) {
647         // non-blocking read; client must write metadata before calling
648         // processOneCaptureRequest
649         settingsFmq.resize(request.fmqSettingsSize);
650         bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.fmqSettingsSize);
651         if (read) {
652             converted = V3_2::implementation::convertFromHidl(settingsFmq, &rawSettings);
653         } else {
654             ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__);
655             converted = false;
656         }
657     } else {
658         converted = V3_2::implementation::convertFromHidl(request.settings, &rawSettings);
659     }
660 
661     if (converted && rawSettings != nullptr) {
662         mLatestReqSetting = rawSettings;
663     }
664 
665     if (!converted) {
666         ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__);
667         return Status::ILLEGAL_ARGUMENT;
668     }
669 
670     if (mFirstRequest && rawSettings == nullptr) {
671         ALOGE("%s: capture request settings must not be null for first request!",
672                 __FUNCTION__);
673         return Status::ILLEGAL_ARGUMENT;
674     }
675 
676     hidl_vec<buffer_handle_t*> allBufPtrs;
677     hidl_vec<int> allFences;
678     size_t numOutputBufs = request.outputBuffers.size();
679 
680     if (numOutputBufs == 0) {
681         ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__);
682         return Status::ILLEGAL_ARGUMENT;
683     }
684 
685     camera_metadata_entry fpsRange = mLatestReqSetting.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
686     if (fpsRange.count == 2) {
687         double requestFpsMax = fpsRange.data.i32[1];
688         double closestFps = 0.0;
689         double fpsError = 1000.0;
690         bool fpsSupported = false;
691         for (const auto& fr : mV4l2StreamingFmt.frameRates) {
692             double f = fr.getDouble();
693             if (std::fabs(requestFpsMax - f) < 1.0) {
694                 fpsSupported = true;
695                 break;
696             }
697             if (std::fabs(requestFpsMax - f) < fpsError) {
698                 fpsError = std::fabs(requestFpsMax - f);
699                 closestFps = f;
700             }
701         }
702         if (!fpsSupported) {
703             /* This can happen in a few scenarios:
704              * 1. The application is sending a FPS range not supported by the configured outputs.
705              * 2. The application is sending a valid FPS range for all cofigured outputs, but
706              *    the selected V4L2 size can only run at slower speed. This should be very rare
707              *    though: for this to happen a sensor needs to support at least 3 different aspect
708              *    ratio outputs, and when (at least) two outputs are both not the main aspect ratio
709              *    of the webcam, a third size that's larger might be picked and runs into this
710              *    issue.
711              */
712             ALOGW("%s: cannot reach fps %d! Will do %f instead",
713                     __FUNCTION__, fpsRange.data.i32[1], closestFps);
714             requestFpsMax = closestFps;
715         }
716 
717         if (requestFpsMax != mV4l2StreamingFps) {
718             {
719                 std::unique_lock<std::mutex> lk(mV4l2BufferLock);
720                 while (mNumDequeuedV4l2Buffers != 0) {
721                     // Wait until pipeline is idle before reconfigure stream
722                     int waitRet = waitForV4L2BufferReturnLocked(lk);
723                     if (waitRet != 0) {
724                         ALOGE("%s: wait for pipeline idle failed!", __FUNCTION__);
725                         return Status::INTERNAL_ERROR;
726                     }
727                 }
728             }
729             configureV4l2StreamLocked(mV4l2StreamingFmt, requestFpsMax);
730         }
731     }
732 
733     status = importRequestLocked(request, allBufPtrs, allFences);
734     if (status != Status::OK) {
735         return status;
736     }
737 
738     nsecs_t shutterTs = 0;
739     sp<V4L2Frame> frameIn = dequeueV4l2FrameLocked(&shutterTs);
740     if ( frameIn == nullptr) {
741         ALOGE("%s: V4L2 deque frame failed!", __FUNCTION__);
742         return Status::INTERNAL_ERROR;
743     }
744 
745     std::shared_ptr<HalRequest> halReq = std::make_shared<HalRequest>();
746     halReq->frameNumber = request.frameNumber;
747     halReq->setting = mLatestReqSetting;
748     halReq->frameIn = frameIn;
749     halReq->shutterTs = shutterTs;
750     halReq->buffers.resize(numOutputBufs);
751     for (size_t i = 0; i < numOutputBufs; i++) {
752         HalStreamBuffer& halBuf = halReq->buffers[i];
753         int streamId = halBuf.streamId = request.outputBuffers[i].streamId;
754         halBuf.bufferId = request.outputBuffers[i].bufferId;
755         const Stream& stream = mStreamMap[streamId];
756         halBuf.width = stream.width;
757         halBuf.height = stream.height;
758         halBuf.format = stream.format;
759         halBuf.usage = stream.usage;
760         halBuf.bufPtr = allBufPtrs[i];
761         halBuf.acquireFence = allFences[i];
762         halBuf.fenceTimeout = false;
763     }
764     {
765         std::lock_guard<std::mutex> lk(mInflightFramesLock);
766         mInflightFrames.insert(halReq->frameNumber);
767     }
768     // Send request to OutputThread for the rest of processing
769     mOutputThread->submitRequest(halReq);
770     mFirstRequest = false;
771     return Status::OK;
772 }
773 
notifyShutter(uint32_t frameNumber,nsecs_t shutterTs)774 void ExternalCameraDeviceSession::notifyShutter(uint32_t frameNumber, nsecs_t shutterTs) {
775     NotifyMsg msg;
776     msg.type = MsgType::SHUTTER;
777     msg.msg.shutter.frameNumber = frameNumber;
778     msg.msg.shutter.timestamp = shutterTs;
779     mCallback->notify({msg});
780 }
781 
notifyError(uint32_t frameNumber,int32_t streamId,ErrorCode ec)782 void ExternalCameraDeviceSession::notifyError(
783         uint32_t frameNumber, int32_t streamId, ErrorCode ec) {
784     NotifyMsg msg;
785     msg.type = MsgType::ERROR;
786     msg.msg.error.frameNumber = frameNumber;
787     msg.msg.error.errorStreamId = streamId;
788     msg.msg.error.errorCode = ec;
789     mCallback->notify({msg});
790 }
791 
792 //TODO: refactor with processCaptureResult
processCaptureRequestError(const std::shared_ptr<HalRequest> & req)793 Status ExternalCameraDeviceSession::processCaptureRequestError(
794         const std::shared_ptr<HalRequest>& req) {
795     ATRACE_CALL();
796     // Return V4L2 buffer to V4L2 buffer queue
797     enqueueV4l2Frame(req->frameIn);
798 
799     // NotifyShutter
800     notifyShutter(req->frameNumber, req->shutterTs);
801 
802     notifyError(/*frameNum*/req->frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST);
803 
804     // Fill output buffers
805     hidl_vec<CaptureResult> results;
806     results.resize(1);
807     CaptureResult& result = results[0];
808     result.frameNumber = req->frameNumber;
809     result.partialResult = 1;
810     result.inputBuffer.streamId = -1;
811     result.outputBuffers.resize(req->buffers.size());
812     for (size_t i = 0; i < req->buffers.size(); i++) {
813         result.outputBuffers[i].streamId = req->buffers[i].streamId;
814         result.outputBuffers[i].bufferId = req->buffers[i].bufferId;
815         result.outputBuffers[i].status = BufferStatus::ERROR;
816         if (req->buffers[i].acquireFence >= 0) {
817             native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0);
818             handle->data[0] = req->buffers[i].acquireFence;
819             result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false);
820         }
821     }
822 
823     // update inflight records
824     {
825         std::lock_guard<std::mutex> lk(mInflightFramesLock);
826         mInflightFrames.erase(req->frameNumber);
827     }
828 
829     // Callback into framework
830     invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true);
831     freeReleaseFences(results);
832     return Status::OK;
833 }
834 
processCaptureResult(std::shared_ptr<HalRequest> & req)835 Status ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr<HalRequest>& req) {
836     ATRACE_CALL();
837     // Return V4L2 buffer to V4L2 buffer queue
838     enqueueV4l2Frame(req->frameIn);
839 
840     // NotifyShutter
841     notifyShutter(req->frameNumber, req->shutterTs);
842 
843     // Fill output buffers
844     hidl_vec<CaptureResult> results;
845     results.resize(1);
846     CaptureResult& result = results[0];
847     result.frameNumber = req->frameNumber;
848     result.partialResult = 1;
849     result.inputBuffer.streamId = -1;
850     result.outputBuffers.resize(req->buffers.size());
851     for (size_t i = 0; i < req->buffers.size(); i++) {
852         result.outputBuffers[i].streamId = req->buffers[i].streamId;
853         result.outputBuffers[i].bufferId = req->buffers[i].bufferId;
854         if (req->buffers[i].fenceTimeout) {
855             result.outputBuffers[i].status = BufferStatus::ERROR;
856             if (req->buffers[i].acquireFence >= 0) {
857                 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0);
858                 handle->data[0] = req->buffers[i].acquireFence;
859                 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false);
860             }
861             notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER);
862         } else {
863             result.outputBuffers[i].status = BufferStatus::OK;
864             // TODO: refactor
865             if (req->buffers[i].acquireFence >= 0) {
866                 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0);
867                 handle->data[0] = req->buffers[i].acquireFence;
868                 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false);
869             }
870         }
871     }
872 
873     // Fill capture result metadata
874     fillCaptureResult(req->setting, req->shutterTs);
875     const camera_metadata_t *rawResult = req->setting.getAndLock();
876     V3_2::implementation::convertToHidl(rawResult, &result.result);
877     req->setting.unlock(rawResult);
878 
879     // update inflight records
880     {
881         std::lock_guard<std::mutex> lk(mInflightFramesLock);
882         mInflightFrames.erase(req->frameNumber);
883     }
884 
885     // Callback into framework
886     invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true);
887     freeReleaseFences(results);
888     return Status::OK;
889 }
890 
invokeProcessCaptureResultCallback(hidl_vec<CaptureResult> & results,bool tryWriteFmq)891 void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback(
892         hidl_vec<CaptureResult> &results, bool tryWriteFmq) {
893     if (mProcessCaptureResultLock.tryLock() != OK) {
894         const nsecs_t NS_TO_SECOND = 1000000000;
895         ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__);
896         if (mProcessCaptureResultLock.timedLock(/* 1s */NS_TO_SECOND) != OK) {
897             ALOGE("%s: cannot acquire lock in 1s, cannot proceed",
898                     __FUNCTION__);
899             return;
900         }
901     }
902     if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) {
903         for (CaptureResult &result : results) {
904             if (result.result.size() > 0) {
905                 if (mResultMetadataQueue->write(result.result.data(), result.result.size())) {
906                     result.fmqResultSize = result.result.size();
907                     result.result.resize(0);
908                 } else {
909                     ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__);
910                     result.fmqResultSize = 0;
911                 }
912             } else {
913                 result.fmqResultSize = 0;
914             }
915         }
916     }
917     auto status = mCallback->processCaptureResult(results);
918     if (!status.isOk()) {
919         ALOGE("%s: processCaptureResult ERROR : %s", __FUNCTION__,
920               status.description().c_str());
921     }
922 
923     mProcessCaptureResultLock.unlock();
924 }
925 
freeReleaseFences(hidl_vec<CaptureResult> & results)926 void ExternalCameraDeviceSession::freeReleaseFences(hidl_vec<CaptureResult>& results) {
927     for (auto& result : results) {
928         if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) {
929             native_handle_t* handle = const_cast<native_handle_t*>(
930                     result.inputBuffer.releaseFence.getNativeHandle());
931             native_handle_close(handle);
932             native_handle_delete(handle);
933         }
934         for (auto& buf : result.outputBuffers) {
935             if (buf.releaseFence.getNativeHandle() != nullptr) {
936                 native_handle_t* handle = const_cast<native_handle_t*>(
937                         buf.releaseFence.getNativeHandle());
938                 native_handle_close(handle);
939                 native_handle_delete(handle);
940             }
941         }
942     }
943     return;
944 }
945 
OutputThread(wp<ExternalCameraDeviceSession> parent,CroppingType ct)946 ExternalCameraDeviceSession::OutputThread::OutputThread(
947         wp<ExternalCameraDeviceSession> parent,
948         CroppingType ct) : mParent(parent), mCroppingType(ct) {}
949 
~OutputThread()950 ExternalCameraDeviceSession::OutputThread::~OutputThread() {}
951 
setExifMakeModel(const std::string & make,const std::string & model)952 void ExternalCameraDeviceSession::OutputThread::setExifMakeModel(
953         const std::string& make, const std::string& model) {
954     mExifMake = make;
955     mExifModel = model;
956 }
957 
getFourCcFromLayout(const YCbCrLayout & layout)958 uint32_t ExternalCameraDeviceSession::OutputThread::getFourCcFromLayout(
959         const YCbCrLayout& layout) {
960     intptr_t cb = reinterpret_cast<intptr_t>(layout.cb);
961     intptr_t cr = reinterpret_cast<intptr_t>(layout.cr);
962     if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) {
963         // Interleaved format
964         if (layout.cb > layout.cr) {
965             return V4L2_PIX_FMT_NV21;
966         } else {
967             return V4L2_PIX_FMT_NV12;
968         }
969     } else if (layout.chromaStep == 1) {
970         // Planar format
971         if (layout.cb > layout.cr) {
972             return V4L2_PIX_FMT_YVU420; // YV12
973         } else {
974             return V4L2_PIX_FMT_YUV420; // YU12
975         }
976     } else {
977         return FLEX_YUV_GENERIC;
978     }
979 }
980 
getCropRect(CroppingType ct,const Size & inSize,const Size & outSize,IMapper::Rect * out)981 int ExternalCameraDeviceSession::OutputThread::getCropRect(
982         CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) {
983     if (out == nullptr) {
984         ALOGE("%s: out is null", __FUNCTION__);
985         return -1;
986     }
987 
988     uint32_t inW = inSize.width;
989     uint32_t inH = inSize.height;
990     uint32_t outW = outSize.width;
991     uint32_t outH = outSize.height;
992 
993     // Handle special case where aspect ratio is close to input but scaled
994     // dimension is slightly larger than input
995     float arIn = ASPECT_RATIO(inSize);
996     float arOut = ASPECT_RATIO(outSize);
997     if (isAspectRatioClose(arIn, arOut)) {
998         out->left = 0;
999         out->top = 0;
1000         out->width = inW;
1001         out->height = inH;
1002         return 0;
1003     }
1004 
1005     if (ct == VERTICAL) {
1006         uint64_t scaledOutH = static_cast<uint64_t>(outH) * inW / outW;
1007         if (scaledOutH > inH) {
1008             ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d",
1009                     __FUNCTION__, outW, outH, inW, inH);
1010             return -1;
1011         }
1012         scaledOutH = scaledOutH & ~0x1; // make it multiple of 2
1013 
1014         out->left = 0;
1015         out->top = ((inH - scaledOutH) / 2) & ~0x1;
1016         out->width = inW;
1017         out->height = static_cast<int32_t>(scaledOutH);
1018         ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d",
1019                 __FUNCTION__, inW, inH, outW, outH, out->top, static_cast<int32_t>(scaledOutH));
1020     } else {
1021         uint64_t scaledOutW = static_cast<uint64_t>(outW) * inH / outH;
1022         if (scaledOutW > inW) {
1023             ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d",
1024                     __FUNCTION__, outW, outH, inW, inH);
1025             return -1;
1026         }
1027         scaledOutW = scaledOutW & ~0x1; // make it multiple of 2
1028 
1029         out->left = ((inW - scaledOutW) / 2) & ~0x1;
1030         out->top = 0;
1031         out->width = static_cast<int32_t>(scaledOutW);
1032         out->height = inH;
1033         ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d",
1034                 __FUNCTION__, inW, inH, outW, outH, out->top, static_cast<int32_t>(scaledOutW));
1035     }
1036 
1037     return 0;
1038 }
1039 
cropAndScaleLocked(sp<AllocatedFrame> & in,const Size & outSz,YCbCrLayout * out)1040 int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked(
1041         sp<AllocatedFrame>& in, const Size& outSz, YCbCrLayout* out) {
1042     Size inSz = {in->mWidth, in->mHeight};
1043 
1044     int ret;
1045     if (inSz == outSz) {
1046         ret = in->getLayout(out);
1047         if (ret != 0) {
1048             ALOGE("%s: failed to get input image layout", __FUNCTION__);
1049             return ret;
1050         }
1051         return ret;
1052     }
1053 
1054     // Cropping to output aspect ratio
1055     IMapper::Rect inputCrop;
1056     ret = getCropRect(mCroppingType, inSz, outSz, &inputCrop);
1057     if (ret != 0) {
1058         ALOGE("%s: failed to compute crop rect for output size %dx%d",
1059                 __FUNCTION__, outSz.width, outSz.height);
1060         return ret;
1061     }
1062 
1063     YCbCrLayout croppedLayout;
1064     ret = in->getCroppedLayout(inputCrop, &croppedLayout);
1065     if (ret != 0) {
1066         ALOGE("%s: failed to crop input image %dx%d to output size %dx%d",
1067                 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height);
1068         return ret;
1069     }
1070 
1071     if ((mCroppingType == VERTICAL && inSz.width == outSz.width) ||
1072             (mCroppingType == HORIZONTAL && inSz.height == outSz.height)) {
1073         // No scale is needed
1074         *out = croppedLayout;
1075         return 0;
1076     }
1077 
1078     auto it = mScaledYu12Frames.find(outSz);
1079     sp<AllocatedFrame> scaledYu12Buf;
1080     if (it != mScaledYu12Frames.end()) {
1081         scaledYu12Buf = it->second;
1082     } else {
1083         it = mIntermediateBuffers.find(outSz);
1084         if (it == mIntermediateBuffers.end()) {
1085             ALOGE("%s: failed to find intermediate buffer size %dx%d",
1086                     __FUNCTION__, outSz.width, outSz.height);
1087             return -1;
1088         }
1089         scaledYu12Buf = it->second;
1090     }
1091     // Scale
1092     YCbCrLayout outLayout;
1093     ret = scaledYu12Buf->getLayout(&outLayout);
1094     if (ret != 0) {
1095         ALOGE("%s: failed to get output buffer layout", __FUNCTION__);
1096         return ret;
1097     }
1098 
1099     ret = libyuv::I420Scale(
1100             static_cast<uint8_t*>(croppedLayout.y),
1101             croppedLayout.yStride,
1102             static_cast<uint8_t*>(croppedLayout.cb),
1103             croppedLayout.cStride,
1104             static_cast<uint8_t*>(croppedLayout.cr),
1105             croppedLayout.cStride,
1106             inputCrop.width,
1107             inputCrop.height,
1108             static_cast<uint8_t*>(outLayout.y),
1109             outLayout.yStride,
1110             static_cast<uint8_t*>(outLayout.cb),
1111             outLayout.cStride,
1112             static_cast<uint8_t*>(outLayout.cr),
1113             outLayout.cStride,
1114             outSz.width,
1115             outSz.height,
1116             // TODO: b/72261744 see if we can use better filter without losing too much perf
1117             libyuv::FilterMode::kFilterNone);
1118 
1119     if (ret != 0) {
1120         ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d",
1121                 __FUNCTION__, inputCrop.width, inputCrop.height,
1122                 outSz.width, outSz.height, ret);
1123         return ret;
1124     }
1125 
1126     *out = outLayout;
1127     mScaledYu12Frames.insert({outSz, scaledYu12Buf});
1128     return 0;
1129 }
1130 
1131 
cropAndScaleThumbLocked(sp<AllocatedFrame> & in,const Size & outSz,YCbCrLayout * out)1132 int ExternalCameraDeviceSession::OutputThread::cropAndScaleThumbLocked(
1133         sp<AllocatedFrame>& in, const Size &outSz, YCbCrLayout* out) {
1134     Size inSz  {in->mWidth, in->mHeight};
1135 
1136     if ((outSz.width * outSz.height) >
1137         (mYu12ThumbFrame->mWidth * mYu12ThumbFrame->mHeight)) {
1138         ALOGE("%s: Requested thumbnail size too big (%d,%d) > (%d,%d)",
1139               __FUNCTION__, outSz.width, outSz.height,
1140               mYu12ThumbFrame->mWidth, mYu12ThumbFrame->mHeight);
1141         return -1;
1142     }
1143 
1144     int ret;
1145 
1146     /* This will crop-and-zoom the input YUV frame to the thumbnail size
1147      * Based on the following logic:
1148      *  1) Square pixels come in, square pixels come out, therefore single
1149      *  scale factor is computed to either make input bigger or smaller
1150      *  depending on if we are upscaling or downscaling
1151      *  2) That single scale factor would either make height too tall or width
1152      *  too wide so we need to crop the input either horizontally or vertically
1153      *  but not both
1154      */
1155 
1156     /* Convert the input and output dimensions into floats for ease of math */
1157     float fWin = static_cast<float>(inSz.width);
1158     float fHin = static_cast<float>(inSz.height);
1159     float fWout = static_cast<float>(outSz.width);
1160     float fHout = static_cast<float>(outSz.height);
1161 
1162     /* Compute the one scale factor from (1) above, it will be the smaller of
1163      * the two possibilities. */
1164     float scaleFactor = std::min( fHin / fHout, fWin / fWout );
1165 
1166     /* Since we are crop-and-zooming (as opposed to letter/pillar boxing) we can
1167      * simply multiply the output by our scaleFactor to get the cropped input
1168      * size. Note that at least one of {fWcrop, fHcrop} is going to wind up
1169      * being {fWin, fHin} respectively because fHout or fWout cancels out the
1170      * scaleFactor calculation above.
1171      *
1172      * Specifically:
1173      *  if ( fHin / fHout ) < ( fWin / fWout ) we crop the sides off
1174      * input, in which case
1175      *    scaleFactor = fHin / fHout
1176      *    fWcrop = fHin / fHout * fWout
1177      *    fHcrop = fHin
1178      *
1179      * Note that fWcrop <= fWin ( because ( fHin / fHout ) * fWout < fWin, which
1180      * is just the inequality above with both sides multiplied by fWout
1181      *
1182      * on the other hand if ( fWin / fWout ) < ( fHin / fHout) we crop the top
1183      * and the bottom off of input, and
1184      *    scaleFactor = fWin / fWout
1185      *    fWcrop = fWin
1186      *    fHCrop = fWin / fWout * fHout
1187      */
1188     float fWcrop = scaleFactor * fWout;
1189     float fHcrop = scaleFactor * fHout;
1190 
1191     /* Convert to integer and truncate to an even number */
1192     Size cropSz = { 2*static_cast<uint32_t>(fWcrop/2.0f),
1193                     2*static_cast<uint32_t>(fHcrop/2.0f) };
1194 
1195     /* Convert to a centered rectange with even top/left */
1196     IMapper::Rect inputCrop {
1197         2*static_cast<int32_t>((inSz.width - cropSz.width)/4),
1198         2*static_cast<int32_t>((inSz.height - cropSz.height)/4),
1199         static_cast<int32_t>(cropSz.width),
1200         static_cast<int32_t>(cropSz.height) };
1201 
1202     if ((inputCrop.top < 0) ||
1203         (inputCrop.top >= static_cast<int32_t>(inSz.height)) ||
1204         (inputCrop.left < 0) ||
1205         (inputCrop.left >= static_cast<int32_t>(inSz.width)) ||
1206         (inputCrop.width <= 0) ||
1207         (inputCrop.width + inputCrop.left > static_cast<int32_t>(inSz.width)) ||
1208         (inputCrop.height <= 0) ||
1209         (inputCrop.height + inputCrop.top > static_cast<int32_t>(inSz.height)))
1210     {
1211         ALOGE("%s: came up with really wrong crop rectangle",__FUNCTION__);
1212         ALOGE("%s: input layout %dx%d to for output size %dx%d",
1213              __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height);
1214         ALOGE("%s: computed input crop +%d,+%d %dx%d",
1215              __FUNCTION__, inputCrop.left, inputCrop.top,
1216              inputCrop.width, inputCrop.height);
1217         return -1;
1218     }
1219 
1220     YCbCrLayout inputLayout;
1221     ret = in->getCroppedLayout(inputCrop, &inputLayout);
1222     if (ret != 0) {
1223         ALOGE("%s: failed to crop input layout %dx%d to for output size %dx%d",
1224              __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height);
1225         ALOGE("%s: computed input crop +%d,+%d %dx%d",
1226              __FUNCTION__, inputCrop.left, inputCrop.top,
1227              inputCrop.width, inputCrop.height);
1228         return ret;
1229     }
1230     ALOGV("%s: crop input layout %dx%d to for output size %dx%d",
1231           __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height);
1232     ALOGV("%s: computed input crop +%d,+%d %dx%d",
1233           __FUNCTION__, inputCrop.left, inputCrop.top,
1234           inputCrop.width, inputCrop.height);
1235 
1236 
1237     // Scale
1238     YCbCrLayout outFullLayout;
1239 
1240     ret = mYu12ThumbFrame->getLayout(&outFullLayout);
1241     if (ret != 0) {
1242         ALOGE("%s: failed to get output buffer layout", __FUNCTION__);
1243         return ret;
1244     }
1245 
1246 
1247     ret = libyuv::I420Scale(
1248             static_cast<uint8_t*>(inputLayout.y),
1249             inputLayout.yStride,
1250             static_cast<uint8_t*>(inputLayout.cb),
1251             inputLayout.cStride,
1252             static_cast<uint8_t*>(inputLayout.cr),
1253             inputLayout.cStride,
1254             inputCrop.width,
1255             inputCrop.height,
1256             static_cast<uint8_t*>(outFullLayout.y),
1257             outFullLayout.yStride,
1258             static_cast<uint8_t*>(outFullLayout.cb),
1259             outFullLayout.cStride,
1260             static_cast<uint8_t*>(outFullLayout.cr),
1261             outFullLayout.cStride,
1262             outSz.width,
1263             outSz.height,
1264             libyuv::FilterMode::kFilterNone);
1265 
1266     if (ret != 0) {
1267         ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d",
1268                 __FUNCTION__, inputCrop.width, inputCrop.height,
1269                 outSz.width, outSz.height, ret);
1270         return ret;
1271     }
1272 
1273     *out = outFullLayout;
1274     return 0;
1275 }
1276 
formatConvertLocked(const YCbCrLayout & in,const YCbCrLayout & out,Size sz,uint32_t format)1277 int ExternalCameraDeviceSession::OutputThread::formatConvertLocked(
1278         const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) {
1279     int ret = 0;
1280     switch (format) {
1281         case V4L2_PIX_FMT_NV21:
1282             ret = libyuv::I420ToNV21(
1283                     static_cast<uint8_t*>(in.y),
1284                     in.yStride,
1285                     static_cast<uint8_t*>(in.cb),
1286                     in.cStride,
1287                     static_cast<uint8_t*>(in.cr),
1288                     in.cStride,
1289                     static_cast<uint8_t*>(out.y),
1290                     out.yStride,
1291                     static_cast<uint8_t*>(out.cr),
1292                     out.cStride,
1293                     sz.width,
1294                     sz.height);
1295             if (ret != 0) {
1296                 ALOGE("%s: convert to NV21 buffer failed! ret %d",
1297                             __FUNCTION__, ret);
1298                 return ret;
1299             }
1300             break;
1301         case V4L2_PIX_FMT_NV12:
1302             ret = libyuv::I420ToNV12(
1303                     static_cast<uint8_t*>(in.y),
1304                     in.yStride,
1305                     static_cast<uint8_t*>(in.cb),
1306                     in.cStride,
1307                     static_cast<uint8_t*>(in.cr),
1308                     in.cStride,
1309                     static_cast<uint8_t*>(out.y),
1310                     out.yStride,
1311                     static_cast<uint8_t*>(out.cb),
1312                     out.cStride,
1313                     sz.width,
1314                     sz.height);
1315             if (ret != 0) {
1316                 ALOGE("%s: convert to NV12 buffer failed! ret %d",
1317                             __FUNCTION__, ret);
1318                 return ret;
1319             }
1320             break;
1321         case V4L2_PIX_FMT_YVU420: // YV12
1322         case V4L2_PIX_FMT_YUV420: // YU12
1323             // TODO: maybe we can speed up here by somehow save this copy?
1324             ret = libyuv::I420Copy(
1325                     static_cast<uint8_t*>(in.y),
1326                     in.yStride,
1327                     static_cast<uint8_t*>(in.cb),
1328                     in.cStride,
1329                     static_cast<uint8_t*>(in.cr),
1330                     in.cStride,
1331                     static_cast<uint8_t*>(out.y),
1332                     out.yStride,
1333                     static_cast<uint8_t*>(out.cb),
1334                     out.cStride,
1335                     static_cast<uint8_t*>(out.cr),
1336                     out.cStride,
1337                     sz.width,
1338                     sz.height);
1339             if (ret != 0) {
1340                 ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d",
1341                             __FUNCTION__, ret);
1342                 return ret;
1343             }
1344             break;
1345         case FLEX_YUV_GENERIC:
1346             // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow.
1347             ALOGE("%s: unsupported flexible yuv layout"
1348                     " y %p cb %p cr %p y_str %d c_str %d c_step %d",
1349                     __FUNCTION__, out.y, out.cb, out.cr,
1350                     out.yStride, out.cStride, out.chromaStep);
1351             return -1;
1352         default:
1353             ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format);
1354             return -1;
1355     }
1356     return 0;
1357 }
1358 
encodeJpegYU12(const Size & inSz,const YCbCrLayout & inLayout,int jpegQuality,const void * app1Buffer,size_t app1Size,void * out,const size_t maxOutSize,size_t & actualCodeSize)1359 int ExternalCameraDeviceSession::OutputThread::encodeJpegYU12(
1360         const Size & inSz, const YCbCrLayout& inLayout,
1361         int jpegQuality, const void *app1Buffer, size_t app1Size,
1362         void *out, const size_t maxOutSize, size_t &actualCodeSize)
1363 {
1364     /* libjpeg is a C library so we use C-style "inheritance" by
1365      * putting libjpeg's jpeg_destination_mgr first in our custom
1366      * struct. This allows us to cast jpeg_destination_mgr* to
1367      * CustomJpegDestMgr* when we get it passed to us in a callback */
1368     struct CustomJpegDestMgr {
1369         struct jpeg_destination_mgr mgr;
1370         JOCTET *mBuffer;
1371         size_t mBufferSize;
1372         size_t mEncodedSize;
1373         bool mSuccess;
1374     } dmgr;
1375 
1376     jpeg_compress_struct cinfo = {};
1377     jpeg_error_mgr jerr;
1378 
1379     /* Initialize error handling with standard callbacks, but
1380      * then override output_message (to print to ALOG) and
1381      * error_exit to set a flag and print a message instead
1382      * of killing the whole process */
1383     cinfo.err = jpeg_std_error(&jerr);
1384 
1385     cinfo.err->output_message = [](j_common_ptr cinfo) {
1386         char buffer[JMSG_LENGTH_MAX];
1387 
1388         /* Create the message */
1389         (*cinfo->err->format_message)(cinfo, buffer);
1390         ALOGE("libjpeg error: %s", buffer);
1391     };
1392     cinfo.err->error_exit = [](j_common_ptr cinfo) {
1393         (*cinfo->err->output_message)(cinfo);
1394         if(cinfo->client_data) {
1395             auto & dmgr =
1396                 *reinterpret_cast<CustomJpegDestMgr*>(cinfo->client_data);
1397             dmgr.mSuccess = false;
1398         }
1399     };
1400     /* Now that we initialized some callbacks, let's create our compressor */
1401     jpeg_create_compress(&cinfo);
1402 
1403     /* Initialize our destination manager */
1404     dmgr.mBuffer = static_cast<JOCTET*>(out);
1405     dmgr.mBufferSize = maxOutSize;
1406     dmgr.mEncodedSize = 0;
1407     dmgr.mSuccess = true;
1408     cinfo.client_data = static_cast<void*>(&dmgr);
1409 
1410     /* These lambdas become C-style function pointers and as per C++11 spec
1411      * may not capture anything */
1412     dmgr.mgr.init_destination = [](j_compress_ptr cinfo) {
1413         auto & dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest);
1414         dmgr.mgr.next_output_byte = dmgr.mBuffer;
1415         dmgr.mgr.free_in_buffer = dmgr.mBufferSize;
1416         ALOGV("%s:%d jpeg start: %p [%zu]",
1417               __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize);
1418     };
1419 
1420     dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) {
1421         ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__);
1422         return 0;
1423     };
1424 
1425     dmgr.mgr.term_destination = [](j_compress_ptr cinfo) {
1426         auto & dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest);
1427         dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer;
1428         ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize);
1429     };
1430     cinfo.dest = reinterpret_cast<struct jpeg_destination_mgr*>(&dmgr);
1431 
1432     /* We are going to be using JPEG in raw data mode, so we are passing
1433      * straight subsampled planar YCbCr and it will not touch our pixel
1434      * data or do any scaling or anything */
1435     cinfo.image_width = inSz.width;
1436     cinfo.image_height = inSz.height;
1437     cinfo.input_components = 3;
1438     cinfo.in_color_space = JCS_YCbCr;
1439 
1440     /* Initialize defaults and then override what we want */
1441     jpeg_set_defaults(&cinfo);
1442 
1443     jpeg_set_quality(&cinfo, jpegQuality, 1);
1444     jpeg_set_colorspace(&cinfo, JCS_YCbCr);
1445     cinfo.raw_data_in = 1;
1446     cinfo.dct_method = JDCT_IFAST;
1447 
1448     /* Configure sampling factors. The sampling factor is JPEG subsampling 420
1449      * because the source format is YUV420. Note that libjpeg sampling factors
1450      * are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and
1451      * 1 V value for each 2 Y values */
1452     cinfo.comp_info[0].h_samp_factor = 2;
1453     cinfo.comp_info[0].v_samp_factor = 2;
1454     cinfo.comp_info[1].h_samp_factor = 1;
1455     cinfo.comp_info[1].v_samp_factor = 1;
1456     cinfo.comp_info[2].h_samp_factor = 1;
1457     cinfo.comp_info[2].v_samp_factor = 1;
1458 
1459     /* Let's not hardcode YUV420 in 6 places... 5 was enough */
1460     int maxVSampFactor = std::max( {
1461         cinfo.comp_info[0].v_samp_factor,
1462         cinfo.comp_info[1].v_samp_factor,
1463         cinfo.comp_info[2].v_samp_factor
1464     });
1465     int cVSubSampling = cinfo.comp_info[0].v_samp_factor /
1466                         cinfo.comp_info[1].v_samp_factor;
1467 
1468     /* Start the compressor */
1469     jpeg_start_compress(&cinfo, TRUE);
1470 
1471     /* Compute our macroblock height, so we can pad our input to be vertically
1472      * macroblock aligned.
1473      * TODO: Does it need to be horizontally MCU aligned too? */
1474 
1475     size_t mcuV = DCTSIZE*maxVSampFactor;
1476     size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV);
1477 
1478     /* libjpeg uses arrays of row pointers, which makes it really easy to pad
1479      * data vertically (unfortunately doesn't help horizontally) */
1480     std::vector<JSAMPROW> yLines (paddedHeight);
1481     std::vector<JSAMPROW> cbLines(paddedHeight/cVSubSampling);
1482     std::vector<JSAMPROW> crLines(paddedHeight/cVSubSampling);
1483 
1484     uint8_t *py = static_cast<uint8_t*>(inLayout.y);
1485     uint8_t *pcr = static_cast<uint8_t*>(inLayout.cr);
1486     uint8_t *pcb = static_cast<uint8_t*>(inLayout.cb);
1487 
1488     for(uint32_t i = 0; i < paddedHeight; i++)
1489     {
1490         /* Once we are in the padding territory we still point to the last line
1491          * effectively replicating it several times ~ CLAMP_TO_EDGE */
1492         int li = std::min(i, inSz.height - 1);
1493         yLines[i]  = static_cast<JSAMPROW>(py + li * inLayout.yStride);
1494         if(i < paddedHeight / cVSubSampling)
1495         {
1496             crLines[i] = static_cast<JSAMPROW>(pcr + li * inLayout.cStride);
1497             cbLines[i] = static_cast<JSAMPROW>(pcb + li * inLayout.cStride);
1498         }
1499     }
1500 
1501     /* If APP1 data was passed in, use it */
1502     if(app1Buffer && app1Size)
1503     {
1504         jpeg_write_marker(&cinfo, JPEG_APP0 + 1,
1505              static_cast<const JOCTET*>(app1Buffer), app1Size);
1506     }
1507 
1508     /* While we still have padded height left to go, keep giving it one
1509      * macroblock at a time. */
1510     while (cinfo.next_scanline < cinfo.image_height) {
1511         const uint32_t batchSize = DCTSIZE * maxVSampFactor;
1512         const uint32_t nl = cinfo.next_scanline;
1513         JSAMPARRAY planes[3]{ &yLines[nl],
1514                               &cbLines[nl/cVSubSampling],
1515                               &crLines[nl/cVSubSampling] };
1516 
1517         uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize);
1518 
1519         if (done != batchSize) {
1520             ALOGE("%s: compressed %u lines, expected %u (total %u/%u)",
1521               __FUNCTION__, done, batchSize, cinfo.next_scanline,
1522               cinfo.image_height);
1523             return -1;
1524         }
1525     }
1526 
1527     /* This will flush everything */
1528     jpeg_finish_compress(&cinfo);
1529 
1530     /* Grab the actual code size and set it */
1531     actualCodeSize = dmgr.mEncodedSize;
1532 
1533     return 0;
1534 }
1535 
1536 /*
1537  * TODO: There needs to be a mechanism to discover allocated buffer size
1538  * in the HAL.
1539  *
1540  * This is very fragile because it is duplicated computation from:
1541  * frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
1542  *
1543  */
1544 
1545 /* This assumes mSupportedFormats have all been declared as supporting
1546  * HAL_PIXEL_FORMAT_BLOB to the framework */
getMaxJpegResolution() const1547 Size ExternalCameraDeviceSession::getMaxJpegResolution() const {
1548     Size ret { 0, 0 };
1549     for(auto & fmt : mSupportedFormats) {
1550         if(fmt.width * fmt.height > ret.width * ret.height) {
1551             ret = Size { fmt.width, fmt.height };
1552         }
1553     }
1554     return ret;
1555 }
1556 
getMaxThumbResolution() const1557 Size ExternalCameraDeviceSession::getMaxThumbResolution() const {
1558     Size thumbSize { 0, 0 };
1559     camera_metadata_ro_entry entry =
1560         mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES);
1561     for(uint32_t i = 0; i < entry.count; i += 2) {
1562         Size sz { static_cast<uint32_t>(entry.data.i32[i]),
1563                   static_cast<uint32_t>(entry.data.i32[i+1]) };
1564         if(sz.width * sz.height > thumbSize.width * thumbSize.height) {
1565             thumbSize = sz;
1566         }
1567     }
1568 
1569     if (thumbSize.width * thumbSize.height == 0) {
1570         ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__);
1571     }
1572 
1573     return thumbSize;
1574 }
1575 
1576 
getJpegBufferSize(uint32_t width,uint32_t height) const1577 ssize_t ExternalCameraDeviceSession::getJpegBufferSize(
1578         uint32_t width, uint32_t height) const {
1579     // Constant from camera3.h
1580     const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(CameraBlob);
1581     // Get max jpeg size (area-wise).
1582     if (mMaxJpegResolution.width == 0) {
1583         ALOGE("%s: Do not have a single supported JPEG stream",
1584                 __FUNCTION__);
1585         return BAD_VALUE;
1586     }
1587 
1588     // Get max jpeg buffer size
1589     ssize_t maxJpegBufferSize = 0;
1590     camera_metadata_ro_entry jpegBufMaxSize =
1591             mCameraCharacteristics.find(ANDROID_JPEG_MAX_SIZE);
1592     if (jpegBufMaxSize.count == 0) {
1593         ALOGE("%s: Can't find maximum JPEG size in static metadata!",
1594               __FUNCTION__);
1595         return BAD_VALUE;
1596     }
1597     maxJpegBufferSize = jpegBufMaxSize.data.i32[0];
1598 
1599     if (maxJpegBufferSize <= kMinJpegBufferSize) {
1600         ALOGE("%s: ANDROID_JPEG_MAX_SIZE (%zd) <= kMinJpegBufferSize (%zd)",
1601               __FUNCTION__, maxJpegBufferSize, kMinJpegBufferSize);
1602         return BAD_VALUE;
1603     }
1604 
1605     // Calculate final jpeg buffer size for the given resolution.
1606     float scaleFactor = ((float) (width * height)) /
1607             (mMaxJpegResolution.width * mMaxJpegResolution.height);
1608     ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
1609             kMinJpegBufferSize;
1610     if (jpegBufferSize > maxJpegBufferSize) {
1611         jpegBufferSize = maxJpegBufferSize;
1612     }
1613 
1614     return jpegBufferSize;
1615 }
1616 
createJpegLocked(HalStreamBuffer & halBuf,const std::shared_ptr<HalRequest> & req)1617 int ExternalCameraDeviceSession::OutputThread::createJpegLocked(
1618         HalStreamBuffer &halBuf,
1619         const std::shared_ptr<HalRequest>& req)
1620 {
1621     ATRACE_CALL();
1622     int ret;
1623     auto lfail = [&](auto... args) {
1624         ALOGE(args...);
1625 
1626         return 1;
1627     };
1628     auto parent = mParent.promote();
1629     if (parent == nullptr) {
1630        ALOGE("%s: session has been disconnected!", __FUNCTION__);
1631        return 1;
1632     }
1633 
1634     ALOGV("%s: HAL buffer sid: %d bid: %" PRIu64 " w: %u h: %u",
1635           __FUNCTION__, halBuf.streamId, static_cast<uint64_t>(halBuf.bufferId),
1636           halBuf.width, halBuf.height);
1637     ALOGV("%s: HAL buffer fmt: %x usage: %" PRIx64 " ptr: %p",
1638           __FUNCTION__, halBuf.format, static_cast<uint64_t>(halBuf.usage),
1639           halBuf.bufPtr);
1640     ALOGV("%s: YV12 buffer %d x %d",
1641           __FUNCTION__,
1642           mYu12Frame->mWidth, mYu12Frame->mHeight);
1643 
1644     int jpegQuality, thumbQuality;
1645     Size thumbSize;
1646     bool outputThumbnail = true;
1647 
1648     if (req->setting.exists(ANDROID_JPEG_QUALITY)) {
1649         camera_metadata_entry entry =
1650             req->setting.find(ANDROID_JPEG_QUALITY);
1651         jpegQuality = entry.data.u8[0];
1652     } else {
1653         return lfail("%s: ANDROID_JPEG_QUALITY not set",__FUNCTION__);
1654     }
1655 
1656     if (req->setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
1657         camera_metadata_entry entry =
1658             req->setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY);
1659         thumbQuality = entry.data.u8[0];
1660     } else {
1661         return lfail(
1662             "%s: ANDROID_JPEG_THUMBNAIL_QUALITY not set",
1663             __FUNCTION__);
1664     }
1665 
1666     if (req->setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
1667         camera_metadata_entry entry =
1668             req->setting.find(ANDROID_JPEG_THUMBNAIL_SIZE);
1669         thumbSize = Size { static_cast<uint32_t>(entry.data.i32[0]),
1670                            static_cast<uint32_t>(entry.data.i32[1])
1671         };
1672         if (thumbSize.width == 0 && thumbSize.height == 0) {
1673             outputThumbnail = false;
1674         }
1675     } else {
1676         return lfail(
1677             "%s: ANDROID_JPEG_THUMBNAIL_SIZE not set", __FUNCTION__);
1678     }
1679 
1680     /* Cropped and scaled YU12 buffer for main and thumbnail */
1681     YCbCrLayout yu12Main;
1682     Size jpegSize { halBuf.width, halBuf.height };
1683 
1684     /* Compute temporary buffer sizes accounting for the following:
1685      * thumbnail can't exceed APP1 size of 64K
1686      * main image needs to hold APP1, headers, and at most a poorly
1687      * compressed image */
1688     const ssize_t maxThumbCodeSize = 64 * 1024;
1689     const ssize_t maxJpegCodeSize = mBlobBufferSize == 0 ?
1690             parent->getJpegBufferSize(jpegSize.width, jpegSize.height) :
1691             mBlobBufferSize;
1692 
1693     /* Check that getJpegBufferSize did not return an error */
1694     if (maxJpegCodeSize < 0) {
1695         return lfail(
1696             "%s: getJpegBufferSize returned %zd",__FUNCTION__,maxJpegCodeSize);
1697     }
1698 
1699 
1700     /* Hold actual thumbnail and main image code sizes */
1701     size_t thumbCodeSize = 0, jpegCodeSize = 0;
1702     /* Temporary thumbnail code buffer */
1703     std::vector<uint8_t> thumbCode(outputThumbnail ? maxThumbCodeSize : 0);
1704 
1705     YCbCrLayout yu12Thumb;
1706     if (outputThumbnail) {
1707         ret = cropAndScaleThumbLocked(mYu12Frame, thumbSize, &yu12Thumb);
1708 
1709         if (ret != 0) {
1710             return lfail(
1711                 "%s: crop and scale thumbnail failed!", __FUNCTION__);
1712         }
1713     }
1714 
1715     /* Scale and crop main jpeg */
1716     ret = cropAndScaleLocked(mYu12Frame, jpegSize, &yu12Main);
1717 
1718     if (ret != 0) {
1719         return lfail("%s: crop and scale main failed!", __FUNCTION__);
1720     }
1721 
1722     /* Encode the thumbnail image */
1723     if (outputThumbnail) {
1724         ret = encodeJpegYU12(thumbSize, yu12Thumb,
1725                 thumbQuality, 0, 0,
1726                 &thumbCode[0], maxThumbCodeSize, thumbCodeSize);
1727 
1728         if (ret != 0) {
1729             return lfail("%s: thumbnail encodeJpegYU12 failed with %d",__FUNCTION__, ret);
1730         }
1731     }
1732 
1733     /* Combine camera characteristics with request settings to form EXIF
1734      * metadata */
1735     common::V1_0::helper::CameraMetadata meta(parent->mCameraCharacteristics);
1736     meta.append(req->setting);
1737 
1738     /* Generate EXIF object */
1739     std::unique_ptr<ExifUtils> utils(ExifUtils::create());
1740     /* Make sure it's initialized */
1741     utils->initialize();
1742 
1743     utils->setFromMetadata(meta, jpegSize.width, jpegSize.height);
1744     utils->setMake(mExifMake);
1745     utils->setModel(mExifModel);
1746 
1747     ret = utils->generateApp1(outputThumbnail ? &thumbCode[0] : 0, thumbCodeSize);
1748 
1749     if (!ret) {
1750         return lfail("%s: generating APP1 failed", __FUNCTION__);
1751     }
1752 
1753     /* Get internal buffer */
1754     size_t exifDataSize = utils->getApp1Length();
1755     const uint8_t* exifData = utils->getApp1Buffer();
1756 
1757     /* Lock the HAL jpeg code buffer */
1758     void *bufPtr = sHandleImporter.lock(
1759             *(halBuf.bufPtr), halBuf.usage, maxJpegCodeSize);
1760 
1761     if (!bufPtr) {
1762         return lfail("%s: could not lock %zu bytes", __FUNCTION__, maxJpegCodeSize);
1763     }
1764 
1765     /* Encode the main jpeg image */
1766     ret = encodeJpegYU12(jpegSize, yu12Main,
1767             jpegQuality, exifData, exifDataSize,
1768             bufPtr, maxJpegCodeSize, jpegCodeSize);
1769 
1770     /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out
1771      * and do this when returning buffer to parent */
1772     CameraBlob blob { CameraBlobId::JPEG, static_cast<uint32_t>(jpegCodeSize) };
1773     void *blobDst =
1774         reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(bufPtr) +
1775                            maxJpegCodeSize -
1776                            sizeof(CameraBlob));
1777     memcpy(blobDst, &blob, sizeof(CameraBlob));
1778 
1779     /* Unlock the HAL jpeg code buffer */
1780     int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
1781     if (relFence >= 0) {
1782         halBuf.acquireFence = relFence;
1783     }
1784 
1785     /* Check if our JPEG actually succeeded */
1786     if (ret != 0) {
1787         return lfail(
1788             "%s: encodeJpegYU12 failed with %d",__FUNCTION__, ret);
1789     }
1790 
1791     ALOGV("%s: encoded JPEG (ret:%d) with Q:%d max size: %zu",
1792           __FUNCTION__, ret, jpegQuality, maxJpegCodeSize);
1793 
1794     return 0;
1795 }
1796 
threadLoop()1797 bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
1798     std::shared_ptr<HalRequest> req;
1799     auto parent = mParent.promote();
1800     if (parent == nullptr) {
1801        ALOGE("%s: session has been disconnected!", __FUNCTION__);
1802        return false;
1803     }
1804 
1805     // TODO: maybe we need to setup a sensor thread to dq/enq v4l frames
1806     //       regularly to prevent v4l buffer queue filled with stale buffers
1807     //       when app doesn't program a preveiw request
1808     waitForNextRequest(&req);
1809     if (req == nullptr) {
1810         // No new request, wait again
1811         return true;
1812     }
1813 
1814     auto onDeviceError = [&](auto... args) {
1815         ALOGE(args...);
1816         parent->notifyError(
1817                 req->frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE);
1818         signalRequestDone();
1819         return false;
1820     };
1821 
1822     if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
1823         return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
1824                 req->frameIn->mFourcc & 0xFF,
1825                 (req->frameIn->mFourcc >> 8) & 0xFF,
1826                 (req->frameIn->mFourcc >> 16) & 0xFF,
1827                 (req->frameIn->mFourcc >> 24) & 0xFF);
1828     }
1829 
1830     int res = requestBufferStart(req->buffers);
1831     if (res != 0) {
1832         ALOGE("%s: send BufferRequest failed! res %d", __FUNCTION__, res);
1833         return onDeviceError("%s: failed to send buffer request!", __FUNCTION__);
1834     }
1835 
1836     std::unique_lock<std::mutex> lk(mBufferLock);
1837     // Convert input V4L2 frame to YU12 of the same size
1838     // TODO: see if we can save some computation by converting to YV12 here
1839     uint8_t* inData;
1840     size_t inDataSize;
1841     if (req->frameIn->map(&inData, &inDataSize) != 0) {
1842         lk.unlock();
1843         return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__);
1844     }
1845 
1846     // TODO: in some special case maybe we can decode jpg directly to gralloc output?
1847     if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
1848         ATRACE_BEGIN("MJPGtoI420");
1849         int res = libyuv::MJPGToI420(
1850             inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y), mYu12FrameLayout.yStride,
1851             static_cast<uint8_t*>(mYu12FrameLayout.cb), mYu12FrameLayout.cStride,
1852             static_cast<uint8_t*>(mYu12FrameLayout.cr), mYu12FrameLayout.cStride,
1853             mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth, mYu12Frame->mHeight);
1854         ATRACE_END();
1855 
1856         if (res != 0) {
1857             // For some webcam, the first few V4L2 frames might be malformed...
1858             ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res);
1859             lk.unlock();
1860             Status st = parent->processCaptureRequestError(req);
1861             if (st != Status::OK) {
1862                 return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
1863             }
1864             signalRequestDone();
1865             return true;
1866         }
1867     }
1868 
1869     ATRACE_BEGIN("Wait for BufferRequest done");
1870     res = waitForBufferRequestDone(&req->buffers);
1871     ATRACE_END();
1872 
1873     if (res != 0) {
1874         ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res);
1875         lk.unlock();
1876         return onDeviceError("%s: failed to process buffer request error!", __FUNCTION__);
1877     }
1878 
1879     ALOGV("%s processing new request", __FUNCTION__);
1880     const int kSyncWaitTimeoutMs = 500;
1881     for (auto& halBuf : req->buffers) {
1882         if (*(halBuf.bufPtr) == nullptr) {
1883             ALOGW("%s: buffer for stream %d missing", __FUNCTION__, halBuf.streamId);
1884             halBuf.fenceTimeout = true;
1885         } else if (halBuf.acquireFence >= 0) {
1886             int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs);
1887             if (ret) {
1888                 halBuf.fenceTimeout = true;
1889             } else {
1890                 ::close(halBuf.acquireFence);
1891                 halBuf.acquireFence = -1;
1892             }
1893         }
1894 
1895         if (halBuf.fenceTimeout) {
1896             continue;
1897         }
1898 
1899         // Gralloc lockYCbCr the buffer
1900         switch (halBuf.format) {
1901             case PixelFormat::BLOB: {
1902                 int ret = createJpegLocked(halBuf, req);
1903 
1904                 if(ret != 0) {
1905                     lk.unlock();
1906                     return onDeviceError("%s: createJpegLocked failed with %d",
1907                           __FUNCTION__, ret);
1908                 }
1909             } break;
1910             case PixelFormat::Y16: {
1911                 void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize);
1912 
1913                 std::memcpy(outLayout, inData, inDataSize);
1914 
1915                 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
1916                 if (relFence >= 0) {
1917                     halBuf.acquireFence = relFence;
1918                 }
1919             } break;
1920             case PixelFormat::YCBCR_420_888:
1921             case PixelFormat::YV12: {
1922                 IMapper::Rect outRect {0, 0,
1923                         static_cast<int32_t>(halBuf.width),
1924                         static_cast<int32_t>(halBuf.height)};
1925                 YCbCrLayout outLayout = sHandleImporter.lockYCbCr(
1926                         *(halBuf.bufPtr), halBuf.usage, outRect);
1927                 ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d",
1928                         __FUNCTION__, outLayout.y, outLayout.cb, outLayout.cr,
1929                         outLayout.yStride, outLayout.cStride, outLayout.chromaStep);
1930 
1931                 // Convert to output buffer size/format
1932                 uint32_t outputFourcc = getFourCcFromLayout(outLayout);
1933                 ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__,
1934                         outputFourcc & 0xFF,
1935                         (outputFourcc >> 8) & 0xFF,
1936                         (outputFourcc >> 16) & 0xFF,
1937                         (outputFourcc >> 24) & 0xFF);
1938 
1939                 YCbCrLayout cropAndScaled;
1940                 ATRACE_BEGIN("cropAndScaleLocked");
1941                 int ret = cropAndScaleLocked(
1942                         mYu12Frame,
1943                         Size { halBuf.width, halBuf.height },
1944                         &cropAndScaled);
1945                 ATRACE_END();
1946                 if (ret != 0) {
1947                     lk.unlock();
1948                     return onDeviceError("%s: crop and scale failed!", __FUNCTION__);
1949                 }
1950 
1951                 Size sz {halBuf.width, halBuf.height};
1952                 ATRACE_BEGIN("formatConvertLocked");
1953                 ret = formatConvertLocked(cropAndScaled, outLayout, sz, outputFourcc);
1954                 ATRACE_END();
1955                 if (ret != 0) {
1956                     lk.unlock();
1957                     return onDeviceError("%s: format coversion failed!", __FUNCTION__);
1958                 }
1959                 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
1960                 if (relFence >= 0) {
1961                     halBuf.acquireFence = relFence;
1962                 }
1963             } break;
1964             default:
1965                 lk.unlock();
1966                 return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format);
1967         }
1968     } // for each buffer
1969     mScaledYu12Frames.clear();
1970 
1971     // Don't hold the lock while calling back to parent
1972     lk.unlock();
1973     Status st = parent->processCaptureResult(req);
1974     if (st != Status::OK) {
1975         return onDeviceError("%s: failed to process capture result!", __FUNCTION__);
1976     }
1977     signalRequestDone();
1978     return true;
1979 }
1980 
allocateIntermediateBuffers(const Size & v4lSize,const Size & thumbSize,const hidl_vec<Stream> & streams,uint32_t blobBufferSize)1981 Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers(
1982         const Size& v4lSize, const Size& thumbSize,
1983         const hidl_vec<Stream>& streams,
1984         uint32_t blobBufferSize) {
1985     std::lock_guard<std::mutex> lk(mBufferLock);
1986     if (mScaledYu12Frames.size() != 0) {
1987         ALOGE("%s: intermediate buffer pool has %zu inflight buffers! (expect 0)",
1988                 __FUNCTION__, mScaledYu12Frames.size());
1989         return Status::INTERNAL_ERROR;
1990     }
1991 
1992     // Allocating intermediate YU12 frame
1993     if (mYu12Frame == nullptr || mYu12Frame->mWidth != v4lSize.width ||
1994             mYu12Frame->mHeight != v4lSize.height) {
1995         mYu12Frame.clear();
1996         mYu12Frame = new AllocatedFrame(v4lSize.width, v4lSize.height);
1997         int ret = mYu12Frame->allocate(&mYu12FrameLayout);
1998         if (ret != 0) {
1999             ALOGE("%s: allocating YU12 frame failed!", __FUNCTION__);
2000             return Status::INTERNAL_ERROR;
2001         }
2002     }
2003 
2004     // Allocating intermediate YU12 thumbnail frame
2005     if (mYu12ThumbFrame == nullptr ||
2006         mYu12ThumbFrame->mWidth != thumbSize.width ||
2007         mYu12ThumbFrame->mHeight != thumbSize.height) {
2008         mYu12ThumbFrame.clear();
2009         mYu12ThumbFrame = new AllocatedFrame(thumbSize.width, thumbSize.height);
2010         int ret = mYu12ThumbFrame->allocate(&mYu12ThumbFrameLayout);
2011         if (ret != 0) {
2012             ALOGE("%s: allocating YU12 thumb frame failed!", __FUNCTION__);
2013             return Status::INTERNAL_ERROR;
2014         }
2015     }
2016 
2017     // Allocating scaled buffers
2018     for (const auto& stream : streams) {
2019         Size sz = {stream.width, stream.height};
2020         if (sz == v4lSize) {
2021             continue; // Don't need an intermediate buffer same size as v4lBuffer
2022         }
2023         if (mIntermediateBuffers.count(sz) == 0) {
2024             // Create new intermediate buffer
2025             sp<AllocatedFrame> buf = new AllocatedFrame(stream.width, stream.height);
2026             int ret = buf->allocate();
2027             if (ret != 0) {
2028                 ALOGE("%s: allocating intermediate YU12 frame %dx%d failed!",
2029                             __FUNCTION__, stream.width, stream.height);
2030                 return Status::INTERNAL_ERROR;
2031             }
2032             mIntermediateBuffers[sz] = buf;
2033         }
2034     }
2035 
2036     // Remove unconfigured buffers
2037     auto it = mIntermediateBuffers.begin();
2038     while (it != mIntermediateBuffers.end()) {
2039         bool configured = false;
2040         auto sz = it->first;
2041         for (const auto& stream : streams) {
2042             if (stream.width == sz.width && stream.height == sz.height) {
2043                 configured = true;
2044                 break;
2045             }
2046         }
2047         if (configured) {
2048             it++;
2049         } else {
2050             it = mIntermediateBuffers.erase(it);
2051         }
2052     }
2053 
2054     mBlobBufferSize = blobBufferSize;
2055     return Status::OK;
2056 }
2057 
submitRequest(const std::shared_ptr<HalRequest> & req)2058 Status ExternalCameraDeviceSession::OutputThread::submitRequest(
2059         const std::shared_ptr<HalRequest>& req) {
2060     std::unique_lock<std::mutex> lk(mRequestListLock);
2061     mRequestList.push_back(req);
2062     lk.unlock();
2063     mRequestCond.notify_one();
2064     return Status::OK;
2065 }
2066 
flush()2067 void ExternalCameraDeviceSession::OutputThread::flush() {
2068     ATRACE_CALL();
2069     auto parent = mParent.promote();
2070     if (parent == nullptr) {
2071        ALOGE("%s: session has been disconnected!", __FUNCTION__);
2072        return;
2073     }
2074 
2075     std::unique_lock<std::mutex> lk(mRequestListLock);
2076     std::list<std::shared_ptr<HalRequest>> reqs = std::move(mRequestList);
2077     mRequestList.clear();
2078     if (mProcessingRequest) {
2079         std::chrono::seconds timeout = std::chrono::seconds(kFlushWaitTimeoutSec);
2080         auto st = mRequestDoneCond.wait_for(lk, timeout);
2081         if (st == std::cv_status::timeout) {
2082             ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__);
2083         }
2084     }
2085 
2086     ALOGV("%s: flusing inflight requests", __FUNCTION__);
2087     lk.unlock();
2088     for (const auto& req : reqs) {
2089         parent->processCaptureRequestError(req);
2090     }
2091 }
2092 
waitForNextRequest(std::shared_ptr<HalRequest> * out)2093 void ExternalCameraDeviceSession::OutputThread::waitForNextRequest(
2094         std::shared_ptr<HalRequest>* out) {
2095     ATRACE_CALL();
2096     if (out == nullptr) {
2097         ALOGE("%s: out is null", __FUNCTION__);
2098         return;
2099     }
2100 
2101     std::unique_lock<std::mutex> lk(mRequestListLock);
2102     int waitTimes = 0;
2103     while (mRequestList.empty()) {
2104         if (exitPending()) {
2105             return;
2106         }
2107         std::chrono::milliseconds timeout = std::chrono::milliseconds(kReqWaitTimeoutMs);
2108         auto st = mRequestCond.wait_for(lk, timeout);
2109         if (st == std::cv_status::timeout) {
2110             waitTimes++;
2111             if (waitTimes == kReqWaitTimesMax) {
2112                 // no new request, return
2113                 return;
2114             }
2115         }
2116     }
2117     *out = mRequestList.front();
2118     mRequestList.pop_front();
2119     mProcessingRequest = true;
2120     mProcessingFrameNumer = (*out)->frameNumber;
2121 }
2122 
signalRequestDone()2123 void ExternalCameraDeviceSession::OutputThread::signalRequestDone() {
2124     std::unique_lock<std::mutex> lk(mRequestListLock);
2125     mProcessingRequest = false;
2126     mProcessingFrameNumer = 0;
2127     lk.unlock();
2128     mRequestDoneCond.notify_one();
2129 }
2130 
dump(int fd)2131 void ExternalCameraDeviceSession::OutputThread::dump(int fd) {
2132     std::lock_guard<std::mutex> lk(mRequestListLock);
2133     if (mProcessingRequest) {
2134         dprintf(fd, "OutputThread processing frame %d\n", mProcessingFrameNumer);
2135     } else {
2136         dprintf(fd, "OutputThread not processing any frames\n");
2137     }
2138     dprintf(fd, "OutputThread request list contains frame: ");
2139     for (const auto& req : mRequestList) {
2140         dprintf(fd, "%d, ", req->frameNumber);
2141     }
2142     dprintf(fd, "\n");
2143 }
2144 
cleanupBuffersLocked(int id)2145 void ExternalCameraDeviceSession::cleanupBuffersLocked(int id) {
2146     for (auto& pair : mCirculatingBuffers.at(id)) {
2147         sHandleImporter.freeBuffer(pair.second);
2148     }
2149     mCirculatingBuffers[id].clear();
2150     mCirculatingBuffers.erase(id);
2151 }
2152 
updateBufferCaches(const hidl_vec<BufferCache> & cachesToRemove)2153 void ExternalCameraDeviceSession::updateBufferCaches(const hidl_vec<BufferCache>& cachesToRemove) {
2154     Mutex::Autolock _l(mCbsLock);
2155     for (auto& cache : cachesToRemove) {
2156         auto cbsIt = mCirculatingBuffers.find(cache.streamId);
2157         if (cbsIt == mCirculatingBuffers.end()) {
2158             // The stream could have been removed
2159             continue;
2160         }
2161         CirculatingBuffers& cbs = cbsIt->second;
2162         auto it = cbs.find(cache.bufferId);
2163         if (it != cbs.end()) {
2164             sHandleImporter.freeBuffer(it->second);
2165             cbs.erase(it);
2166         } else {
2167             ALOGE("%s: stream %d buffer %" PRIu64 " is not cached",
2168                     __FUNCTION__, cache.streamId, cache.bufferId);
2169         }
2170     }
2171 }
2172 
isSupported(const Stream & stream,const std::vector<SupportedV4L2Format> & supportedFormats,const ExternalCameraConfig & devCfg)2173 bool ExternalCameraDeviceSession::isSupported(const Stream& stream,
2174         const std::vector<SupportedV4L2Format>& supportedFormats,
2175         const ExternalCameraConfig& devCfg) {
2176     int32_t ds = static_cast<int32_t>(stream.dataSpace);
2177     PixelFormat fmt = stream.format;
2178     uint32_t width = stream.width;
2179     uint32_t height = stream.height;
2180     // TODO: check usage flags
2181 
2182     if (stream.streamType != StreamType::OUTPUT) {
2183         ALOGE("%s: does not support non-output stream type", __FUNCTION__);
2184         return false;
2185     }
2186 
2187     if (stream.rotation != StreamRotation::ROTATION_0) {
2188         ALOGE("%s: does not support stream rotation", __FUNCTION__);
2189         return false;
2190     }
2191 
2192     switch (fmt) {
2193         case PixelFormat::BLOB:
2194             if (ds != static_cast<int32_t>(Dataspace::V0_JFIF)) {
2195                 ALOGI("%s: BLOB format does not support dataSpace %x", __FUNCTION__, ds);
2196                 return false;
2197             }
2198             break;
2199         case PixelFormat::IMPLEMENTATION_DEFINED:
2200         case PixelFormat::YCBCR_420_888:
2201         case PixelFormat::YV12:
2202             // TODO: check what dataspace we can support here.
2203             // intentional no-ops.
2204             break;
2205         case PixelFormat::Y16:
2206             if (!devCfg.depthEnabled) {
2207                 ALOGI("%s: Depth is not Enabled", __FUNCTION__);
2208                 return false;
2209             }
2210             if (!(ds & Dataspace::DEPTH)) {
2211                 ALOGI("%s: Y16 supports only dataSpace DEPTH", __FUNCTION__);
2212                 return false;
2213             }
2214             break;
2215         default:
2216             ALOGI("%s: does not support format %x", __FUNCTION__, fmt);
2217             return false;
2218     }
2219 
2220     // Assume we can convert any V4L2 format to any of supported output format for now, i.e,
2221     // ignoring v4l2Fmt.fourcc for now. Might need more subtle check if we support more v4l format
2222     // in the futrue.
2223     for (const auto& v4l2Fmt : supportedFormats) {
2224         if (width == v4l2Fmt.width && height == v4l2Fmt.height) {
2225             return true;
2226         }
2227     }
2228     ALOGI("%s: resolution %dx%d is not supported", __FUNCTION__, width, height);
2229     return false;
2230 }
2231 
v4l2StreamOffLocked()2232 int ExternalCameraDeviceSession::v4l2StreamOffLocked() {
2233     if (!mV4l2Streaming) {
2234         return OK;
2235     }
2236 
2237     {
2238         std::lock_guard<std::mutex> lk(mV4l2BufferLock);
2239         if (mNumDequeuedV4l2Buffers != 0)  {
2240             ALOGE("%s: there are %zu inflight V4L buffers",
2241                 __FUNCTION__, mNumDequeuedV4l2Buffers);
2242             return -1;
2243         }
2244     }
2245     mV4L2BufferCount = 0;
2246 
2247     // VIDIOC_STREAMOFF
2248     v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2249     if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMOFF, &capture_type)) < 0) {
2250         ALOGE("%s: STREAMOFF failed: %s", __FUNCTION__, strerror(errno));
2251         return -errno;
2252     }
2253 
2254     // VIDIOC_REQBUFS: clear buffers
2255     v4l2_requestbuffers req_buffers{};
2256     req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2257     req_buffers.memory = V4L2_MEMORY_MMAP;
2258     req_buffers.count = 0;
2259     if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) {
2260         ALOGE("%s: REQBUFS failed: %s", __FUNCTION__, strerror(errno));
2261         return -errno;
2262     }
2263 
2264     mV4l2Streaming = false;
2265     return OK;
2266 }
2267 
setV4l2FpsLocked(double fps)2268 int ExternalCameraDeviceSession::setV4l2FpsLocked(double fps) {
2269     // VIDIOC_G_PARM/VIDIOC_S_PARM: set fps
2270     v4l2_streamparm streamparm = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
2271     // The following line checks that the driver knows about framerate get/set.
2272     int ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_G_PARM, &streamparm));
2273     if (ret != 0) {
2274         if (errno == -EINVAL) {
2275             ALOGW("%s: device does not support VIDIOC_G_PARM", __FUNCTION__);
2276         }
2277         return -errno;
2278     }
2279     // Now check if the device is able to accept a capture framerate set.
2280     if (!(streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME)) {
2281         ALOGW("%s: device does not support V4L2_CAP_TIMEPERFRAME", __FUNCTION__);
2282         return -EINVAL;
2283     }
2284 
2285     // fps is float, approximate by a fraction.
2286     const int kFrameRatePrecision = 10000;
2287     streamparm.parm.capture.timeperframe.numerator = kFrameRatePrecision;
2288     streamparm.parm.capture.timeperframe.denominator =
2289         (fps * kFrameRatePrecision);
2290 
2291     if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_PARM, &streamparm)) < 0) {
2292         ALOGE("%s: failed to set framerate to %f: %s", __FUNCTION__, fps, strerror(errno));
2293         return -1;
2294     }
2295 
2296     double retFps = streamparm.parm.capture.timeperframe.denominator /
2297             static_cast<double>(streamparm.parm.capture.timeperframe.numerator);
2298     if (std::fabs(fps - retFps) > 1.0) {
2299         ALOGE("%s: expect fps %f, got %f instead", __FUNCTION__, fps, retFps);
2300         return -1;
2301     }
2302     mV4l2StreamingFps = fps;
2303     return 0;
2304 }
2305 
configureV4l2StreamLocked(const SupportedV4L2Format & v4l2Fmt,double requestFps)2306 int ExternalCameraDeviceSession::configureV4l2StreamLocked(
2307         const SupportedV4L2Format& v4l2Fmt, double requestFps) {
2308     ATRACE_CALL();
2309     int ret = v4l2StreamOffLocked();
2310     if (ret != OK) {
2311         ALOGE("%s: stop v4l2 streaming failed: ret %d", __FUNCTION__, ret);
2312         return ret;
2313     }
2314 
2315     // VIDIOC_S_FMT w/h/fmt
2316     v4l2_format fmt;
2317     fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2318     fmt.fmt.pix.width = v4l2Fmt.width;
2319     fmt.fmt.pix.height = v4l2Fmt.height;
2320     fmt.fmt.pix.pixelformat = v4l2Fmt.fourcc;
2321     ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt));
2322     if (ret < 0) {
2323         int numAttempt = 0;
2324         while (ret < 0) {
2325             ALOGW("%s: VIDIOC_S_FMT failed, wait 33ms and try again", __FUNCTION__);
2326             usleep(IOCTL_RETRY_SLEEP_US); // sleep and try again
2327             ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt));
2328             if (numAttempt == MAX_RETRY) {
2329                 break;
2330             }
2331             numAttempt++;
2332         }
2333         if (ret < 0) {
2334             ALOGE("%s: S_FMT ioctl failed: %s", __FUNCTION__, strerror(errno));
2335             return -errno;
2336         }
2337     }
2338 
2339     if (v4l2Fmt.width != fmt.fmt.pix.width || v4l2Fmt.height != fmt.fmt.pix.height ||
2340             v4l2Fmt.fourcc != fmt.fmt.pix.pixelformat) {
2341         ALOGE("%s: S_FMT expect %c%c%c%c %dx%d, got %c%c%c%c %dx%d instead!", __FUNCTION__,
2342                 v4l2Fmt.fourcc & 0xFF,
2343                 (v4l2Fmt.fourcc >> 8) & 0xFF,
2344                 (v4l2Fmt.fourcc >> 16) & 0xFF,
2345                 (v4l2Fmt.fourcc >> 24) & 0xFF,
2346                 v4l2Fmt.width, v4l2Fmt.height,
2347                 fmt.fmt.pix.pixelformat & 0xFF,
2348                 (fmt.fmt.pix.pixelformat >> 8) & 0xFF,
2349                 (fmt.fmt.pix.pixelformat >> 16) & 0xFF,
2350                 (fmt.fmt.pix.pixelformat >> 24) & 0xFF,
2351                 fmt.fmt.pix.width, fmt.fmt.pix.height);
2352         return -EINVAL;
2353     }
2354     uint32_t bufferSize = fmt.fmt.pix.sizeimage;
2355     ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize);
2356     uint32_t expectedMaxBufferSize = kMaxBytesPerPixel * fmt.fmt.pix.width * fmt.fmt.pix.height;
2357     if ((bufferSize == 0) || (bufferSize > expectedMaxBufferSize)) {
2358         ALOGE("%s: V4L2 buffer size: %u looks invalid. Expected maximum size: %u", __FUNCTION__,
2359                 bufferSize, expectedMaxBufferSize);
2360         return -EINVAL;
2361     }
2362     mMaxV4L2BufferSize = bufferSize;
2363 
2364     const double kDefaultFps = 30.0;
2365     double fps = 1000.0;
2366     if (requestFps != 0.0) {
2367         fps = requestFps;
2368     } else {
2369         double maxFps = -1.0;
2370         // Try to pick the slowest fps that is at least 30
2371         for (const auto& fr : v4l2Fmt.frameRates) {
2372             double f = fr.getDouble();
2373             if (maxFps < f) {
2374                 maxFps = f;
2375             }
2376             if (f >= kDefaultFps && f < fps) {
2377                 fps = f;
2378             }
2379         }
2380         if (fps == 1000.0) {
2381             fps = maxFps;
2382         }
2383     }
2384 
2385     int fpsRet = setV4l2FpsLocked(fps);
2386     if (fpsRet != 0 && fpsRet != -EINVAL) {
2387         ALOGE("%s: set fps failed: %s", __FUNCTION__, strerror(fpsRet));
2388         return fpsRet;
2389     }
2390 
2391     uint32_t v4lBufferCount = (fps >= kDefaultFps) ?
2392             mCfg.numVideoBuffers : mCfg.numStillBuffers;
2393     // VIDIOC_REQBUFS: create buffers
2394     v4l2_requestbuffers req_buffers{};
2395     req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2396     req_buffers.memory = V4L2_MEMORY_MMAP;
2397     req_buffers.count = v4lBufferCount;
2398     if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) {
2399         ALOGE("%s: VIDIOC_REQBUFS failed: %s", __FUNCTION__, strerror(errno));
2400         return -errno;
2401     }
2402 
2403     // Driver can indeed return more buffer if it needs more to operate
2404     if (req_buffers.count < v4lBufferCount) {
2405         ALOGE("%s: VIDIOC_REQBUFS expected %d buffers, got %d instead",
2406                 __FUNCTION__, v4lBufferCount, req_buffers.count);
2407         return NO_MEMORY;
2408     }
2409 
2410     // VIDIOC_QUERYBUF:  get buffer offset in the V4L2 fd
2411     // VIDIOC_QBUF: send buffer to driver
2412     mV4L2BufferCount = req_buffers.count;
2413     for (uint32_t i = 0; i < req_buffers.count; i++) {
2414         v4l2_buffer buffer = {
2415                 .index = i, .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .memory = V4L2_MEMORY_MMAP};
2416 
2417         if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QUERYBUF, &buffer)) < 0) {
2418             ALOGE("%s: QUERYBUF %d failed: %s", __FUNCTION__, i,  strerror(errno));
2419             return -errno;
2420         }
2421 
2422         if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {
2423             ALOGE("%s: QBUF %d failed: %s", __FUNCTION__, i,  strerror(errno));
2424             return -errno;
2425         }
2426     }
2427 
2428     // VIDIOC_STREAMON: start streaming
2429     v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2430     ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type));
2431     if (ret < 0) {
2432         int numAttempt = 0;
2433         while (ret < 0) {
2434             ALOGW("%s: VIDIOC_STREAMON failed, wait 33ms and try again", __FUNCTION__);
2435             usleep(IOCTL_RETRY_SLEEP_US); // sleep 100 ms and try again
2436             ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type));
2437             if (numAttempt == MAX_RETRY) {
2438                 break;
2439             }
2440             numAttempt++;
2441         }
2442         if (ret < 0) {
2443             ALOGE("%s: VIDIOC_STREAMON ioctl failed: %s", __FUNCTION__, strerror(errno));
2444             return -errno;
2445         }
2446     }
2447 
2448     // Swallow first few frames after streamOn to account for bad frames from some devices
2449     for (int i = 0; i < kBadFramesAfterStreamOn; i++) {
2450         v4l2_buffer buffer{};
2451         buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2452         buffer.memory = V4L2_MEMORY_MMAP;
2453         if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) {
2454             ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno));
2455             return -errno;
2456         }
2457 
2458         if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {
2459             ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, buffer.index, strerror(errno));
2460             return -errno;
2461         }
2462     }
2463 
2464     ALOGI("%s: start V4L2 streaming %dx%d@%ffps",
2465                 __FUNCTION__, v4l2Fmt.width, v4l2Fmt.height, fps);
2466     mV4l2StreamingFmt = v4l2Fmt;
2467     mV4l2Streaming = true;
2468     return OK;
2469 }
2470 
dequeueV4l2FrameLocked(nsecs_t * shutterTs)2471 sp<V4L2Frame> ExternalCameraDeviceSession::dequeueV4l2FrameLocked(/*out*/nsecs_t* shutterTs) {
2472     ATRACE_CALL();
2473     sp<V4L2Frame> ret = nullptr;
2474 
2475     if (shutterTs == nullptr) {
2476         ALOGE("%s: shutterTs must not be null!", __FUNCTION__);
2477         return ret;
2478     }
2479 
2480     {
2481         std::unique_lock<std::mutex> lk(mV4l2BufferLock);
2482         if (mNumDequeuedV4l2Buffers == mV4L2BufferCount) {
2483             int waitRet = waitForV4L2BufferReturnLocked(lk);
2484             if (waitRet != 0) {
2485                 return ret;
2486             }
2487         }
2488     }
2489 
2490     ATRACE_BEGIN("VIDIOC_DQBUF");
2491     v4l2_buffer buffer{};
2492     buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2493     buffer.memory = V4L2_MEMORY_MMAP;
2494     if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) {
2495         ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno));
2496         return ret;
2497     }
2498     ATRACE_END();
2499 
2500     if (buffer.index >= mV4L2BufferCount) {
2501         ALOGE("%s: Invalid buffer id: %d", __FUNCTION__, buffer.index);
2502         return ret;
2503     }
2504 
2505     if (buffer.flags & V4L2_BUF_FLAG_ERROR) {
2506         ALOGE("%s: v4l2 buf error! buf flag 0x%x", __FUNCTION__, buffer.flags);
2507         // TODO: try to dequeue again
2508     }
2509 
2510     if (buffer.bytesused > mMaxV4L2BufferSize) {
2511         ALOGE("%s: v4l2 buffer bytes used: %u maximum %u", __FUNCTION__, buffer.bytesused,
2512                 mMaxV4L2BufferSize);
2513         return ret;
2514     }
2515 
2516     if (buffer.flags & V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC) {
2517         // Ideally we should also check for V4L2_BUF_FLAG_TSTAMP_SRC_SOE, but
2518         // even V4L2_BUF_FLAG_TSTAMP_SRC_EOF is better than capture a timestamp now
2519         *shutterTs = static_cast<nsecs_t>(buffer.timestamp.tv_sec)*1000000000LL +
2520                 buffer.timestamp.tv_usec * 1000LL;
2521     } else {
2522         *shutterTs = systemTime(SYSTEM_TIME_MONOTONIC);
2523     }
2524 
2525     {
2526         std::lock_guard<std::mutex> lk(mV4l2BufferLock);
2527         mNumDequeuedV4l2Buffers++;
2528     }
2529     return new V4L2Frame(
2530             mV4l2StreamingFmt.width, mV4l2StreamingFmt.height, mV4l2StreamingFmt.fourcc,
2531             buffer.index, mV4l2Fd.get(), buffer.bytesused, buffer.m.offset);
2532 }
2533 
enqueueV4l2Frame(const sp<V4L2Frame> & frame)2534 void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp<V4L2Frame>& frame) {
2535     ATRACE_CALL();
2536     frame->unmap();
2537     ATRACE_BEGIN("VIDIOC_QBUF");
2538     v4l2_buffer buffer{};
2539     buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2540     buffer.memory = V4L2_MEMORY_MMAP;
2541     buffer.index = frame->mBufferIndex;
2542     if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {
2543         ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__,
2544                 frame->mBufferIndex, strerror(errno));
2545         return;
2546     }
2547     ATRACE_END();
2548 
2549     {
2550         std::lock_guard<std::mutex> lk(mV4l2BufferLock);
2551         mNumDequeuedV4l2Buffers--;
2552     }
2553     mV4L2BufferReturned.notify_one();
2554 }
2555 
isStreamCombinationSupported(const V3_2::StreamConfiguration & config,const std::vector<SupportedV4L2Format> & supportedFormats,const ExternalCameraConfig & devCfg)2556 Status ExternalCameraDeviceSession::isStreamCombinationSupported(
2557         const V3_2::StreamConfiguration& config,
2558         const std::vector<SupportedV4L2Format>& supportedFormats,
2559         const ExternalCameraConfig& devCfg) {
2560     if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) {
2561         ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode);
2562         return Status::ILLEGAL_ARGUMENT;
2563     }
2564 
2565     if (config.streams.size() == 0) {
2566         ALOGE("%s: cannot configure zero stream", __FUNCTION__);
2567         return Status::ILLEGAL_ARGUMENT;
2568     }
2569 
2570     int numProcessedStream = 0;
2571     int numStallStream = 0;
2572     for (const auto& stream : config.streams) {
2573         // Check if the format/width/height combo is supported
2574         if (!isSupported(stream, supportedFormats, devCfg)) {
2575             return Status::ILLEGAL_ARGUMENT;
2576         }
2577         if (stream.format == PixelFormat::BLOB) {
2578             numStallStream++;
2579         } else {
2580             numProcessedStream++;
2581         }
2582     }
2583 
2584     if (numProcessedStream > kMaxProcessedStream) {
2585         ALOGE("%s: too many processed streams (expect <= %d, got %d)", __FUNCTION__,
2586                 kMaxProcessedStream, numProcessedStream);
2587         return Status::ILLEGAL_ARGUMENT;
2588     }
2589 
2590     if (numStallStream > kMaxStallStream) {
2591         ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__,
2592                 kMaxStallStream, numStallStream);
2593         return Status::ILLEGAL_ARGUMENT;
2594     }
2595 
2596     return Status::OK;
2597 }
2598 
configureStreams(const V3_2::StreamConfiguration & config,V3_3::HalStreamConfiguration * out,uint32_t blobBufferSize)2599 Status ExternalCameraDeviceSession::configureStreams(
2600         const V3_2::StreamConfiguration& config,
2601         V3_3::HalStreamConfiguration* out,
2602         uint32_t blobBufferSize) {
2603     ATRACE_CALL();
2604 
2605     Status status = isStreamCombinationSupported(config, mSupportedFormats, mCfg);
2606     if (status != Status::OK) {
2607         return status;
2608     }
2609 
2610     status = initStatus();
2611     if (status != Status::OK) {
2612         return status;
2613     }
2614 
2615 
2616     {
2617         std::lock_guard<std::mutex> lk(mInflightFramesLock);
2618         if (!mInflightFrames.empty()) {
2619             ALOGE("%s: trying to configureStreams while there are still %zu inflight frames!",
2620                     __FUNCTION__, mInflightFrames.size());
2621             return Status::INTERNAL_ERROR;
2622         }
2623     }
2624 
2625     Mutex::Autolock _l(mLock);
2626     {
2627         Mutex::Autolock _l(mCbsLock);
2628         // Add new streams
2629         for (const auto& stream : config.streams) {
2630             if (mStreamMap.count(stream.id) == 0) {
2631                 mStreamMap[stream.id] = stream;
2632                 mCirculatingBuffers.emplace(stream.id, CirculatingBuffers{});
2633             }
2634         }
2635 
2636         // Cleanup removed streams
2637         for(auto it = mStreamMap.begin(); it != mStreamMap.end();) {
2638             int id = it->first;
2639             bool found = false;
2640             for (const auto& stream : config.streams) {
2641                 if (id == stream.id) {
2642                     found = true;
2643                     break;
2644                 }
2645             }
2646             if (!found) {
2647                 // Unmap all buffers of deleted stream
2648                 cleanupBuffersLocked(id);
2649                 it = mStreamMap.erase(it);
2650             } else {
2651                 ++it;
2652             }
2653         }
2654     }
2655 
2656     // Now select a V4L2 format to produce all output streams
2657     float desiredAr = (mCroppingType == VERTICAL) ? kMaxAspectRatio : kMinAspectRatio;
2658     uint32_t maxDim = 0;
2659     for (const auto& stream : config.streams) {
2660         float aspectRatio = ASPECT_RATIO(stream);
2661         ALOGI("%s: request stream %dx%d", __FUNCTION__, stream.width, stream.height);
2662         if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) ||
2663                 (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) {
2664             desiredAr = aspectRatio;
2665         }
2666 
2667         // The dimension that's not cropped
2668         uint32_t dim = (mCroppingType == VERTICAL) ? stream.width : stream.height;
2669         if (dim > maxDim) {
2670             maxDim = dim;
2671         }
2672     }
2673     // Find the smallest format that matches the desired aspect ratio and is wide/high enough
2674     SupportedV4L2Format v4l2Fmt {.width = 0, .height = 0};
2675     for (const auto& fmt : mSupportedFormats) {
2676         uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height;
2677         if (dim >= maxDim) {
2678             float aspectRatio = ASPECT_RATIO(fmt);
2679             if (isAspectRatioClose(aspectRatio, desiredAr)) {
2680                 v4l2Fmt = fmt;
2681                 // since mSupportedFormats is sorted by width then height, the first matching fmt
2682                 // will be the smallest one with matching aspect ratio
2683                 break;
2684             }
2685         }
2686     }
2687     if (v4l2Fmt.width == 0) {
2688         // Cannot find exact good aspect ratio candidate, try to find a close one
2689         for (const auto& fmt : mSupportedFormats) {
2690             uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height;
2691             if (dim >= maxDim) {
2692                 float aspectRatio = ASPECT_RATIO(fmt);
2693                 if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) ||
2694                         (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) {
2695                     v4l2Fmt = fmt;
2696                     break;
2697                 }
2698             }
2699         }
2700     }
2701 
2702     if (v4l2Fmt.width == 0) {
2703         ALOGE("%s: unable to find a resolution matching (%s at least %d, aspect ratio %f)"
2704                 , __FUNCTION__, (mCroppingType == VERTICAL) ? "width" : "height",
2705                 maxDim, desiredAr);
2706         return Status::ILLEGAL_ARGUMENT;
2707     }
2708 
2709     if (configureV4l2StreamLocked(v4l2Fmt) != 0) {
2710         ALOGE("V4L configuration failed!, format:%c%c%c%c, w %d, h %d",
2711             v4l2Fmt.fourcc & 0xFF,
2712             (v4l2Fmt.fourcc >> 8) & 0xFF,
2713             (v4l2Fmt.fourcc >> 16) & 0xFF,
2714             (v4l2Fmt.fourcc >> 24) & 0xFF,
2715             v4l2Fmt.width, v4l2Fmt.height);
2716         return Status::INTERNAL_ERROR;
2717     }
2718 
2719     Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height};
2720     Size thumbSize { 0, 0 };
2721     camera_metadata_ro_entry entry =
2722         mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES);
2723     for(uint32_t i = 0; i < entry.count; i += 2) {
2724         Size sz { static_cast<uint32_t>(entry.data.i32[i]),
2725                   static_cast<uint32_t>(entry.data.i32[i+1]) };
2726         if(sz.width * sz.height > thumbSize.width * thumbSize.height) {
2727             thumbSize = sz;
2728         }
2729     }
2730 
2731     if (thumbSize.width * thumbSize.height == 0) {
2732         ALOGE("%s: non-zero thumbnail size not available", __FUNCTION__);
2733         return Status::INTERNAL_ERROR;
2734     }
2735 
2736     status = mOutputThread->allocateIntermediateBuffers(v4lSize,
2737                 mMaxThumbResolution, config.streams, blobBufferSize);
2738     if (status != Status::OK) {
2739         ALOGE("%s: allocating intermediate buffers failed!", __FUNCTION__);
2740         return status;
2741     }
2742 
2743     out->streams.resize(config.streams.size());
2744     for (size_t i = 0; i < config.streams.size(); i++) {
2745         out->streams[i].overrideDataSpace = config.streams[i].dataSpace;
2746         out->streams[i].v3_2.id = config.streams[i].id;
2747         // TODO: double check should we add those CAMERA flags
2748         mStreamMap[config.streams[i].id].usage =
2749                 out->streams[i].v3_2.producerUsage = config.streams[i].usage |
2750                 BufferUsage::CPU_WRITE_OFTEN |
2751                 BufferUsage::CAMERA_OUTPUT;
2752         out->streams[i].v3_2.consumerUsage = 0;
2753         out->streams[i].v3_2.maxBuffers  = mV4L2BufferCount;
2754 
2755         switch (config.streams[i].format) {
2756             case PixelFormat::BLOB:
2757             case PixelFormat::YCBCR_420_888:
2758             case PixelFormat::YV12: // Used by SurfaceTexture
2759             case PixelFormat::Y16:
2760                 // No override
2761                 out->streams[i].v3_2.overrideFormat = config.streams[i].format;
2762                 break;
2763             case PixelFormat::IMPLEMENTATION_DEFINED:
2764                 // Override based on VIDEO or not
2765                 out->streams[i].v3_2.overrideFormat =
2766                         (config.streams[i].usage & BufferUsage::VIDEO_ENCODER) ?
2767                         PixelFormat::YCBCR_420_888 : PixelFormat::YV12;
2768                 // Save overridden formt in mStreamMap
2769                 mStreamMap[config.streams[i].id].format = out->streams[i].v3_2.overrideFormat;
2770                 break;
2771             default:
2772                 ALOGE("%s: unsupported format 0x%x", __FUNCTION__, config.streams[i].format);
2773                 return Status::ILLEGAL_ARGUMENT;
2774         }
2775     }
2776 
2777     mFirstRequest = true;
2778     return Status::OK;
2779 }
2780 
isClosed()2781 bool ExternalCameraDeviceSession::isClosed() {
2782     Mutex::Autolock _l(mLock);
2783     return mClosed;
2784 }
2785 
2786 #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
2787 #define UPDATE(md, tag, data, size)               \
2788 do {                                              \
2789     if ((md).update((tag), (data), (size))) {     \
2790         ALOGE("Update " #tag " failed!");         \
2791         return BAD_VALUE;                         \
2792     }                                             \
2793 } while (0)
2794 
initDefaultRequests()2795 status_t ExternalCameraDeviceSession::initDefaultRequests() {
2796     ::android::hardware::camera::common::V1_0::helper::CameraMetadata md;
2797 
2798     const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
2799     UPDATE(md, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1);
2800 
2801     const int32_t exposureCompensation = 0;
2802     UPDATE(md, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1);
2803 
2804     const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2805     UPDATE(md, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1);
2806 
2807     const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2808     UPDATE(md, ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2809 
2810     const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2811     UPDATE(md, ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2812 
2813     const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
2814     UPDATE(md, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
2815 
2816     const uint8_t afMode = ANDROID_CONTROL_AF_MODE_AUTO;
2817     UPDATE(md, ANDROID_CONTROL_AF_MODE, &afMode, 1);
2818 
2819     const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2820     UPDATE(md, ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
2821 
2822     const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
2823     UPDATE(md, ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2824 
2825     const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2826     UPDATE(md, ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2827 
2828     const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2829     UPDATE(md, ANDROID_FLASH_MODE, &flashMode, 1);
2830 
2831     const int32_t thumbnailSize[] = {240, 180};
2832     UPDATE(md, ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
2833 
2834     const uint8_t jpegQuality = 90;
2835     UPDATE(md, ANDROID_JPEG_QUALITY, &jpegQuality, 1);
2836     UPDATE(md, ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1);
2837 
2838     const int32_t jpegOrientation = 0;
2839     UPDATE(md, ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
2840 
2841     const uint8_t oisMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
2842     UPDATE(md, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &oisMode, 1);
2843 
2844     const uint8_t nrMode = ANDROID_NOISE_REDUCTION_MODE_OFF;
2845     UPDATE(md, ANDROID_NOISE_REDUCTION_MODE, &nrMode, 1);
2846 
2847     const int32_t testPatternModes = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
2848     UPDATE(md, ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternModes, 1);
2849 
2850     const uint8_t fdMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
2851     UPDATE(md, ANDROID_STATISTICS_FACE_DETECT_MODE, &fdMode, 1);
2852 
2853     const uint8_t hotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
2854     UPDATE(md, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotpixelMode, 1);
2855 
2856     bool support30Fps = false;
2857     int32_t maxFps = std::numeric_limits<int32_t>::min();
2858     for (const auto& supportedFormat : mSupportedFormats) {
2859         for (const auto& fr : supportedFormat.frameRates) {
2860             int32_t framerateInt = static_cast<int32_t>(fr.getDouble());
2861             if (maxFps < framerateInt) {
2862                 maxFps = framerateInt;
2863             }
2864             if (framerateInt == 30) {
2865                 support30Fps = true;
2866                 break;
2867             }
2868         }
2869         if (support30Fps) {
2870             break;
2871         }
2872     }
2873     int32_t defaultFramerate = support30Fps ? 30 : maxFps;
2874     int32_t defaultFpsRange[] = {defaultFramerate / 2, defaultFramerate};
2875     UPDATE(md, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, defaultFpsRange, ARRAY_SIZE(defaultFpsRange));
2876 
2877     uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
2878     UPDATE(md, ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1);
2879 
2880     const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2881     UPDATE(md, ANDROID_CONTROL_MODE, &controlMode, 1);
2882 
2883     auto requestTemplates = hidl_enum_range<RequestTemplate>();
2884     for (RequestTemplate type : requestTemplates) {
2885         ::android::hardware::camera::common::V1_0::helper::CameraMetadata mdCopy = md;
2886         uint8_t intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2887         switch (type) {
2888             case RequestTemplate::PREVIEW:
2889                 intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2890                 break;
2891             case RequestTemplate::STILL_CAPTURE:
2892                 intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2893                 break;
2894             case RequestTemplate::VIDEO_RECORD:
2895                 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2896                 break;
2897             case RequestTemplate::VIDEO_SNAPSHOT:
2898                 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2899                 break;
2900             default:
2901                 ALOGV("%s: unsupported RequestTemplate type %d", __FUNCTION__, type);
2902                 continue;
2903         }
2904         UPDATE(mdCopy, ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1);
2905 
2906         camera_metadata_t* rawMd = mdCopy.release();
2907         CameraMetadata hidlMd;
2908         hidlMd.setToExternal(
2909                 (uint8_t*) rawMd, get_camera_metadata_size(rawMd));
2910         mDefaultRequests[type] = hidlMd;
2911         free_camera_metadata(rawMd);
2912     }
2913 
2914     return OK;
2915 }
2916 
fillCaptureResult(common::V1_0::helper::CameraMetadata & md,nsecs_t timestamp)2917 status_t ExternalCameraDeviceSession::fillCaptureResult(
2918         common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) {
2919     // android.control
2920     // For USB camera, we don't know the AE state. Set the state to converged to
2921     // indicate the frame should be good to use. Then apps don't have to wait the
2922     // AE state.
2923     const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2924     UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1);
2925 
2926     const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF;
2927     UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1);
2928 
2929     bool afTrigger = false;
2930     {
2931         std::lock_guard<std::mutex> lk(mAfTriggerLock);
2932         afTrigger = mAfTrigger;
2933         if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) {
2934             camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER);
2935             if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) {
2936                 mAfTrigger = afTrigger = true;
2937             } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) {
2938                 mAfTrigger = afTrigger = false;
2939             }
2940         }
2941     }
2942 
2943     // For USB camera, the USB camera handles everything and we don't have control
2944     // over AF. We only simply fake the AF metadata based on the request
2945     // received here.
2946     uint8_t afState;
2947     if (afTrigger) {
2948         afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2949     } else {
2950         afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2951     }
2952     UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1);
2953 
2954     // Set AWB state to converged to indicate the frame should be good to use.
2955     const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED;
2956     UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1);
2957 
2958     const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2959     UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2960 
2961     camera_metadata_ro_entry active_array_size =
2962         mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
2963 
2964     if (active_array_size.count == 0) {
2965         ALOGE("%s: cannot find active array size!", __FUNCTION__);
2966         return -EINVAL;
2967     }
2968 
2969     const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2970     UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1);
2971 
2972     // This means pipeline latency of X frame intervals. The maximum number is 4.
2973     const uint8_t requestPipelineMaxDepth = 4;
2974     UPDATE(md, ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1);
2975 
2976     // android.scaler
2977     const int32_t crop_region[] = {
2978           active_array_size.data.i32[0], active_array_size.data.i32[1],
2979           active_array_size.data.i32[2], active_array_size.data.i32[3],
2980     };
2981     UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region));
2982 
2983     // android.sensor
2984     UPDATE(md, ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
2985 
2986     // android.statistics
2987     const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
2988     UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
2989 
2990     const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
2991     UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1);
2992 
2993     return OK;
2994 }
2995 
2996 #undef ARRAY_SIZE
2997 #undef UPDATE
2998 
2999 }  // namespace implementation
3000 }  // namespace V3_4
3001 }  // namespace device
3002 }  // namespace camera
3003 }  // namespace hardware
3004 }  // namespace android
3005