1 /* Copyright 2017 The Chromium OS Authors. All rights reserved.
2 * Use of this source code is governed by a BSD-style license that can be
3 * found in the LICENSE file.
4 */
5
6 #include "arc/image_processor.h"
7
8 #include <cerrno>
9 #include <ctime>
10 #include <string>
11
12 #include <libyuv.h>
13 #include "arc/common.h"
14 #include "arc/exif_utils.h"
15 #include "arc/jpeg_compressor.h"
16
17 namespace arc {
18
19 using android::CameraMetadata;
20
21 /*
22 * Formats have different names in different header files. Here is the mapping
23 * table:
24 *
25 * android_pixel_format_t videodev2.h FOURCC in libyuv
26 * -----------------------------------------------------------------------------
27 * HAL_PIXEL_FORMAT_YV12 = V4L2_PIX_FMT_YVU420 = FOURCC_YV12
28 * HAL_PIXEL_FORMAT_YCrCb_420_SP = V4L2_PIX_FMT_NV21 = FOURCC_NV21
29 * HAL_PIXEL_FORMAT_RGBA_8888 = V4L2_PIX_FMT_RGB32 = FOURCC_BGR4
30 * HAL_PIXEL_FORMAT_YCbCr_422_I = V4L2_PIX_FMT_YUYV = FOURCC_YUYV
31 * = FOURCC_YUY2
32 * V4L2_PIX_FMT_YUV420 = FOURCC_I420
33 * = FOURCC_YU12
34 * V4L2_PIX_FMT_MJPEG = FOURCC_MJPG
35 *
36 * Camera device generates FOURCC_YUYV and FOURCC_MJPG.
37 * Preview needs FOURCC_ARGB format.
38 * Software video encoder needs FOURCC_YU12.
39 * CTS requires FOURCC_YV12 and FOURCC_NV21 for applications.
40 *
41 * Android stride requirement:
42 * YV12 horizontal stride should be a multiple of 16 pixels. See
43 * android.graphics.ImageFormat.YV12.
44 * The stride of ARGB, YU12, and NV21 are always equal to the width.
45 *
46 * Conversion Path:
47 * MJPG/YUYV (from camera) -> YU12 -> ARGB (preview)
48 * -> NV21 (apps)
49 * -> YV12 (apps)
50 * -> YU12 (video encoder)
51 */
52
53 // YV12 horizontal stride should be a multiple of 16 pixels for each plane.
54 // |dst_stride_uv| is the pixel stride of u or v plane.
55 static int YU12ToYV12(const void* yv12, void* yu12, int width, int height,
56 int dst_stride_y, int dst_stride_uv);
57 static int YU12ToNV21(const void* yv12, void* nv21, int width, int height);
58 static bool ConvertToJpeg(const CameraMetadata& metadata,
59 const FrameBuffer& in_frame, FrameBuffer* out_frame);
60 static bool SetExifTags(const CameraMetadata& metadata, ExifUtils* utils);
61
62 // How precise the float-to-rational conversion for EXIF tags would be.
63 static const int kRationalPrecision = 10000;
64
65 // Default JPEG quality settings.
66 static const int DEFAULT_JPEG_QUALITY = 80;
67
Align16(size_t value)68 inline static size_t Align16(size_t value) { return (value + 15) & ~15; }
69
GetConvertedSize(int fourcc,uint32_t width,uint32_t height)70 size_t ImageProcessor::GetConvertedSize(int fourcc, uint32_t width,
71 uint32_t height) {
72 if ((width % 2) || (height % 2)) {
73 LOGF(ERROR) << "Width or height is not even (" << width << " x " << height
74 << ")";
75 return 0;
76 }
77
78 switch (fourcc) {
79 case V4L2_PIX_FMT_YVU420: // YV12
80 return Align16(width) * height + Align16(width / 2) * height;
81 case V4L2_PIX_FMT_YUV420: // YU12
82 // Fall-through.
83 case V4L2_PIX_FMT_NV21: // NV21
84 return width * height * 3 / 2;
85 case V4L2_PIX_FMT_BGR32:
86 case V4L2_PIX_FMT_RGB32:
87 return width * height * 4;
88 case V4L2_PIX_FMT_JPEG:
89 return 0; // For JPEG real size will be calculated after conversion.
90 default:
91 LOGF(ERROR) << "Pixel format " << FormatToString(fourcc)
92 << " is unsupported.";
93 return 0;
94 }
95 }
96
SupportsConversion(uint32_t from_fourcc,uint32_t to_fourcc)97 bool ImageProcessor::SupportsConversion(uint32_t from_fourcc,
98 uint32_t to_fourcc) {
99 switch (from_fourcc) {
100 case V4L2_PIX_FMT_YUYV:
101 return (to_fourcc == V4L2_PIX_FMT_YUV420);
102 case V4L2_PIX_FMT_YUV420:
103 return (
104 to_fourcc == V4L2_PIX_FMT_YUV420 ||
105 to_fourcc == V4L2_PIX_FMT_YVU420 || to_fourcc == V4L2_PIX_FMT_NV21 ||
106 to_fourcc == V4L2_PIX_FMT_RGB32 || to_fourcc == V4L2_PIX_FMT_BGR32 ||
107 to_fourcc == V4L2_PIX_FMT_JPEG);
108 case V4L2_PIX_FMT_MJPEG:
109 return (to_fourcc == V4L2_PIX_FMT_YUV420);
110 default:
111 return false;
112 }
113 }
114
ConvertFormat(const CameraMetadata & metadata,const FrameBuffer & in_frame,FrameBuffer * out_frame)115 int ImageProcessor::ConvertFormat(const CameraMetadata& metadata,
116 const FrameBuffer& in_frame,
117 FrameBuffer* out_frame) {
118 if ((in_frame.GetWidth() % 2) || (in_frame.GetHeight() % 2)) {
119 LOGF(ERROR) << "Width or height is not even (" << in_frame.GetWidth()
120 << " x " << in_frame.GetHeight() << ")";
121 return -EINVAL;
122 }
123
124 size_t data_size = GetConvertedSize(
125 out_frame->GetFourcc(), in_frame.GetWidth(), in_frame.GetHeight());
126
127 if (out_frame->SetDataSize(data_size)) {
128 LOGF(ERROR) << "Set data size failed";
129 return -EINVAL;
130 }
131
132 if (in_frame.GetFourcc() == V4L2_PIX_FMT_YUYV) {
133 switch (out_frame->GetFourcc()) {
134 case V4L2_PIX_FMT_YUV420: // YU12
135 {
136 int res = libyuv::YUY2ToI420(
137 in_frame.GetData(), /* src_yuy2 */
138 in_frame.GetWidth() * 2, /* src_stride_yuy2 */
139 out_frame->GetData(), /* dst_y */
140 out_frame->GetWidth(), /* dst_stride_y */
141 out_frame->GetData() +
142 out_frame->GetWidth() * out_frame->GetHeight(), /* dst_u */
143 out_frame->GetWidth() / 2, /* dst_stride_u */
144 out_frame->GetData() + out_frame->GetWidth() *
145 out_frame->GetHeight() * 5 /
146 4, /* dst_v */
147 out_frame->GetWidth() / 2, /* dst_stride_v */
148 in_frame.GetWidth(), in_frame.GetHeight());
149 LOGF_IF(ERROR, res) << "YUY2ToI420() for YU12 returns " << res;
150 return res ? -EINVAL : 0;
151 }
152 default:
153 LOGF(ERROR) << "Destination pixel format "
154 << FormatToString(out_frame->GetFourcc())
155 << " is unsupported for YUYV source format.";
156 return -EINVAL;
157 }
158 } else if (in_frame.GetFourcc() == V4L2_PIX_FMT_YUV420) {
159 // V4L2_PIX_FMT_YVU420 is YV12. I420 is usually referred to YU12
160 // (V4L2_PIX_FMT_YUV420), and YV12 is similar to YU12 except that U/V
161 // planes are swapped.
162 switch (out_frame->GetFourcc()) {
163 case V4L2_PIX_FMT_YVU420: // YV12
164 {
165 int ystride = Align16(in_frame.GetWidth());
166 int uvstride = Align16(in_frame.GetWidth() / 2);
167 int res = YU12ToYV12(in_frame.GetData(), out_frame->GetData(),
168 in_frame.GetWidth(), in_frame.GetHeight(), ystride,
169 uvstride);
170 LOGF_IF(ERROR, res) << "YU12ToYV12() returns " << res;
171 return res ? -EINVAL : 0;
172 }
173 case V4L2_PIX_FMT_YUV420: // YU12
174 {
175 memcpy(out_frame->GetData(), in_frame.GetData(),
176 in_frame.GetDataSize());
177 return 0;
178 }
179 case V4L2_PIX_FMT_NV21: // NV21
180 {
181 // TODO(henryhsu): Use libyuv::I420ToNV21.
182 int res = YU12ToNV21(in_frame.GetData(), out_frame->GetData(),
183 in_frame.GetWidth(), in_frame.GetHeight());
184 LOGF_IF(ERROR, res) << "YU12ToNV21() returns " << res;
185 return res ? -EINVAL : 0;
186 }
187 case V4L2_PIX_FMT_BGR32: {
188 int res = libyuv::I420ToABGR(
189 in_frame.GetData(), /* src_y */
190 in_frame.GetWidth(), /* src_stride_y */
191 in_frame.GetData() +
192 in_frame.GetWidth() * in_frame.GetHeight(), /* src_u */
193 in_frame.GetWidth() / 2, /* src_stride_u */
194 in_frame.GetData() +
195 in_frame.GetWidth() * in_frame.GetHeight() * 5 / 4, /* src_v */
196 in_frame.GetWidth() / 2, /* src_stride_v */
197 out_frame->GetData(), /* dst_abgr */
198 out_frame->GetWidth() * 4, /* dst_stride_abgr */
199 in_frame.GetWidth(), in_frame.GetHeight());
200 LOGF_IF(ERROR, res) << "I420ToABGR() returns " << res;
201 return res ? -EINVAL : 0;
202 }
203 case V4L2_PIX_FMT_RGB32: {
204 int res = libyuv::I420ToARGB(
205 in_frame.GetData(), /* src_y */
206 in_frame.GetWidth(), /* src_stride_y */
207 in_frame.GetData() +
208 in_frame.GetWidth() * in_frame.GetHeight(), /* src_u */
209 in_frame.GetWidth() / 2, /* src_stride_u */
210 in_frame.GetData() +
211 in_frame.GetWidth() * in_frame.GetHeight() * 5 / 4, /* src_v */
212 in_frame.GetWidth() / 2, /* src_stride_v */
213 out_frame->GetData(), /* dst_argb */
214 out_frame->GetWidth() * 4, /* dst_stride_argb */
215 in_frame.GetWidth(), in_frame.GetHeight());
216 LOGF_IF(ERROR, res) << "I420ToARGB() returns " << res;
217 return res ? -EINVAL : 0;
218 }
219 case V4L2_PIX_FMT_JPEG: {
220 bool res = ConvertToJpeg(metadata, in_frame, out_frame);
221 LOGF_IF(ERROR, !res) << "ConvertToJpeg() returns " << res;
222 return res ? -EINVAL : 0;
223 }
224 default:
225 LOGF(ERROR) << "Destination pixel format "
226 << FormatToString(out_frame->GetFourcc())
227 << " is unsupported for YU12 source format.";
228 return -EINVAL;
229 }
230 } else if (in_frame.GetFourcc() == V4L2_PIX_FMT_MJPEG) {
231 switch (out_frame->GetFourcc()) {
232 case V4L2_PIX_FMT_YUV420: // YU12
233 {
234 int res = libyuv::MJPGToI420(
235 in_frame.GetData(), /* sample */
236 in_frame.GetDataSize(), /* sample_size */
237 out_frame->GetData(), /* dst_y */
238 out_frame->GetWidth(), /* dst_stride_y */
239 out_frame->GetData() +
240 out_frame->GetWidth() * out_frame->GetHeight(), /* dst_u */
241 out_frame->GetWidth() / 2, /* dst_stride_u */
242 out_frame->GetData() + out_frame->GetWidth() *
243 out_frame->GetHeight() * 5 /
244 4, /* dst_v */
245 out_frame->GetWidth() / 2, /* dst_stride_v */
246 in_frame.GetWidth(), in_frame.GetHeight(), out_frame->GetWidth(),
247 out_frame->GetHeight());
248 LOGF_IF(ERROR, res) << "MJPEGToI420() returns " << res;
249 return res ? -EINVAL : 0;
250 }
251 default:
252 LOGF(ERROR) << "Destination pixel format "
253 << FormatToString(out_frame->GetFourcc())
254 << " is unsupported for MJPEG source format.";
255 return -EINVAL;
256 }
257 } else {
258 LOGF(ERROR) << "Convert format doesn't support source format "
259 << FormatToString(in_frame.GetFourcc());
260 return -EINVAL;
261 }
262 }
263
Scale(const FrameBuffer & in_frame,FrameBuffer * out_frame)264 int ImageProcessor::Scale(const FrameBuffer& in_frame, FrameBuffer* out_frame) {
265 if (in_frame.GetFourcc() != V4L2_PIX_FMT_YUV420) {
266 LOGF(ERROR) << "Pixel format " << FormatToString(in_frame.GetFourcc())
267 << " is unsupported.";
268 return -EINVAL;
269 }
270
271 size_t data_size = GetConvertedSize(
272 in_frame.GetFourcc(), out_frame->GetWidth(), out_frame->GetHeight());
273
274 if (out_frame->SetDataSize(data_size)) {
275 LOGF(ERROR) << "Set data size failed";
276 return -EINVAL;
277 }
278 out_frame->SetFourcc(in_frame.GetFourcc());
279
280 VLOGF(1) << "Scale image from " << in_frame.GetWidth() << "x"
281 << in_frame.GetHeight() << " to " << out_frame->GetWidth() << "x"
282 << out_frame->GetHeight();
283
284 int ret = libyuv::I420Scale(
285 in_frame.GetData(), in_frame.GetWidth(),
286 in_frame.GetData() + in_frame.GetWidth() * in_frame.GetHeight(),
287 in_frame.GetWidth() / 2,
288 in_frame.GetData() + in_frame.GetWidth() * in_frame.GetHeight() * 5 / 4,
289 in_frame.GetWidth() / 2, in_frame.GetWidth(), in_frame.GetHeight(),
290 out_frame->GetData(), out_frame->GetWidth(),
291 out_frame->GetData() + out_frame->GetWidth() * out_frame->GetHeight(),
292 out_frame->GetWidth() / 2,
293 out_frame->GetData() +
294 out_frame->GetWidth() * out_frame->GetHeight() * 5 / 4,
295 out_frame->GetWidth() / 2, out_frame->GetWidth(), out_frame->GetHeight(),
296 libyuv::FilterMode::kFilterNone);
297 LOGF_IF(ERROR, ret) << "I420Scale failed: " << ret;
298 return ret;
299 }
300
YU12ToYV12(const void * yu12,void * yv12,int width,int height,int dst_stride_y,int dst_stride_uv)301 static int YU12ToYV12(const void* yu12, void* yv12, int width, int height,
302 int dst_stride_y, int dst_stride_uv) {
303 if ((width % 2) || (height % 2)) {
304 LOGF(ERROR) << "Width or height is not even (" << width << " x " << height
305 << ")";
306 return -EINVAL;
307 }
308 if (dst_stride_y < width || dst_stride_uv < width / 2) {
309 LOGF(ERROR) << "Y plane stride (" << dst_stride_y
310 << ") or U/V plane stride (" << dst_stride_uv
311 << ") is invalid for width " << width;
312 return -EINVAL;
313 }
314
315 const uint8_t* src = reinterpret_cast<const uint8_t*>(yu12);
316 uint8_t* dst = reinterpret_cast<uint8_t*>(yv12);
317 const uint8_t* u_src = src + width * height;
318 uint8_t* u_dst = dst + dst_stride_y * height + dst_stride_uv * height / 2;
319 const uint8_t* v_src = src + width * height * 5 / 4;
320 uint8_t* v_dst = dst + dst_stride_y * height;
321
322 return libyuv::I420Copy(src, width, u_src, width / 2, v_src, width / 2, dst,
323 dst_stride_y, u_dst, dst_stride_uv, v_dst,
324 dst_stride_uv, width, height);
325 }
326
YU12ToNV21(const void * yu12,void * nv21,int width,int height)327 static int YU12ToNV21(const void* yu12, void* nv21, int width, int height) {
328 if ((width % 2) || (height % 2)) {
329 LOGF(ERROR) << "Width or height is not even (" << width << " x " << height
330 << ")";
331 return -EINVAL;
332 }
333
334 const uint8_t* src = reinterpret_cast<const uint8_t*>(yu12);
335 uint8_t* dst = reinterpret_cast<uint8_t*>(nv21);
336 const uint8_t* u_src = src + width * height;
337 const uint8_t* v_src = src + width * height * 5 / 4;
338 uint8_t* vu_dst = dst + width * height;
339
340 memcpy(dst, src, width * height);
341
342 for (int i = 0; i < height / 2; i++) {
343 for (int j = 0; j < width / 2; j++) {
344 *vu_dst++ = *v_src++;
345 *vu_dst++ = *u_src++;
346 }
347 }
348 return 0;
349 }
350
ConvertToJpeg(const CameraMetadata & metadata,const FrameBuffer & in_frame,FrameBuffer * out_frame)351 static bool ConvertToJpeg(const CameraMetadata& metadata,
352 const FrameBuffer& in_frame, FrameBuffer* out_frame) {
353 ExifUtils utils;
354 int jpeg_quality, thumbnail_jpeg_quality;
355 camera_metadata_ro_entry entry;
356
357 if (metadata.exists(ANDROID_JPEG_QUALITY)) {
358 entry = metadata.find(ANDROID_JPEG_QUALITY);
359 jpeg_quality = entry.data.u8[0];
360 } else {
361 LOGF(ERROR) << "Could not find jpeg quality in metadata, defaulting to "
362 << DEFAULT_JPEG_QUALITY;
363 jpeg_quality = DEFAULT_JPEG_QUALITY;
364 }
365 if (metadata.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
366 entry = metadata.find(ANDROID_JPEG_THUMBNAIL_QUALITY);
367 thumbnail_jpeg_quality = entry.data.u8[0];
368 } else {
369 thumbnail_jpeg_quality = jpeg_quality;
370 }
371
372 if (!utils.Initialize(in_frame.GetData(), in_frame.GetWidth(),
373 in_frame.GetHeight(), thumbnail_jpeg_quality)) {
374 LOGF(ERROR) << "ExifUtils initialization failed.";
375 return false;
376 }
377 if (!SetExifTags(metadata, &utils)) {
378 LOGF(ERROR) << "Setting Exif tags failed.";
379 return false;
380 }
381 if (!utils.GenerateApp1()) {
382 LOGF(ERROR) << "Generating APP1 segment failed.";
383 return false;
384 }
385 JpegCompressor compressor;
386 if (!compressor.CompressImage(in_frame.GetData(), in_frame.GetWidth(),
387 in_frame.GetHeight(), jpeg_quality,
388 utils.GetApp1Buffer(), utils.GetApp1Length())) {
389 LOGF(ERROR) << "JPEG image compression failed";
390 return false;
391 }
392 size_t buffer_length = compressor.GetCompressedImageSize();
393 if (out_frame->SetDataSize(buffer_length)) {
394 return false;
395 }
396 memcpy(out_frame->GetData(), compressor.GetCompressedImagePtr(),
397 buffer_length);
398 return true;
399 }
400
SetExifTags(const CameraMetadata & metadata,ExifUtils * utils)401 static bool SetExifTags(const CameraMetadata& metadata, ExifUtils* utils) {
402 time_t raw_time = 0;
403 struct tm time_info;
404 bool time_available = time(&raw_time) != -1;
405 localtime_r(&raw_time, &time_info);
406 if (!utils->SetDateTime(time_info)) {
407 LOGF(ERROR) << "Setting data time failed.";
408 return false;
409 }
410
411 float focal_length;
412 camera_metadata_ro_entry entry = metadata.find(ANDROID_LENS_FOCAL_LENGTH);
413 if (entry.count) {
414 focal_length = entry.data.f[0];
415 } else {
416 LOGF(ERROR) << "Cannot find focal length in metadata.";
417 return false;
418 }
419 if (!utils->SetFocalLength(
420 static_cast<uint32_t>(focal_length * kRationalPrecision),
421 kRationalPrecision)) {
422 LOGF(ERROR) << "Setting focal length failed.";
423 return false;
424 }
425
426 if (metadata.exists(ANDROID_JPEG_GPS_COORDINATES)) {
427 entry = metadata.find(ANDROID_JPEG_GPS_COORDINATES);
428 if (entry.count < 3) {
429 LOGF(ERROR) << "Gps coordinates in metadata is not complete.";
430 return false;
431 }
432 if (!utils->SetGpsLatitude(entry.data.d[0])) {
433 LOGF(ERROR) << "Setting gps latitude failed.";
434 return false;
435 }
436 if (!utils->SetGpsLongitude(entry.data.d[1])) {
437 LOGF(ERROR) << "Setting gps longitude failed.";
438 return false;
439 }
440 if (!utils->SetGpsAltitude(entry.data.d[2])) {
441 LOGF(ERROR) << "Setting gps altitude failed.";
442 return false;
443 }
444 }
445
446 if (metadata.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
447 entry = metadata.find(ANDROID_JPEG_GPS_PROCESSING_METHOD);
448 std::string method_str(reinterpret_cast<const char*>(entry.data.u8));
449 if (!utils->SetGpsProcessingMethod(method_str)) {
450 LOGF(ERROR) << "Setting gps processing method failed.";
451 return false;
452 }
453 }
454
455 if (time_available && metadata.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
456 entry = metadata.find(ANDROID_JPEG_GPS_TIMESTAMP);
457 time_t timestamp = static_cast<time_t>(entry.data.i64[0]);
458 if (gmtime_r(×tamp, &time_info)) {
459 if (!utils->SetGpsTimestamp(time_info)) {
460 LOGF(ERROR) << "Setting gps timestamp failed.";
461 return false;
462 }
463 } else {
464 LOGF(ERROR) << "Time tranformation failed.";
465 return false;
466 }
467 }
468
469 if (metadata.exists(ANDROID_JPEG_ORIENTATION)) {
470 entry = metadata.find(ANDROID_JPEG_ORIENTATION);
471 if (!utils->SetOrientation(entry.data.i32[0])) {
472 LOGF(ERROR) << "Setting orientation failed.";
473 return false;
474 }
475 }
476
477 if (metadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
478 entry = metadata.find(ANDROID_JPEG_THUMBNAIL_SIZE);
479 if (entry.count < 2) {
480 LOGF(ERROR) << "Thumbnail size in metadata is not complete.";
481 return false;
482 }
483 int thumbnail_width = entry.data.i32[0];
484 int thumbnail_height = entry.data.i32[1];
485 if (thumbnail_width > 0 && thumbnail_height > 0) {
486 if (!utils->SetThumbnailSize(static_cast<uint16_t>(thumbnail_width),
487 static_cast<uint16_t>(thumbnail_height))) {
488 LOGF(ERROR) << "Setting thumbnail size failed.";
489 return false;
490 }
491 }
492 }
493 return true;
494 }
495
496 } // namespace arc
497