1 #include <iostream>
2 #include <string>
3 #include <vector>
4 
5 #include "fake-pipeline2/Base.h"
6 #include "fake-pipeline2/Scene.h"
7 #include "QemuClient.h"
8 #include <gralloc_cb_bp.h>
9 
10 #include <ui/GraphicBufferAllocator.h>
11 #include <ui/GraphicBufferMapper.h>
12 #include <ui/Rect.h>
13 
14 #include <linux/videodev2.h>
15 #include <utils/Timers.h>
16 
17 using namespace android;
18 
19 
20 const nsecs_t kExposureTimeRange[2] =
21     {1000L, 300000000L} ; // 1 us - 0.3 sec
22 const nsecs_t kFrameDurationRange[2] =
23     {33331760L, 300000000L}; // ~1/30 s - 0.3 sec
24 
25 const nsecs_t kMinVerticalBlank = 10000L;
26 
27 const uint8_t kColorFilterArrangement =
28     ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
29 
30 // Output image data characteristics
31 const uint32_t kMaxRawValue = 4000;
32 const uint32_t kBlackLevel  = 1000;
33 
34 // Sensor sensitivity
35 const float kSaturationVoltage      = 0.520f;
36 const uint32_t kSaturationElectrons = 2000;
37 const float kVoltsPerLuxSecond      = 0.100f;
38 
39 const float kElectronsPerLuxSecond =
40         kSaturationElectrons / kSaturationVoltage
41         * kVoltsPerLuxSecond;
42 
43 const float kBaseGainFactor = (float)kMaxRawValue /
44             kSaturationElectrons;
45 
46 const float kReadNoiseStddevBeforeGain = 1.177; // in electrons
47 const float kReadNoiseStddevAfterGain =  2.100; // in digital counts
48 const float kReadNoiseVarBeforeGain =
49             kReadNoiseStddevBeforeGain *
50             kReadNoiseStddevBeforeGain;
51 const float kReadNoiseVarAfterGain =
52             kReadNoiseStddevAfterGain *
53             kReadNoiseStddevAfterGain;
54 
55 const int32_t kSensitivityRange[2] = {100, 1600};
56 const uint32_t kDefaultSensitivity = 100;
57 
captureRGBA(uint8_t * img,uint32_t gain,uint32_t width,uint32_t height,Scene & scene,uint32_t sWidth,uint32_t sHeight)58 void captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height, Scene& scene, uint32_t sWidth, uint32_t sHeight) {
59     float totalGain = gain/100.0 * kBaseGainFactor;
60     // In fixed-point math, calculate total scaling from electrons to 8bpp
61     int scale64x = 64 * totalGain * 255 / kMaxRawValue;
62     unsigned int DivH= (float)sHeight/height * (0x1 << 10);
63     unsigned int DivW = (float)sWidth/width * (0x1 << 10);
64 
65     for (unsigned int outY = 0; outY < height; outY++) {
66         unsigned int y = outY * DivH >> 10;
67         uint8_t *px = img + outY * width * 4;
68         scene.setReadoutPixel(0, y);
69         unsigned int lastX = 0;
70         const uint32_t *pixel = scene.getPixelElectrons();
71         for (unsigned int outX = 0; outX < width; outX++) {
72             uint32_t rCount, gCount, bCount;
73             unsigned int x = outX * DivW >> 10;
74             if (x - lastX > 0) {
75                 for (unsigned int k = 0; k < (x-lastX); k++) {
76                      pixel = scene.getPixelElectrons();
77                 }
78             }
79             lastX = x;
80             // TODO: Perfect demosaicing is a cheat
81             rCount = pixel[Scene::R]  * scale64x;
82             gCount = pixel[Scene::Gr] * scale64x;
83             bCount = pixel[Scene::B]  * scale64x;
84 
85             *px++ = rCount < 255*64 ? rCount / 64 : 255;
86             *px++ = gCount < 255*64 ? gCount / 64 : 255;
87             *px++ = bCount < 255*64 ? bCount / 64 : 255;
88             *px++ = 255;
89          }
90         // TODO: Handle this better
91         //simulatedTime += mRowReadoutTime;
92     }
93 }
94 
captureYU12(uint8_t * img,uint32_t gain,uint32_t width,uint32_t height,Scene & scene,uint32_t sWidth,uint32_t sHeight)95 void captureYU12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height, Scene& scene, uint32_t sWidth, uint32_t sHeight) {
96     float totalGain = gain/100.0 * kBaseGainFactor;
97     // Using fixed-point math with 6 bits of fractional precision.
98     // In fixed-point math, calculate total scaling from electrons to 8bpp
99     const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
100     // In fixed-point math, saturation point of sensor after gain
101     const int saturationPoint = 64 * 255;
102     // Fixed-point coefficients for RGB-YUV transform
103     // Based on JFIF RGB->YUV transform.
104     // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
105     float rgbToY[]  = {19.0, 37.0, 7.0, 0.0};
106     float rgbToCb[] = {-10.0,-21.0, 32.0, 524288.0};
107     float rgbToCr[] = {32.0,-26.0, -5.0, 524288.0};
108     // Scale back to 8bpp non-fixed-point
109     const int scaleOut = 64;
110     const int scaleOutSq = scaleOut * scaleOut; // after multiplies
111     const double invscaleOutSq = 1.0/scaleOutSq;
112     for (int i=0; i < 4; ++i) {
113         rgbToY[i] *= invscaleOutSq;
114         rgbToCb[i] *= invscaleOutSq;
115         rgbToCr[i] *= invscaleOutSq;
116     }
117 
118     unsigned int DivH= (float)sHeight/height * (0x1 << 10);
119     unsigned int DivW = (float)sWidth/width * (0x1 << 10);
120     for (unsigned int outY = 0; outY < height; outY++) {
121         unsigned int y = outY * DivH >> 10;
122         uint8_t *pxY = img + outY * width;
123         uint8_t *pxVU = img + (height + outY / 2) * width;
124         uint8_t *pxU = img + height * width + (outY / 2) * (width / 2);
125         uint8_t *pxV = pxU + (height / 2) * (width / 2);
126         scene.setReadoutPixel(0, y);
127         unsigned int lastX = 0;
128         const uint32_t *pixel = scene.getPixelElectrons();
129          for (unsigned int outX = 0; outX < width; outX++) {
130             int32_t rCount, gCount, bCount;
131             unsigned int x = outX * DivW >> 10;
132             if (x - lastX > 0) {
133                 for (unsigned int k = 0; k < (x-lastX); k++) {
134                      pixel = scene.getPixelElectrons();
135                 }
136             }
137             lastX = x;
138             rCount = pixel[Scene::R]  * scale64x;
139             rCount = rCount < saturationPoint ? rCount : saturationPoint;
140             gCount = pixel[Scene::Gr] * scale64x;
141             gCount = gCount < saturationPoint ? gCount : saturationPoint;
142             bCount = pixel[Scene::B]  * scale64x;
143             bCount = bCount < saturationPoint ? bCount : saturationPoint;
144             *pxY++ = (rgbToY[0] * rCount + rgbToY[1] * gCount + rgbToY[2] * bCount);
145             if (outY % 2 == 0 && outX % 2 == 0) {
146                 *pxV++ = (rgbToCr[0] * rCount + rgbToCr[1] * gCount + rgbToCr[2] * bCount + rgbToCr[3]);
147                 *pxU++ = (rgbToCb[0] * rCount + rgbToCb[1] * gCount + rgbToCb[2] * bCount + rgbToCb[3]);
148             }
149         }
150     }
151 }
152 
153 // Test the capture speed of qemu camera, e.g., webcam and virtual scene
main(int argc,char * argv[])154 int main(int argc, char* argv[]) {
155     using ::android::GraphicBufferAllocator;
156     using ::android::GraphicBufferMapper;
157 
158 
159     uint32_t pixFmt;
160     int uiFmt;
161     bool v1 = false;
162     bool fake = false;
163     std::vector<nsecs_t> report;
164     uint32_t sceneWidth;
165     uint32_t sceneHeight;
166 
167     if (!strncmp(argv[1], "RGB", 3)) {
168         pixFmt = V4L2_PIX_FMT_RGB32;
169         uiFmt = HAL_PIXEL_FORMAT_RGBA_8888;
170     } else if (!strncmp(argv[1], "NV21", 3)) {
171         pixFmt = V4L2_PIX_FMT_NV21;
172         uiFmt = HAL_PIXEL_FORMAT_YCbCr_420_888;
173     } else if (!strncmp(argv[1], "YV12", 3)) {
174         pixFmt = V4L2_PIX_FMT_YVU420;
175         uiFmt = HAL_PIXEL_FORMAT_YCbCr_420_888;
176     } else if (!strncmp(argv[1], "YU12", 3)) {
177         pixFmt = V4L2_PIX_FMT_YUV420;
178         uiFmt = HAL_PIXEL_FORMAT_YCbCr_420_888;
179     } else {
180         printf("format error, use RGB, NV21, YV12 or YU12");
181         return -1;
182     }
183     uint32_t width = atoi(argv[2]);
184     uint32_t height = atoi(argv[3]);
185     uint32_t repeated = atoi(argv[4]);
186     std::string deviceName;
187     if (!strncmp(argv[5], "web", 3)) {
188         deviceName = "name=/dev/video0";
189     } else if (!strncmp(argv[5], "vir", 3)) {
190         deviceName = "name=virtualscene";
191     } else if (!strncmp(argv[5], "fak", 3)){
192         fake = true;
193         sceneWidth = atoi(argv[6]);
194         sceneHeight = atoi(argv[7]);
195     } else {
196         printf("device error, use web or virtual");
197         return -1;
198     }
199 
200     if (fake) {
201         std::vector<uint8_t> buf(width * height * 4);
202         Scene scene(width, height, kElectronsPerLuxSecond);
203         for (int i = 0 ; i < repeated; i++) {
204             nsecs_t start = systemTime();
205             if (pixFmt == V4L2_PIX_FMT_RGB32) {
206                 captureRGBA(buf.data(), 0, width, height, scene, sceneWidth, sceneHeight);
207             } else {
208                 captureYU12(buf.data(), 0, width, height, scene, sceneWidth, sceneHeight);
209             }
210             nsecs_t end = systemTime();
211             report.push_back(end - start);
212         }
213     }
214     else {
215         if (argc > 6 && !strncmp(argv[6], "v1", 2)) {
216             v1 = true;
217         }
218         // Open qemu pipe
219         CameraQemuClient client;
220         int ret = client.connectClient(deviceName.c_str());
221         if (ret != NO_ERROR) {
222             printf("Failed to connect device\n");
223             return -1;
224         }
225         ret = client.queryConnect();
226         if (ret == NO_ERROR) {
227             printf("Connected to device\n");
228         } else {
229             printf("Failed to connect device\n");
230             return -1;
231         }
232         // Caputre ASAP
233         if (v1) {
234             //ret = client.queryStart();
235             ret = client.queryStart(pixFmt, width, height);
236         } else {
237             ret = client.queryStart(pixFmt, width, height);
238         }
239         if (ret != NO_ERROR) {
240             printf("Failed to configure device for query\n");
241             return -1;
242         }
243         if (v1) {
244             const uint64_t usage =
245                 GRALLOC_USAGE_HW_CAMERA_READ |
246                 GRALLOC_USAGE_HW_CAMERA_WRITE |
247                 GRALLOC_USAGE_HW_TEXTURE;
248             uint32_t stride;
249 
250             buffer_handle_t handle;
251             if (GraphicBufferAllocator::get().allocate(
252                     width, height, uiFmt, 1, usage,
253                     &handle, &stride,
254                     0, "EmulatorCameraTest") != ::android::OK) {
255                 printf("GraphicBufferAllocator::allocate failed\n");
256                 return -1;
257             }
258 
259             void* addr;
260             if (uiFmt == HAL_PIXEL_FORMAT_RGBA_8888) {
261                 GraphicBufferMapper::get().lock(
262                     handle,
263                     GRALLOC_USAGE_HW_CAMERA_WRITE,
264                     Rect(0, 0, width, height),
265                     &addr);
266             } else {
267                 android_ycbcr ycbcr;
268                 GraphicBufferMapper::get().lockYCbCr(
269                     handle,
270                     GRALLOC_USAGE_HW_CAMERA_WRITE,
271                     Rect(0, 0, width, height),
272                     &ycbcr);
273                 addr = ycbcr.y;
274             }
275 
276             const cb_handle_t* cbHandle = cb_handle_t::from(handle);
277 
278             uint64_t offset = cbHandle->getMmapedOffset();
279             printf("offset is 0x%llx\n", offset);
280             float whiteBalance[] = {1.0f, 1.0f, 1.0f};
281             float exposureCompensation = 1.0f;
282             for (int i = 0 ; i < repeated; i++) {
283                 nsecs_t start = systemTime();
284                 client.queryFrame(width, height, pixFmt, offset,
285                                   whiteBalance[0], whiteBalance[1], whiteBalance[2],
286                                   exposureCompensation, nullptr);
287                 nsecs_t end = systemTime();
288                 report.push_back(end - start);
289             }
290             GraphicBufferMapper::get().unlock(handle);
291             GraphicBufferAllocator::get().free(handle);
292         } else {
293             size_t bufferSize;
294             if (pixFmt == V4L2_PIX_FMT_RGB32) {
295                 bufferSize = width * height * 4;
296             } else {
297                 bufferSize = width * height * 12 / 8;
298             }
299             std::vector<char> buffer(bufferSize, 0);
300             float whiteBalance[] = {1.0f, 1.0f, 1.0f};
301             float exposureCompensation = 1.0f;
302             for (int i = 0 ; i < repeated; i++) {
303                 nsecs_t start = systemTime();
304                 client.queryFrame(buffer.data(), nullptr, 0, bufferSize,
305                                   whiteBalance[0], whiteBalance[1], whiteBalance[2],
306                                   exposureCompensation, nullptr);
307                 nsecs_t end = systemTime();
308                 report.push_back(end - start);
309             }
310         }
311     }
312     // Report
313     nsecs_t average, sum = 0;
314     for (int i = 0; i < repeated; i++) {
315         sum += report[i];
316     }
317     average = sum / repeated;
318     printf("Report for reading %d frames\n", repeated);
319     printf("\ttime total: %lld\n", sum);
320     printf("\tframe average: %lld\n", average);
321 
322     return 0;
323 }
324 
325