1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media;
18 
19 import android.annotation.Nullable;
20 import android.compat.annotation.UnsupportedAppUsage;
21 import android.graphics.Rect;
22 import android.hardware.HardwareBuffer;
23 
24 import java.nio.ByteBuffer;
25 
26 /**
27  * <p>A single complete image buffer to use with a media source such as a
28  * {@link MediaCodec} or a
29  * {@link android.hardware.camera2.CameraDevice CameraDevice}.</p>
30  *
31  * <p>This class allows for efficient direct application access to the pixel
32  * data of the Image through one or more
33  * {@link java.nio.ByteBuffer ByteBuffers}. Each buffer is encapsulated in a
34  * {@link Plane} that describes the layout of the pixel data in that plane. Due
35  * to this direct access, and unlike the {@link android.graphics.Bitmap Bitmap} class,
36  * Images are not directly usable as UI resources.</p>
37  *
38  * <p>Since Images are often directly produced or consumed by hardware
39  * components, they are a limited resource shared across the system, and should
40  * be closed as soon as they are no longer needed.</p>
41  *
42  * <p>For example, when using the {@link ImageReader} class to read out Images
43  * from various media sources, not closing old Image objects will prevent the
44  * availability of new Images once
45  * {@link ImageReader#getMaxImages the maximum outstanding image count} is
46  * reached. When this happens, the function acquiring new Images will typically
47  * throw an {@link IllegalStateException}.</p>
48  *
49  * @see ImageReader
50  */
51 public abstract class Image implements AutoCloseable {
52     /**
53      * @hide
54      */
55     protected boolean mIsImageValid = false;
56 
57     /**
58      * @hide
59      */
60     @UnsupportedAppUsage
Image()61     protected Image() {
62     }
63 
64     /**
65      * Throw IllegalStateException if the image is invalid (already closed).
66      *
67      * @hide
68      */
throwISEIfImageIsInvalid()69     protected void throwISEIfImageIsInvalid() {
70         if (!mIsImageValid) {
71             throw new IllegalStateException("Image is already closed");
72         }
73     }
74     /**
75      * Get the format for this image. This format determines the number of
76      * ByteBuffers needed to represent the image, and the general layout of the
77      * pixel data in each ByteBuffer.
78      *
79      * <p>
80      * The format is one of the values from
81      * {@link android.graphics.ImageFormat ImageFormat}. The mapping between the
82      * formats and the planes is as follows:
83      * </p>
84      *
85      * <table>
86      * <tr>
87      *   <th>Format</th>
88      *   <th>Plane count</th>
89      *   <th>Layout details</th>
90      * </tr>
91      * <tr>
92      *   <td>{@link android.graphics.ImageFormat#JPEG JPEG}</td>
93      *   <td>1</td>
94      *   <td>Compressed data, so row and pixel strides are 0. To uncompress, use
95      *      {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
96      *   </td>
97      * </tr>
98      * <tr>
99      *   <td>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</td>
100      *   <td>3</td>
101      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
102      *     The chroma planes have half the width and height of the luminance
103      *     plane (4:2:0 subsampling). Each pixel sample in each plane has 8 bits.
104      *     Each plane has its own row stride and pixel stride.</td>
105      * </tr>
106      * <tr>
107      *   <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td>
108      *   <td>3</td>
109      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
110      *     The chroma planes have half the width and the full height of the luminance
111      *     plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits.
112      *     Each plane has its own row stride and pixel stride.</td>
113      * </tr>
114      * <tr>
115      *   <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td>
116      *   <td>3</td>
117      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
118      *     The chroma planes have the same width and height as that of the luminance
119      *     plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits.
120      *     Each plane has its own row stride and pixel stride.</td>
121      * </tr>
122      * <tr>
123      *   <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td>
124      *   <td>3</td>
125      *   <td>A R (red) plane followed by the G (green) and B (blue) planes.
126      *     All planes have the same widths and heights.
127      *     Each pixel sample in each plane has 8 bits.
128      *     Each plane has its own row stride and pixel stride.</td>
129      * </tr>
130      * <tr>
131      *   <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td>
132      *   <td>4</td>
133      *   <td>A R (red) plane followed by the G (green), B (blue), and
134      *     A (alpha) planes. All planes have the same widths and heights.
135      *     Each pixel sample in each plane has 8 bits.
136      *     Each plane has its own row stride and pixel stride.</td>
137      * </tr>
138      * <tr>
139      *   <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td>
140      *   <td>1</td>
141      *   <td>A single plane of raw sensor image data, with 16 bits per color
142      *     sample. The details of the layout need to be queried from the source of
143      *     the raw sensor data, such as
144      *     {@link android.hardware.camera2.CameraDevice CameraDevice}.
145      *   </td>
146      * </tr>
147      * <tr>
148      *   <td>{@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}</td>
149      *   <td>1</td>
150      *   <td>A single plane of raw sensor image data of private layout.
151      *   The details of the layout is implementation specific. Row stride and
152      *   pixel stride are undefined for this format. Calling {@link Plane#getRowStride()}
153      *   or {@link Plane#getPixelStride()} on RAW_PRIVATE image will cause
154      *   UnSupportedOperationException being thrown.
155      *   </td>
156      * </tr>
157      * <tr>
158      *   <td>{@link android.graphics.ImageFormat#HEIC HEIC}</td>
159      *   <td>1</td>
160      *   <td>Compressed data, so row and pixel strides are 0. To uncompress, use
161      *      {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
162      *   </td>
163      * </tr>
164      * </table>
165      *
166      * @see android.graphics.ImageFormat
167      */
getFormat()168     public abstract int getFormat();
169 
170     /**
171      * The width of the image in pixels. For formats where some color channels
172      * are subsampled, this is the width of the largest-resolution plane.
173      */
getWidth()174     public abstract int getWidth();
175 
176     /**
177      * The height of the image in pixels. For formats where some color channels
178      * are subsampled, this is the height of the largest-resolution plane.
179      */
getHeight()180     public abstract int getHeight();
181 
182     /**
183      * Get the timestamp associated with this frame.
184      * <p>
185      * The timestamp is measured in nanoseconds, and is normally monotonically
186      * increasing. The timestamps for the images from different sources may have
187      * different timebases therefore may not be comparable. The specific meaning and
188      * timebase of the timestamp depend on the source providing images. See
189      * {@link android.hardware.Camera Camera},
190      * {@link android.hardware.camera2.CameraDevice CameraDevice},
191      * {@link MediaPlayer} and {@link MediaCodec} for more details.
192      * </p>
193      */
getTimestamp()194     public abstract long getTimestamp();
195 
196     /**
197      * Get the transformation associated with this frame.
198      * @return The window transformation that needs to be applied for this frame.
199      * @hide
200      */
getTransform()201     public abstract int getTransform();
202 
203     /**
204      * Get the scaling mode associated with this frame.
205      * @return The scaling mode that needs to be applied for this frame.
206      * @hide
207      */
getScalingMode()208     public abstract int getScalingMode();
209 
210     /**
211      * Get the {@link android.hardware.HardwareBuffer HardwareBuffer} handle of the input image
212      * intended for GPU and/or hardware access.
213      * <p>
214      * The returned {@link android.hardware.HardwareBuffer HardwareBuffer} shall not be used
215      * after  {@link Image#close Image.close()} has been called.
216      * </p>
217      * @return the HardwareBuffer associated with this Image or null if this Image doesn't support
218      * this feature. (Unsupported use cases include Image instances obtained through
219      * {@link android.media.MediaCodec MediaCodec}, and on versions prior to Android P,
220      * {@link android.media.ImageWriter ImageWriter}).
221      */
222     @Nullable
getHardwareBuffer()223     public HardwareBuffer getHardwareBuffer() {
224         throwISEIfImageIsInvalid();
225         return null;
226     }
227 
228     /**
229      * Set the timestamp associated with this frame.
230      * <p>
231      * The timestamp is measured in nanoseconds, and is normally monotonically
232      * increasing. The timestamps for the images from different sources may have
233      * different timebases therefore may not be comparable. The specific meaning and
234      * timebase of the timestamp depend on the source providing images. See
235      * {@link android.hardware.Camera Camera},
236      * {@link android.hardware.camera2.CameraDevice CameraDevice},
237      * {@link MediaPlayer} and {@link MediaCodec} for more details.
238      * </p>
239      * <p>
240      * For images dequeued from {@link ImageWriter} via
241      * {@link ImageWriter#dequeueInputImage()}, it's up to the application to
242      * set the timestamps correctly before sending them back to the
243      * {@link ImageWriter}, or the timestamp will be generated automatically when
244      * {@link ImageWriter#queueInputImage queueInputImage()} is called.
245      * </p>
246      *
247      * @param timestamp The timestamp to be set for this image.
248      */
setTimestamp(long timestamp)249     public void setTimestamp(long timestamp) {
250         throwISEIfImageIsInvalid();
251         return;
252     }
253 
254     private Rect mCropRect;
255 
256     /**
257      * Get the crop rectangle associated with this frame.
258      * <p>
259      * The crop rectangle specifies the region of valid pixels in the image,
260      * using coordinates in the largest-resolution plane.
261      */
getCropRect()262     public Rect getCropRect() {
263         throwISEIfImageIsInvalid();
264 
265         if (mCropRect == null) {
266             return new Rect(0, 0, getWidth(), getHeight());
267         } else {
268             return new Rect(mCropRect); // return a copy
269         }
270     }
271 
272     /**
273      * Set the crop rectangle associated with this frame.
274      * <p>
275      * The crop rectangle specifies the region of valid pixels in the image,
276      * using coordinates in the largest-resolution plane.
277      */
setCropRect(Rect cropRect)278     public void setCropRect(Rect cropRect) {
279         throwISEIfImageIsInvalid();
280 
281         if (cropRect != null) {
282             cropRect = new Rect(cropRect);  // make a copy
283             if (!cropRect.intersect(0, 0, getWidth(), getHeight())) {
284                 cropRect.setEmpty();
285             }
286         }
287         mCropRect = cropRect;
288     }
289 
290     /**
291      * Get the array of pixel planes for this Image. The number of planes is
292      * determined by the format of the Image. The application will get an empty
293      * array if the image format is {@link android.graphics.ImageFormat#PRIVATE
294      * PRIVATE}, because the image pixel data is not directly accessible. The
295      * application can check the image format by calling
296      * {@link Image#getFormat()}.
297      */
getPlanes()298     public abstract Plane[] getPlanes();
299 
300     /**
301      * Free up this frame for reuse.
302      * <p>
303      * After calling this method, calling any methods on this {@code Image} will
304      * result in an {@link IllegalStateException}, and attempting to read from
305      * or write to {@link ByteBuffer ByteBuffers} returned by an earlier
306      * {@link Plane#getBuffer} call will have undefined behavior. If the image
307      * was obtained from {@link ImageWriter} via
308      * {@link ImageWriter#dequeueInputImage()}, after calling this method, any
309      * image data filled by the application will be lost and the image will be
310      * returned to {@link ImageWriter} for reuse. Images given to
311      * {@link ImageWriter#queueInputImage queueInputImage()} are automatically
312      * closed.
313      * </p>
314      */
315     @Override
close()316     public abstract void close();
317 
318     /**
319      * <p>
320      * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}).
321      * </p>
322      * <p>
323      * This is a package private method that is only used internally.
324      * </p>
325      *
326      * @return true if the image is attachable to a new owner, false if the image is still attached
327      *         to its current owner, or the image is a stand-alone image and is not attachable to
328      *         a new owner.
329      */
isAttachable()330     boolean isAttachable() {
331         throwISEIfImageIsInvalid();
332 
333         return false;
334     }
335 
336     /**
337      * <p>
338      * Get the owner of the {@link Image}.
339      * </p>
340      * <p>
341      * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter},
342      * {@link MediaCodec} etc. This method returns the owner that produces this image, or null
343      * if the image is stand-alone image or the owner is unknown.
344      * </p>
345      * <p>
346      * This is a package private method that is only used internally.
347      * </p>
348      *
349      * @return The owner of the Image.
350      */
getOwner()351     Object getOwner() {
352         throwISEIfImageIsInvalid();
353 
354         return null;
355     }
356 
357     /**
358      * Get native context (buffer pointer) associated with this image.
359      * <p>
360      * This is a package private method that is only used internally. It can be
361      * used to get the native buffer pointer and passed to native, which may be
362      * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse
363      * JNI call.
364      * </p>
365      *
366      * @return native context associated with this Image.
367      */
getNativeContext()368     long getNativeContext() {
369         throwISEIfImageIsInvalid();
370 
371         return 0;
372     }
373 
374     /**
375      * <p>A single color plane of image data.</p>
376      *
377      * <p>The number and meaning of the planes in an Image are determined by the
378      * format of the Image.</p>
379      *
380      * <p>Once the Image has been closed, any access to the the plane's
381      * ByteBuffer will fail.</p>
382      *
383      * @see #getFormat
384      */
385     public static abstract class Plane {
386         /**
387          * @hide
388          */
389         @UnsupportedAppUsage
Plane()390         protected Plane() {
391         }
392 
393         /**
394          * <p>The row stride for this color plane, in bytes.</p>
395          *
396          * <p>This is the distance between the start of two consecutive rows of
397          * pixels in the image. Note that row stride is undefined for some formats
398          * such as
399          * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
400          * and calling getRowStride on images of these formats will
401          * cause an UnsupportedOperationException being thrown.
402          * For formats where row stride is well defined, the row stride
403          * is always greater than 0.</p>
404          */
getRowStride()405         public abstract int getRowStride();
406         /**
407          * <p>The distance between adjacent pixel samples, in bytes.</p>
408          *
409          * <p>This is the distance between two consecutive pixel values in a row
410          * of pixels. It may be larger than the size of a single pixel to
411          * account for interleaved image data or padded formats.
412          * Note that pixel stride is undefined for some formats such as
413          * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
414          * and calling getPixelStride on images of these formats will
415          * cause an UnsupportedOperationException being thrown.
416          * For formats where pixel stride is well defined, the pixel stride
417          * is always greater than 0.</p>
418          */
getPixelStride()419         public abstract int getPixelStride();
420         /**
421          * <p>Get a direct {@link java.nio.ByteBuffer ByteBuffer}
422          * containing the frame data.</p>
423          *
424          * <p>In particular, the buffer returned will always have
425          * {@link java.nio.ByteBuffer#isDirect isDirect} return {@code true}, so
426          * the underlying data could be mapped as a pointer in JNI without doing
427          * any copies with {@code GetDirectBufferAddress}.</p>
428          *
429          * <p>For raw formats, each plane is only guaranteed to contain data
430          * up to the last pixel in the last row. In other words, the stride
431          * after the last row may not be mapped into the buffer. This is a
432          * necessary requirement for any interleaved format.</p>
433          *
434          * @return the byte buffer containing the image data for this plane.
435          */
getBuffer()436         public abstract ByteBuffer getBuffer();
437     }
438 
439 }
440