diff --git a/CMakeLists.txt b/CMakeLists.txt index 6678f52c..9a5e85f6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -32,6 +32,7 @@ set(IIO_DIR ${SRC_ROOT_DIR}/iio) set(ISP_CTRL_DIR ${SRC_ROOT_DIR}/isp_control) # ISP_CONTROL_E set(IUTILS_DIR ${SRC_ROOT_DIR}/iutils) +set(SCHEDULER_DIR ${SRC_ROOT_DIR}/scheduler) set(METADATA_DIR ${SRC_ROOT_DIR}/metadata) set(PLATFORMDATA_DIR ${SRC_ROOT_DIR}/platformdata) set(V4L2_DIR ${SRC_ROOT_DIR}/v4l2) @@ -144,7 +145,7 @@ include_directories(include src/v4l2 src/core src/metadata src/platformdata src/platformdata/gc src/3a src/3a/intel3a src/3a/external - src/fd + src/fd src/scheduler src/core/psysprocessor src/image_process ) @@ -216,6 +217,7 @@ set(LIBCAMHAL_SRCS ${IUTILS_SRCS} ${METADATA_SRCS} ${PLATFORMDATA_SRCS} + ${SCHEDULER_SRCS} ${V4L2_SRCS} ${ALGOWRAPPER_SRCS} ${IMAGE_PROCESS_SRCS} diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..53d86be8 --- /dev/null +++ b/LICENSE @@ -0,0 +1,59 @@ +Apache License +Version 2.0, January 2004 + +http://www.apache.org/licenses/ +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + diff --git a/config/linux/ipu6ep/sensors/lt6911uxc.xml b/config/linux/ipu6ep/sensors/lt6911uxc.xml index 2214db94..0921aeec 100644 --- a/config/linux/ipu6ep/sensors/lt6911uxc.xml +++ b/config/linux/ipu6ep/sensors/lt6911uxc.xml @@ -19,10 +19,10 @@ - + - - + + @@ -32,10 +32,10 @@ - + - - + + @@ -45,10 +45,10 @@ - + - - + + @@ -58,10 +58,10 @@ - + - - + + @@ -71,10 +71,10 @@ - + - - + + @@ -84,11 +84,10 @@ - + - - - + + @@ -116,12 +115,12 @@ - + - - + + - + @@ -129,12 +128,12 @@ - + - - + + - + @@ -142,12 +141,12 @@ - + - - + + - + @@ -155,12 +154,12 @@ - + - - + + - + @@ -168,12 +167,12 @@ - + - - + + - + diff --git a/include/api/ICamera.h b/include/api/ICamera.h index 956a95a2..105b585b 100644 --- a/include/api/ICamera.h +++ b/include/api/ICamera.h @@ -50,7 +50,8 @@ ******************************************************************************* * Version 0.44 Change output parameter of get_frame_size ******************************************************************************* - * Version 0.45 Add two parameters(number of buffers, parameters) for camera_stream_qbuf + * Version 0.45 Add two parameters(number of buffers, parameters) + for camera_stream_qbuf Add one parameter(parameters) for camera_stream_dqbuf ******************************************************************************* * Version 0.46 Add virtual channel camera number for camera_device_open @@ -75,7 +76,7 @@ #include "Parameters.h" -#include // For including definition of NULL +#include // For including definition of NULL extern "C" { namespace icamera { @@ -85,9 +86,10 @@ namespace icamera { * \struct vc_info_t: Define the virtual channel information for the device */ typedef struct { - int total_num; /**< the total camera number of virtual channel. 0: the virtual channel is disabled */ - int sequence; /**< the current camera's sequence in all the virtual channel cameras */ - int group; /**< the virtual channel group id */ + int total_num; /**< the total camera number of virtual channel. 0: the virtual channel is + disabled */ + int sequence; /**< the current camera's sequence in all the virtual channel cameras */ + int group; /**< the virtual channel group id */ } vc_info_t; // VIRTUAL_CHANNEL_E @@ -98,16 +100,16 @@ typedef struct { int facing; int orientation; int device_version; - const char* name; /**< Sensor name */ - const char* description; /**< Sensor description */ - const Parameters *capability; /**< camera capability */ + const char* name; /**< Sensor name */ + const char* description; /**< Sensor description */ + const Parameters* capability; /**< camera capability */ // VIRTUAL_CHANNEL_S - vc_info_t vc; /**< Virtual Channel information */ + vc_info_t vc; /**< Virtual Channel information */ /** The following three field are replaced by the vc_info_t, please don't use them */ int vc_total_num; /**deprecated */ - int vc_sequence; /**deprecated */ - int vc_group; /** deprecated */ + int vc_sequence; /**deprecated */ + int vc_group; /** deprecated */ // VIRTUAL_CHANNEL_E } camera_info_t; @@ -204,7 +206,7 @@ int camera_hal_deinit(); * camera_callback_ops_t *callback: callback handle * **/ -void camera_callback_register(int camera_id, const camera_callback_ops_t *callback); +void camera_callback_register(int camera_id, const camera_callback_ops_t* callback); /** * \brief @@ -282,7 +284,7 @@ void camera_device_close(int camera_id); * ret = camera_device_config_sensor_input(camera_id, &input_config); * \endcode **/ -int camera_device_config_sensor_input(int camera_id, const stream_t *inputConfig); +int camera_device_config_sensor_input(int camera_id, const stream_t* inputConfig); /** * \brief @@ -295,7 +297,8 @@ int camera_device_config_sensor_input(int camera_id, const stream_t *inputConfig * \param[in] * int camera_id: ID of the camera * \param[in] - * stream_config_t stream_list: stream configuration list, if success, stream id is filled in streams[] + * stream_config_t stream_list: stream configuration list, if success, stream id is filled in + * streams[] * * \return * 0 succeed to configure streams @@ -317,7 +320,7 @@ int camera_device_config_sensor_input(int camera_id, const stream_t *inputConfig * ret = camera_device_config_streams(camera_id, &stream_list); * \endcode **/ -int camera_device_config_streams(int camera_id, stream_config_t *stream_list); +int camera_device_config_streams(int camera_id, stream_config_t* stream_list); /** * \brief @@ -396,7 +399,7 @@ int camera_device_stop(int camera_id); * \endcode * */ -int camera_device_allocate_memory(int camera_id, camera_buffer_t *buffer); +int camera_device_allocate_memory(int camera_id, camera_buffer_t* buffer); /** * \brief @@ -425,8 +428,8 @@ int camera_device_allocate_memory(int camera_id, camera_buffer_t *buffer); * * \see camera_stream_qbuf(); **/ -int camera_stream_qbuf(int camera_id, camera_buffer_t **buffer, - int num_buffers = 1, const Parameters* settings = NULL); +int camera_stream_qbuf(int camera_id, camera_buffer_t** buffer, int num_buffers = 1, + const Parameters* settings = NULL); /** * \brief @@ -454,13 +457,15 @@ int camera_stream_qbuf(int camera_id, camera_buffer_t **buffer, * \code * const int buffer_count = 8; * int bpp = 0; - * int buffer_size = get_frame_size(camera_id, V4L2_PIX_FMT_SGRBG8, 1920, 1080, V4L2_FIELD_ANY, &bpp); + * int buffer_size = get_frame_size(camera_id, V4L2_PIX_FMT_SGRBG8, 1920, 1080, V4L2_FIELD_ANY, + * &bpp); * camera_buffer_t buffers[buffer_count]; * camera_buffer_t *buf = nullptr; * for (int i = 0; i < buffer_count; i++) { * buf = &buffers[i]; * posix_memalign(&buf->addr, getpagesize(), buffer_size); - * buf->s = stream; // stream here comes from parameter and result of camera_device_config_streams. + * // stream here comes from parameter and result of camera_device_config_streams. + * buf->s = stream; * } * * for (int i = 0; i < buffer_count; i++) { @@ -477,7 +482,7 @@ int camera_stream_qbuf(int camera_id, camera_buffer_t **buffer, * \endcode * **/ -int camera_stream_dqbuf(int camera_id, int stream_id, camera_buffer_t **buffer, +int camera_stream_dqbuf(int camera_id, int stream_id, camera_buffer_t** buffer, Parameters* settings = NULL); /** @@ -574,7 +579,7 @@ int camera_get_parameters(int camera_id, Parameters& param, int64_t sequence = - * \return * frame size. **/ -int get_frame_size(int camera_id, int format, int width, int height, int field, int *bpp); +int get_frame_size(int camera_id, int format, int width, int height, int field, int* bpp); -} // namespace icamera -} // extern "C" +} // namespace icamera +} // extern "C" diff --git a/include/api/IspControl.h b/include/api/IspControl.h index c863d7c1..1a22a2de 100644 --- a/include/api/IspControl.h +++ b/include/api/IspControl.h @@ -25,8 +25,7 @@ namespace icamera { -typedef enum -{ +typedef enum { camera_control_isp_ctrl_id_wb_gains = 34710, camera_control_isp_ctrl_id_color_correction_matrix = 30009, camera_control_isp_ctrl_id_advanced_color_correction_matrix = 51448, @@ -45,8 +44,7 @@ typedef enum /** * \struct camera_control_isp_wb_gains_t */ -struct camera_control_isp_wb_gains_t -{ +struct camera_control_isp_wb_gains_t { /*!< gr Gr gain.*/ float gr; /*!< r R gain.*/ @@ -55,26 +53,23 @@ struct camera_control_isp_wb_gains_t float b; /*!< gb Gb gain.*/ float gb; - }; /** * \struct camera_control_isp_color_correction_matrix_t */ -struct camera_control_isp_color_correction_matrix_t -{ - /*!< ccm_gains[9] Color correction matrix from sensor RGB to sRGB/target color space. Row-major order.*/ +struct camera_control_isp_color_correction_matrix_t { + /*!< ccm_gains[9] Color correction matrix from sensor RGB to sRGB/target color space. Row-major + * order.*/ float ccm_gains[9]; /*!< ccm_gains_media_format colorspace the ccm matrix was calibrated to*/ int32_t ccm_gains_media_format; - }; /** * \struct camera_control_isp_advanced_color_correction_matrix_t */ -struct camera_control_isp_advanced_color_correction_matrix_t -{ +struct camera_control_isp_advanced_color_correction_matrix_t { /*!< bypass bypass*/ int32_t bypass; /*!< number_of_sectors number of sectors (CCM matrices)*/ @@ -85,24 +80,20 @@ struct camera_control_isp_advanced_color_correction_matrix_t float hue_of_sectors[24]; /*!< ccm_matrices_media_format colorspace the ccm matrices where calibrated to*/ int32_t ccm_matrices_media_format; - }; /** * \struct camera_control_isp_bxt_csc_t */ -struct camera_control_isp_bxt_csc_t -{ +struct camera_control_isp_bxt_csc_t { /*!< rgb2yuv_coef[9] Matrix for RGB to YUV conversion*/ int32_t rgb2yuv_coef[9]; - }; /** * \struct camera_control_isp_bxt_demosaic_t */ -struct camera_control_isp_bxt_demosaic_t -{ +struct camera_control_isp_bxt_demosaic_t { /*!< high_frequency_denoise_enable High frequency denoise enbale flag*/ int32_t high_frequency_denoise_enable; /*!< false_color_correction_enable False color correction enable flag*/ @@ -117,14 +108,12 @@ struct camera_control_isp_bxt_demosaic_t int32_t high_frequency_denoise_power; /*!< checkers_removal_w Checkers removal homogeneity weight*/ int32_t checkers_removal_w; - }; /** * \struct camera_control_isp_sc_iefd_t */ -struct camera_control_isp_sc_iefd_t -{ +struct camera_control_isp_sc_iefd_t { /*!< sharpening_power[2] base power of sharpening*/ int32_t sharpening_power[2]; /*!< sharp_power_edge shapening power for direct edge*/ @@ -183,14 +172,12 @@ struct camera_control_isp_sc_iefd_t int32_t vssnlm_y2; /*!< vssnlm_y3 Edge denoising LUT y2*/ int32_t vssnlm_y3; - }; /** * \struct camera_control_isp_see_t */ -struct camera_control_isp_see_t -{ +struct camera_control_isp_see_t { /*!< bypass bypass*/ int32_t bypass; /*!< edge_max max edge value for clamping*/ @@ -205,47 +192,58 @@ struct camera_control_isp_see_t int32_t alpha_width; /*!< alpha_bias */ int32_t alpha_bias; - }; /** * \struct camera_control_isp_bnlm_t */ -struct camera_control_isp_bnlm_t -{ +struct camera_control_isp_bnlm_t { /*!< nm_offset NR level for noise model adaptation*/ int32_t nm_offset; /*!< nm_th Threshold for noise model adaptation*/ int32_t nm_th; /*!< bypass bypass for the filter*/ int32_t bypass; - /*!< detailix_x_range[2] detailIx noise model - noise input range - tunned automatically from data - set QNN fit range - [Q14.0] [0..16383]*/ + /*!< detailix_x_range[2] detailIx noise model - noise input range - tunned automatically from + * data - set QNN fit range - [Q14.0] [0..16383]*/ int32_t detailix_x_range[2]; /*!< detailix_radgain detailIx Lens shading radial compensation power - [Q8.16], [0..256%]*/ int32_t detailix_radgain; /*!< detailix_coeffs[3] detailIx SAD noise model - NoiseModel=SQRT(C1*mu^2+C2*mu+C3)*/ int32_t detailix_coeffs[3]; - /*!< sad_spatialrad[4] Neighbors spatial (radial) weight - filter radial bias - allowing reduction of effective filter size [Q3.5] [1:0.25:4]*/ + /*!< sad_spatialrad[4] Neighbors spatial (radial) weight - filter radial bias - allowing + * reduction of effective filter size [Q3.5] [1:0.25:4]*/ int32_t sad_spatialrad[4]; - /*!< sad_mu_x_range[2] detailIx noise model - noise input range - tunned automatically from data - set QNN fit range [Q14.0] [0..16383]*/ + /*!< sad_mu_x_range[2] detailIx noise model - noise input range - tunned automatically from data + * - set QNN fit range [Q14.0] [0..16383]*/ int32_t sad_mu_x_range[2]; /*!< sad_radgain SAD Lens shading radial compensation power - [Q8.16], [0..256%]*/ int32_t sad_radgain; /*!< sad_mu_coeffs[3] SAD noise model - NoiseModel=SQRT(C1*mu^2+C2*mu+C3)*/ int32_t sad_mu_coeffs[3]; - /*!< detailth[3] detailTH - 3 THs classifying content(detail) type to {flat/weak texture/texture/edge} based on detailIx - [14.0] [0..16383]*/ + /*!< detailth[3] detailTH - 3 THs classifying content(detail) type to {flat/weak + * texture/texture/edge} based on detailIx - [14.0] [0..16383]*/ int32_t detailth[3]; - /*!< sad_detailixlutx[4] Continuos LUT aligning SAD from different detailIx levels to a common scale before appling exponent scaling - texture to flat & edge discrimination, X axis is currently fixed 0:8:1023 - [14.0] [0..16383]*/ + /*!< sad_detailixlutx[4] Continuos LUT aligning SAD from different detailIx levels to a common + * scale before appling exponent scaling - texture to flat & edge discrimination, X axis is + * currently fixed 0:8:1023 - [14.0] [0..16383]*/ int32_t sad_detailixlutx[4]; - /*!< sad_detailixluty[4] Continuos LUT aligning SAD from different detailIx levels to a common scale before appling exponent scaling - texture to flat & edge discrimination - Yaxis is continuos description of (2^10-1)/QNN_o - [14.0] [0..16383]*/ + /*!< sad_detailixluty[4] Continuos LUT aligning SAD from different detailIx levels to a common + * scale before appling exponent scaling - texture to flat & edge discrimination - Yaxis is + * continuos description of (2^10-1)/QNN_o - [14.0] [0..16383]*/ int32_t sad_detailixluty[4]; - /*!< numcandforavg[4] max num neighbors to average for smoothing per detail type - [4.0] [1..16]*/ + /*!< numcandforavg[4] max num neighbors to average for smoothing per detail type - [4.0] + * [1..16]*/ int32_t numcandforavg[4]; - /*!< blend_power[4] blend alpha(alpha0) - Increase central pixel effect to enhance detail preservation vs smoothing - [8.24], [0..256%]*/ + /*!< blend_power[4] blend alpha(alpha0) - Increase central pixel effect to enhance detail + * preservation vs smoothing - [8.24], [0..256%]*/ int32_t blend_power[4]; - /*!< blend_th[4] blend alpha(alpha0) - preserve details based on texture classification of the form alpha0>TH - [0:2^10-1]*/ + /*!< blend_th[4] blend alpha(alpha0) - preserve details based on texture classification of the + * form alpha0>TH - [0:2^10-1]*/ int32_t blend_th[4]; - /*!< blend_texturegain[4] blend alpha(alpha0) - define steepness of blent_th effect for preserve details - alphaRes=2^10-1; gainScale=255; outScale=0.01; curSlope = outScale*tan(pi/2*((fixGain/(gainScale-1))^2))*alphaRes/(alphaRes-curTH) - [0..256]*/ + /*!< blend_texturegain[4] blend alpha(alpha0) - define steepness of blent_th effect for + * preserve details - alphaRes=2^10-1; gainScale=255; outScale=0.01; curSlope = + * outScale*tan(pi/2*((fixGain/(gainScale-1))^2))*alphaRes/(alphaRes-curTH) - [0..256]*/ int32_t blend_texturegain[4]; /*!< blend_radgain blend radial weigh - [8.16], [0..256%]*/ int32_t blend_radgain; @@ -257,14 +255,12 @@ struct camera_control_isp_bnlm_t int32_t wmaxminth; /*!< rad_enable Radial LSC correction*/ int32_t rad_enable; - }; /** * \struct camera_control_isp_tnr5_21_t */ -struct camera_control_isp_tnr5_21_t -{ +struct camera_control_isp_tnr5_21_t { /*!< bypass bypass filter*/ int32_t bypass; /*!< nm_yy_xcu_b[64] base y noise model - y dependency*/ @@ -333,14 +329,12 @@ struct camera_control_isp_tnr5_21_t int32_t bypass_g_mv; /*!< bypass_NS Bypass Noise Stream*/ int32_t bypass_NS; - }; /** * \struct camera_control_isp_xnr_dss_t */ -struct camera_control_isp_xnr_dss_t -{ +struct camera_control_isp_xnr_dss_t { /*!< rad_enable */ int32_t rad_enable; /*!< bypass Bypass all XNR4*/ @@ -413,14 +407,12 @@ struct camera_control_isp_xnr_dss_t int32_t blnd_hf_power_y; /*!< blnd_hf_power_c New in DSS_XNR*/ int32_t blnd_hf_power_c; - }; /** * \struct camera_control_isp_gamma_tone_map_t */ -struct camera_control_isp_gamma_tone_map_t -{ +struct camera_control_isp_gamma_tone_map_t { /*!< gamma[2048] Gamma table for all channels*/ float gamma[2048]; /*!< gamma_lut_size Gamma LUT size*/ @@ -429,14 +421,12 @@ struct camera_control_isp_gamma_tone_map_t float tone_map[2048]; /*!< tone_map_lut_size Tone map LUT size*/ uint32_t tone_map_lut_size; - }; /** * \struct camera_control_isp_tnr5_22_t */ -struct camera_control_isp_tnr5_22_t -{ +struct camera_control_isp_tnr5_22_t { /*!< bypass bypass filter*/ int32_t bypass; /*!< nm_yy_xcu_b[64] base y noise model - y dependency*/ @@ -505,14 +495,12 @@ struct camera_control_isp_tnr5_22_t int32_t bypass_g_mv; /*!< bypass_NS Bypass Noise Stream*/ int32_t bypass_NS; - }; /** * \struct camera_control_isp_tnr5_25_t */ -struct camera_control_isp_tnr5_25_t -{ +struct camera_control_isp_tnr5_25_t { /*!< bypass bypass filter*/ int32_t bypass; /*!< nm_yy_xcu_b[64] base y noise model - y dependency*/ @@ -555,7 +543,6 @@ struct camera_control_isp_tnr5_25_t int32_t tbd_sim_gain; /*!< bypass_g_mv Bypass Global Motion Vector*/ int32_t bypass_g_mv; - }; -} // end of icamera +} // namespace icamera diff --git a/include/api/Parameters.h b/include/api/Parameters.h index 29384503..876a8b78 100644 --- a/include/api/Parameters.h +++ b/include/api/Parameters.h @@ -76,6 +76,7 @@ #include #include #include +#include namespace icamera { @@ -96,94 +97,97 @@ typedef struct { * MUST use int if new member added. */ typedef struct { - int format; /**< stream format refer to v4l2 definition https://linuxtv.org/downloads/v4l-dvb-apis/pixfmt.html */ - int width; /**< image width */ - int height; /**< image height */ - int field; /**< refer to v4l2 definition https://linuxtv.org/downloads/v4l-dvb-apis/field-order.html#v4l2-field */ + int format; /**< stream format refer to v4l2 definition + https://linuxtv.org/downloads/v4l-dvb-apis/pixfmt.html */ + int width; /**< image width */ + int height; /**< image height */ + int field; /**< refer to v4l2 definition + https://linuxtv.org/downloads/v4l-dvb-apis/field-order.html#v4l2-field */ -/* -* The buffer geometry introduction. -* The YUV image is formed with Y:Luma and UV:Chroma. And there are -* two kinds of styles for YUV format: planar and packed. -* -* YUV420:NV12 -* -* YUV420(720x480) sampling -* -* |<----width+padding=alignedBpl----->| -* Y *-------*-------*-------*-------*....----- -* | | : ^ -* | # UV # | : | -* | | : | -* *-------*-------*-------*-------*.... | -* | | : | -* | # # | : | -* | | : | -* *-------*-------*-------*-------*.... (height * 3 / 2) -* | | : | -* | # # | : | -* | | : | -* *-------*-------*-------*-------*.... | -* | | : | -* | # # | : | -* | | : v -* *-------*-------*-------*-------*....----- -* -* The data stored in memory -* ____________w___________ ..... -* |Y0|Y1 | : -* | | : -* h h : -* | | : -* | | : -* |________________________|....: -* |U|V|U|V | : -* h/2 h/2 : -* |____________w___________|....: -* -* bpp = 12 -* bpl = width; -* stride = align64(bpl): -* -* YUV422:YUY2 -* -* YUV422(720x480) sampling -* -* |<--(width*2)+padding=alignedBpl-->| -* YUV *#----*#-----*#-----*#-----*#....----- -* *#----*#-----*#-----*#-----*#.... | -* *#----*#-----*#-----*#-----*#.... | -* *#----*#-----*#-----*#-----*#.... | -* *#----*#-----*#-----*#-----*#.... (height) -* *#----*#-----*#-----*#-----*#.... | -* *#----*#-----*#-----*#-----*#.... | -* *#----*#-----*#-----*#-----*#.... | -* *#----*#-----*#-----*#-----*#.... | -* *#----*#-----*#-----*#-----*#....----- -* -* The data stored in memory -* ____________w___________ ..... -* |Y0|Cb|Y1|Cr | : -* | | : -* | | : -* | | : -* h h : -* | | : -* | | : -* | | : -* |____________w___________|....: -* -* bpp = 16 -* bpl = width * bpp / 8 = width * 2; -* stride = align64(bpl): -* -* Note: The stride defined in HAL is same as aligned bytes per line. -*/ - int stride; /**< stride = aligned bytes per line */ - int size; /**< real buffer size */ - - int id; /**< Id that is filled by HAL. */ - int memType; /**< buffer memory type filled by app, refer to https://linuxtv.org/downloads/v4l-dvb-apis/io.html */ + /* + * The buffer geometry introduction. + * The YUV image is formed with Y:Luma and UV:Chroma. And there are + * two kinds of styles for YUV format: planar and packed. + * + * YUV420:NV12 + * + * YUV420(720x480) sampling + * + * |<----width+padding=alignedBpl----->| + * Y *-------*-------*-------*-------*....----- + * | | : ^ + * | # UV # | : | + * | | : | + * *-------*-------*-------*-------*.... | + * | | : | + * | # # | : | + * | | : | + * *-------*-------*-------*-------*.... (height * 3 / 2) + * | | : | + * | # # | : | + * | | : | + * *-------*-------*-------*-------*.... | + * | | : | + * | # # | : | + * | | : v + * *-------*-------*-------*-------*....----- + * + * The data stored in memory + * ____________w___________ ..... + * |Y0|Y1 | : + * | | : + * h h : + * | | : + * | | : + * |________________________|....: + * |U|V|U|V | : + * h/2 h/2 : + * |____________w___________|....: + * + * bpp = 12 + * bpl = width; + * stride = align64(bpl): + * + * YUV422:YUY2 + * + * YUV422(720x480) sampling + * + * |<--(width*2)+padding=alignedBpl-->| + * YUV *#----*#-----*#-----*#-----*#....----- + * *#----*#-----*#-----*#-----*#.... | + * *#----*#-----*#-----*#-----*#.... | + * *#----*#-----*#-----*#-----*#.... | + * *#----*#-----*#-----*#-----*#.... (height) + * *#----*#-----*#-----*#-----*#.... | + * *#----*#-----*#-----*#-----*#.... | + * *#----*#-----*#-----*#-----*#.... | + * *#----*#-----*#-----*#-----*#.... | + * *#----*#-----*#-----*#-----*#....----- + * + * The data stored in memory + * ____________w___________ ..... + * |Y0|Cb|Y1|Cr | : + * | | : + * | | : + * | | : + * h h : + * | | : + * | | : + * | | : + * |____________w___________|....: + * + * bpp = 16 + * bpl = width * bpp / 8 = width * 2; + * stride = align64(bpl): + * + * Note: The stride defined in HAL is same as aligned bytes per line. + */ + int stride; /**< stride = aligned bytes per line */ + int size; /**< real buffer size */ + + int id; /**< Id that is filled by HAL. */ + int memType; /**< buffer memory type filled by app, refer to + https://linuxtv.org/downloads/v4l-dvb-apis/io.html */ /** * The maximum number of buffers the HAL device may need to have dequeued at @@ -192,7 +196,7 @@ typedef struct { */ uint32_t max_buffers; - int usage; /** stream_array_t; * Contains all streams info in this configuration. */ typedef struct { - int num_streams; /**< number of streams in this configuration */ - stream_t *streams; /**< streams list */ + int num_streams; /**< number of streams in this configuration */ + stream_t* streams; /**< streams list */ /** * The operation mode of the streams in this configuration. It should be one of the value * defined in camera_stream_configuration_mode_t. @@ -224,10 +228,10 @@ typedef struct { * The buffer's properties can be one of them or combined with some of them. */ typedef enum { - BUFFER_FLAG_DMA_EXPORT = 1<<0, - BUFFER_FLAG_INTERNAL = 1<<1, - BUFFER_FLAG_SW_READ = 1<<2, - BUFFER_FLAG_SW_WRITE = 1<<3, + BUFFER_FLAG_DMA_EXPORT = 1 << 0, + BUFFER_FLAG_INTERNAL = 1 << 1, + BUFFER_FLAG_SW_READ = 1 << 2, + BUFFER_FLAG_SW_WRITE = 1 << 3, } camera_buffer_flags_t; /** @@ -237,15 +241,17 @@ typedef enum { * according to memory type to allocate memory and queue to device. */ typedef struct { - stream_t s; /**< stream info */ - void *addr; /**< buffer addr for userptr and mmap memory mode */ - int index; /**< buffer index, filled by HAL. it is used for qbuf and dqbuf in order */ - int64_t sequence; /**< buffer sequence, filled by HAL, to record buffer dqueue sequence from device */ - int dmafd; /**< buffer dmafd for DMA import and export mode */ - int flags; /**< buffer flags, its type is camera_buffer_flags_t, used to specify buffer properties */ + stream_t s; /**< stream info */ + void* addr; /**< buffer addr for userptr and mmap memory mode */ + int index; /**< buffer index, filled by HAL. it is used for qbuf and dqbuf in order */ + int64_t sequence; /**< buffer sequence, filled by HAL, to record buffer dqueue sequence from + device */ + int dmafd; /**< buffer dmafd for DMA import and export mode */ + int flags; /**< buffer flags, its type is camera_buffer_flags_t, used to specify buffer + properties */ uint64_t timestamp; /**< buffer timestamp, it's a time reference measured in nanosecond */ uint32_t requestId; /**< buffer requestId, it's a request id of buffer */ - int reserved; /**< reserved for future */ + int reserved; /**< reserved for future */ } camera_buffer_t; /** @@ -464,14 +470,14 @@ typedef enum { * \enum camera_features: camera supported features. */ typedef enum { - MANUAL_EXPOSURE, /**< Allow user to control exposure time and ISO manually */ - MANUAL_WHITE_BALANCE, /**< Allow user to control AWB mode, cct range, and gain */ - IMAGE_ENHANCEMENT, /**< Sharpness, Brightness, Contrast, Hue, Saturation */ - NOISE_REDUCTION, /**< Allow user to control NR mode and NR level */ - SCENE_MODE, /**< Allow user to control scene mode */ - WEIGHT_GRID_MODE, /**< Allow user to control custom weight grid mode */ - PER_FRAME_CONTROL, /**< Allow user to control most of parameters for each frame */ - ISP_CONTROL, /**< Allow user to control low level ISP features */ + MANUAL_EXPOSURE, /**< Allow user to control exposure time and ISO manually */ + MANUAL_WHITE_BALANCE, /**< Allow user to control AWB mode, cct range, and gain */ + IMAGE_ENHANCEMENT, /**< Sharpness, Brightness, Contrast, Hue, Saturation */ + NOISE_REDUCTION, /**< Allow user to control NR mode and NR level */ + SCENE_MODE, /**< Allow user to control scene mode */ + WEIGHT_GRID_MODE, /**< Allow user to control custom weight grid mode */ + PER_FRAME_CONTROL, /**< Allow user to control most of parameters for each frame */ + ISP_CONTROL, /**< Allow user to control low level ISP features */ INVALID_FEATURE } camera_features; typedef std::vector camera_features_list_t; @@ -494,10 +500,7 @@ typedef enum { AE_MODE_MAX /**< Invalid AE mode, any new mode should be added before this */ } camera_ae_mode_t; -typedef enum { - AE_STATE_NOT_CONVERGED, - AE_STATE_CONVERGED -} camera_ae_state_t; +typedef enum { AE_STATE_NOT_CONVERGED, AE_STATE_CONVERGED } camera_ae_state_t; /** * \enum camera_antibanding_mode_t: Used to control antibanding mode. @@ -528,7 +531,8 @@ typedef enum { } camera_scene_mode_t; /** - * \struct camera_ae_exposure_time_range_t: Provide supported exposure time range info per scene mode. + * \struct camera_ae_exposure_time_range_t: Provide supported exposure time range info per scene + * mode. */ typedef struct { camera_scene_mode_t scene_mode; @@ -565,8 +569,8 @@ typedef enum { * \enum camera_yuv_color_range_mode_t: Specify which YUV color range will be used. */ typedef enum { - CAMERA_FULL_MODE_YUV_COLOR_RANGE, /*!< Full range (0 - 255) YUV data. */ - CAMERA_REDUCED_MODE_YUV_COLOR_RANGE /*!< Reduced range aka. BT.601 (16-235) YUV data range. */ + CAMERA_FULL_MODE_YUV_COLOR_RANGE, /*!< Full range (0 - 255) YUV data. */ + CAMERA_REDUCED_MODE_YUV_COLOR_RANGE /*!< Reduced range aka. BT.601 (16-235) YUV data range. */ } camera_yuv_color_range_mode_t; /** @@ -588,10 +592,7 @@ typedef enum { AWB_MODE_MAX } camera_awb_mode_t; -typedef enum { - AWB_STATE_NOT_CONVERGED, - AWB_STATE_CONVERGED -} camera_awb_state_t; +typedef enum { AWB_STATE_NOT_CONVERGED, AWB_STATE_CONVERGED } camera_awb_state_t; /** * \enum camera_af_mode_t: Used to control af working mode. @@ -653,11 +654,11 @@ typedef enum { * \enum camera_af_state_t: Used to return af state. */ typedef enum { - AF_STATE_IDLE, /*!< Focus is idle */ - AF_STATE_LOCAL_SEARCH, /*!< Focus is in local search state */ - AF_STATE_EXTENDED_SEARCH, /*!< Focus is in extended search state */ - AF_STATE_SUCCESS, /*!< Focus has succeeded */ - AF_STATE_FAIL /*!< Focus has failed */ + AF_STATE_IDLE, /*!< Focus is idle */ + AF_STATE_LOCAL_SEARCH, /*!< Focus is in local search state */ + AF_STATE_EXTENDED_SEARCH, /*!< Focus is in extended search state */ + AF_STATE_SUCCESS, /*!< Focus has succeeded */ + AF_STATE_FAIL /*!< Focus has failed */ } camera_af_state_t; /** @@ -782,7 +783,7 @@ typedef struct { * \struct camera_callback_ops_t */ typedef struct camera_callback_ops { - void (*notify)(const camera_callback_ops* cb, const camera_msg_data_t &data); + void (*notify)(const camera_callback_ops* cb, const camera_msg_data_t& data); } camera_callback_ops_t; /** @@ -930,12 +931,7 @@ typedef struct { /** * \enum camera_converge_speed_t: Used to control AE/AWB converge speed. */ -typedef enum { - CONVERGE_NORMAL, - CONVERGE_MID, - CONVERGE_LOW, - CONVERGE_MAX -} camera_converge_speed_t; +typedef enum { CONVERGE_NORMAL, CONVERGE_MID, CONVERGE_LOW, CONVERGE_MAX } camera_converge_speed_t; /** * \enum camera_converge_speed_mode_t: Used to control AE/AWB converge speed mode. @@ -984,18 +980,12 @@ typedef enum { /** * \enum camera_ldc_mode_t: Used to toggle lens distortion correction. */ -typedef enum { - LDC_MODE_OFF, - LDC_MODE_ON -} camera_ldc_mode_t; +typedef enum { LDC_MODE_OFF, LDC_MODE_ON } camera_ldc_mode_t; /** * \enum camera_rsc_mode_t: Used to toggle rolling shutter correction. */ -typedef enum { - RSC_MODE_OFF, - RSC_MODE_ON -} camera_rsc_mode_t; +typedef enum { RSC_MODE_OFF, RSC_MODE_ON } camera_rsc_mode_t; /** * \enum camera_flip_mode_t: Used to set output slip. @@ -1010,10 +1000,7 @@ typedef enum { /** * \enum camera_mono_downscale_mode_t: Used to enable/disable MONO Downscale. */ -typedef enum { - MONO_DS_MODE_OFF, - MONO_DS_MODE_ON -} camera_mono_downscale_mode_t; +typedef enum { MONO_DS_MODE_OFF, MONO_DS_MODE_ON } camera_mono_downscale_mode_t; /** * \enum camera_video_stabilization_mode_t: Used to control the video stabilization mode. @@ -1033,8 +1020,8 @@ typedef enum { } camera_mount_type_t; /** -* \enum camera_shading_mode_t: camera shading mode type -*/ + * \enum camera_shading_mode_t: camera shading mode type + */ typedef enum { SHADING_MODE_OFF, SHADING_MODE_FAST, @@ -1042,8 +1029,8 @@ typedef enum { } camera_shading_mode_t; /** -* \enum camera_lens_shading_map_mode_type_t: camera lens shading map mode type -*/ + * \enum camera_lens_shading_map_mode_type_t: camera lens shading map mode type + */ typedef enum { LENS_SHADING_MAP_MODE_OFF, LENS_SHADING_MAP_MODE_ON @@ -1073,7 +1060,7 @@ typedef struct { * */ class Parameters { -public: + public: Parameters(); Parameters(const Parameters& other); Parameters& operator=(const Parameters& other); @@ -1132,7 +1119,8 @@ class Parameters { * Camera application MUST check if the feature is supported before trying to enable it. * Otherwise the behavior is undefined currently, HAL may just ignore the request. * - * \param[out] camera_features_list_t& features: All supported feature will be filled in "features" + * \param[out] camera_features_list_t& features: All supported feature will be filled in + * "features" * * \return: If no feature supported, features will be empty */ @@ -1145,7 +1133,8 @@ class Parameters { * Camera application MUST check if the feature is supported before trying to enable it. * Otherwise the behavior is undefined currently, HAL may just ignore the request. * - * \param[out] vector& controls: All supported ISP control features will be filled in it. + * \param[out] vector& controls: All supported ISP control features will be filled in + * it. * * \return: If no ISP control supported, the controls will be empty */ @@ -1157,7 +1146,8 @@ class Parameters { * * \param[out] camera_range_t& evRange * - * \return 0 if ae compensation supported, non-0 or evRange equals [0, 0] means ae compensation not supported. + * \return 0 if ae compensation supported, non-0 or evRange equals [0, 0] means ae compensation + * not supported. */ int getAeCompensationRange(camera_range_t& evRange) const; @@ -1190,7 +1180,8 @@ class Parameters { * * \return 0 if exposure time range is filled by HAL. */ - int getSupportedAeExposureTimeRange(std::vector& etRanges) const; + int getSupportedAeExposureTimeRange( + std::vector& etRanges) const; /** * \brief Get supported manual sensor gain range @@ -1272,11 +1263,12 @@ class Parameters { * Camera application MUST check if the video stabilization mode is supported before trying * to enable it. Otherwise one error occurring, HAL may just ignore the request. * - * \param[out] supportedModes: All supported video stabilization mode will be filled in "supportedModes" + * \param[out] supportedModes: All supported video stabilization mode will be filled in + * "supportedModes" * * \return: If no mode supported, supportedModes will be empty */ - int getSupportedVideoStabilizationMode(camera_video_stabilization_list_t &supportedModes) const; + int getSupportedVideoStabilizationMode(camera_video_stabilization_list_t& supportedModes) const; /** * \brief Get supported ae mode @@ -1288,7 +1280,7 @@ class Parameters { * * \return: If no ae mode supported, supportedAeModes will be empty */ - int getSupportedAeMode(std::vector &supportedAeModes) const; + int getSupportedAeMode(std::vector& supportedAeModes) const; /** * \brief Get supported awb mode @@ -1300,7 +1292,7 @@ class Parameters { * * \return: If no awb mode supported, supportedAwbModes will be empty */ - int getSupportedAwbMode(std::vector &supportedAwbModes) const; + int getSupportedAwbMode(std::vector& supportedAwbModes) const; /** * \brief Get supported af mode @@ -1312,7 +1304,7 @@ class Parameters { * * \return: If no af mode supported, supportedAfModes will be empty */ - int getSupportedAfMode(std::vector &supportedAfModes) const; + int getSupportedAfMode(std::vector& supportedAfModes) const; /** * \brief Get supported scene mode @@ -1320,23 +1312,26 @@ class Parameters { * Camera application MUST check if the scene mode is supported before trying to enable it. * Otherwise one error occurring, HAL may just ignore the request. * - * \param[out] supportedSceneModes: All supported scene mode will be filled in "supportedSceneModes" + * \param[out] supportedSceneModes: All supported scene mode will be filled in + * "supportedSceneModes" * * \return: If no scene mode supported, supportedSceneModes will be empty */ - int getSupportedSceneMode(std::vector &supportedSceneModes) const; + int getSupportedSceneMode(std::vector& supportedSceneModes) const; /** * \brief Get supported antibanding mode * - * Camera application MUST check if the antibanding mode is supported before trying to enable it. - * Otherwise one error occurring, HAL may just ignore the request. + * Camera application MUST check if the antibanding mode is supported before trying to enable + * it. Otherwise one error occurring, HAL may just ignore the request. * - * \param[out] supportedAntibindingModes: All supported scene mode will be filled in "supportedAntibindingModes" + * \param[out] supportedAntibindingModes: All supported scene mode will be filled in + * "supportedAntibindingModes" * * \return: If no antibanding mode supported, supportedAntibindingModes will be empty */ - int getSupportedAntibandingMode(std::vector &supportedAntibindingModes) const; + int getSupportedAntibandingMode( + std::vector& supportedAntibindingModes) const; /** * \brief Get if ae lock is available @@ -1504,7 +1499,8 @@ class Parameters { /** * \brief Set AE distribution priority. * - * \param[in] camera_ae_distribution_priority_t priority: the AE distribution priority to be set. + * \param[in] camera_ae_distribution_priority_t priority: the AE distribution priority to be + * set. * * \return 0 if set successfully, otherwise non-0 value is returned. */ @@ -1598,8 +1594,8 @@ class Parameters { /** * \brief Set white balance mode * - * White balance mode could be one of totally auto, preset cct range, customized cct range, customized - * white area, customize gains. + * White balance mode could be one of totally auto, preset cct range, customized cct range, + * customized white area, customize gains. * * \param[in] camera_awb_mode_t awbMode * @@ -1701,7 +1697,8 @@ class Parameters { * * The range of each gain shift is (0, 255). * - * \param[in] camera_awb_gains_t awb gain shift, which specify r,g,b gains for updating awb result. + * \param[in] camera_awb_gains_t awb gain shift, which specify r,g,b gains for updating awb + * result. * * \return 0 if set successfully, otherwise non-0 value is returned. */ @@ -1725,7 +1722,7 @@ class Parameters { * * \return 0 if set successfully, otherwise non-0 value is returned. */ - int setAwbResult(void *data); + int setAwbResult(void* data); /** * \brief Get awb result currently used. @@ -1736,7 +1733,7 @@ class Parameters { * * \return 0 if get successfully, otherwise non-0 value is returned. */ - int getAwbResult(void *data) const; + int getAwbResult(void* data) const; /** * \brief Set manual white point coordinate. @@ -1762,7 +1759,8 @@ class Parameters { /** * \brief Set customized color transform which is a 3x3 matrix. * - * Manual color transform only takes effect when awb mode set to AWB_MODE_MANUAL_COLOR_TRANSFORM. + * Manual color transform only takes effect when awb mode set to + * AWB_MODE_MANUAL_COLOR_TRANSFORM. * * \param[in] camera_color_transform_t colorTransform: a 3x3 matrix for color convertion. * @@ -1782,7 +1780,8 @@ class Parameters { /** * \brief Set customized color correction gains which is a 4 array. * - * Manual color correction gains only takes effect when awb mode set to AWB_MODE_MANUAL_COLOR_TRANSFORM. + * Manual color correction gains only takes effect when awb mode set to + * AWB_MODE_MANUAL_COLOR_TRANSFORM. * * \param[in] camera_color_gains_t colorGains: a 4 array for color correction gains. * @@ -1916,7 +1915,7 @@ class Parameters { * * \return 0 if get successfully, otherwise non-0 value is returned. */ - int getYuvColorRangeMode(camera_yuv_color_range_mode_t & colorRange) const; + int getYuvColorRangeMode(camera_yuv_color_range_mode_t& colorRange) const; /** * \brief Set customized effects. @@ -1946,7 +1945,7 @@ class Parameters { int setIrisLevel(int level); int getIrisLevel(int& level); -// HDR_FEATURE_S + // HDR_FEATURE_S /** * \brief Set WDR mode * @@ -1964,7 +1963,7 @@ class Parameters { * \return 0 if awb mode was set, non-0 means no awb mode was set. */ int getWdrMode(camera_wdr_mode_t& wdrMode) const; -// HDR_FEATURE_E + // HDR_FEATURE_E /** * \brief Set WDR Level @@ -2039,7 +2038,7 @@ class Parameters { * * \return 0 if deinterlace mode was set, non-0 means no deinterlace mode was set. */ - int getDeinterlaceMode(camera_deinterlace_mode_t &deinterlaceMode) const; + int getDeinterlaceMode(camera_deinterlace_mode_t& deinterlaceMode) const; /** * \brief Set Makernote Data @@ -2097,7 +2096,7 @@ class Parameters { * * \return 0 if makernote mode was set, otherwise return non-0 value. */ - int getMakernoteMode(camera_makernote_mode_t &mode) const; + int getMakernoteMode(camera_makernote_mode_t& mode) const; // ISP_CONTROL_S /** @@ -2187,7 +2186,7 @@ class Parameters { * * \return 0 if find the corresponding data, otherwise non-0 value is returned. */ - int getLdcMode(camera_ldc_mode_t &mode) const; + int getLdcMode(camera_ldc_mode_t& mode) const; /** * \brief Set rolling shutter correction mode @@ -2205,7 +2204,7 @@ class Parameters { * * \return 0 if find the corresponding data, otherwise non-0 value is returned. */ - int getRscMode(camera_rsc_mode_t &mode) const; + int getRscMode(camera_rsc_mode_t& mode) const; /** * \brief flip mode @@ -2223,7 +2222,7 @@ class Parameters { * * \return 0 if find the corresponding data, otherwise non-0 value is returned. */ - int getFlipMode(camera_flip_mode_t &mode) const; + int getFlipMode(camera_flip_mode_t& mode) const; /** * \brief set frame interval to run 3A @@ -2241,7 +2240,7 @@ class Parameters { * * \return 0 if find the corresponding data, otherwise non-0 value is returned. */ - int getRun3ACadence(int &cadence) const; + int getRun3ACadence(int& cadence) const; /** * \brief mono downscale mode @@ -2259,7 +2258,7 @@ class Parameters { * * \return 0 if find the corresponding data, otherwise non-0 value is returned. */ - int getMonoDsMode(camera_mono_downscale_mode_t &mode) const; + int getMonoDsMode(camera_mono_downscale_mode_t& mode) const; /** * \brief Set Fisheye Dewarping Mode @@ -2280,42 +2279,42 @@ class Parameters { * * \return 0 if dewarping mode was set, non-0 means no dewarping mode was set. */ - int getFisheyeDewarpingMode(camera_fisheye_dewarping_mode_t &dewarpingMode) const; + int getFisheyeDewarpingMode(camera_fisheye_dewarping_mode_t& dewarpingMode) const; // Belows are Jpeg related parameters operations - int getJpegQuality(uint8_t *quality) const; + int getJpegQuality(uint8_t* quality) const; int setJpegQuality(uint8_t quality); - int getJpegThumbnailQuality(uint8_t *quality) const; + int getJpegThumbnailQuality(uint8_t* quality) const; int setJpegThumbnailQuality(uint8_t quality); int setJpegThumbnailSize(const camera_resolution_t& res); int getJpegThumbnailSize(camera_resolution_t& res) const; - int getJpegRotation(int &rotation) const; - int setJpegRotation(int rotation); + int getJpegRotation(int& rotation) const; + int setJpegRotation(int rotation); - int setJpegGpsCoordinates(const double *coordinates); - int getJpegGpsLatitude(double &latitude) const; - int getJpegGpsLongitude(double &longitude) const; - int getJpegGpsAltitude(double &altiude) const; + int setJpegGpsCoordinates(const double* coordinates); + int getJpegGpsLatitude(double& latitude) const; + int getJpegGpsLongitude(double& longitude) const; + int getJpegGpsAltitude(double& altiude) const; - int getJpegGpsTimeStamp(int64_t ×tamp) const; - int setJpegGpsTimeStamp(int64_t timestamp); + int getJpegGpsTimeStamp(int64_t& timestamp) const; + int setJpegGpsTimeStamp(int64_t timestamp); - int getJpegGpsProcessingMethod(int &processMethod) const; - int setJpegGpsProcessingMethod(int processMethod); + int getJpegGpsProcessingMethod(int& processMethod) const; + int setJpegGpsProcessingMethod(int processMethod); int getJpegGpsProcessingMethod(int size, char* processMethod) const; int setJpegGpsProcessingMethod(const char* processMethod); - int getImageEffect(camera_effect_mode_t &effect) const; - int setImageEffect(camera_effect_mode_t effect); + int getImageEffect(camera_effect_mode_t& effect) const; + int setImageEffect(camera_effect_mode_t effect); - int getVideoStabilizationMode(camera_video_stabilization_mode_t &mode) const; + int getVideoStabilizationMode(camera_video_stabilization_mode_t& mode) const; int setVideoStabilizationMode(camera_video_stabilization_mode_t mode); - int getFocalLength(float &focal) const; + int getFocalLength(float& focal) const; int setFocalLength(float focal); /** @@ -2325,7 +2324,7 @@ class Parameters { * * \return 0 if aperture was set, non=0 means no aperture was set */ - int getAperture(float &aperture) const; + int getAperture(float& aperture) const; /** * \brief Set aperture value * @@ -2342,7 +2341,7 @@ class Parameters { * * \return 0 if distance was set, non-0 means no focus distance was set */ - int getFocusDistance(float &distance) const; + int getFocusDistance(float& distance) const; /** * \brief Set focus distance value * @@ -2448,7 +2447,7 @@ class Parameters { * * \return 0 if set successfully, otherwise non-0 value is returned. */ - int getLensAperture(float &aperture) const; + int getLensAperture(float& aperture) const; /** * \brief Get lens filter density. @@ -2457,7 +2456,7 @@ class Parameters { * * \return 0 if set successfully, otherwise non-0 value is returned. */ - int getLensFilterDensity(float &filterDensity) const; + int getLensFilterDensity(float& filterDensity) const; /** * \brief Get lens min focus distance. @@ -2466,7 +2465,7 @@ class Parameters { * * \return 0 if set successfully, otherwise non-0 value is returned. */ - int getLensMinFocusDistance(float &minFocusDistance) const; + int getLensMinFocusDistance(float& minFocusDistance) const; /** * \brief Get lens hyperfocal distance. @@ -2475,7 +2474,7 @@ class Parameters { * * \return 0 if set successfully, otherwise non-0 value is returned. */ - int getLensHyperfocalDistance(float &hyperfocalDistance) const; + int getLensHyperfocalDistance(float& hyperfocalDistance) const; /** * \brief Set af region @@ -2543,40 +2542,40 @@ class Parameters { int getCropRegion(camera_crop_region_t& cropRegion) const; /** - * \brief Set control scene mode - * - * \param[in] sceneModeValue the control scene mode related parameters - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Set control scene mode + * + * \param[in] sceneModeValue the control scene mode related parameters + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int setControlSceneMode(uint8_t sceneModeValue); /** - * \brief Set face detect mode - * - * \param[in] faceDetectMode the face detect mode related parameters - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Set face detect mode + * + * \param[in] faceDetectMode the face detect mode related parameters + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int setFaceDetectMode(uint8_t faceDetectMode); /** - * \brief Get face detect mode - * - * \param[out] faceDetectMode the face detect mode related parameters, 0:OFF 1:SIMPLE 2:FULL - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Get face detect mode + * + * \param[out] faceDetectMode the face detect mode related parameters, 0:OFF 1:SIMPLE 2:FULL + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int getFaceDetectMode(uint8_t& faceDetectMode) const; /** - * \brief Set face id - * - * \param[in] int *faceIds, int faceNum - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ - int setFaceIds(int *faceIds, int faceNum); + * \brief Set face id + * + * \param[in] int *faceIds, int faceNum + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ + int setFaceIds(int* faceIds, int faceNum); /** * Get sensor active array size @@ -2587,158 +2586,158 @@ class Parameters { int getSensorActiveArraySize(camera_coordinate_system_t& arraySize) const; /** - * \brief Set shading mode - * - * \param[in] shadingMode the shading mode related parameters - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Set shading mode + * + * \param[in] shadingMode the shading mode related parameters + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int setShadingMode(camera_shading_mode_t shadingMode); /** - * \brief Get shading mode - * - * \param[out] shadingMode the shading mode related parameters, 0:OFF 1:FAST 2:HIGH_QUALITY - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Get shading mode + * + * \param[out] shadingMode the shading mode related parameters, 0:OFF 1:FAST 2:HIGH_QUALITY + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int getShadingMode(camera_shading_mode_t& shadingMode) const; /** - * \brief Set statistics lens shading map mode - * - * \param[in] lensShadingMapMode the lens shading map mode related parameters - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Set statistics lens shading map mode + * + * \param[in] lensShadingMapMode the lens shading map mode related parameters + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int setLensShadingMapMode(camera_lens_shading_map_mode_type_t lensShadingMapMode); /** - * \brief Get statistics lens shading map mode - * - * \param[out] lensShadingMapMode the lens shading map mode related parameters, 0:OFF 1:ON - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ - int getLensShadingMapMode(camera_lens_shading_map_mode_type_t &lensShadingMapMode) const; + * \brief Get statistics lens shading map mode + * + * \param[out] lensShadingMapMode the lens shading map mode related parameters, 0:OFF 1:ON + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ + int getLensShadingMapMode(camera_lens_shading_map_mode_type_t& lensShadingMapMode) const; /** - * \brief Set lens shading map - * - * \param[in] lensShadingMap the lens shading map - * \param[in] lensShadingMapSize lensShadingMap's size - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ - int setLensShadingMap(const float *lensShadingMap, size_t lensShadingMapSize); + * \brief Set lens shading map + * + * \param[in] lensShadingMap the lens shading map + * \param[in] lensShadingMapSize lensShadingMap's size + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ + int setLensShadingMap(const float* lensShadingMap, size_t lensShadingMapSize); /** - * \brief Get lens shading map - * - * \param[out] lensShadingMap the lens shading map - * \param[out] lensShadingMapSize the lens shading map's size - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ - int getLensShadingMap(float **lensShadingMap, size_t &lensShadingMapSize) const; + * \brief Get lens shading map + * + * \param[out] lensShadingMap the lens shading map + * \param[out] lensShadingMapSize the lens shading map's size + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ + int getLensShadingMap(float** lensShadingMap, size_t& lensShadingMapSize) const; /** - * \brief Get lens shading map size - * - * \param[out] arraySize the lens shading map size related parameters - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ - int getLensInfoShadingMapSize(camera_coordinate_t &shadingMapSize) const; + * \brief Get lens shading map size + * + * \param[out] arraySize the lens shading map size related parameters + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ + int getLensInfoShadingMapSize(camera_coordinate_t& shadingMapSize) const; /* - * \brief Set tonemap mode - * - * \param[in] camera_tonemap_mode_t& mode - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Set tonemap mode + * + * \param[in] camera_tonemap_mode_t& mode + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int setTonemapMode(camera_tonemap_mode_t mode); /** - * \brief Get tonemap mode - * - * \param[out] camera_tonemap_mode_t& mode - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Get tonemap mode + * + * \param[out] camera_tonemap_mode_t& mode + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int getTonemapMode(camera_tonemap_mode_t& mode) const; /** - * \brief Get supported tonemap modes - * - * \param[out] vector& tonemapModes - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Get supported tonemap modes + * + * \param[out] vector& tonemapModes + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int getSupportedTonemapMode(std::vector& tonemapModes) const; /** - * \brief Set the type of tonemap preset curve - * - * \param[in] camera_tonemap_preset_curve_t type - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Set the type of tonemap preset curve + * + * \param[in] camera_tonemap_preset_curve_t type + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int setTonemapPresetCurve(camera_tonemap_preset_curve_t type); /** - * \brief Get tonemap gamma - * - * \param[out] camera_tonemap_preset_curve_t& type - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Get tonemap gamma + * + * \param[out] camera_tonemap_preset_curve_t& type + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int getTonemapPresetCurve(camera_tonemap_preset_curve_t& type) const; /** - * \brief Set tonemap gamma - * - * \param[in] float gamma - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Set tonemap gamma + * + * \param[in] float gamma + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int setTonemapGamma(float gamma); /** - * \brief Get tonemap gamma - * - * \param[out] float& gamma - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Get tonemap gamma + * + * \param[out] float& gamma + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int getTonemapGamma(float& gamma) const; /** - * \brief Get number of tonemap curve points - * - * \param[out] int32_t& number - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Get number of tonemap curve points + * + * \param[out] int32_t& number + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int getTonemapMaxCurvePoints(int32_t& number) const; /** - * \brief Set tonemap curves - * - * \param[in] const camera_tonemap_curves_t& curve - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Set tonemap curves + * + * \param[in] const camera_tonemap_curves_t& curve + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int setTonemapCurves(const camera_tonemap_curves_t& curves); /** - * \brief Get tonemap curves - * - * \param[out] camera_tonemap_curves_t& curve - * - * \return 0 if successfully, otherwise non-0 value is returned. - */ + * \brief Get tonemap curves + * + * \param[out] camera_tonemap_curves_t& curve + * + * \return 0 if successfully, otherwise non-0 value is returned. + */ int getTonemapCurves(camera_tonemap_curves_t& curves) const; /** @@ -2757,7 +2756,7 @@ class Parameters { * * \return 0 if power mode was set, otherwise non-0 value is returned. */ - int getPowerMode(camera_power_mode_t &mode) const; + int getPowerMode(camera_power_mode_t& mode) const; /** * \brief Set raw data output mode. @@ -2775,7 +2774,7 @@ class Parameters { * * \return 0 if raw data output mode was set, otherwise non-0 value is returned. */ - int getRawDataOutput(raw_data_output_t &mode) const; + int getRawDataOutput(raw_data_output_t& mode) const; /** * \brief Set total exposure target @@ -2793,7 +2792,7 @@ class Parameters { * * \return 0 if total exposure target was set, otherwise non-0 value is returned. */ - int getTotalExposureTarget(int64_t &totalExposureTarget) const; + int getTotalExposureTarget(int64_t& totalExposureTarget) const; /** * \brief Set user request id @@ -2847,7 +2846,7 @@ class Parameters { * * \return 0 if flag was set, otherwise non-0 value is returned. */ - int getCallbackRgbs(bool *enabled) const; + int getCallbackRgbs(bool* enabled) const; /** * \brief Set callback tonemap curve flags @@ -2865,9 +2864,9 @@ class Parameters { * * \return 0 if flag was set, otherwise non-0 value is returned. */ - int getCallbackTmCurve(bool *enabled) const; + int getCallbackTmCurve(bool* enabled) const; -// ENABLE_EVCP_S + // ENABLE_EVCP_S /** * \brief Set EVCP ECC status * @@ -2941,7 +2940,7 @@ class Parameters { * \return 0 if flag was set, otherwise non-0 value is returned. */ int getEvcpFFMode(uint8_t* mode) const; -// ENABLE_EVCP_E + // ENABLE_EVCP_E /** * \brief Set scale & crop region @@ -2961,10 +2960,10 @@ class Parameters { */ int getZoomRegion(camera_zoom_region_t* region) const; -private: + private: friend class ParameterHelper; - void* mData; // The internal data to save the all of the parameters. -}; // class Parameters + void* mData; // The internal data to save the all of the parameters. +}; // class Parameters /*******************End of Camera Parameters Definition**********************/ } // namespace icamera diff --git a/include/api/intel_vendor_metadata_tags.h b/include/api/intel_vendor_metadata_tags.h index f0f1fc9e..7ef269e6 100644 --- a/include/api/intel_vendor_metadata_tags.h +++ b/include/api/intel_vendor_metadata_tags.h @@ -37,7 +37,7 @@ typedef enum vendor_metadata_section { * Hierarchy positions in enum space. */ typedef enum vendor_metadata_section_start { - INTEL_VENDOR_CAMERA_START = uint32_t(INTEL_VENDOR_CAMERA << 16), + INTEL_VENDOR_CAMERA_START = uint32_t(INTEL_VENDOR_CAMERA << 16), } vendor_metadata_section_start_t; /** @@ -47,32 +47,32 @@ typedef enum vendor_metadata_section_start { * src/metadata/vendor_metadata_tag_info.c */ typedef enum vendor_metadata_tag { - INTEL_VENDOR_CAMERA_CALLBACK_RGBS = // enum | public - INTEL_VENDOR_CAMERA_START, + INTEL_VENDOR_CAMERA_CALLBACK_RGBS = // enum | public + INTEL_VENDOR_CAMERA_START, INTEL_VENDOR_CAMERA_RGBS_GRID_SIZE, // int32[] | public INTEL_VENDOR_CAMERA_SHADING_CORRECTION, // enum | public INTEL_VENDOR_CAMERA_RGBS_STATS_BLOCKS, // byte[] | public INTEL_VENDOR_CAMERA_CALLBACK_TM_CURVE, // enum | public INTEL_VENDOR_CAMERA_TONE_MAP_CURVE, // float[] | public INTEL_VENDOR_CAMERA_POWER_MODE, // enum | public -// ENABLE_EVCP_S + // ENABLE_EVCP_S INTEL_VENDOR_CAMERA_IC_CAPS, // int32 | public -// ENABLE_EVCP_E -// ENABLE_EVCP_S + // ENABLE_EVCP_E + // ENABLE_EVCP_S INTEL_VENDOR_CAMERA_IC_PEACE_FEATURES, // int32[] | public -// ENABLE_EVCP_E -// ENABLE_EVCP_S + // ENABLE_EVCP_E + // ENABLE_EVCP_S INTEL_VENDOR_CAMERA_IC_ECC_MODE, // enum | public -// ENABLE_EVCP_E -// ENABLE_EVCP_S + // ENABLE_EVCP_E + // ENABLE_EVCP_S INTEL_VENDOR_CAMERA_IC_BC_MODE, // enum | public -// ENABLE_EVCP_E -// ENABLE_EVCP_S + // ENABLE_EVCP_E + // ENABLE_EVCP_S INTEL_VENDOR_CAMERA_IC_BR_PARAMETERS, // int32[] | public -// ENABLE_EVCP_E -// ENABLE_EVCP_S + // ENABLE_EVCP_E + // ENABLE_EVCP_S INTEL_VENDOR_CAMERA_IC_FF_MODE, // enum | public -// ENABLE_EVCP_E + // ENABLE_EVCP_E INTEL_VENDOR_CAMERA_TOTAL_EXPOSURE_TARGET, // int64 | public INTEL_VENDOR_CAMERA_TOTAL_EXPOSURE_TARGET_RANGE, // int64[] | public INTEL_VENDOR_CAMERA_RAW_DATA_OUTPUT, // enum | public @@ -137,4 +137,3 @@ typedef enum vendor_metadata_enum_intel_vendor_camera_raw_data_output { INTEL_VENDOR_CAMERA_RAW_DATA_OUTPUT_OFF, INTEL_VENDOR_CAMERA_RAW_DATA_OUTPUT_ON, } vendor_metadata_enum_intel_vendor_camera_raw_data_output_t; - diff --git a/include/utils/ScopedAtrace.h b/include/utils/ScopedAtrace.h index ea58c307..312d9692 100644 --- a/include/utils/ScopedAtrace.h +++ b/include/utils/ScopedAtrace.h @@ -33,44 +33,41 @@ namespace icamera { * is enabled. */ class ScopedAtrace { - public: - ScopedAtrace(const int level, const char* func, const char* tag, - const char* note = NULL, long value = -1, - const char* note2 = NULL, int value2 = -1, - const char* note3 = NULL, int value3 = -1); - ~ScopedAtrace(); - static void setTraceLevel(int); - private: - bool mEnableAtraceEnd; + public: + ScopedAtrace(const int level, const char* func, const char* tag, const char* note = NULL, + long value = -1, const char* note2 = NULL, int value2 = -1, + const char* note3 = NULL, int value3 = -1); + ~ScopedAtrace(); + static void setTraceLevel(int); + + private: + bool mEnableAtraceEnd; }; -#define CAMERA_DEBUG_LOG_ATRACE_OS (1<<4) -#define CAMERA_DEBUG_LOG_ATRACE_IMAGING (1<<7) +#define CAMERA_DEBUG_LOG_ATRACE_OS (1 << 4) +#define CAMERA_DEBUG_LOG_ATRACE_IMAGING (1 << 7) -#define PERF_CAMERA_ATRACE() ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, \ - __func__, PERF_LOG_TAG_STR(LOG_TAG)); -#define PERF_CAMERA_ATRACE_PARAM1(note, value) \ - ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, \ - PERF_LOG_TAG_STR(LOG_TAG), note, value); -#define PERF_CAMERA_ATRACE_PARAM2(note, value, note2, value2) \ - ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), \ - note, value, note2, value2); -#define PERF_CAMERA_ATRACE_PARAM3(note, value, note2, value2, note3, value3) \ - ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), \ - note, value, note2, value2, note3, value3); +#define PERF_CAMERA_ATRACE() \ + ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG)); +#define PERF_CAMERA_ATRACE_PARAM1(note, value) \ + ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), note, \ + value); +#define PERF_CAMERA_ATRACE_PARAM2(note, value, note2, value2) \ + ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), note, \ + value, note2, value2); +#define PERF_CAMERA_ATRACE_PARAM3(note, value, note2, value2, note3, value3) \ + ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), note, \ + value, note2, value2, note3, value3); #define PERF_CAMERA_ATRACE_IMAGING() \ - ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, \ - PERF_LOG_TAG_STR(LOG_TAG)); -#define PERF_CAMERA_ATRACE_PARAM1_IMAGING(note, value) \ - ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, \ - PERF_LOG_TAG_STR(LOG_TAG), note, value); -#define PERF_CAMERA_ATRACE_PARAM2_IMAGING(note, value, note2, value2) \ - ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, \ - PERF_LOG_TAG_STR(LOG_TAG), note, value, note2, value2); -#define PERF_CAMERA_ATRACE_PARAM3_IMAGING(note, value, note2, value2, note3, \ - value3) \ - ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, \ - PERF_LOG_TAG_STR(LOG_TAG), note, value, note2, value2, note3, \ - value3); -} // namespace icamera + ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, PERF_LOG_TAG_STR(LOG_TAG)); +#define PERF_CAMERA_ATRACE_PARAM1_IMAGING(note, value) \ + ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, PERF_LOG_TAG_STR(LOG_TAG), \ + note, value); +#define PERF_CAMERA_ATRACE_PARAM2_IMAGING(note, value, note2, value2) \ + ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, PERF_LOG_TAG_STR(LOG_TAG), \ + note, value, note2, value2); +#define PERF_CAMERA_ATRACE_PARAM3_IMAGING(note, value, note2, value2, note3, value3) \ + ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, PERF_LOG_TAG_STR(LOG_TAG), \ + note, value, note2, value2, note3, value3); +} // namespace icamera diff --git a/modules/algowrapper/IntelCca.cpp b/modules/algowrapper/IntelCca.cpp index 9e4a2bb6..5b907f9f 100644 --- a/modules/algowrapper/IntelCca.cpp +++ b/modules/algowrapper/IntelCca.cpp @@ -33,7 +33,7 @@ IntelCca* IntelCca::getInstance(int cameraId, TuningMode mode) { sCcaInstance.size()); AutoMutex lock(sLock); - for (auto &it : sCcaInstance) { + for (auto& it : sCcaInstance) { if (cameraId == it.cameraId) { if (it.ccaHandle.find(mode) == it.ccaHandle.end()) { it.ccaHandle[mode] = new IntelCca(cameraId, mode); @@ -54,9 +54,9 @@ void IntelCca::releaseInstance(int cameraId, TuningMode mode) { LOG2("@%s, tuningMode:%d", cameraId, __func__, mode); AutoMutex lock(sLock); - for (auto &it : sCcaInstance) { + for (auto& it : sCcaInstance) { if (cameraId == it.cameraId && it.ccaHandle.find(mode) != it.ccaHandle.end()) { - IntelCca *cca = it.ccaHandle[mode]; + IntelCca* cca = it.ccaHandle[mode]; it.ccaHandle.erase(mode); delete cca; } @@ -66,8 +66,8 @@ void IntelCca::releaseInstance(int cameraId, TuningMode mode) { void IntelCca::releaseAllInstances() { AutoMutex lock(sLock); LOG2("@%s, cca instance size:%zu", __func__, sCcaInstance.size()); - for (auto &it : sCcaInstance) { - for (auto &oneCcaHandle : it.ccaHandle) { + for (auto& it : sCcaInstance) { + for (auto& oneCcaHandle : it.ccaHandle) { IntelCca* intelCca = oneCcaHandle.second; delete intelCca; } @@ -75,9 +75,7 @@ void IntelCca::releaseAllInstances() { } } -IntelCca::IntelCca(int cameraId, TuningMode mode) : - mCameraId(cameraId), - mTuningMode(mode) { +IntelCca::IntelCca(int cameraId, TuningMode mode) : mCameraId(cameraId), mTuningMode(mode) { mIntelCCA = nullptr; } @@ -140,15 +138,15 @@ ia_err IntelCca::runLTM(uint64_t frameId, const cca::cca_ltm_input_params& param return ret; } -ia_err IntelCca::updateZoom(const cca::cca_dvs_zoom& params) { - ia_err ret = getIntelCCA()->updateZoom(params); +ia_err IntelCca::updateZoom(uint32_t streamId, const cca::cca_dvs_zoom& params) { + ia_err ret = getIntelCCA()->updateZoom(streamId, params); LOG2("@%s, ret:%d", __func__, ret); return ret; } -ia_err IntelCca::runDVS(uint64_t frameId) { - ia_err ret = getIntelCCA()->runDVS(frameId); +ia_err IntelCca::runDVS(uint32_t streamId, uint64_t frameId) { + ia_err ret = getIntelCCA()->runDVS(streamId, frameId); LOG2("@%s, ret:%d", __func__, ret); return ret; @@ -213,7 +211,7 @@ bool IntelCca::allocStatsDataMem(unsigned int size) { for (int i = 0; i < kMaxQueueSize; i++) { void* p = malloc(size); CheckAndLogError(!p, false, "failed to malloc stats buffer"); - StatsBufInfo info = { size, p, 0 }; + StatsBufInfo info = {size, p, 0}; int64_t index = i * (-1) - 1; // default index list: -1, -2, -3, ... mMemStatsInfoMap[index] = info; @@ -243,8 +241,8 @@ void* IntelCca::getStatsDataBuffer() { } void IntelCca::decodeHwStatsDone(int64_t sequence, unsigned int byteUsed) { - LOG2("@%s, tuningMode:%d, sequence:%ld, byteUsed:%d", mCameraId, __func__, - mTuningMode, sequence, byteUsed); + LOG2("@%s, tuningMode:%d, sequence:%ld, byteUsed:%d", mCameraId, __func__, mTuningMode, + sequence, byteUsed); AutoMutex l(mMemStatsMLock); if (mMemStatsInfoMap.empty()) return; @@ -296,8 +294,8 @@ uint32_t IntelCca::getPalDataSize(const cca::cca_program_group& programGroup) { } void* IntelCca::allocMem(int streamId, const std::string& name, int index, int size) { - LOG1("@%s, name:%s, index: %d, streamId: %d, size: %d", __func__, - name.c_str(), index, streamId, size); + LOG1("@%s, name:%s, index: %d, streamId: %d, size: %d", __func__, name.c_str(), index, streamId, + size); return calloc(1, size); } diff --git a/modules/algowrapper/IntelCca.h b/modules/algowrapper/IntelCca.h index 642bcde5..8bc56c00 100644 --- a/modules/algowrapper/IntelCca.h +++ b/modules/algowrapper/IntelCca.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2020-2021 Intel Corporation. + * Copyright (C) 2020-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -48,9 +48,9 @@ class IntelCca { ia_err runLTM(uint64_t frameId, const cca::cca_ltm_input_params& params); - ia_err updateZoom(const cca::cca_dvs_zoom& params); + ia_err updateZoom(uint32_t streamId, const cca::cca_dvs_zoom& params); - ia_err runDVS(uint64_t frameId); + ia_err runDVS(uint32_t streamId, uint64_t frameId); ia_err runAIC(uint64_t frameId, const cca::cca_pal_input_params* params, ia_binary_data* pal); @@ -81,8 +81,8 @@ class IntelCca { void freeStatsDataMem(); private: - int mCameraId; - TuningMode mTuningMode; + int mCameraId; + TuningMode mTuningMode; // Only 3 buffers will be held in AiqResultStorage (kAiqResultStorageSize is 3), // So it is safe to use other 3 buffers. diff --git a/modules/algowrapper/IntelEvcp.cpp b/modules/algowrapper/IntelEvcp.cpp index ecf5963d..bcc3d5f2 100644 --- a/modules/algowrapper/IntelEvcp.cpp +++ b/modules/algowrapper/IntelEvcp.cpp @@ -42,16 +42,16 @@ bool IntelEvcp::runEvcpFrame(void* inBufAddr, int size) { const int CACHE_LINE_SIZE_FOR_ADL = 64; const int CACHE_LINE_MASK_FOR_ADL = CACHE_LINE_SIZE_FOR_ADL - 1; - char* p = reinterpret_cast(reinterpret_cast(start) & - ~CACHE_LINE_MASK_FOR_ADL); + char* p = + reinterpret_cast(reinterpret_cast(start) & ~CACHE_LINE_MASK_FOR_ADL); char* end = reinterpret_cast(start) + sz; - asm volatile("mfence" :::"memory"); + asm volatile("mfence" ::: "memory"); while (p < end) { asm volatile("clflush (%0)" ::"r"(p)); p += CACHE_LINE_SIZE_FOR_ADL; } - asm volatile("mfence" :::"memory"); + asm volatile("mfence" ::: "memory"); return true; }; diff --git a/modules/algowrapper/IntelFaceDetection.cpp b/modules/algowrapper/IntelFaceDetection.cpp index 5175bef3..7a60eb8c 100644 --- a/modules/algowrapper/IntelFaceDetection.cpp +++ b/modules/algowrapper/IntelFaceDetection.cpp @@ -26,11 +26,9 @@ #include "iutils/CameraLog.h" namespace icamera { -IntelFaceDetection::IntelFaceDetection() : mFDHandle(nullptr), mMaxFacesNum(0) { -} +IntelFaceDetection::IntelFaceDetection() : mFDHandle(nullptr), mMaxFacesNum(0) {} -IntelFaceDetection::~IntelFaceDetection() { -} +IntelFaceDetection::~IntelFaceDetection() {} status_t IntelFaceDetection::init(FaceDetectionInitParams* pData, int dataSize) { CheckAndLogError(!pData, UNKNOWN_ERROR, "pData is nullptr"); @@ -67,7 +65,7 @@ status_t IntelFaceDetection::init(FaceDetectionInitParams* pData, int dataSize) status_t IntelFaceDetection::deinit(FaceDetectionDeinitParams* pData, int dataSize) { CheckAndLogError(!pData, UNKNOWN_ERROR, "pData is nullptr"); CheckAndLogError(dataSize < static_cast(sizeof(FaceDetectionDeinitParams)), UNKNOWN_ERROR, - "buffer is small"); + "buffer is small"); LOG1("@%s", pData->cameraId, __func__); if (mFDHandle) { diff --git a/modules/algowrapper/IntelPGParam.cpp b/modules/algowrapper/IntelPGParam.cpp index 3e47ce86..7c3f375a 100644 --- a/modules/algowrapper/IntelPGParam.cpp +++ b/modules/algowrapper/IntelPGParam.cpp @@ -109,8 +109,7 @@ int IntelPGParam::getFragmentDescriptors(int descCount, ia_p2p_fragment_desc* de CheckAndLogError(!terminal, BAD_VALUE, "terminal is nullptr"); int termIdx = terminal->tm_index; - if (!IS_DATA_TERMINAL(mPgReqs.terminals[termIdx].type)) - continue; + if (!IS_DATA_TERMINAL(mPgReqs.terminals[termIdx].type)) continue; if (mFragmentConfig) { int kernelId = kernel_id_ffs(mPgReqs.terminals[termIdx].kernelBitmap); @@ -244,19 +243,18 @@ int IntelPGParam::prepare(const ia_binary_data* ipuParameters, const ia_css_rbm_ ret = ia_p2p_get_kernel_terminal_requirements(mP2pHandle, mPgId, (uint32_t)kernelId, &mKernel.mSections[kernelId]); CheckAndLogError(ret != ia_err_none, ret, - "%s: failed to get requirements for pg %d kernel %d", __func__, - mPgId, kernelId); + "%s: failed to get requirements for pg %d kernel %d", __func__, mPgId, + kernelId); /* Get payload descriptor */ - ret = ia_p2p_get_kernel_payload_desc( - mP2pHandle, mPgId, (uint32_t)kernelId, + ret = ia_p2p_get_kernel_payload_desc(mP2pHandle, mPgId, (uint32_t)kernelId, #if defined(IPU_SYSVER_IPU6) && defined(UNIFIED_PROG_TERM_FRAG_DESC) - 1, + 1, #else - mFragmentCount, + mFragmentCount, #endif - mFragmentConfig->pixel_fragment_descs[kernelId], - &mKernel.mPayloads[kernelId]); + mFragmentConfig->pixel_fragment_descs[kernelId], + &mKernel.mPayloads[kernelId]); CheckAndLogError(ret != ia_err_none, ret, "%s: failed to get payload for pg %d kernel %d, ret %d", __func__, mPgId, kernelId, ret); @@ -456,8 +454,7 @@ int IntelPGParam::allocatePayloads(int payloadCount, ia_binary_data* payloads) { for (int idx = 0; idx < payloadCount; idx++) { ia_binary_data payload = {nullptr, payloads[idx].size}; if (payload.size) { - payload.data = CIPR::mallocAlignedMemory(PAGE_ALIGN(payload.size), - CIPR::getPageSize()); + payload.data = CIPR::mallocAlignedMemory(PAGE_ALIGN(payload.size), CIPR::getPageSize()); CheckAndLogError(!payload.data, BAD_VALUE, "no memory for payload size %d!", payload.size); mAllocatedPayloads.push_back(payload); @@ -469,8 +466,7 @@ int IntelPGParam::allocatePayloads(int payloadCount, ia_binary_data* payloads) { void IntelPGParam::destroyPayloads() { while (!mAllocatedPayloads.empty()) { - if (mAllocatedPayloads.back().data) - CIPR::freeMemory(mAllocatedPayloads.back().data); + if (mAllocatedPayloads.back().data) CIPR::freeMemory(mAllocatedPayloads.back().data); mAllocatedPayloads.pop_back(); } } @@ -533,7 +529,7 @@ int IntelPGParam::encodeTerminal(ia_css_terminal_t* terminal, ia_binary_data pay mP2pHandle, mPgId, mFragmentCount, mFragmentConfig, mPgReqs.terminals[terminalIndex].userParamAddress.get()); CheckAndLogError(ret != ia_err_none, ret, - "Failed to call ia_p2p_get_kernel_user_parameters_v2."); + "Failed to call ia_p2p_get_kernel_user_parameters_v2."); ia_css_kernel_user_param_t* userParam = reinterpret_cast( mPgReqs.terminals[terminalIndex].userParamAddress.get()); @@ -697,9 +693,10 @@ int IntelPGParam::decodeTerminal(ia_css_terminal_t* terminal, ia_binary_data pay /* Use specific ordering of kernels when available */ if (mPgReqs.terminals[terminalIndex].kernelOrder) { kernelId = mPgReqs.terminals[terminalIndex].kernelOrder[kernelIndex++].id; - CheckAndLogError(kernelId >= PSYS_MAX_KERNELS_PER_PG, css_err_internal, - "%s: Kernel bitmap for terminal %d covers more kernels than in manifest", - __func__, terminalIndex); + CheckAndLogError( + kernelId >= PSYS_MAX_KERNELS_PER_PG, css_err_internal, + "%s: Kernel bitmap for terminal %d covers more kernels than in manifest", __func__, + terminalIndex); } else { kernelId = getKernelIdByBitmap(kernelBitmap); } @@ -1057,26 +1054,24 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren ia_p2p_payload_desc init = mKernel.mPayloads[kernelId]; /* calculate again the memory requirements for each kernel * and compare it with what we stored at init time. */ - ia_err ia_ret = ia_p2p_get_kernel_payload_desc( - mP2pHandle, mPgId, kernelId, + ia_err ia_ret = + ia_p2p_get_kernel_payload_desc(mP2pHandle, mPgId, kernelId, #if defined(IPU_SYSVER_IPU6) && defined(UNIFIED_PROG_TERM_FRAG_DESC) - 1, + 1, #else - mFragmentCount, + mFragmentCount, #endif - mFragmentConfig->pixel_fragment_descs[kernelId], - current); + mFragmentConfig->pixel_fragment_descs[kernelId], current); CheckAndLogError(ia_ret != ia_err_none, css_err_internal, "Failed to get payload description during sanity check (kernel %d)", kernelId); switch (mPgReqs.terminals[terminalIndex].type) { case IA_CSS_TERMINAL_TYPE_PARAM_CACHED_IN: if (current->param_in_payload_size > init.param_in_payload_size) { - LOGW( - "%s: param-in section size mismatch in pg[%d] kernel[%d]" - " p2p size %d pg_die size %d", - __func__, mPgId, kernelId, current->param_in_payload_size, - init.param_in_payload_size); + LOGW("%s: param-in section size mismatch in pg[%d] kernel[%d]" + " p2p size %d pg_die size %d", + __func__, mPgId, kernelId, current->param_in_payload_size, + init.param_in_payload_size); } else { current->param_in_payload_size = init.param_in_payload_size; } @@ -1084,11 +1079,10 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren break; case IA_CSS_TERMINAL_TYPE_PARAM_CACHED_OUT: if (current->param_out_payload_size > init.param_out_payload_size) { - LOGW( - "%s: param-out section size mismatch in pg[%d] kernel[%d]" - " p2p size %d pg_die size %d", - __func__, mPgId, kernelId, current->param_out_payload_size, - init.param_out_payload_size); + LOGW("%s: param-out section size mismatch in pg[%d] kernel[%d]" + " p2p size %d pg_die size %d", + __func__, mPgId, kernelId, current->param_out_payload_size, + init.param_out_payload_size); } else { current->param_out_payload_size = init.param_out_payload_size; } @@ -1096,11 +1090,10 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren break; case IA_CSS_TERMINAL_TYPE_PROGRAM: if (current->program_payload_size > init.program_payload_size) { - LOG1( - "%s: program section size mismatch in pg[%d] kernel[%d]" - " p2p size %d pg_die size %d", - __func__, mPgId, kernelId, current->program_payload_size, - init.program_payload_size); + LOG1("%s: program section size mismatch in pg[%d] kernel[%d]" + " p2p size %d pg_die size %d", + __func__, mPgId, kernelId, current->program_payload_size, + init.program_payload_size); } else { current->program_payload_size = init.program_payload_size; } @@ -1108,11 +1101,10 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren break; case IA_CSS_TERMINAL_TYPE_PARAM_SPATIAL_IN: if (current->spatial_param_in_payload_size > init.spatial_param_in_payload_size) { - LOGW( - "%s: spatial-in section size mismatch in pg[%d] kernel[%d]" - " p2p size %d pg_die size %d", - __func__, mPgId, kernelId, current->spatial_param_in_payload_size, - init.spatial_param_in_payload_size); + LOGW("%s: spatial-in section size mismatch in pg[%d] kernel[%d]" + " p2p size %d pg_die size %d", + __func__, mPgId, kernelId, current->spatial_param_in_payload_size, + init.spatial_param_in_payload_size); } else { current->spatial_param_in_payload_size = init.spatial_param_in_payload_size; } @@ -1120,11 +1112,10 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren break; case IA_CSS_TERMINAL_TYPE_PARAM_SPATIAL_OUT: if (current->spatial_param_out_payload_size > init.spatial_param_out_payload_size) { - LOGW( - "%s: spatial-out section size mismatch in pg[%d] kernel[%d]" - " p2p size %d pg_die size %d", - __func__, mPgId, kernelId, current->spatial_param_out_payload_size, - init.spatial_param_out_payload_size); + LOGW("%s: spatial-out section size mismatch in pg[%d] kernel[%d]" + " p2p size %d pg_die size %d", + __func__, mPgId, kernelId, current->spatial_param_out_payload_size, + init.spatial_param_out_payload_size); } else { current->spatial_param_out_payload_size = init.spatial_param_out_payload_size; } diff --git a/modules/algowrapper/IntelTNR7US.cpp b/modules/algowrapper/IntelTNR7US.cpp index 17998bd2..404d7a8e 100644 --- a/modules/algowrapper/IntelTNR7US.cpp +++ b/modules/algowrapper/IntelTNR7US.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2020-2021 Intel Corporation + * Copyright (C) 2020-2022 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -120,9 +120,9 @@ int IntelTNR7US::runTnrFrame(const void* inBufAddr, void* outBufAddr, uint32_t i struct timespec beginTime = {}; if (Log::isLogTagEnabled(ST_GPU_TNR)) clock_gettime(CLOCK_MONOTONIC, &beginTime); /* call Tnr api to run tnr for the inSurface and store the result in outSurface */ - int ret = run_tnr7us_frame(mWidth, CM_SURFACE_ALIGN_HEIGHT(mHeight), mWidth, inSurface, - outSurface, &tnrParam->scale, &tnrParam->ims, &tnrParam->bc, - &tnrParam->blend, syncUpdate, mTnrType); + int ret = + run_tnr7us_frame(mWidth, mHeight, mWidth, inSurface, outSurface, &tnrParam->scale, + &tnrParam->ims, &tnrParam->bc, &tnrParam->blend, syncUpdate, mTnrType); if (fd >= 0) { destroyCMSurface(outSurface); } @@ -185,8 +185,7 @@ CmSurface2DUP* IntelTNR7US::getBufferCMSurface(void* bufAddr) { CmSurface2DUP* IntelTNR7US::createCMSurface(void* bufAddr) { PERF_CAMERA_ATRACE(); CmSurface2DUP* cmSurface = nullptr; - int32_t ret = createCmSurface2DUP(mWidth, CM_SURFACE_ALIGN_HEIGHT(mHeight), - CM_SURFACE_FORMAT_NV12, bufAddr, cmSurface); + int32_t ret = createCmSurface2DUP(mWidth, mHeight, CM_SURFACE_FORMAT_NV12, bufAddr, cmSurface); CheckAndLogError(ret != 0, nullptr, "failed to create CmSurface2DUP object"); return cmSurface; } diff --git a/modules/algowrapper/graph/GraphConfigImpl.cpp b/modules/algowrapper/graph/GraphConfigImpl.cpp index 6d250077..5a037ddb 100644 --- a/modules/algowrapper/graph/GraphConfigImpl.cpp +++ b/modules/algowrapper/graph/GraphConfigImpl.cpp @@ -93,7 +93,7 @@ void GraphConfigImpl::addCustomKeyMap() { */ #define GCSS_KEY(key, str) std::make_pair(#str, GCSS_KEY_##key), map CUSTOM_GRAPH_KEYS = { - #include "custom_gcss_keys.h" +#include "custom_gcss_keys.h" }; #undef GCSS_KEY @@ -360,9 +360,9 @@ status_t GraphConfigImpl::getRawInputSize(GCSS::IGraphConfig* query, camera_reso return UNKNOWN_ERROR; } -status_t GraphConfigImpl::queryAllMatchedResults(const std::vector& activeStreams, - bool dummyStillSink, - std::map> *queryResults) { +status_t GraphConfigImpl::queryAllMatchedResults( + const std::vector& activeStreams, bool dummyStillSink, + std::map>* queryResults) { CheckAndLogError(!queryResults, UNKNOWN_ERROR, "%s, The queryResults is nullptr", __func__); status_t ret = createQueryRule(activeStreams, dummyStillSink); @@ -395,7 +395,7 @@ status_t GraphConfigImpl::queryAllMatchedResults(const std::vector& } bool GraphConfigImpl::queryGraphSettings(const std::vector& activeStreams) { - std::map > useCaseToQueryResults; + std::map> useCaseToQueryResults; status_t ret = queryAllMatchedResults(activeStreams, false, &useCaseToQueryResults); return ret == OK ? true : false; } @@ -407,7 +407,7 @@ status_t GraphConfigImpl::configStreams(const vector& activeStreams, bool dummyStillSink) { HAL_TRACE_CALL(CAMERA_DEBUG_LOG_LEVEL1); - map > useCaseToQueryResults; + map> useCaseToQueryResults; status_t ret = queryAllMatchedResults(activeStreams, dummyStillSink, &useCaseToQueryResults); CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, Faild to queryAllMatchedResults", __func__); // Filter the results with same isys output if there are @@ -434,7 +434,7 @@ status_t GraphConfigImpl::configStreams(const vector& activeStreams, camera_resolution_t stillReso; ret = getRawInputSize(still, &stillReso); CheckAndLogError(ret != OK, UNKNOWN_ERROR, - "%s, Failed to get csi ouput resolution for still pipe", __func__); + "%s, Failed to get csi ouput resolution for still pipe", __func__); LOG2("Isys output resolution for still pipe: %dx%d", stillReso.width, stillReso.height); @@ -559,7 +559,7 @@ string GraphConfigImpl::format2GraphBpp(int format) { * Do the secondary filter: configMode and stream format. */ status_t GraphConfigImpl::selectSetting( - int useCase, std::map >* queryResults) { + int useCase, std::map>* queryResults) { CheckAndLogError(!queryResults, UNKNOWN_ERROR, "%s, The queryResults is nullptr", __func__); string opMode; vector internalQueryResults; @@ -603,9 +603,8 @@ status_t GraphConfigImpl::selectSetting( string bpp = format2GraphBpp(s->format()); queryItem[bppKey] = bpp; - LOG2("The stream: %dx%d, format: %s, graphFmt: %s, bpp: %s", - s->width(), s->height(), CameraUtils::format2string(s->format()).c_str(), - fmt.c_str(), bpp.c_str()); + LOG2("The stream: %dx%d, format: %s, graphFmt: %s, bpp: %s", s->width(), s->height(), + CameraUtils::format2string(s->format()).c_str(), fmt.c_str(), bpp.c_str()); } LOG1("dumpQuery with format condition"); @@ -833,11 +832,11 @@ status_t GraphConfigImpl::pipelineGetConnections( std::vector stillScalerInfo, videoScalerInfo; std::vector stillTnrPortFmt, videoTnrPortFmt; - int ret = videoGraphPipe->pipelineGetConnections(pgList, &videoScalerInfo, - &videoConnVector, &videoTnrPortFmt); + int ret = videoGraphPipe->pipelineGetConnections(pgList, &videoScalerInfo, &videoConnVector, + &videoTnrPortFmt); CheckAndLogError(ret != OK, UNKNOWN_ERROR, "Failed to get the connetction from video pipe"); - ret = stillGraphPipe->pipelineGetConnections(pgList, &stillScalerInfo, - &stillConnVector, &stillTnrPortFmt); + ret = stillGraphPipe->pipelineGetConnections(pgList, &stillScalerInfo, &stillConnVector, + &stillTnrPortFmt); CheckAndLogError(ret != OK, UNKNOWN_ERROR, "Failed to get the connetction from still pipe"); LOG2("The connetction in video: %zu, in still: %zu; the scalera in video: %zu, in still: %zu", diff --git a/modules/algowrapper/graph/GraphConfigImpl.h b/modules/algowrapper/graph/GraphConfigImpl.h index daee690c..224e65f4 100644 --- a/modules/algowrapper/graph/GraphConfigImpl.h +++ b/modules/algowrapper/graph/GraphConfigImpl.h @@ -44,7 +44,7 @@ namespace icamera { #define GCSS_KEY(key, str) GCSS_KEY_##key, enum AndroidGraphConfigKey { GCSS_ANDROID_KEY_START = GCSS_KEY_START_CUSTOM_KEYS, - #include "custom_gcss_keys.h" +#include "custom_gcss_keys.h" }; #undef GCSS_KEY @@ -112,13 +112,14 @@ class GraphConfigImpl { status_t prepareGraphConfig(); bool isVideoStream(HalStream* stream); status_t selectSetting(int useCase, - std::map >* queryResults); + std::map>* queryResults); status_t queryGraphs(const std::vector& activeStreams, bool dummyStillSink); status_t createQueryRule(const std::vector& activeStreams, bool dummyStillSink); status_t getRawInputSize(GCSS::IGraphConfig* query, camera_resolution_t* reso); status_t queryAllMatchedResults(const std::vector& activeStreams, - bool dummyStillSink, std::map> *queryResults); + bool dummyStillSink, + std::map>* queryResults); status_t getGdcKernelSetting(uint32_t* kernelId, ia_isp_bxt_resolution_info_t* resolution); status_t graphGetStreamIds(std::vector* streamIds); int getStreamIdByPgName(std::string pgName); @@ -152,7 +153,7 @@ class GraphConfigImpl { * - The first item of mQuery is stream useCase(VIDEO or STILL), * - and the second is an query rule map(GCSS_KEY_, VALUE). */ - std::map > mQuery; + std::map> mQuery; /** * Map to get the virtual sink id from a client stream pointer. @@ -163,7 +164,7 @@ class GraphConfigImpl { * - The first item is streams useCase(VIDEO or STILL) * - and the second is the stream to virtual sink map */ - std::map > mStreamToSinkIdMap; + std::map> mStreamToSinkIdMap; /* * This vector is used to store the first query result. @@ -176,7 +177,7 @@ class GraphConfigImpl { std::map mQueryResult; // The stream useCase to GraphConfigPipe map - std::map > mGraphConfigPipe; + std::map> mGraphConfigPipe; ConfigMode mConfigMode; GraphSettingType mType; diff --git a/modules/algowrapper/graph/GraphConfigPipe.cpp b/modules/algowrapper/graph/GraphConfigPipe.cpp index 2c9845f0..96330cd4 100644 --- a/modules/algowrapper/graph/GraphConfigPipe.cpp +++ b/modules/algowrapper/graph/GraphConfigPipe.cpp @@ -61,9 +61,7 @@ uint32_t pppKernel[PPP_KERNEL_SIZE] = {ia_pal_uuid_isp_sc_outputscaler_ppp, ia_pal_uuid_isp_sc_outputscaler_ppp_1_1}; uint32_t dsKernel[DS_KERNEL_SIZE] = {ia_pal_uuid_isp_b2i_ds_1_0_0, ia_pal_uuid_isp_b2i_ds_1_0_1}; -GraphConfigPipe::GraphConfigPipe(int pipeUseCase) - : mSettings(nullptr), - mPipeUseCase(pipeUseCase) { +GraphConfigPipe::GraphConfigPipe(int pipeUseCase) : mSettings(nullptr), mPipeUseCase(pipeUseCase) { mCsiOutput = {0, 0}; } @@ -216,7 +214,7 @@ status_t GraphConfigPipe::getActiveOutputPorts(const StreamToSinkMap& streamToSi ret = sink->getValue(GCSS_KEY_STREAM_ID, streamId); CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to get stream id", __func__); - Node *outputPort = getOutputPortForSink(sinkName); + Node* outputPort = getOutputPortForSink(sinkName); CheckAndLogError(!outputPort, BAD_VALUE, "%s, No output port found for sink", __func__); LOG2("%s, sink name: %s, stream id: %d, output port name: %s", __func__, sinkName.c_str(), @@ -369,12 +367,12 @@ status_t GraphConfigPipe::getPgRbmValue(string pgName, IGraphType::StageAttr* st if (ret != css_err_none) return NAME_NOT_FOUND; GCSS::GraphCameraUtil mGCSSCameraUtil; - void *rbmAddr = mGCSSCameraUtil.numString2binary(rbmString, &stageAttr->rbm_bytes); + void* rbmAddr = mGCSSCameraUtil.numString2binary(rbmString, &stageAttr->rbm_bytes); CheckAndLogError(!rbmAddr, NO_MEMORY, "%s get rbm value: %s", __func__, rbmString.c_str()); if (stageAttr->rbm_bytes > MAX_RBM_STR_SIZE) { - LOGE("%s, memory is too small to save rbm value: %d, %d", __func__, - stageAttr->rbm_bytes, MAX_RBM_STR_SIZE); + LOGE("%s, memory is too small to save rbm value: %d, %d", __func__, stageAttr->rbm_bytes, + MAX_RBM_STR_SIZE); stageAttr->rbm_bytes = 0; return NO_MEMORY; } @@ -392,7 +390,7 @@ status_t GraphConfigPipe::getScalerKernelResolutionRatio(uint32_t* kenerArray, u const ia_isp_bxt_resolution_info_t* resolutionInfo; resolutionInfo = getScalerKernelResolutionInfo(kenerArray, sizeArray); - if (!resolutionInfo) return OK; // no scaling in current setting + if (!resolutionInfo) return OK; // no scaling in current setting *widthRatio = 1.0; *heightRatio = 1.0; @@ -586,8 +584,8 @@ status_t GraphConfigPipe::getPgIdForKernel(const uint32_t streamId, const int32_ if (ret != css_err_none) continue; ret = ndVec->getValue(GCSS_KEY_PG_ID, *pgId); - CheckAndLogError(ret != css_err_none, BAD_VALUE, - "Couldn't get pg id for kernel: %d", kernelId); + CheckAndLogError(ret != css_err_none, BAD_VALUE, "Couldn't get pg id for kernel: %d", + kernelId); LOG2("got the pgid:%d for kernel id:%d in stream:%d", *pgId, kernelId, streamId); return OK; @@ -825,8 +823,8 @@ status_t GraphConfigPipe::getPrivatePortFormat(Node* port, } ia_uid stageId; - status_t status = GCSS::GraphCameraUtil::portGetFourCCInfo(port, stageId, - format.formatSetting.terminalId); + status_t status = + GCSS::GraphCameraUtil::portGetFourCCInfo(port, stageId, format.formatSetting.terminalId); CheckAndLogError(status != OK, INVALID_OPERATION, "Failed to get port uid", __func__); ret = port->getValue(GCSS_KEY_WIDTH, format.formatSetting.width); CheckAndLogError(ret != css_err_none, BAD_VALUE, "Failed to get port width", __func__); @@ -838,8 +836,8 @@ status_t GraphConfigPipe::getPrivatePortFormat(Node* port, CheckAndLogError(ret != css_err_none, BAD_VALUE, "Failed to find port fourcc", __func__); format.formatSetting.fourcc = CameraUtils::string2IaFourccCode(fourccFormat.c_str()); - format.formatSetting.bpl = CameraUtils::getBpl(format.formatSetting.fourcc, - format.formatSetting.width); + format.formatSetting.bpl = + CameraUtils::getBpl(format.formatSetting.fourcc, format.formatSetting.width); format.formatSetting.bpp = CameraUtils::getBpp(format.formatSetting.fourcc); LOG2("%s, Tnr ref out: streamId: %d, %dx%d, terminalId: %d, fmt: %s, bpp: %d, bpl: %d", @@ -1112,7 +1110,7 @@ int32_t GraphConfigPipe::getTuningMode(const int32_t streamId) { ret = result->getValue(GCSS_KEY_STREAM_ID, graphStreamId); if (ret == css_err_none && graphStreamId == streamId && graphStreamId != -1) { - GraphConfigNode *tuningModeNode = nullptr; + GraphConfigNode* tuningModeNode = nullptr; ret = result->getDescendant(GCSS_KEY_TUNING_MODE, &tuningModeNode); if (ret == css_err_none && tuningModeNode) { string tuningModeStr; @@ -1215,8 +1213,8 @@ status_t GraphConfigPipe::portGetPeer(Node* port, Node** peer) { CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to get peer attribute", __func__); ret = mSettings->getDescendantByString(peerName, peer); - CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to find peer by name %s", - __func__, peerName.c_str()); + CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to find peer by name %s", __func__, + peerName.c_str()); return OK; } @@ -1263,8 +1261,8 @@ status_t GraphConfigPipe::portGetConnection(Node* port, // input port is the sink in a connection status = GCSS::GraphCameraUtil::portGetFourCCInfo(port, connectionInfo->mSinkStage, connectionInfo->mSinkTerminal); - CheckAndLogError(status != OK, BAD_VALUE, - "%s, Failed to create fourcc info for sink port", __func__); + CheckAndLogError(status != OK, BAD_VALUE, "%s, Failed to create fourcc info for sink port", + __func__); if (*peerPort != nullptr && !portIsVirtual(*peerPort)) { status = GCSS::GraphCameraUtil::portGetFourCCInfo( @@ -1279,8 +1277,8 @@ status_t GraphConfigPipe::portGetConnection(Node* port, // output port is the source in a connection status = GCSS::GraphCameraUtil::portGetFourCCInfo(port, connectionInfo->mSourceStage, connectionInfo->mSourceTerminal); - CheckAndLogError(status != OK, BAD_VALUE, - "%s, Failed to create fourcc info for sink port", __func__); + CheckAndLogError(status != OK, BAD_VALUE, "%s, Failed to create fourcc info for sink port", + __func__); if (*peerPort != nullptr && !portIsVirtual(*peerPort)) { status = GCSS::GraphCameraUtil::portGetFourCCInfo(*peerPort, connectionInfo->mSinkStage, @@ -1412,15 +1410,15 @@ int32_t GraphConfigPipe::portGetDirection(Node* port) { * \return BAD_VALUE if any of the graph queries failed. */ status_t GraphConfigPipe::portGetFullName(Node* port, string* fullName) { - CheckAndLogError(!fullName || !port, UNKNOWN_ERROR, - "%s, the fullName or port is nullptr", __func__); + CheckAndLogError(!fullName || !port, UNKNOWN_ERROR, "%s, the fullName or port is nullptr", + __func__); string portName, ancestorName; Node* ancestor; css_err_t ret = css_err_none; ret = port->getAncestor(&ancestor); - CheckAndLogError(ret != css_err_none, BAD_VALUE, - "%s, Failed to retrieve port ancestor", __func__); + CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to retrieve port ancestor", + __func__); ret = ancestor->getValue(GCSS_KEY_NAME, ancestorName); if (ret != css_err_none) { @@ -1430,8 +1428,7 @@ status_t GraphConfigPipe::portGetFullName(Node* port, string* fullName) { } ret = port->getValue(GCSS_KEY_NAME, portName); - CheckAndLogError(ret != css_err_none, BAD_VALUE, - "%s, Failed to retrieve port name", __func__); + CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to retrieve port name", __func__); *fullName = ancestorName + ":" + portName; return OK; @@ -1493,8 +1490,8 @@ bool GraphConfigPipe::portIsVirtual(Node* port) { */ status_t GraphConfigPipe::portGetClientStream(Node* port, HalStream** stream) { CheckAndLogError(!port || !stream, BAD_VALUE, "%s, Invalid parameters", __func__); - CheckAndLogError(!portIsVirtual(port), INVALID_OPERATION, - "%s, port is not a virtual port", __func__); + CheckAndLogError(!portIsVirtual(port), INVALID_OPERATION, "%s, port is not a virtual port", + __func__); string portName; css_err_t ret = port->getValue(GCSS_KEY_NAME, portName); @@ -1532,8 +1529,8 @@ bool GraphConfigPipe::portIsEdgePort(Node* port) { LOG2("port is disabled, so it is an edge port"); return true; } - CheckAndLogError(status != OK, false, - "%s, Failed to create fourcc info for source port", __func__); + CheckAndLogError(status != OK, false, "%s, Failed to create fourcc info for source port", + __func__); streamId = portGetStreamId(port); if (streamId < 0) return false; diff --git a/modules/ia_cipr/include/ipu-psys.h b/modules/ia_cipr/include/ipu-psys.h index 657027a5..60fbc241 100644 --- a/modules/ia_cipr/include/ipu-psys.h +++ b/modules/ia_cipr/include/ipu-psys.h @@ -21,40 +21,40 @@ #define _UAPI_IPU_PSYS_H #include struct ipu_psys_capability { -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - uint32_t version; - uint8_t driver[20]; - uint32_t pg_count; - uint8_t dev_model[32]; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - uint32_t reserved[17]; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + uint32_t version; + uint8_t driver[20]; + uint32_t pg_count; + uint8_t dev_model[32]; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + uint32_t reserved[17]; } __attribute__((packed)); struct ipu_psys_event { - uint32_t type; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - uint64_t user_token; - uint64_t issue_id; - uint32_t buffer_idx; - uint32_t error; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - int32_t reserved[2]; + uint32_t type; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + uint64_t user_token; + uint64_t issue_id; + uint32_t buffer_idx; + uint32_t error; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int32_t reserved[2]; } __attribute__((packed)); #define IPU_PSYS_EVENT_TYPE_CMD_COMPLETE 1 #define IPU_PSYS_EVENT_TYPE_BUFFER_COMPLETE 2 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ struct ipu_psys_buffer { - uint64_t len; - union { - int fd; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - void* userptr; - uint64_t reserved; - } base; - uint32_t data_offset; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - uint32_t bytes_used; - uint32_t flags; - uint32_t reserved[2]; + uint64_t len; + union { + int fd; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + void* userptr; + uint64_t reserved; + } base; + uint32_t data_offset; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + uint32_t bytes_used; + uint32_t flags; + uint32_t reserved[2]; } __attribute__((packed)); /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ #define IPU_BUFFER_FLAG_INPUT (1 << 0) @@ -70,33 +70,33 @@ struct ipu_psys_buffer { #define IPU_PSYS_CMD_PRIORITY_LOW 2 #define IPU_PSYS_CMD_PRIORITY_NUM 3 struct ipu_psys_command { - uint64_t issue_id; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - uint64_t user_token; - uint32_t priority; - void* pg_manifest; - struct ipu_psys_buffer* buffers; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - int pg; - uint32_t pg_manifest_size; - uint32_t bufcount; - uint32_t min_psys_freq; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - uint32_t frame_counter; - uint32_t kernel_enable_bitmap[4]; - uint32_t terminal_enable_bitmap[4]; - uint32_t routing_enable_bitmap[4]; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - uint32_t rbm[5]; - uint32_t reserved[2]; + uint64_t issue_id; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + uint64_t user_token; + uint32_t priority; + void* pg_manifest; + struct ipu_psys_buffer* buffers; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int pg; + uint32_t pg_manifest_size; + uint32_t bufcount; + uint32_t min_psys_freq; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + uint32_t frame_counter; + uint32_t kernel_enable_bitmap[4]; + uint32_t terminal_enable_bitmap[4]; + uint32_t routing_enable_bitmap[4]; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + uint32_t rbm[5]; + uint32_t reserved[2]; } __attribute__((packed)); struct ipu_psys_manifest { -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ - uint32_t index; - uint32_t size; - void* manifest; - uint32_t reserved[5]; -/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + uint32_t index; + uint32_t size; + void* manifest; + uint32_t reserved[5]; + /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ } __attribute__((packed)); #define IPU_IOC_QUERYCAP _IOR('A', 1, struct ipu_psys_capability) #define IPU_IOC_MAPBUF _IOWR('A', 2, int) diff --git a/modules/ia_cipr/src/Buffer.cpp b/modules/ia_cipr/src/Buffer.cpp index d331dfac..611f0dfb 100644 --- a/modules/ia_cipr/src/Buffer.cpp +++ b/modules/ia_cipr/src/Buffer.cpp @@ -22,8 +22,8 @@ #include "iutils/Utils.h" #include "modules/ia_cipr/include/Context.h" -using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_ERR; +using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_WARNING; namespace icamera { @@ -98,8 +98,7 @@ Result Buffer::validateBuffer(const MemoryDesc* memory) { valid &= false; } - bool haveMemory = mem->flags & MemoryFlag::Allocated || - mem->flags & MemoryFlag::MemoryFromUser; + bool haveMemory = mem->flags & MemoryFlag::Allocated || mem->flags & MemoryFlag::MemoryFromUser; if (!haveMemory && ((mem->flags & MemoryFlag::MemoryHandle) || (mem->flags & MemoryFlag::CpuPtr))) { valid &= false; diff --git a/modules/ia_cipr/src/Command.cpp b/modules/ia_cipr/src/Command.cpp index 52f1db3f..1a17ea0e 100644 --- a/modules/ia_cipr/src/Command.cpp +++ b/modules/ia_cipr/src/Command.cpp @@ -23,8 +23,8 @@ #include "iutils/CameraLog.h" #include "iutils/Utils.h" -using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_ERR; +using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_WARNING; namespace icamera { @@ -79,19 +79,19 @@ Result Command::updateKernel(const PSysCommandConfig& cfg, const MemoryDesc& mem ProcessGroupCommand* ppg_command_ext = reinterpret_cast(memory.cpuPtr); CheckAndLogError(ppg_command_ext->header.size != memory.size || - ppg_command_ext->header.offset != sizeof(PSysCmdExtHeader) || - (ppg_command_ext->header.version != psys_command_ext_ppg_0 && - ppg_command_ext->header.version != psys_command_ext_ppg_1), + ppg_command_ext->header.offset != sizeof(PSysCmdExtHeader) || + (ppg_command_ext->header.version != psys_command_ext_ppg_0 && + ppg_command_ext->header.version != psys_command_ext_ppg_1), Result::InvaildArg, "Invalid command extension buffer received! (%p)", cfg.extBuf); if (ppg_command_ext->header.version == psys_command_ext_ppg_1) { CheckAndLogError(sizeof(mCmd->iocCmd.kernel_enable_bitmap) != - sizeof(ppg_command_ext->dynamicKernelBitmap), Result::DataError, - "Invalid bitmap buffer size"); - MEMCPY_S( - &(mCmd->iocCmd.kernel_enable_bitmap), sizeof(mCmd->iocCmd.kernel_enable_bitmap), - ppg_command_ext->dynamicKernelBitmap, sizeof(ppg_command_ext->dynamicKernelBitmap)); + sizeof(ppg_command_ext->dynamicKernelBitmap), + Result::DataError, "Invalid bitmap buffer size"); + MEMCPY_S(&(mCmd->iocCmd.kernel_enable_bitmap), sizeof(mCmd->iocCmd.kernel_enable_bitmap), + ppg_command_ext->dynamicKernelBitmap, + sizeof(ppg_command_ext->dynamicKernelBitmap)); } mCmd->iocCmd.frame_counter = static_cast(ppg_command_ext->frameCounter); diff --git a/modules/ia_cipr/src/Context.cpp b/modules/ia_cipr/src/Context.cpp index 85af000e..1896491c 100644 --- a/modules/ia_cipr/src/Context.cpp +++ b/modules/ia_cipr/src/Context.cpp @@ -29,8 +29,8 @@ #include "iutils/CameraLog.h" #include "iutils/Utils.h" -using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_ERR; +using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_WARNING; #include "modules/ia_cipr/include/Context.h" @@ -120,13 +120,13 @@ Result Context::getCapabilities(PSYSCapability* cap) { cap->version = psys_capability.version; CheckAndLogError(sizeof(cap->driver) != sizeof(psys_capability.driver), Result::DataError, "the driver array size wasn't matching"); - MEMCPY_S(cap->driver, sizeof(cap->driver), - psys_capability.driver, sizeof(psys_capability.driver)); + MEMCPY_S(cap->driver, sizeof(cap->driver), psys_capability.driver, + sizeof(psys_capability.driver)); CheckAndLogError(sizeof(cap->devModel) != sizeof(psys_capability.dev_model), Result::DataError, "the dev model array size wasn't matching"); - MEMCPY_S(cap->devModel, sizeof(cap->devModel), - psys_capability.dev_model, sizeof(psys_capability.dev_model)); + MEMCPY_S(cap->devModel, sizeof(cap->devModel), psys_capability.dev_model, + sizeof(psys_capability.dev_model)); cap->programGroupCount = psys_capability.pg_count; diff --git a/modules/ia_cipr/src/Event.cpp b/modules/ia_cipr/src/Event.cpp index d14bce35..6e6a08de 100644 --- a/modules/ia_cipr/src/Event.cpp +++ b/modules/ia_cipr/src/Event.cpp @@ -31,8 +31,8 @@ #include "iutils/CameraLog.h" #include "iutils/Utils.h" -using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_ERR; +using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_WARNING; #include "modules/ia_cipr/include/Context.h" diff --git a/modules/ia_cipr/src/Utils.cpp b/modules/ia_cipr/src/Utils.cpp index c5c43d22..003f9a84 100644 --- a/modules/ia_cipr/src/Utils.cpp +++ b/modules/ia_cipr/src/Utils.cpp @@ -23,8 +23,8 @@ #include "iutils/CameraLog.h" #include "iutils/Utils.h" -using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_ERR; +using icamera::CAMERA_DEBUG_LOG_INFO; using icamera::CAMERA_DEBUG_LOG_WARNING; namespace icamera { diff --git a/src/3a/AiqCore.cpp b/src/3a/AiqCore.cpp index 438d138e..7e825ca5 100644 --- a/src/3a/AiqCore.cpp +++ b/src/3a/AiqCore.cpp @@ -20,6 +20,7 @@ #include +#include #include #include @@ -101,8 +102,7 @@ int AiqCore::initAiqPlusParams() { } mGbceParams.gbce_on = (tonemapMaxCurvePoints > 0) ? true : false; mGbceParams.athena_mode = PlatformData::getPLCEnable(mCameraId); - LOG1("%s, gbce_on: %d, plc enable: %d", __func__, mGbceParams.gbce_on, - mGbceParams.athena_mode); + LOG1("%s, gbce_on: %d, plc enable: %d", __func__, mGbceParams.gbce_on, mGbceParams.athena_mode); // HDR_FEATURE_S if (PlatformData::getSensorAeEnable(mCameraId)) { @@ -308,7 +308,6 @@ int AiqCore::runAe(long requestId, AiqResult* aiqResult) { CheckAndLogError(!aiqResult, BAD_VALUE, "@%s, aiqResult is nullptr", __func__); LOG2("@%s, aiqResult %p", requestId, __func__, aiqResult); - // run AE return runAEC(requestId, &aiqResult->mAeResults); } @@ -316,7 +315,6 @@ int AiqCore::runAe(long requestId, AiqResult* aiqResult) { int AiqCore::runAiq(long requestId, AiqResult* aiqResult) { CheckAndLogError(!aiqResult, BAD_VALUE, "@%s, aiqResult is nullptr", __func__); - int aaaRunType = IMAGING_ALGO_AWB | IMAGING_ALGO_GBCE | IMAGING_ALGO_PA; if (PlatformData::getLensHwType(mCameraId) == LENS_VCM_HW) { aaaRunType |= IMAGING_ALGO_AF; @@ -696,7 +694,8 @@ bool AiqCore::bypassAe(const aiq_parameter_t& param) { // run AE if manual AE or total exposure target is set if (param.aeMode != AE_MODE_AUTO || param.powerMode != CAMERA_LOW_POWER || - param.totalExposureTarget > 0) return false; + param.totalExposureTarget > 0) + return false; bool converged = mLastAeResult.exposures[0].converged; diff --git a/src/3a/AiqEngine.cpp b/src/3a/AiqEngine.cpp index d39d736e..3944a6fc 100644 --- a/src/3a/AiqEngine.cpp +++ b/src/3a/AiqEngine.cpp @@ -116,8 +116,8 @@ int AiqEngine::run3A(long requestId, int64_t applyingSeq, int64_t* effectSeq) { AutoMutex l(mEngineLock); AiqStatistics* aiqStats = - mFirstAiqRunning ? nullptr - : const_cast(mAiqResultStorage->getAndLockAiqStatistics()); + mFirstAiqRunning ? nullptr : + const_cast(mAiqResultStorage->getAndLockAiqStatistics()); AiqState state = AIQ_STATE_IDLE; AiqResult* aiqResult = mAiqResultStorage->acquireAiqResult(); @@ -155,7 +155,6 @@ int AiqEngine::run3A(long requestId, int64_t applyingSeq, int64_t* effectSeq) { mAiqRunningHistory.statsSequnce); } - PlatformData::saveMakernoteData(mCameraId, aiqResult->mAiqParam.makernoteMode, mAiqResultStorage->getAiqResult()->mSequence, aiqResult->mTuningMode); @@ -174,8 +173,8 @@ void AiqEngine::handleEvent(EventData eventData) { mLensManager->handleSofEvent(eventData); } -int AiqEngine::prepareStatsParams(cca::cca_stats_params* statsParams, - AiqStatistics* aiqStatistics, AiqResult* aiqResult) { +int AiqEngine::prepareStatsParams(cca::cca_stats_params* statsParams, AiqStatistics* aiqStatistics, + AiqResult* aiqResult) { LOG2("%s, sequence %ld", __func__, aiqStatistics->mSequence); // update face detection related parameters @@ -218,7 +217,7 @@ int AiqEngine::prepareStatsParams(cca::cca_stats_params* statsParams, if (PlatformData::isDvsSupported(mCameraId) && PlatformData::getGraphConfigNodes(mCameraId)) { std::shared_ptr gc = nullptr; - IGraphConfigManager *GCM = IGraphConfigManager::getInstance(mCameraId); + IGraphConfigManager* GCM = IGraphConfigManager::getInstance(mCameraId); if (GCM) { gc = GCM->getGraphConfig(CAMERA_STREAM_CONFIGURATION_MODE_NORMAL); } @@ -399,9 +398,9 @@ AiqEngine::AiqState AiqEngine::handleAiqResult(AiqResult* aiqResult) { LOG2("%s: aiqResult->mTuningMode = %d", __func__, aiqResult->mTuningMode); // HDR_FEATURE_S - aec_scene_t aecScene = (aiqResult->mAeResults.multiframe == ia_aiq_bracket_mode_ull) - ? AEC_SCENE_ULL - : AEC_SCENE_HDR; + aec_scene_t aecScene = (aiqResult->mAeResults.multiframe == ia_aiq_bracket_mode_ull) ? + AEC_SCENE_ULL : + AEC_SCENE_HDR; mAiqSetting->updateTuningMode(aecScene); // HDR_FEATURE_E @@ -433,8 +432,8 @@ int AiqEngine::applyManualTonemaps(AiqResult* aiqResult) { aiqResult->mAiqParam.tonemapMode == TONEMAP_MODE_HIGH_QUALITY) { aiqResult->mGbceResults.have_manual_settings = false; - if (aiqResult->mAiqParam.aeMode != AE_MODE_AUTO && aiqResult->mAiqParam.manualIso != 0 - && aiqResult->mAiqParam.manualExpTimeUs != 0) { + if (aiqResult->mAiqParam.aeMode != AE_MODE_AUTO && aiqResult->mAiqParam.manualIso != 0 && + aiqResult->mAiqParam.manualExpTimeUs != 0) { aiqResult->mGbceResults.have_manual_settings = true; } } diff --git a/src/3a/AiqResult.cpp b/src/3a/AiqResult.cpp index a7c19013..48a56bbe 100644 --- a/src/3a/AiqResult.cpp +++ b/src/3a/AiqResult.cpp @@ -23,18 +23,18 @@ namespace icamera { -AiqResult::AiqResult(int cameraId) : - mCameraId(cameraId), - mTimestamp(0), - mSequence(-1), - mFrameId(-1), - mTuningMode(TUNING_MODE_VIDEO), - mAfDistanceDiopters(0.0f), - mSkip(false), - mLensPosition(0), - mSceneMode(SCENE_MODE_AUTO), - mFrameDuration(0), - mRollingShutter(0) { +AiqResult::AiqResult(int cameraId) + : mCameraId(cameraId), + mTimestamp(0), + mSequence(-1), + mFrameId(-1), + mTuningMode(TUNING_MODE_VIDEO), + mAfDistanceDiopters(0.0f), + mSkip(false), + mLensPosition(0), + mSceneMode(SCENE_MODE_AUTO), + mFrameDuration(0), + mRollingShutter(0) { CLEAR(mCustomControls); CLEAR(mCustomControlsParams); CLEAR(mAwbResults); @@ -74,7 +74,7 @@ int AiqResult::deinit() { return OK; } -AiqResult &AiqResult::operator=(const AiqResult &other) { +AiqResult& AiqResult::operator=(const AiqResult& other) { mCameraId = other.mCameraId; mSequence = other.mSequence; mFrameId = other.mFrameId; @@ -98,8 +98,8 @@ AiqResult &AiqResult::operator=(const AiqResult &other) { for (int i = 0; i < mCustomControls.count; i++) { mCustomControlsParams[i] = other.mCustomControlsParams[i]; } - MEMCPY_S(mLensShadingMap, sizeof(mLensShadingMap), - other.mLensShadingMap, sizeof(other.mLensShadingMap)); + MEMCPY_S(mLensShadingMap, sizeof(mLensShadingMap), other.mLensShadingMap, + sizeof(other.mLensShadingMap)); mAiqParam = other.mAiqParam; mFrameDuration = other.mFrameDuration; diff --git a/src/3a/AiqResult.h b/src/3a/AiqResult.h index 715bfe89..3fb75d9f 100644 --- a/src/3a/AiqResult.h +++ b/src/3a/AiqResult.h @@ -34,8 +34,7 @@ namespace icamera { * Then we can do deep copy of the results */ class AiqResult { - -public: + public: AiqResult(int cameraId); ~AiqResult(); @@ -44,7 +43,7 @@ class AiqResult { AiqResult& operator=(const AiqResult& other); -public: + public: int mCameraId; unsigned long long mTimestamp; int64_t mSequence; @@ -72,10 +71,9 @@ class AiqResult { int64_t mFrameDuration; // us int64_t mRollingShutter; // us -private: + private: /*!< ia_isp_custom_controls pointer content */ float mCustomControlsParams[MAX_CUSTOM_CONTROLS_PARAM_SIZE]; - }; } /* namespace icamera */ diff --git a/src/3a/AiqResultStorage.cpp b/src/3a/AiqResultStorage.cpp index cb4f8984..6e55a96e 100644 --- a/src/3a/AiqResultStorage.cpp +++ b/src/3a/AiqResultStorage.cpp @@ -36,8 +36,7 @@ void AiqResultStorage::releaseAiqResultStorage(int cameraId) { delete storage; } -AiqResultStorage::AiqResultStorage(int cameraId) : - mCameraId(cameraId) { +AiqResultStorage::AiqResultStorage(int cameraId) : mCameraId(cameraId) { for (int i = 0; i < kStorageSize; i++) { mAiqResults[i] = new AiqResult(mCameraId); mAiqResults[i]->init(); @@ -82,11 +81,10 @@ void AiqResultStorage::resetAiqStatistics() { const AiqStatistics* AiqResultStorage::getAndLockAiqStatistics() { AutoRMutex rlock(mDataLock); - if (mCurrentAiqStatsIndex == -1) - return nullptr; + if (mCurrentAiqStatsIndex == -1) return nullptr; - CheckAndLogError(mAiqStatistics[mCurrentAiqStatsIndex].mSequence == -1, - nullptr, "Invalid sequence id -1 of stored aiq statistics"); + CheckAndLogError(mAiqStatistics[mCurrentAiqStatsIndex].mSequence == -1, nullptr, + "Invalid sequence id -1 of stored aiq statistics"); mAiqStatistics[mCurrentAiqStatsIndex].mInUse = true; return &mAiqStatistics[mCurrentAiqStatsIndex]; @@ -150,5 +148,4 @@ AiqResultStorage* AiqResultStorage::getInstanceLocked(int cameraId) { return sInstances[cameraId]; } -} //namespace icamera - +} // namespace icamera diff --git a/src/3a/AiqResultStorage.h b/src/3a/AiqResultStorage.h index 9a2f94d3..70c0af4c 100644 --- a/src/3a/AiqResultStorage.h +++ b/src/3a/AiqResultStorage.h @@ -43,7 +43,7 @@ namespace icamera { * its static methods getInstance and releaseAiqResultStorage. */ class AiqResultStorage { -public: + public: /** * \brief Get internal instance for cameraId. * @@ -84,7 +84,8 @@ class AiqResultStorage { * param[in] int64_t sequence: specify which aiq result is needed. * * return 1. when sequence id is -1 or not provided, the lastest result will be returned. - * 2. when sequence id is larger than -1, the result with gaven sequence id will be returned. + * 2. when sequence id is larger than -1, the result with gaven sequence id will be + * returned. * 3. if cannot find in result storage, it means either sequence id is too old and its * result was overrided, or the sequence id is too new, and its result has not been * saved into storage yet. For both cases, nullptr will be returned. @@ -125,27 +126,27 @@ class AiqResultStorage { */ void resetAiqStatistics(); -private: + private: AiqResultStorage(int cameraId); ~AiqResultStorage(); static AiqResultStorage* getInstanceLocked(int cameraId); -private: + private: static std::map sInstances; // Guard for singleton creation. static Mutex sLock; int mCameraId; - RWLock mDataLock; // lock for all the data storage below + RWLock mDataLock; // lock for all the data storage below - static const int kStorageSize = MAX_SETTING_COUNT; // Should > MAX_BUFFER_COUNT + sensorLag + static const int kStorageSize = MAX_SETTING_COUNT; // Should > MAX_BUFFER_COUNT + sensorLag int mCurrentIndex = -1; AiqResult* mAiqResults[kStorageSize]; - static const int kAiqStatsStorageSize = 3; // Always use the latest, but may hold for long time + static const int kAiqStatsStorageSize = 3; // Always use the latest, but may hold for long time int mCurrentAiqStatsIndex = -1; AiqStatistics mAiqStatistics[kAiqStatsStorageSize]; }; -} //namespace icamera +} // namespace icamera diff --git a/src/3a/AiqSetting.cpp b/src/3a/AiqSetting.cpp index 12e8159e..0b6d1bef 100644 --- a/src/3a/AiqSetting.cpp +++ b/src/3a/AiqSetting.cpp @@ -27,12 +27,9 @@ namespace icamera { -AiqSetting::AiqSetting(int cameraId) : - mCameraId(cameraId) { -} +AiqSetting::AiqSetting(int cameraId) : mCameraId(cameraId) {} -AiqSetting::~AiqSetting() { -} +AiqSetting::~AiqSetting() {} int AiqSetting::init(void) { AutoWMutex wlock(mParamLock); @@ -53,7 +50,7 @@ int AiqSetting::deinit(void) { return OK; } -int AiqSetting::configure(const stream_config_t *streamList) { +int AiqSetting::configure(const stream_config_t* streamList) { AutoWMutex wlock(mParamLock); camera_resolution_t resolution = {streamList->streams[0].width, streamList->streams[0].height}; @@ -90,21 +87,21 @@ int AiqSetting::configure(const stream_config_t *streamList) { mAiqParam.tuningMode = mTuningModes[0]; } LOG1("%s, tuningMode %d, configMode %x, fame usage %d, res %dx%d", __func__, - mAiqParam.tuningMode, configModes[0], mAiqParam.frameUsage, - mAiqParam.resolution.width, mAiqParam.resolution.height); + mAiqParam.tuningMode, configModes[0], mAiqParam.frameUsage, mAiqParam.resolution.width, + mAiqParam.resolution.height); return OK; } -void AiqSetting::updateFrameUsage(const stream_config_t *streamList) { +void AiqSetting::updateFrameUsage(const stream_config_t* streamList) { bool preview = false, still = false, video = false; for (int i = 0; i < streamList->num_streams; i++) { if (streamList->streams[i].usage == CAMERA_STREAM_VIDEO_CAPTURE) { video = true; } else if (streamList->streams[i].usage == CAMERA_STREAM_STILL_CAPTURE) { still = true; - } else if (streamList->streams[i].usage == CAMERA_STREAM_PREVIEW - || streamList->streams[i].usage == CAMERA_STREAM_APP) { + } else if (streamList->streams[i].usage == CAMERA_STREAM_PREVIEW || + streamList->streams[i].usage == CAMERA_STREAM_APP) { preview = true; } } @@ -146,8 +143,8 @@ int AiqSetting::setParameters(const Parameters& params) { mAiqParam.evShift = 0.0; } else { ev = CLIP(ev, mAiqParam.evRange.max, mAiqParam.evRange.min); - mAiqParam.evShift = static_cast(ev) * - mAiqParam.evStep.numerator / mAiqParam.evStep.denominator; + mAiqParam.evShift = + static_cast(ev) * mAiqParam.evStep.numerator / mAiqParam.evStep.denominator; } params.getFrameRate(mAiqParam.fps); @@ -221,8 +218,8 @@ int AiqSetting::setParameters(const Parameters& params) { CheckWarningNoReturn(curves.bSize > DEFAULT_TONEMAP_CURVE_POINT_NUM, "user v curve size is too big %d", curves.bSize); int curveSize = sizeof(float) * DEFAULT_TONEMAP_CURVE_POINT_NUM; - MEMCPY_S(&mAiqParam.tonemapCurveMem[0], curveSize, - curves.rCurve, sizeof(float) * curves.rSize); + MEMCPY_S(&mAiqParam.tonemapCurveMem[0], curveSize, curves.rCurve, + sizeof(float) * curves.rSize); MEMCPY_S(&mAiqParam.tonemapCurveMem[DEFAULT_TONEMAP_CURVE_POINT_NUM], curveSize, curves.gCurve, sizeof(float) * curves.gSize); MEMCPY_S(&mAiqParam.tonemapCurveMem[DEFAULT_TONEMAP_CURVE_POINT_NUM * 2], curveSize, @@ -240,19 +237,19 @@ int AiqSetting::setParameters(const Parameters& params) { uint8_t captureIntent = 0; if (params.getCaptureIntent(captureIntent) == OK) { switch (captureIntent) { - case CAMERA_CONTROL_CAPTUREINTENT_STILL_CAPTURE: - mAiqParam.frameUsage = FRAME_USAGE_STILL; - break; - case CAMERA_CONTROL_CAPTUREINTENT_VIDEO_RECORD: - case CAMERA_CONTROL_CAPTUREINTENT_VIDEO_SNAPSHOT: - mAiqParam.frameUsage = FRAME_USAGE_VIDEO; - break; - case CAMERA_CONTROL_CAPTUREINTENT_PREVIEW: - mAiqParam.frameUsage = FRAME_USAGE_PREVIEW; - break; - default: - mAiqParam.frameUsage = FRAME_USAGE_CONTINUOUS; - break; + case CAMERA_CONTROL_CAPTUREINTENT_STILL_CAPTURE: + mAiqParam.frameUsage = FRAME_USAGE_STILL; + break; + case CAMERA_CONTROL_CAPTUREINTENT_VIDEO_RECORD: + case CAMERA_CONTROL_CAPTUREINTENT_VIDEO_SNAPSHOT: + mAiqParam.frameUsage = FRAME_USAGE_VIDEO; + break; + case CAMERA_CONTROL_CAPTUREINTENT_PREVIEW: + mAiqParam.frameUsage = FRAME_USAGE_PREVIEW; + break; + default: + mAiqParam.frameUsage = FRAME_USAGE_CONTINUOUS; + break; } } @@ -264,7 +261,7 @@ int AiqSetting::setParameters(const Parameters& params) { return OK; } -int AiqSetting::getAiqParameter(aiq_parameter_t ¶m) { +int AiqSetting::getAiqParameter(aiq_parameter_t& param) { AutoRMutex rlock(mParamLock); param = mAiqParam; @@ -276,9 +273,8 @@ int AiqSetting::getAiqParameter(aiq_parameter_t ¶m) { based on AE result. Current it only has HDR and ULL mode switching case, this maybe changed if more cases are supported. */ void AiqSetting::updateTuningMode(aec_scene_t aecScene) { - if (!PlatformData::isEnableHDR(mCameraId) - || mTuningModes.size() <= 1 - || mAiqParam.aeMode != AE_MODE_AUTO) { + if (!PlatformData::isEnableHDR(mCameraId) || mTuningModes.size() <= 1 || + mAiqParam.aeMode != AE_MODE_AUTO) { return; } @@ -290,7 +286,7 @@ void AiqSetting::updateTuningMode(aec_scene_t aecScene) { } bool found = false; - for (auto &tMode : mTuningModes) { + for (auto& tMode : mTuningModes) { // Check tuningMode if support or not if (tMode == tuningMode) { found = true; @@ -320,12 +316,12 @@ void aiq_parameter_t::reset() { evStep = {1, 3}; evRange = {-6, 6}; fps = 0; - aeFpsRange = { 0.0, 0.0 }; + aeFpsRange = {0.0, 0.0}; antibandingMode = ANTIBANDING_MODE_AUTO; - cctRange = { 0, 0 }; - whitePoint = { 0, 0 }; - awbManualGain = { 0, 0, 0 }; - awbGainShift = { 0, 0, 0 }; + cctRange = {0, 0}; + whitePoint = {0, 0}; + awbManualGain = {0, 0, 0}; + awbGainShift = {0, 0, 0}; CLEAR(manualColorMatrix); CLEAR(manualColorGains); aeRegions.clear(); @@ -388,38 +384,36 @@ void aiq_parameter_t::dump() { LOG3("converge speed mode: ae %d, awb %d", aeConvergeSpeedMode, awbConvergeSpeedMode); LOG3("converge speed: ae %d, awb %d", aeConvergeSpeed, awbConvergeSpeed); - LOG3("EV:%f, range (%f-%f), step %d/%d", evShift, evRange.min, evRange.max, - evStep.numerator, evStep.denominator); - LOG3("manualExpTimeUs:%ld, time range (%f-%f)", manualExpTimeUs, - exposureTimeRange.min, exposureTimeRange.max); + LOG3("EV:%f, range (%f-%f), step %d/%d", evShift, evRange.min, evRange.max, evStep.numerator, + evStep.denominator); + LOG3("manualExpTimeUs:%ld, time range (%f-%f)", manualExpTimeUs, exposureTimeRange.min, + exposureTimeRange.max); LOG3("manualGain %f, manualIso %d, gain range (%f-%f)", manualGain, manualIso, sensitivityGainRange.min, sensitivityGainRange.max); LOG3("FPS %f, range (%f-%f)", fps, aeFpsRange.min, aeFpsRange.max); - for (auto ®ion : aeRegions) { - LOG3("ae region (%d, %d, %d, %d, %d)", - region.left, region.top, region.right, region.bottom, region.weight); + for (auto& region : aeRegions) { + LOG3("ae region (%d, %d, %d, %d, %d)", region.left, region.top, region.right, region.bottom, + region.weight); } LOG3("Antibanding mode:%d", antibandingMode); LOG3("AE Distribution Priority:%d", aeDistributionPriority); LOG3("cctRange:(%f-%f)", cctRange.min, cctRange.max); LOG3("manual awb: white point:(%d,%d)", whitePoint.x, whitePoint.y); - LOG3("manual awb gain:(%d,%d,%d), gain shift:(%d,%d,%d)", - awbManualGain.r_gain, awbManualGain.g_gain, awbManualGain.b_gain, - awbGainShift.r_gain, awbGainShift.g_gain, awbGainShift.b_gain); + LOG3("manual awb gain:(%d,%d,%d), gain shift:(%d,%d,%d)", awbManualGain.r_gain, + awbManualGain.g_gain, awbManualGain.b_gain, awbGainShift.r_gain, awbGainShift.g_gain, + awbGainShift.b_gain); for (int i = 0; i < 3; i++) { - LOG3("manual color matrix: [%.3f %.3f %.3f]", - manualColorMatrix.color_transform[i][0], - manualColorMatrix.color_transform[i][1], - manualColorMatrix.color_transform[i][2]); + LOG3("manual color matrix: [%.3f %.3f %.3f]", manualColorMatrix.color_transform[i][0], + manualColorMatrix.color_transform[i][1], manualColorMatrix.color_transform[i][2]); } - LOG3("manual color gains in rggb:(%.3f,%.3f,%.3f,%.3f)", - manualColorGains.color_gains_rggb[0], manualColorGains.color_gains_rggb[1], - manualColorGains.color_gains_rggb[2], manualColorGains.color_gains_rggb[3]); + LOG3("manual color gains in rggb:(%.3f,%.3f,%.3f,%.3f)", manualColorGains.color_gains_rggb[0], + manualColorGains.color_gains_rggb[1], manualColorGains.color_gains_rggb[2], + manualColorGains.color_gains_rggb[3]); - for (auto ®ion : afRegions) { - LOG3("af region (%d, %d, %d, %d, %d)", - region.left, region.top, region.right, region.bottom, region.weight); + for (auto& region : afRegions) { + LOG3("af region (%d, %d, %d, %d, %d)", region.left, region.top, region.right, region.bottom, + region.weight); } LOG3("manual focus distance: %f, min focus distance: %f", focusDistance, minFocusDistance); LOG3("Focus position %d, start timestamp %llu", lensPosition, lensMovementStartTimestamp); @@ -440,14 +434,14 @@ void aiq_parameter_t::dump() { LOG3("DVS mode %d", videoStabilizationMode); LOG3("makernoteMode %d", makernoteMode); - LOG3("shadingMode %d, lensShadingMapMode %d, size %dx%d", shadingMode, - lensShadingMapMode, lensShadingMapSize.x, lensShadingMapSize.y); + LOG3("shadingMode %d, lensShadingMapMode %d, size %dx%d", shadingMode, lensShadingMapMode, + lensShadingMapSize.x, lensShadingMapSize.y); LOG3("ldcMode %d, rscMode %d, flipMode %d", ldcMode, ldcMode, flipMode); LOG3("run3ACadence %d", run3ACadence); - LOG3("tonemap mode %d, preset curve %d, gamma %f, curve points %d", - tonemapMode, tonemapPresetCurve, tonemapGamma, tonemapCurves.gSize); + LOG3("tonemap mode %d, preset curve %d, gamma %f, curve points %d", tonemapMode, + tonemapPresetCurve, tonemapGamma, tonemapCurves.gSize); LOG3("testPatternMode %d", testPatternMode); LOG3("power mode %d", powerMode); LOG3("totalExposureTarget %ld", totalExposureTarget); diff --git a/src/3a/AiqSetting.h b/src/3a/AiqSetting.h index 6034151a..015f0791 100644 --- a/src/3a/AiqSetting.h +++ b/src/3a/AiqSetting.h @@ -28,11 +28,7 @@ namespace icamera { #define DEFAULT_TONEMAP_CURVE_POINT_NUM 2048 // HDR_FEATURE_S -typedef enum { - AEC_SCENE_NONE, - AEC_SCENE_HDR, - AEC_SCENE_ULL -} aec_scene_t; +typedef enum { AEC_SCENE_NONE, AEC_SCENE_HDR, AEC_SCENE_ULL } aec_scene_t; // HDR_FEATURE_E typedef struct { @@ -129,30 +125,29 @@ struct aiq_parameter_t { * and return some useful status of aiq results */ class AiqSetting { - -public: + public: AiqSetting(int cameraId); ~AiqSetting(); int init(void); int deinit(void); - int configure(const stream_config_t *streamList); + int configure(const stream_config_t* streamList); int setParameters(const Parameters& params); - int getAiqParameter(aiq_parameter_t ¶m); + int getAiqParameter(aiq_parameter_t& param); // HDR_FEATURE_S void updateTuningMode(aec_scene_t aecScene); // HDR_FEATURE_E -private: - void updateFrameUsage(const stream_config_t *streamList); + private: + void updateFrameUsage(const stream_config_t* streamList); -public: + public: int mCameraId; -private: + private: std::vector mTuningModes; aiq_parameter_t mAiqParam; diff --git a/src/3a/AiqStatistics.h b/src/3a/AiqStatistics.h index 50295597..6518e0aa 100644 --- a/src/3a/AiqStatistics.h +++ b/src/3a/AiqStatistics.h @@ -27,11 +27,11 @@ struct AiqStatistics { bool mInUse; bool mPendingDecode; - AiqStatistics() : mSequence(-1), - mTimestamp(0), - mTuningMode(TUNING_MODE_MAX), - mInUse(false), - mPendingDecode(false) {} + AiqStatistics() + : mSequence(-1), + mTimestamp(0), + mTuningMode(TUNING_MODE_MAX), + mInUse(false), + mPendingDecode(false) {} }; } /* namespace icamera */ - diff --git a/src/3a/AiqUnit.cpp b/src/3a/AiqUnit.cpp index 7180a553..26c59576 100644 --- a/src/3a/AiqUnit.cpp +++ b/src/3a/AiqUnit.cpp @@ -28,17 +28,17 @@ namespace icamera { -AiqUnit::AiqUnit(int cameraId, SensorHwCtrl *sensorHw, LensHw *lensHw) : - mCameraId(cameraId), - // LOCAL_TONEMAP_S - mLtm(nullptr), - // LOCAL_TONEMAP_E - mAiqUnitState(AIQ_UNIT_NOT_INIT), - // INTEL_DVS_S - mDvs(nullptr), - // INTEL_DVS_S - mCcaInitialized(false), - mActiveStreamCount(0) { +AiqUnit::AiqUnit(int cameraId, SensorHwCtrl* sensorHw, LensHw* lensHw) + : mCameraId(cameraId), + // LOCAL_TONEMAP_S + mLtm(nullptr), + // LOCAL_TONEMAP_E + mAiqUnitState(AIQ_UNIT_NOT_INIT), + // INTEL_DVS_S + mDvs(nullptr), + // INTEL_DVS_S + mCcaInitialized(false), + mActiveStreamCount(0) { mAiqSetting = new AiqSetting(cameraId); mAiqEngine = new AiqEngine(cameraId, sensorHw, lensHw, mAiqSetting); @@ -122,7 +122,7 @@ int AiqUnit::deinit() { return OK; } -int AiqUnit::configure(const stream_config_t *streamList) { +int AiqUnit::configure(const stream_config_t* streamList) { CheckAndLogError(streamList == nullptr, BAD_VALUE, "streamList is nullptr"); AutoMutex l(mAiqUnitLock); @@ -135,8 +135,7 @@ int AiqUnit::configure(const stream_config_t *streamList) { } std::vector configModes; - PlatformData::getConfigModesByOperationMode(mCameraId, streamList->operation_mode, - configModes); + PlatformData::getConfigModesByOperationMode(mCameraId, streamList->operation_mode, configModes); int ret = initIntelCcaHandle(configModes); CheckAndLogError(ret < 0, BAD_VALUE, "@%s failed to create intel cca handle", __func__); @@ -146,18 +145,11 @@ int AiqUnit::configure(const stream_config_t *streamList) { ret = mAiqEngine->configure(); CheckAndLogError(ret != OK, ret, "configure AIQ engine error: %d", ret); - // LOCAL_TONEMAP_S - if (mLtm) { - ret = mLtm->configure(configModes); - CheckAndLogError(ret != OK, ret, "configure LTM engine error: %d", ret); - } - // LOCAL_TONEMAP_E - mAiqUnitState = AIQ_UNIT_CONFIGURED; return OK; } -int AiqUnit::initIntelCcaHandle(const std::vector &configModes) { +int AiqUnit::initIntelCcaHandle(const std::vector& configModes) { if (PlatformData::supportUpdateTuning() && !configModes.empty()) { std::shared_ptr graphConfig = IGraphConfigManager::getInstance(mCameraId)->getGraphConfig(configModes[0]); @@ -178,7 +170,7 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) { LOG1("@%s", mCameraId, __func__); mTuningModes.clear(); - for (auto &cfg : configModes) { + for (auto& cfg : configModes) { TuningMode tuningMode; int ret = PlatformData::getTuningModeByConfigMode(mCameraId, cfg, tuningMode); CheckAndLogError(ret != OK, ret, "%s: Failed to get tuningMode, cfg: %d", __func__, cfg); @@ -191,8 +183,8 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) { ret = PlatformData::getCpf(mCameraId, tuningMode, &cpfData); if (ret == OK && cpfData.data) { CheckAndLogError(cpfData.size > cca::MAX_CPF_LEN, UNKNOWN_ERROR, - "%s, AIQB buffer is too small cpfData:%d > MAX_CPF_LEN:%d", - __func__, cpfData.size, cca::MAX_CPF_LEN); + "%s, AIQB buffer is too small cpfData:%d > MAX_CPF_LEN:%d", __func__, + cpfData.size, cca::MAX_CPF_LEN); MEMCPY_S(params.aiq_cpf.buf, cca::MAX_CPF_LEN, cpfData.data, cpfData.size); params.aiq_cpf.size = cpfData.size; } @@ -200,9 +192,9 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) { // Initialize cca_nvm data ia_binary_data* nvmData = PlatformData::getNvm(mCameraId); if (nvmData) { - CheckAndLogError(nvmData->size > cca::MAX_NVM_LEN, UNKNOWN_ERROR, - "%s, NVM buffer is too small: nvmData:%d MAX_NVM_LEN:%d", - __func__, nvmData->size, cca::MAX_NVM_LEN); + CheckAndLogError(nvmData->size > cca::MAX_NVM_LEN, UNKNOWN_ERROR, + "%s, NVM buffer is too small: nvmData:%d MAX_NVM_LEN:%d", __func__, + nvmData->size, cca::MAX_NVM_LEN); MEMCPY_S(params.aiq_nvm.buf, cca::MAX_NVM_LEN, nvmData->data, nvmData->size); params.aiq_nvm.size = nvmData->size; } @@ -210,9 +202,9 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) { // Initialize cca_aiqd data ia_binary_data* aiqdData = PlatformData::getAiqd(mCameraId, tuningMode); if (aiqdData) { - CheckAndLogError(aiqdData->size > cca::MAX_AIQD_LEN, UNKNOWN_ERROR, - "%s, AIQD buffer is too small aiqdData:%d > MAX_AIQD_LEN:%d", - __func__, aiqdData->size, cca::MAX_AIQD_LEN); + CheckAndLogError(aiqdData->size > cca::MAX_AIQD_LEN, UNKNOWN_ERROR, + "%s, AIQD buffer is too small aiqdData:%d > MAX_AIQD_LEN:%d", __func__, + aiqdData->size, cca::MAX_AIQD_LEN); MEMCPY_S(params.aiq_aiqd.buf, cca::MAX_AIQD_LEN, aiqdData->data, aiqdData->size); params.aiq_aiqd.size = aiqdData->size; } @@ -228,13 +220,15 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) { params.aecFrameDelay = 0; // Initialize functions which need to be started - params.bitmap = cca::CCA_MODULE_AE | cca::CCA_MODULE_AWB | - cca::CCA_MODULE_PA | cca::CCA_MODULE_SA | cca::CCA_MODULE_GBCE | - cca::CCA_MODULE_LARD; + params.bitmap = cca::CCA_MODULE_AE | cca::CCA_MODULE_AWB | cca::CCA_MODULE_PA | + cca::CCA_MODULE_SA | cca::CCA_MODULE_GBCE | cca::CCA_MODULE_LARD; if (PlatformData::getLensHwType(mCameraId) == LENS_VCM_HW) { params.bitmap |= cca::CCA_MODULE_AF; } + std::shared_ptr graphConfig = + IGraphConfigManager::getInstance(mCameraId)->getGraphConfig(cfg); + // LOCAL_TONEMAP_S bool hasLtm = PlatformData::isLtmEnabled(mCameraId); // HDR_FEATURE_S @@ -245,17 +239,26 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) { // HDR_FEATURE_E // DOL_FEATURE_S - hasLtm |= (PlatformData::isDolShortEnabled(mCameraId) - || PlatformData::isDolMediumEnabled(mCameraId)); + hasLtm |= (PlatformData::isDolShortEnabled(mCameraId) || + PlatformData::isDolMediumEnabled(mCameraId)); // DOL_FEATURE_E - - if (hasLtm) { + if (hasLtm && mLtm) { params.bitmap |= cca::CCA_MODULE_LTM; + ret = mLtm->configure(configModes, graphConfig, VIDEO_STREAM_ID); + CheckAndLogError(ret != OK, ret, "configure LTM engine error: %d", ret); } // LOCAL_TONEMAP_E // INTEL_DVS_S if (mDvs) { + std::vector streamIds; + if (graphConfig != nullptr) { + graphConfig->graphGetStreamIds(streamIds); + } + params.dvs_ids.count = streamIds.size(); + for (size_t i = 0; i < streamIds.size(); ++i) { + params.dvs_ids.ids[i] = streamIds[i]; + } ret = mDvs->configure(cfg, ¶ms); CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, configure DVS error", __func__); params.bitmap |= cca::CCA_MODULE_DVS; @@ -265,10 +268,8 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) { // DOL_FEATURE_S // Initialize Bcomp params if (PlatformData::isDolShortEnabled(mCameraId) || - PlatformData::isDolMediumEnabled(mCameraId)) { + PlatformData::isDolMediumEnabled(mCameraId)) { // Parse the DOL mode and CG ratio from sensor mode config - std::shared_ptr graphConfig = - IGraphConfigManager::getInstance(mCameraId)->getGraphConfig(cfg); if (graphConfig != nullptr) { std::string dol_mode_name; graphConfig->getDolInfo(params.conversionGainRatio, dol_mode_name); @@ -281,35 +282,33 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) { params.dolMode = dolModeNameMap[dol_mode_name]; } } - LOG2("conversionGainRatio: %f, dolMode: %d", - params.conversionGainRatio, params.dolMode); + LOG2("conversionGainRatio: %f, dolMode: %d", params.conversionGainRatio, + params.dolMode); params.bitmap = params.bitmap | cca::CCA_MODULE_BCOM; } else if (PlatformData::getSensorAeEnable(mCameraId)) { params.conversionGainRatio = 1; params.dolMode = ia_bcomp_linear_hdr_mode; - LOG2("WA: conversionGainRatio: %f, dolMode: %d", - params.conversionGainRatio, params.dolMode); + LOG2("WA: conversionGainRatio: %f, dolMode: %d", params.conversionGainRatio, + params.dolMode); params.bitmap = params.bitmap | cca::CCA_MODULE_BCOM; } // DOL_FEATURE_E if (PlatformData::supportUpdateTuning()) { - std::shared_ptr graphConfig = - IGraphConfigManager::getInstance(mCameraId)->getGraphConfig(cfg); if (graphConfig != nullptr) { std::vector streamIds; graphConfig->graphGetStreamIds(streamIds); params.aic_stream_ids.count = streamIds.size(); CheckAndLogError(streamIds.size() > cca::MAX_STREAM_NUM, UNKNOWN_ERROR, - "%s, Too many streams: %zu in graph", __func__, streamIds.size()); + "%s, Too many streams: %zu in graph", __func__, streamIds.size()); for (size_t i = 0; i < streamIds.size(); ++i) { params.aic_stream_ids.ids[i] = streamIds[i]; } } } - IntelCca *intelCca = IntelCca::getInstance(mCameraId, tuningMode); - CheckAndLogError(!intelCca, UNKNOWN_ERROR, - "Failed to get cca. mode:%d cameraId:%d", tuningMode, mCameraId); + IntelCca* intelCca = IntelCca::getInstance(mCameraId, tuningMode); + CheckAndLogError(!intelCca, UNKNOWN_ERROR, "Failed to get cca. mode:%d cameraId:%d", + tuningMode, mCameraId); ia_err iaErr = intelCca->init(params); if (iaErr == ia_err_none) { mTuningModes.push_back(tuningMode); @@ -332,8 +331,8 @@ void AiqUnit::deinitIntelCcaHandle() { if (!mCcaInitialized) return; LOG1("@%s", mCameraId, __func__); - for (auto &mode : mTuningModes) { - IntelCca *intelCca = IntelCca::getInstance(mCameraId, mode); + for (auto& mode : mTuningModes) { + IntelCca* intelCca = IntelCca::getInstance(mCameraId, mode); CheckAndLogError(!intelCca, VOID_VALUE, "%s, Failed to get cca: mode(%d), cameraId(%d)", __func__, mode, mCameraId); @@ -431,15 +430,20 @@ std::vector AiqUnit::getStatsEventListener() { eventListenerList.push_back(mLtm); } // LOCAL_TONEMAP_E - // INTEL_DVS_S - if (mDvs) { - eventListenerList.push_back(mDvs); - } - // INTEL_DVS_E + + return eventListenerList; +} + +// INTEL_DVS_S +std::vector AiqUnit::getDVSEventListener() { + AutoMutex l(mAiqUnitLock); + std::vector eventListenerList; + if (mDvs) eventListenerList.push_back(mDvs); return eventListenerList; } +// INTEL_DVS_E -int AiqUnit::setParameters(const Parameters ¶ms) { +int AiqUnit::setParameters(const Parameters& params) { AutoMutex l(mAiqUnitLock); if (mDvs) { mDvs->setParameter(params); diff --git a/src/3a/AiqUnit.h b/src/3a/AiqUnit.h index b2b5a217..13c45d07 100644 --- a/src/3a/AiqUnit.h +++ b/src/3a/AiqUnit.h @@ -43,41 +43,42 @@ class LensHw; * This class is used for upper layer to control 3a engine. */ -class AiqUnitBase{ - -public: +class AiqUnitBase { + public: AiqUnitBase() {} virtual ~AiqUnitBase() {} virtual int init() { return OK; } virtual int deinit() { return OK; } - virtual int configure(const stream_config_t * /*streamList*/) { return OK; } + virtual int configure(const stream_config_t* /*streamList*/) { return OK; } virtual int start() { return OK; } virtual int stop() { return OK; } - virtual int run3A(long request, int64_t applyingSeq, int64_t * /*effectSeq*/) { return OK; } + virtual int run3A(long request, int64_t applyingSeq, int64_t* /*effectSeq*/) { return OK; } - virtual std::vector getSofEventListener() - { + virtual std::vector getSofEventListener() { std::vector eventListenerList; return eventListenerList; } - virtual std::vector getStatsEventListener() - { + virtual std::vector getStatsEventListener() { std::vector eventListenerList; return eventListenerList; } + // INTEL_DVS_S + virtual std::vector getDVSEventListener() { + std::vector eventListenerList; + return eventListenerList; + } + // INTEL_DVS_E - virtual int setParameters(const Parameters & /*params*/) { return OK; } + virtual int setParameters(const Parameters& /*params*/) { return OK; } -private: + private: DISALLOW_COPY_AND_ASSIGN(AiqUnitBase); - }; class AiqUnit : public AiqUnitBase { - -public: - AiqUnit(int cameraId, SensorHwCtrl *sensorHw, LensHw *lensHw); + public: + AiqUnit(int cameraId, SensorHwCtrl* sensorHw, LensHw* lensHw); ~AiqUnit(); /** @@ -93,7 +94,7 @@ class AiqUnit : public AiqUnitBase { /** * \brief configure 3a engine with stream configuration */ - int configure(const stream_config_t *streamList); + int configure(const stream_config_t* streamList); /** * \brief Start 3a Engine @@ -128,25 +129,32 @@ class AiqUnit : public AiqUnitBase { */ std::vector getStatsEventListener(); + // INTEL_DVS_S + /** + * \brief Get DVS EventListener + */ + std::vector getDVSEventListener(); + // INTEL_DVS_E + /** * \brief Set 3A Parameters * * \param params: the Parameters update to 3A */ - int setParameters(const Parameters ¶ms); + int setParameters(const Parameters& params); -private: + private: DISALLOW_COPY_AND_ASSIGN(AiqUnit); -private: - int initIntelCcaHandle(const std::vector &configModes); + private: + int initIntelCcaHandle(const std::vector& configModes); void deinitIntelCcaHandle(); void dumpCcaInitParam(const cca::cca_init_params params); -private: + private: int mCameraId; // LOCAL_TONEMAP_S - Ltm *mLtm; + Ltm* mLtm; // LOCAL_TONEMAP_E enum AiqUnitState { AIQ_UNIT_NOT_INIT = 0, @@ -158,10 +166,10 @@ class AiqUnit : public AiqUnitBase { } mAiqUnitState; // INTEL_DVS_S - Dvs *mDvs; + Dvs* mDvs; // INTEL_DVS_E - AiqEngine *mAiqEngine; - AiqSetting *mAiqSetting; + AiqEngine* mAiqEngine; + AiqSetting* mAiqSetting; // Guard for AiqUnit public API. Mutex mAiqUnitLock; @@ -172,4 +180,3 @@ class AiqUnit : public AiqUnitBase { }; } /* namespace icamera */ - diff --git a/src/3a/AiqUtils.cpp b/src/3a/AiqUtils.cpp index ca369f28..e0f3dbc0 100644 --- a/src/3a/AiqUtils.cpp +++ b/src/3a/AiqUtils.cpp @@ -46,7 +46,7 @@ void AiqUtils::dumpAeResults(const cca::cca_ae_results& aeResult) { aeResult.exposures[i].exposure[0].aperture_fn, aeResult.exposures[i].exposure[0].exposure_time_us, aeResult.exposures[i].exposure[0].total_target_exposure, - aeResult.exposures[i].exposure[0].nd_filter_enabled? "YES": "NO", + aeResult.exposures[i].exposure[0].nd_filter_enabled ? "YES" : "NO", aeResult.exposures[i].exposure[0].iso, aeResult.exposures[i].exposure[0].low_limit_total_exposure, aeResult.exposures[i].exposure[0].up_limit_total_exposure); @@ -62,13 +62,13 @@ void AiqUtils::dumpAeResults(const cca::cca_ae_results& aeResult) { if (wg.width != 0 && wg.height != 0) { LOG3("AE weight grid [%dx%d]", wg.width, wg.height); for (int i = 0; i < 5 && i < wg.height; i++) { - LOG3("AE weight_grid[%d] = %d ", wg.width/2, wg.weights[wg.width/2]); + LOG3("AE weight_grid[%d] = %d ", wg.width / 2, wg.weights[wg.width / 2]); } } const ia_aiq_aperture_control& ac = aeResult.aperture_control; - LOG3("AE aperture fn = %f, iris command = %d, code = %d", - ac.aperture_fn, ac.dc_iris_command, ac.code); + LOG3("AE aperture fn = %f, iris command = %d, code = %d", ac.aperture_fn, ac.dc_iris_command, + ac.code); } void AiqUtils::dumpAfResults(const cca::cca_af_results& afResult) { @@ -76,24 +76,24 @@ void AiqUtils::dumpAfResults(const cca::cca_af_results& afResult) { LOG3("AF results: current/next dis %d/%d, next pos %d, final_position_reached %s, status %d", afResult.current_focus_distance, afResult.next_focus_distance, afResult.next_lens_position, - afResult.final_lens_position_reached ? "TRUE":"FALSE", afResult.status); + afResult.final_lens_position_reached ? "TRUE" : "FALSE", afResult.status); switch (afResult.status) { - case ia_aiq_af_status_local_search: - LOG3("AF result state _local_search"); - break; - case ia_aiq_af_status_extended_search: - LOG3("AF result state extended_search"); - break; - case ia_aiq_af_status_success: - LOG3("AF state success"); - break; - case ia_aiq_af_status_fail: - LOG3("AF state fail"); - break; - case ia_aiq_af_status_idle: - default: - LOG3("AF state idle"); + case ia_aiq_af_status_local_search: + LOG3("AF result state _local_search"); + break; + case ia_aiq_af_status_extended_search: + LOG3("AF result state extended_search"); + break; + case ia_aiq_af_status_success: + LOG3("AF state success"); + break; + case ia_aiq_af_status_fail: + LOG3("AF state fail"); + break; + case ia_aiq_af_status_idle: + default: + LOG3("AF state idle"); } } @@ -108,22 +108,22 @@ void AiqUtils::dumpAwbResults(const cca::cca_awb_results& awbResult) { void AiqUtils::dumpGbceResults(const cca::cca_gbce_params& gbceResult) { if (!Log::isLogTagEnabled(GET_FILE_SHIFT(AiqUtils), CAMERA_DEBUG_LOG_LEVEL3)) return; - LOG3("gamma_lut_size: %u, tone_map_lut_size: %u", - gbceResult.gamma_lut_size, gbceResult.tone_map_lut_size); + LOG3("gamma_lut_size: %u, tone_map_lut_size: %u", gbceResult.gamma_lut_size, + gbceResult.tone_map_lut_size); if (gbceResult.gamma_lut_size <= 0 || gbceResult.tone_map_lut_size <= 0) return; LOG3("gamma table: R: 0(%f), %u(%f), %u(%f)", gbceResult.r_gamma_lut[0], (gbceResult.gamma_lut_size / 2), gbceResult.r_gamma_lut[gbceResult.gamma_lut_size / 2], - (gbceResult.gamma_lut_size - 1), gbceResult.r_gamma_lut[gbceResult.gamma_lut_size - 1]); + (gbceResult.gamma_lut_size - 1), gbceResult.r_gamma_lut[gbceResult.gamma_lut_size - 1]); LOG3("gamma table: G: 0(%f), %u(%f), %u(%f)", gbceResult.g_gamma_lut[0], (gbceResult.gamma_lut_size / 2), gbceResult.g_gamma_lut[gbceResult.gamma_lut_size / 2], - (gbceResult.gamma_lut_size - 1), gbceResult.g_gamma_lut[gbceResult.gamma_lut_size - 1]); + (gbceResult.gamma_lut_size - 1), gbceResult.g_gamma_lut[gbceResult.gamma_lut_size - 1]); LOG3("gamma table: B: 0(%f), %u(%f), %u(%f)", gbceResult.b_gamma_lut[0], (gbceResult.gamma_lut_size / 2), gbceResult.b_gamma_lut[gbceResult.gamma_lut_size / 2], - (gbceResult.gamma_lut_size - 1), gbceResult.b_gamma_lut[gbceResult.gamma_lut_size - 1]); + (gbceResult.gamma_lut_size - 1), gbceResult.b_gamma_lut[gbceResult.gamma_lut_size - 1]); LOG3("tonemap table: 0(%f), %u(%f), %u(%f)", gbceResult.tone_map_lut[0], (gbceResult.tone_map_lut_size / 2), @@ -136,47 +136,44 @@ void AiqUtils::dumpPaResults(const cca::cca_pa_params& paResult) { if (!Log::isLogTagEnabled(GET_FILE_SHIFT(AiqUtils), CAMERA_DEBUG_LOG_LEVEL3)) return; for (int i = 0; i < 3; i++) { - LOG3("color_conversion_matrix [%.4f %.4f %.4f] ", - paResult.color_conversion_matrix[i][0], - paResult.color_conversion_matrix[i][1], - paResult.color_conversion_matrix[i][2]); + LOG3("color_conversion_matrix [%.4f %.4f %.4f] ", paResult.color_conversion_matrix[i][0], + paResult.color_conversion_matrix[i][1], paResult.color_conversion_matrix[i][2]); } - LOG3("color_gains, gr:%f, r:%f, b:%f, gb:%f", - paResult.color_gains.gr, paResult.color_gains.r, + LOG3("color_gains, gr:%f, r:%f, b:%f, gb:%f", paResult.color_gains.gr, paResult.color_gains.r, paResult.color_gains.b, paResult.color_gains.gb); } void AiqUtils::dumpSaResults(const cca::cca_sa_results& saResult) { if (!Log::isLogTagEnabled(GET_FILE_SHIFT(AiqUtils), CAMERA_DEBUG_LOG_LEVEL3)) return; - LOG3("SA results color_order %d size %dx%d", - saResult.color_order, saResult.width, saResult.height); + LOG3("SA results color_order %d size %dx%d", saResult.color_order, saResult.width, + saResult.height); } int AiqUtils::convertError(ia_err iaErr) { switch (iaErr) { - case ia_err_none: - return OK; - case ia_err_general: - return UNKNOWN_ERROR; - case ia_err_nomemory: - return NO_MEMORY; - case ia_err_data: - return BAD_VALUE; - case ia_err_internal: - return INVALID_OPERATION; - case ia_err_argument: - return BAD_VALUE; - default: - return UNKNOWN_ERROR; + case ia_err_none: + return OK; + case ia_err_general: + return UNKNOWN_ERROR; + case ia_err_nomemory: + return NO_MEMORY; + case ia_err_data: + return BAD_VALUE; + case ia_err_internal: + return INVALID_OPERATION; + case ia_err_argument: + return BAD_VALUE; + default: + return UNKNOWN_ERROR; } } /** * Convert SensorFrameParams defined in PlatformData to ia_aiq_frame_params in aiq */ -void AiqUtils::convertToAiqFrameParam(const SensorFrameParams &sensor, ia_aiq_frame_params &aiq) { +void AiqUtils::convertToAiqFrameParam(const SensorFrameParams& sensor, ia_aiq_frame_params& aiq) { aiq.cropped_image_height = sensor.cropped_image_height; aiq.cropped_image_width = sensor.cropped_image_width; aiq.horizontal_crop_offset = sensor.horizontal_crop_offset; @@ -214,17 +211,17 @@ camera_window_t AiqUtils::convertToIaWindow(const camera_coordinate_system_t& sr const camera_window_t& srcWindow) { camera_coordinate_t leftTop; camera_coordinate_t rightBottom; - leftTop.x = srcWindow.left; - leftTop.y = srcWindow.top; + leftTop.x = srcWindow.left; + leftTop.y = srcWindow.top; rightBottom.x = srcWindow.right; rightBottom.y = srcWindow.bottom; - leftTop = convertToIaCoordinate(srcSystem, leftTop); - rightBottom = convertToIaCoordinate(srcSystem, rightBottom); + leftTop = convertToIaCoordinate(srcSystem, leftTop); + rightBottom = convertToIaCoordinate(srcSystem, rightBottom); camera_window_t result; - result.left = leftTop.x; - result.top = leftTop.y; - result.right = rightBottom.x; + result.left = leftTop.x; + result.top = leftTop.y; + result.right = rightBottom.x; result.bottom = rightBottom.y; result.weight = srcWindow.weight; return result; @@ -235,14 +232,14 @@ camera_window_t AiqUtils::convertToIaWindow(const camera_coordinate_system_t& sr */ float AiqUtils::normalizeAwbGain(int gain) { gain = CLIP(gain, AWB_GAIN_MAX, AWB_GAIN_MIN); - return AWB_GAIN_NORMALIZED_START + (float)(gain - AWB_GAIN_MIN) * \ - AWB_GAIN_RANGE_NORMALIZED / AWB_GAIN_RANGE_USER; + return AWB_GAIN_NORMALIZED_START + static_cast(gain - AWB_GAIN_MIN) * + AWB_GAIN_RANGE_NORMALIZED / AWB_GAIN_RANGE_USER; } int AiqUtils::convertToUserAwbGain(float normalizedGain) { normalizedGain = CLIP(normalizedGain, AWB_GAIN_NORMALIZED_START, AWB_GAIN_NORMALIZED_END); - return AWB_GAIN_MIN + (normalizedGain - AWB_GAIN_NORMALIZED_START) * \ - AWB_GAIN_RANGE_USER / AWB_GAIN_RANGE_NORMALIZED; + return AWB_GAIN_MIN + (normalizedGain - AWB_GAIN_NORMALIZED_START) * AWB_GAIN_RANGE_USER / + AWB_GAIN_RANGE_NORMALIZED; } float AiqUtils::convertSpeedModeToTime(camera_converge_speed_t mode) { @@ -317,10 +314,10 @@ void AiqUtils::applyTonemapGamma(float gamma, cca::cca_gbce_params* results) { results->g_gamma_lut[i] = pow(i / static_cast(lutSize), 1 / gamma); } - MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float), - results->g_gamma_lut, lutSize * sizeof(float)); - MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float), - results->g_gamma_lut, lutSize * sizeof(float)); + MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut, + lutSize * sizeof(float)); + MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut, + lutSize * sizeof(float)); } void AiqUtils::applyTonemapSRGB(cca::cca_gbce_params* results) { @@ -330,17 +327,17 @@ void AiqUtils::applyTonemapSRGB(cca::cca_gbce_params* results) { CheckAndLogError(lutSize < MIN_TONEMAP_POINTS, VOID_VALUE, "Bad gamma lut size (%d) in gbce results", lutSize); for (int i = 0; i < lutSize; i++) { - if (i / (lutSize - 1) < 0.0031308) + if (i / (lutSize - 1) < 0.0031308) results->g_gamma_lut[i] = 12.92 * (i / (lutSize - 1)); else results->g_gamma_lut[i] = - 1.055 * pow(i / static_cast(lutSize - 1), 1 / 2.4) - 0.055; + 1.055 * pow(i / static_cast(lutSize - 1), 1 / 2.4) - 0.055; } - MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float), - results->g_gamma_lut, lutSize * sizeof(float)); - MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float), - results->g_gamma_lut, lutSize * sizeof(float)); + MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut, + lutSize * sizeof(float)); + MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut, + lutSize * sizeof(float)); } void AiqUtils::applyTonemapREC709(cca::cca_gbce_params* results) { @@ -354,13 +351,13 @@ void AiqUtils::applyTonemapREC709(cca::cca_gbce_params* results) { results->g_gamma_lut[i] = 4.5 * (i / (lutSize - 1)); else results->g_gamma_lut[i] = - 1.099 * pow(i / static_cast(lutSize - 1), 0.45) - 0.099; + 1.099 * pow(i / static_cast(lutSize - 1), 0.45) - 0.099; } - MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float), - results->g_gamma_lut, lutSize * sizeof(float)); - MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float), - results->g_gamma_lut, lutSize * sizeof(float)); + MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut, + lutSize * sizeof(float)); + MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut, + lutSize * sizeof(float)); } void AiqUtils::applyTonemapCurve(const camera_tonemap_curves_t& curves, @@ -383,12 +380,12 @@ void AiqUtils::applyTonemapCurve(const camera_tonemap_curves_t& curves, results->g_gamma_lut[i] = curves.gCurve[left]; results->b_gamma_lut[i] = curves.bCurve[left]; } else { - results->r_gamma_lut[i] = curves.rCurve[left] - + ratio * (curves.rCurve[right] - curves.rCurve[left]); - results->g_gamma_lut[i] = curves.gCurve[left] - + ratio * (curves.gCurve[right] - curves.gCurve[left]); - results->b_gamma_lut[i] = curves.bCurve[left] - + ratio * (curves.bCurve[right] - curves.bCurve[left]); + results->r_gamma_lut[i] = + curves.rCurve[left] + ratio * (curves.rCurve[right] - curves.rCurve[left]); + results->g_gamma_lut[i] = + curves.gCurve[left] + ratio * (curves.gCurve[right] - curves.gCurve[left]); + results->b_gamma_lut[i] = + curves.bCurve[left] + ratio * (curves.bCurve[right] - curves.bCurve[left]); } } } @@ -419,16 +416,16 @@ void AiqUtils::applyAwbGainForTonemapCurve(const camera_tonemap_curves_t& curves float maxAverage = std::max(averageR, averageG); maxAverage = std::max(maxAverage, averageB); if (maxAverage - minAverage > EPSILON) { - averageR = AWB_GAIN_NORMALIZED_START + (averageR - minAverage) * \ - AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage); - averageG = AWB_GAIN_NORMALIZED_START + (averageG - minAverage) * \ - AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage); - averageB = AWB_GAIN_NORMALIZED_START + (averageB - minAverage) * \ - AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage); + averageR = AWB_GAIN_NORMALIZED_START + + (averageR - minAverage) * AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage); + averageG = AWB_GAIN_NORMALIZED_START + + (averageG - minAverage) * AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage); + averageB = AWB_GAIN_NORMALIZED_START + + (averageB - minAverage) * AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage); results->accurate_r_per_g = averageR / averageG; results->accurate_b_per_g = averageB / averageG; - LOG2("%s: overwrite awb gain %f %f", __func__, - results->accurate_r_per_g, results->accurate_b_per_g); + LOG2("%s: overwrite awb gain %f %f", __func__, results->accurate_r_per_g, + results->accurate_b_per_g); } } @@ -454,7 +451,7 @@ void AiqUtils::applyAwbGainForTonemapCurve(const camera_tonemap_curves_t& curves * avoid division by 0. If any of the required CMC items is missing * it will return the default value 5m */ -float AiqUtils::calculateHyperfocalDistance(const cca::cca_cmc &cmc) { +float AiqUtils::calculateHyperfocalDistance(const cca::cca_cmc& cmc) { const float DEFAULT_HYPERFOCAL_DISTANCE = 5000.0f; // Pixel size is stored in CMC in hundreds of micrometers @@ -473,8 +470,8 @@ float AiqUtils::calculateHyperfocalDistance(const cca::cca_cmc &cmc) { // assuming square pixel const int CIRCLE_OF_CONFUSION_IN_PIXELS = 2; float cocMicros = pixelSizeMicro * CIRCLE_OF_CONFUSION_IN_PIXELS; - float hyperfocalDistanceMillis = 1000 * (focalLengthMillis * focalLengthMillis) / - (fNumber * cocMicros); + float hyperfocalDistanceMillis = + 1000 * (focalLengthMillis * focalLengthMillis) / (fNumber * cocMicros); return (hyperfocalDistanceMillis == 0.0f) ? DEFAULT_HYPERFOCAL_DISTANCE : hyperfocalDistanceMillis; diff --git a/src/3a/AiqUtils.h b/src/3a/AiqUtils.h index ffb57105..44810048 100644 --- a/src/3a/AiqUtils.h +++ b/src/3a/AiqUtils.h @@ -121,13 +121,12 @@ void applyAwbGainForTonemapCurve(const camera_tonemap_curves_t& curves, * in a_dst_w width of the output array * in a_dst_h height of the output array */ -template int resize2dArray( - const T* a_src, int a_src_w, int a_src_h, - T* a_dst, int a_dst_w, int a_dst_h) { +template +int resize2dArray(const T* a_src, int a_src_w, int a_src_h, T* a_dst, int a_dst_w, int a_dst_h) { int i, j, step_size_w, step_size_h, rounding_term; if (a_src_w < 2 || a_dst_w < 2 || a_src_h < 2 || a_dst_h < 2) { - return -1; + return -1; } nsecs_t startTime = CameraUtils::systemTime(); step_size_w = ((a_src_w - 1) << FRAC_BITS_CURR_LOC) / (a_dst_w - 1); @@ -145,19 +144,20 @@ template int resize2dArray( curr_loc_lower_w = (curr_loc_w > 0) ? (curr_loc_w - 1) >> FRAC_BITS_CURR_LOC : 0; a_dst[a_dst_w * j + i] = - (a_src[curr_loc_lower_w + curr_loc_lower_h * a_src_w] * - (((curr_loc_lower_w + 1) << FRAC_BITS_CURR_LOC) - curr_loc_w) * - (((curr_loc_lower_h + 1) << FRAC_BITS_CURR_LOC) - curr_loc_h) + - a_src[curr_loc_lower_w + 1 + curr_loc_lower_h * a_src_w] * - (curr_loc_w-((curr_loc_lower_w) << FRAC_BITS_CURR_LOC)) * - (((curr_loc_lower_h + 1) << FRAC_BITS_CURR_LOC) - curr_loc_h) + - a_src[curr_loc_lower_w + (curr_loc_lower_h + 1) * a_src_w] * - (((curr_loc_lower_w + 1) << FRAC_BITS_CURR_LOC) - curr_loc_w) * - (curr_loc_h - ((curr_loc_lower_h) << FRAC_BITS_CURR_LOC)) + - a_src[curr_loc_lower_w + 1 + (curr_loc_lower_h + 1) * a_src_w] * - (curr_loc_w - ((curr_loc_lower_w) << FRAC_BITS_CURR_LOC)) * - (curr_loc_h - ((curr_loc_lower_h) << FRAC_BITS_CURR_LOC)) - + rounding_term) / (FRAC_BASE * FRAC_BASE); + (a_src[curr_loc_lower_w + curr_loc_lower_h * a_src_w] * + (((curr_loc_lower_w + 1) << FRAC_BITS_CURR_LOC) - curr_loc_w) * + (((curr_loc_lower_h + 1) << FRAC_BITS_CURR_LOC) - curr_loc_h) + + a_src[curr_loc_lower_w + 1 + curr_loc_lower_h * a_src_w] * + (curr_loc_w - ((curr_loc_lower_w) << FRAC_BITS_CURR_LOC)) * + (((curr_loc_lower_h + 1) << FRAC_BITS_CURR_LOC) - curr_loc_h) + + a_src[curr_loc_lower_w + (curr_loc_lower_h + 1) * a_src_w] * + (((curr_loc_lower_w + 1) << FRAC_BITS_CURR_LOC) - curr_loc_w) * + (curr_loc_h - ((curr_loc_lower_h) << FRAC_BITS_CURR_LOC)) + + a_src[curr_loc_lower_w + 1 + (curr_loc_lower_h + 1) * a_src_w] * + (curr_loc_w - ((curr_loc_lower_w) << FRAC_BITS_CURR_LOC)) * + (curr_loc_h - ((curr_loc_lower_h) << FRAC_BITS_CURR_LOC)) + + rounding_term) / + (FRAC_BASE * FRAC_BASE); } } LOG2("resize the 2D array cost %dus", @@ -166,16 +166,13 @@ template int resize2dArray( return 0; } -template int resize2dArray( - const float* a_src, int a_src_w, int a_src_h, - float* a_dst, int a_dst_w, int a_dst_h); -template int resize2dArray( - const unsigned short* a_src, int a_src_w, int a_src_h, - unsigned short* a_dst, int a_dst_w, int a_dst_h); -template int resize2dArray( - const int* a_src, int a_src_w, int a_src_h, - int* a_dst, int a_dst_w, int a_dst_h); - -float calculateHyperfocalDistance(const cca::cca_cmc &cmc); +template int resize2dArray(const float* a_src, int a_src_w, int a_src_h, float* a_dst, + int a_dst_w, int a_dst_h); +template int resize2dArray(const unsigned short* a_src, int a_src_w, int a_src_h, + unsigned short* a_dst, int a_dst_w, int a_dst_h); +template int resize2dArray(const int* a_src, int a_src_w, int a_src_h, int* a_dst, int a_dst_w, + int a_dst_h); + +float calculateHyperfocalDistance(const cca::cca_cmc& cmc); } // namespace AiqUtils } // namespace icamera diff --git a/src/3a/Dvs.cpp b/src/3a/Dvs.cpp index 74680dfe..1ac14ecb 100644 --- a/src/3a/Dvs.cpp +++ b/src/3a/Dvs.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2021 Intel Corporation. + * Copyright (C) 2017-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,17 +39,14 @@ const int DVS_OXDIM_UV = 64; const int DVS_OYDIM_UV = 16; const int DVS_MIN_ENVELOPE = 12; -Dvs::Dvs(int cameraId) - : mCameraId(cameraId), - mTuningMode(TUNING_MODE_VIDEO) { +Dvs::Dvs(int cameraId) : mCameraId(cameraId), mTuningMode(TUNING_MODE_VIDEO) { CLEAR(mPtzRegion); CLEAR(mGDCRegion); } -Dvs::~Dvs() { -} +Dvs::~Dvs() {} -int Dvs::configure(const ConfigMode configMode, cca::cca_init_params *params) { +int Dvs::configure(const ConfigMode configMode, cca::cca_init_params* params) { LOG2("@%s", __func__); int ret = configCcaDvsData(configMode, params); @@ -64,11 +61,11 @@ int Dvs::configure(const ConfigMode configMode, cca::cca_init_params *params) { return OK; } -int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *params) { +int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params* params) { // update GC std::shared_ptr gc = nullptr; if (PlatformData::getGraphConfigNodes(mCameraId)) { - IGraphConfigManager *GCM = IGraphConfigManager::getInstance(mCameraId); + IGraphConfigManager* GCM = IGraphConfigManager::getInstance(mCameraId); if (GCM) { gc = GCM->getGraphConfig(configMode); } @@ -80,11 +77,11 @@ int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *par int status = gc->getGdcKernelSetting(&gdcKernelId, &resolution); CheckWarning(status != OK, UNKNOWN_ERROR, "Failed to get GDC kernel setting, DVS disabled"); - LOG2("%s, GDC kernel setting: id: %u, resolution:src: %dx%d, dst: %dx%d", __func__, - gdcKernelId, resolution.input_width, resolution.input_height, resolution.output_width, + LOG2("%s, GDC kernel setting: id: %u, resolution:src: %dx%d, dst: %dx%d", __func__, gdcKernelId, + resolution.input_width, resolution.input_height, resolution.output_width, resolution.output_height); - cca::cca_gdc_configuration *gdcConfig = ¶ms->gdcConfig; + cca::cca_gdc_configuration* gdcConfig = ¶ms->gdcConfig; CLEAR(*gdcConfig); gdcConfig->gdc_filter_width = DVS_MIN_ENVELOPE / 2; gdcConfig->gdc_filter_height = DVS_MIN_ENVELOPE / 2; @@ -102,7 +99,7 @@ int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *par gdcConfig->splitMetadata[0] = DVS_OYDIM_UV; gdcConfig->splitMetadata[1] = DVS_OXDIM_UV; gdcConfig->splitMetadata[2] = DVS_OYDIM_Y; - gdcConfig->splitMetadata[3] = DVS_OXDIM_Y/2; + gdcConfig->splitMetadata[3] = DVS_OXDIM_Y / 2; } camera_resolution_t envelopeResolution; @@ -117,17 +114,16 @@ int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *par const float Max_Ratio = 1.45f; int bq_max_width = - static_cast(Max_Ratio * static_cast(resolution.output_width / 2)); + static_cast(Max_Ratio * static_cast(resolution.output_width / 2)); int bq_max_height = - static_cast(Max_Ratio * static_cast(resolution.output_height / 2)); - if (resolution.input_width / 2 - envelope_bq.width - - gdcConfig->gdc_filter_width > bq_max_width) + static_cast(Max_Ratio * static_cast(resolution.output_height / 2)); + if (resolution.input_width / 2 - envelope_bq.width - gdcConfig->gdc_filter_width > bq_max_width) envelope_bq.width = resolution.input_width / 2 - gdcConfig->gdc_filter_width - bq_max_width; - if (resolution.input_height / 2 - envelope_bq.height - - gdcConfig->gdc_filter_height > bq_max_height) - envelope_bq.height = resolution.input_height / 2 - - gdcConfig->gdc_filter_height - bq_max_height; + if (resolution.input_height / 2 - envelope_bq.height - gdcConfig->gdc_filter_height > + bq_max_height) + envelope_bq.height = + resolution.input_height / 2 - gdcConfig->gdc_filter_height - bq_max_height; float zoomHRatio = resolution.input_width / (resolution.input_width - envelope_bq.width * 2); float zoomVRatio = resolution.input_height / (resolution.input_height - envelope_bq.height * 2); @@ -144,7 +140,7 @@ int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *par mGDCRegion.left = 0; mGDCRegion.top = 0; - mGDCRegion.right = resolution.input_width / 2;; + mGDCRegion.right = resolution.input_width / 2; mGDCRegion.bottom = resolution.input_height / 2; dumpDvsConfiguration(*params); @@ -158,7 +154,8 @@ void Dvs::setParameter(const Parameters& p) { void Dvs::handleEvent(EventData eventData) { LOG2("@%s: eventData.type:%d", __func__, eventData.type); - if (eventData.type != EVENT_PSYS_STATS_BUF_READY) return; + if (eventData.type != EVENT_DVS_READY) return; + int streamId = eventData.data.dvsRunReady.streamId; IntelCca* intelCcaHandle = IntelCca::getInstance(mCameraId, mTuningMode); CheckAndLogError(!intelCcaHandle, VOID_VALUE, "@%s, Failed to get IntelCca instance", __func__); @@ -171,19 +168,49 @@ void Dvs::handleEvent(EventData eventData) { zp.digital_zoom_ratio = 1.0f; zp.digital_zoom_factor = 1.0f; zp.zoom_mode = ia_dvs_zoom_mode_region; - if (!mPtzRegion.left && !mPtzRegion.top && !mPtzRegion.right && !mPtzRegion.bottom) + if (!mPtzRegion.left && !mPtzRegion.top && !mPtzRegion.right && !mPtzRegion.bottom) { zp.zoom_region = {mGDCRegion.left, mGDCRegion.top, mGDCRegion.right, mGDCRegion.bottom}; - else - zp.zoom_region = { mPtzRegion.left, mPtzRegion.top, mPtzRegion.right, mPtzRegion.bottom }; - intelCcaHandle->updateZoom(zp); + } else { + /* + SCALER_CROP_REGION can adjust to a small crop region if the aspect of active + pixel array is not same as the crop region aspect. Crop can only on either + horizontally or veritacl but never both. + If active pixel array's aspect ratio is wider than the crop region, the region + should be further cropped vertically. + */ + auto coord = PlatformData::getActivePixelArray(mCameraId); + int wpa = coord.right - coord.left; + int hpa = coord.bottom - coord.top; + + int width = mPtzRegion.right - mPtzRegion.left; + int height = mPtzRegion.bottom - mPtzRegion.top; + + float aspect0 = static_cast(wpa) / hpa; + float aspect1 = static_cast(width) / height; + + if (std::fabs(aspect0 - aspect1) < 0.00001) { + zp.zoom_region = {mPtzRegion.left, mPtzRegion.top, mPtzRegion.right, mPtzRegion.bottom}; + } else if (aspect0 > aspect1) { + auto croppedHeight = width / aspect0; + int diff = std::abs(height - croppedHeight) / 2; + zp.zoom_region = {mPtzRegion.left, mPtzRegion.top + diff, mPtzRegion.right, + mPtzRegion.bottom - diff}; + } else { + auto croppedWidth = height * aspect0; + int diff = std::abs(width - croppedWidth) / 2; + zp.zoom_region = {mPtzRegion.left + diff, mPtzRegion.top, mPtzRegion.right - diff, + mPtzRegion.bottom}; + } + } + intelCcaHandle->updateZoom(streamId, zp); - ia_err iaErr = intelCcaHandle->runDVS(eventData.data.statsReady.sequence); + ia_err iaErr = intelCcaHandle->runDVS(streamId, eventData.data.statsReady.sequence); int ret = AiqUtils::convertError(iaErr); CheckAndLogError(ret != OK, VOID_VALUE, "Error running DVS: %d", ret); return; } -void Dvs::dumpDvsConfiguration(const cca::cca_init_params &config) { +void Dvs::dumpDvsConfiguration(const cca::cca_init_params& config) { if (!Log::isLogTagEnabled(GET_FILE_SHIFT(Dvs), CAMERA_DEBUG_LOG_LEVEL3)) return; LOG3("config.dvsOutputType %d", config.dvsOutputType); diff --git a/src/3a/Dvs.h b/src/3a/Dvs.h index 0e9fdabc..a509e50c 100644 --- a/src/3a/Dvs.h +++ b/src/3a/Dvs.h @@ -41,13 +41,13 @@ class Dvs : public EventListener { explicit Dvs(int cameraId); ~Dvs(); - int configure(const ConfigMode configMode, cca::cca_init_params *params); + int configure(const ConfigMode configMode, cca::cca_init_params* params); void handleEvent(EventData eventData); void setParameter(const Parameters& p); private: - int configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *params); - void dumpDvsConfiguration(const cca::cca_init_params &config); + int configCcaDvsData(const ConfigMode configMode, cca::cca_init_params* params); + void dumpDvsConfiguration(const cca::cca_init_params& config); private: int mCameraId; diff --git a/src/3a/I3AControlFactory.cpp b/src/3a/I3AControlFactory.cpp index 120d2ca2..f2c4b8a9 100644 --- a/src/3a/I3AControlFactory.cpp +++ b/src/3a/I3AControlFactory.cpp @@ -23,9 +23,8 @@ namespace icamera { -AiqUnitBase *I3AControlFactory::createI3AControl(int cameraId, SensorHwCtrl *sensorHw, - LensHw *lensHw) -{ +AiqUnitBase* I3AControlFactory::createI3AControl(int cameraId, SensorHwCtrl* sensorHw, + LensHw* lensHw) { LOG1("@%s", cameraId, __func__); if (PlatformData::isEnableAIQ(cameraId)) { return new AiqUnit(cameraId, sensorHw, lensHw); diff --git a/src/3a/I3AControlFactory.h b/src/3a/I3AControlFactory.h index 940e2710..9e0e9ca9 100644 --- a/src/3a/I3AControlFactory.h +++ b/src/3a/I3AControlFactory.h @@ -28,7 +28,7 @@ namespace icamera { * automatically based on 3a enabled status */ class I3AControlFactory { -public: + public: /** * \brief Select the AIQ unit according to config file and compiling option * @@ -39,7 +39,7 @@ class I3AControlFactory { * * \return the AIQ unit base class */ - static AiqUnitBase *createI3AControl(int cameraId, SensorHwCtrl *sensorHw, LensHw *lensHw); + static AiqUnitBase* createI3AControl(int cameraId, SensorHwCtrl* sensorHw, LensHw* lensHw); }; } /* namespace icamera */ diff --git a/src/3a/LensManager.cpp b/src/3a/LensManager.cpp index 6f22be2c..c2247688 100644 --- a/src/3a/LensManager.cpp +++ b/src/3a/LensManager.cpp @@ -24,16 +24,14 @@ namespace icamera { -LensManager::LensManager(int cameraId, LensHw *lensHw) : - mCameraId(cameraId), - mLensHw(lensHw), - mDcIrisCommand(ia_aiq_aperture_control_dc_iris_close), - mFocusPosition(-1), - mLastSofSequence(-1) { -} +LensManager::LensManager(int cameraId, LensHw* lensHw) + : mCameraId(cameraId), + mLensHw(lensHw), + mDcIrisCommand(ia_aiq_aperture_control_dc_iris_close), + mFocusPosition(-1), + mLastSofSequence(-1) {} -LensManager::~LensManager() { -} +LensManager::~LensManager() {} int LensManager::start() { AutoMutex l(mLock); @@ -76,8 +74,8 @@ void LensManager::handleSofEvent(EventData eventData) { } } -int LensManager::setLensResult(const cca::cca_af_results &afResults, - int64_t sequence, const aiq_parameter_t &aiqParam) { +int LensManager::setLensResult(const cca::cca_af_results& afResults, int64_t sequence, + const aiq_parameter_t& aiqParam) { AutoMutex l(mLock); if (!mLensHw->isLensSubdevAvailable() || afResults.next_lens_position == 0) { @@ -87,7 +85,7 @@ int LensManager::setLensResult(const cca::cca_af_results &afResults, int ret = OK; int lensHwType = PlatformData::getLensHwType(mCameraId); - switch(lensHwType) { + switch (lensHwType) { case LENS_VCM_HW: if (aiqParam.afMode == AF_MODE_OFF && aiqParam.focusDistance > 0.0f) { // The manual focus setting requires perframe control @@ -117,7 +115,7 @@ void LensManager::setFocusPosition(int focusPosition) { } } -void LensManager::getLensInfo(aiq_parameter_t &aiqParam) { +void LensManager::getLensInfo(aiq_parameter_t& aiqParam) { if (PlatformData::getLensHwType(mCameraId) == LENS_VCM_HW) { mLensHw->getLatestPosition(aiqParam.lensPosition, aiqParam.lensMovementStartTimestamp); } diff --git a/src/3a/LensManager.h b/src/3a/LensManager.h index aa6a48e0..4c431dc6 100644 --- a/src/3a/LensManager.h +++ b/src/3a/LensManager.h @@ -31,9 +31,8 @@ namespace icamera { * This class is used to control focus and aperture related controls. */ class LensManager { - -public: - LensManager(int cameraId, LensHw *lensHw); + public: + LensManager(int cameraId, LensHw* lensHw); ~LensManager(); /** @@ -60,24 +59,24 @@ class LensManager { * * \return OK if set successfully. */ - int setLensResult(const cca::cca_af_results &afResults, - int64_t sequence, const aiq_parameter_t &aiqParam); + int setLensResult(const cca::cca_af_results& afResults, int64_t sequence, + const aiq_parameter_t& aiqParam); /** * \brief Get Lens info * * \param[out] aiqParam: updating lens related parameters. * */ - void getLensInfo(aiq_parameter_t &aiqParam); + void getLensInfo(aiq_parameter_t& aiqParam); -private: + private: DISALLOW_COPY_AND_ASSIGN(LensManager); void setFocusPosition(int focusPostion); -private: + private: int mCameraId; - LensHw *mLensHw; + LensHw* mLensHw; ia_aiq_aperture_control_dc_iris_command mDcIrisCommand; int mFocusPosition; diff --git a/src/3a/Ltm.cpp b/src/3a/Ltm.cpp index 2c7266f4..ad1c61fb 100644 --- a/src/3a/Ltm.cpp +++ b/src/3a/Ltm.cpp @@ -29,6 +29,9 @@ #include "iutils/Errors.h" #include "iutils/Utils.h" +#include "ia_pal_types_isp_ids_autogen.h" +#include "ia_pal_types_isp.h" + namespace icamera { Ltm::Ltm(int cameraId) @@ -38,6 +41,7 @@ Ltm::Ltm(int cameraId) mThreadRunning(false), mInputParamIndex(-1) { CLEAR(mLtmParams); + CLEAR(mFrameResolution); if (PlatformData::isEnableLtmThread(mCameraId)) { mLtmThread = new LtmThread(this); @@ -81,7 +85,28 @@ int Ltm::deinit() { return OK; } -int Ltm::configure(const std::vector& configModes) { +int Ltm::getPixelCropperResolution(std::shared_ptr graphConfig, int32_t streamId, + camera_resolution_t* resolution) { + ia_isp_bxt_program_group* pgPtr = graphConfig->getProgramGroup(streamId); + for (unsigned int i = 0; i < pgPtr->kernel_count; i++) { + // The kernel value is for cca_ltm_input_params::frame_width and frame_height. + if (pgPtr->run_kernels[i].kernel_uuid == ia_pal_uuid_isp_pxl_crop_yuv_a) { + if (pgPtr->run_kernels[i].resolution_info) { + resolution->width = pgPtr->run_kernels[i].resolution_info->output_width; + resolution->height = pgPtr->run_kernels[i].resolution_info->output_height; + return OK; + } else { + resolution->width = pgPtr->run_kernels[i].resolution_history->output_width; + resolution->height = pgPtr->run_kernels[i].resolution_history->output_height; + } + } + } + + return UNKNOWN_ERROR; +} + +int Ltm::configure(const std::vector& configModes, + std::shared_ptr graphConfig, int32_t streamId) { TuningMode tMode = TUNING_MODE_MAX; for (auto cfg : configModes) { // Only support the 1st tuning mode if multiple config mode is configured. @@ -102,6 +127,11 @@ int Ltm::configure(const std::vector& configModes) { } } + if (graphConfig) { + int ret = getPixelCropperResolution(graphConfig, streamId, &mFrameResolution); + CheckAndLogError(ret != OK, ret, "failed to get sis output resolution"); + } + if (tMode == TUNING_MODE_MAX) { return OK; } @@ -144,8 +174,8 @@ void Ltm::stop() { } void Ltm::handleEvent(EventData eventData) { - if ((eventData.type != EVENT_PSYS_STATS_SIS_BUF_READY) && - (eventData.pipeType != STILL_STREAM_ID)) + if ((eventData.type != EVENT_PSYS_STATS_SIS_BUF_READY) || + (eventData.pipeType != VIDEO_STREAM_ID)) return; LOG2("%s: handle EVENT_PSYS_STATS_SIS_BUF_READY", __func__); @@ -188,8 +218,8 @@ int Ltm::handleSisLtm(const std::shared_ptr& cameraBuffer) { AiqResult* feedback = getAiqResult(sequence); mLtmParams[mInputParamIndex]->ltmParams.ev_shift = feedback->mAiqParam.evShift; mLtmParams[mInputParamIndex]->ltmParams.ltm_strength_manual = feedback->mAiqParam.ltmStrength; - mLtmParams[mInputParamIndex]->ltmParams.frame_width = feedback->mAiqParam.resolution.width; - mLtmParams[mInputParamIndex]->ltmParams.frame_height = feedback->mAiqParam.resolution.height; + mLtmParams[mInputParamIndex]->ltmParams.frame_width = mFrameResolution.width; + mLtmParams[mInputParamIndex]->ltmParams.frame_height = mFrameResolution.height; ia_image_full_info* imageInfo = &mLtmParams[mInputParamIndex]->ltmParams.sis.image_info; CLEAR(*imageInfo); @@ -207,6 +237,14 @@ int Ltm::handleSisLtm(const std::shared_ptr& cameraBuffer) { cca::cca_ltm_statistics* sis = &mLtmParams[mInputParamIndex]->ltmParams.sis; MEMCPY_S(sis->data, sizeof(sis->data), data, size); sis->size = sizeof(sis->data) > size ? size : sizeof(sis->data); + LOG3( + "LTM data_format %d, bayer_order %d, data_format_bpp %d, data_bpp %d, frame_width and " + "height(%d, %d), SIS_image_width & height and right padder(%d, %d, %d), image data size %d", + imageInfo->raw_image.data_format, imageInfo->raw_image.bayer_order, + imageInfo->raw_image.data_format_bpp, imageInfo->raw_image.data_bpp, + mLtmParams[mInputParamIndex]->ltmParams.frame_width, + mLtmParams[mInputParamIndex]->ltmParams.frame_height, imageInfo->raw_image.width_cols, + imageInfo->raw_image.height_lines, imageInfo->extra_cols_right, sis->size); if ((!PlatformData::isEnableLtmThread(mCameraId)) || sequence == 0) { runLtm(*mLtmParams[mInputParamIndex]); diff --git a/src/3a/Ltm.h b/src/3a/Ltm.h index 29e6f8c5..55f584b8 100644 --- a/src/3a/Ltm.h +++ b/src/3a/Ltm.h @@ -57,7 +57,8 @@ class Ltm : public EventListener { int start(); void stop(); - int configure(const std::vector& configModes); + int configure(const std::vector& configModes, + std::shared_ptr graphConfig, int32_t streamId); /** * \brief handle statistics event @@ -68,6 +69,9 @@ class Ltm : public EventListener { private: DISALLOW_COPY_AND_ASSIGN(Ltm); + int getPixelCropperResolution(std::shared_ptr graphConfig, int32_t streamId, + camera_resolution_t* resolution); + int runLtmAsync(); int runLtm(const LtmInputParams& ltmInputParams); @@ -100,11 +104,13 @@ class Ltm : public EventListener { LtmThread* mLtmThread; bool mThreadRunning; Condition mParamAvailableSignal; - static const int kMaxLtmParamsNum = 2; // 2 ltm input params + static const int kMaxLtmParamsNum = 2; // 2 ltm input params int mInputParamIndex; LtmInputParams* mLtmParams[kMaxLtmParamsNum]; std::queue mLtmParamsQ; + + camera_resolution_t mFrameResolution; }; } /* namespace icamera */ diff --git a/src/3a/MakerNote.cpp b/src/3a/MakerNote.cpp index c318b189..0547efe0 100644 --- a/src/3a/MakerNote.cpp +++ b/src/3a/MakerNote.cpp @@ -27,12 +27,9 @@ namespace icamera { -MakerNote::MakerNote() : - mMknState(UNINIT) { -} +MakerNote::MakerNote() : mMknState(UNINIT) {} -MakerNote::~MakerNote() { -} +MakerNote::~MakerNote() {} int MakerNote::init(int cameraId, TuningMode tuningMode) { LOG1("@%s, tuningMode:%d", cameraId, __func__, tuningMode); @@ -89,8 +86,8 @@ int MakerNote::saveMakernoteData(int cameraId, camera_makernote_mode_t makernote AutoMutex lock(mMknLock); CheckAndLogError(mMknState != INIT, NO_INIT, "@%s, mkn isn't initialized", __func__); - ia_mkn_trg mknTrg = ((makernoteMode == MAKERNOTE_MODE_JPEG) || dump - ? ia_mkn_trg_section_1 : ia_mkn_trg_section_2); + ia_mkn_trg mknTrg = ((makernoteMode == MAKERNOTE_MODE_JPEG) || dump ? ia_mkn_trg_section_1 : + ia_mkn_trg_section_2); MakernoteData data = mMakernoteDataList.front(); IntelCca* intelCca = IntelCca::getInstance(cameraId, tuningMode); diff --git a/src/3a/SensorManager.cpp b/src/3a/SensorManager.cpp index 00fb6844..be82ee6f 100644 --- a/src/3a/SensorManager.cpp +++ b/src/3a/SensorManager.cpp @@ -27,33 +27,32 @@ using std::vector; namespace icamera { -SensorManager::SensorManager(int cameraId, SensorHwCtrl *sensorHw) : - mCameraId(cameraId), - mSensorHwCtrl(sensorHw), - // HDR_FEATURE_S - mModeSwitched(false), - // HDR_FEATURE_E - mLastSofSequence(-1), - mAnalogGainDelay(0), - mDigitalGainDelay(0) { +SensorManager::SensorManager(int cameraId, SensorHwCtrl* sensorHw) + : mCameraId(cameraId), + mSensorHwCtrl(sensorHw), + // HDR_FEATURE_S + mModeSwitched(false), + // HDR_FEATURE_E + mLastSofSequence(-1), + mAnalogGainDelay(0), + mDigitalGainDelay(0) { // HDR_FEATURE_S CLEAR(mWdrModeSetting); // HDR_FEATURE_E if (PlatformData::getAnalogGainLag(mCameraId) > 0) { - mAnalogGainDelay = PlatformData::getExposureLag(mCameraId) - - PlatformData::getAnalogGainLag(mCameraId); + mAnalogGainDelay = + PlatformData::getExposureLag(mCameraId) - PlatformData::getAnalogGainLag(mCameraId); mDigitalGainDelay = mAnalogGainDelay; } if (PlatformData::getDigitalGainLag(mCameraId) >= 0) { - mDigitalGainDelay = PlatformData::getExposureLag(mCameraId) - - PlatformData::getDigitalGainLag(mCameraId); + mDigitalGainDelay = + PlatformData::getExposureLag(mCameraId) - PlatformData::getDigitalGainLag(mCameraId); } } -SensorManager::~SensorManager() { -} +SensorManager::~SensorManager() {} void SensorManager::reset() { LOG1("@%s", mCameraId, __func__); @@ -87,8 +86,8 @@ void SensorManager::handleSofEvent(EventData eventData) { SofEventInfo info; info.sequence = eventData.data.sync.sequence; - info.timestamp = ((long)eventData.data.sync.timestamp.tv_sec) * 1000000 - + eventData.data.sync.timestamp.tv_usec; + info.timestamp = ((long)eventData.data.sync.timestamp.tv_sec) * 1000000 + + eventData.data.sync.timestamp.tv_usec; if (mSofEventInfo.size() >= kMaxSofEventInfo) { mSofEventInfo.erase(mSofEventInfo.begin()); } @@ -109,7 +108,8 @@ uint64_t SensorManager::getSofTimestamp(int64_t sequence) { // HDR_FEATURE_S int SensorManager::convertTuningModeToWdrMode(TuningMode tuningMode) { - return ((tuningMode == TUNING_MODE_VIDEO_HDR) || (tuningMode == TUNING_MODE_VIDEO_HDR2)) ? 1 : 0; + return ((tuningMode == TUNING_MODE_VIDEO_HDR) || (tuningMode == TUNING_MODE_VIDEO_HDR2)) ? 1 : + 0; } void SensorManager::handleSensorModeSwitch(int64_t sequence) { @@ -147,7 +147,7 @@ int SensorManager::setWdrMode(TuningMode tuningMode, int64_t sequence) { if (mWdrModeSetting.tuningMode != tuningMode) { // Save WDR mode and update this mode to driver in SOF event handler. - //So we know which frame is corrupted and we can skip the corrupted frames. + // So we know which frame is corrupted and we can skip the corrupted frames. LOG2("@%s, tuningMode %d", sequence, __func__, tuningMode); mWdrModeSetting.tuningMode = tuningMode; mWdrModeSetting.sequence = sequence; @@ -195,8 +195,8 @@ int SensorManager::getCurrentExposureAppliedDelay() { uint32_t SensorManager::updateSensorExposure(SensorExpGroup sensorExposures, int64_t applyingSeq) { AutoMutex l(mLock); - int64_t effectSeq = mLastSofSequence < 0 ? 0 : \ - mLastSofSequence + PlatformData::getExposureLag(mCameraId); + int64_t effectSeq = + mLastSofSequence < 0 ? 0 : mLastSofSequence + PlatformData::getExposureLag(mCameraId); if (sensorExposures.empty()) { LOGW("%s: No exposure parameter", __func__); @@ -250,19 +250,18 @@ uint32_t SensorManager::updateSensorExposure(SensorExpGroup sensorExposures, int mSensorHwCtrl->setDigitalGains(digitalGains); } - LOG2("@%s: effectSeq %ld, applyingSeq %ld", mLastSofSequence, __func__, - effectSeq, applyingSeq); + LOG2("@%s: effectSeq %ld, applyingSeq %ld", mLastSofSequence, __func__, effectSeq, + applyingSeq); return ((uint32_t)effectSeq); } // CRL_MODULE_S -int SensorManager::setFrameRate(float fps) -{ +int SensorManager::setFrameRate(float fps) { return mSensorHwCtrl->setFrameRate(fps); } // CRL_MODULE_E -int SensorManager::getSensorInfo(ia_aiq_frame_params &frameParams, - ia_aiq_exposure_sensor_descriptor &sensorDescriptor) { +int SensorManager::getSensorInfo(ia_aiq_frame_params& frameParams, + ia_aiq_exposure_sensor_descriptor& sensorDescriptor) { SensorFrameParams sensorFrameParams; CLEAR(sensorFrameParams); @@ -272,37 +271,36 @@ int SensorManager::getSensorInfo(ia_aiq_frame_params &frameParams, } if (!PlatformData::isIsysEnabled(mCameraId)) { - vector res; + vector res; PlatformData::getSupportedISysSizes(mCameraId, res); CheckAndLogError(res.empty(), BAD_VALUE, "Supported ISYS resolutions are not configured."); // In none-ISYS cases, only take 30 fps into account. int fps = 30; float freq = res[0].width * res[0].height * fps / 1000000; - sensorDescriptor = {freq, static_cast(res[0].width), - static_cast(res[0].height), 24, 0, - static_cast(res[0].width), 6, 0}; + sensorDescriptor = {freq, + static_cast(res[0].width), + static_cast(res[0].height), + 24, + 0, + static_cast(res[0].width), + 6, + 0}; LOG2("freq %f, width %d, height %d", freq, res[0].width, res[0].height); return OK; } ret |= getSensorModeData(sensorDescriptor); - LOG3("ia_aiq_frame_params=[%d, %d, %d, %d, %d, %d, %d, %d]", - frameParams.horizontal_crop_offset, - frameParams.vertical_crop_offset, - frameParams.cropped_image_height, - frameParams.cropped_image_width, - frameParams.horizontal_scaling_numerator, - frameParams.horizontal_scaling_denominator, - frameParams.vertical_scaling_numerator, + LOG3("ia_aiq_frame_params=[%d, %d, %d, %d, %d, %d, %d, %d]", frameParams.horizontal_crop_offset, + frameParams.vertical_crop_offset, frameParams.cropped_image_height, + frameParams.cropped_image_width, frameParams.horizontal_scaling_numerator, + frameParams.horizontal_scaling_denominator, frameParams.vertical_scaling_numerator, frameParams.vertical_scaling_denominator); LOG3("ia_aiq_exposure_sensor_descriptor=[%f, %d, %d, %d, %d, %d, %d, %d]", - sensorDescriptor.pixel_clock_freq_mhz, - sensorDescriptor.pixel_periods_per_line, - sensorDescriptor.line_periods_per_field, - sensorDescriptor.line_periods_vertical_blanking, + sensorDescriptor.pixel_clock_freq_mhz, sensorDescriptor.pixel_periods_per_line, + sensorDescriptor.line_periods_per_field, sensorDescriptor.line_periods_vertical_blanking, sensorDescriptor.coarse_integration_time_min, sensorDescriptor.coarse_integration_time_max_margin, sensorDescriptor.fine_integration_time_min, @@ -318,7 +316,7 @@ int SensorManager::getSensorInfo(ia_aiq_frame_params &frameParams, */ int SensorManager::getSensorModeData(ia_aiq_exposure_sensor_descriptor& sensorData) { int pixel = 0; - int status = mSensorHwCtrl->getPixelRate(pixel); + int status = mSensorHwCtrl->getPixelRate(pixel); CheckAndLogError(status != OK, status, "Failed to get pixel clock ret:%d", status); sensorData.pixel_clock_freq_mhz = (float)pixel / 1000000; @@ -334,7 +332,8 @@ int SensorManager::getSensorModeData(ia_aiq_exposure_sensor_descriptor& sensorDa sensorData.line_periods_per_field = CLIP(line_periods_per_field, USHRT_MAX, 0); int coarse_int_time_min, integration_step = 0, integration_max = 0; - status = mSensorHwCtrl->getExposureRange(coarse_int_time_min, integration_max, integration_step); + status = + mSensorHwCtrl->getExposureRange(coarse_int_time_min, integration_max, integration_step); CheckAndLogError(status != OK, status, "Failed to get Exposure Range ret:%d", status); sensorData.coarse_integration_time_min = CLIP(coarse_int_time_min, USHRT_MAX, 0); diff --git a/src/3a/SensorManager.h b/src/3a/SensorManager.h index 01d9b984..8f9d55b2 100644 --- a/src/3a/SensorManager.h +++ b/src/3a/SensorManager.h @@ -46,7 +46,7 @@ typedef struct { uint64_t timestamp; } SofEventInfo; -typedef std::vector SensorExpGroup; +typedef std::vector SensorExpGroup; /* * \class SensorManager * @@ -54,17 +54,16 @@ typedef std::vector SensorExpGroup; * and get some sensor info. */ class SensorManager { - -public: - SensorManager(int cameraId, SensorHwCtrl *sensorHw); + public: + SensorManager(int cameraId, SensorHwCtrl* sensorHw); ~SensorManager(); void reset(); void handleSofEvent(EventData eventData); /* sensorExposures are exposure results, applyingSeq is the sequence to apply results */ uint32_t updateSensorExposure(SensorExpGroup sensorExposures, int64_t applyingSeq); - int getSensorInfo(ia_aiq_frame_params &frameParams, - ia_aiq_exposure_sensor_descriptor &sensorDescriptor); + int getSensorInfo(ia_aiq_frame_params& frameParams, + ia_aiq_exposure_sensor_descriptor& sensorDescriptor); // HDR_FEATURE_S int setWdrMode(TuningMode tuningMode, int64_t sequence); @@ -76,7 +75,8 @@ class SensorManager { // CRL_MODULE_E int getCurrentExposureAppliedDelay(); uint64_t getSofTimestamp(int64_t sequence); -private: + + private: DISALLOW_COPY_AND_ASSIGN(SensorManager); void handleSensorExposure(); @@ -86,15 +86,15 @@ class SensorManager { // HDR_FEATURE_E int getSensorModeData(ia_aiq_exposure_sensor_descriptor& sensorData); -private: + private: static const int kMaxSensorExposures = 10; static const int kMaxSofEventInfo = 10; int mCameraId; - SensorHwCtrl *mSensorHwCtrl; + SensorHwCtrl* mSensorHwCtrl; // HDR_FEATURE_S - bool mModeSwitched; // Whether the TuningMode get updated + bool mModeSwitched; // Whether the TuningMode get updated WdrModeSetting mWdrModeSetting; // HDR_FEATURE_E @@ -103,7 +103,7 @@ class SensorManager { // Guard for SensorManager public API. Mutex mLock; - int mAnalogGainDelay; // Analog gain delay comparing exposure + int mAnalogGainDelay; // Analog gain delay comparing exposure int mDigitalGainDelay; // Digital gain delay comparing exposure // fisrt: sequence id, second: analog gain vector std::map> mAnalogGainMap; diff --git a/src/3a/intel3a/Intel3AParameter.cpp b/src/3a/intel3a/Intel3AParameter.cpp index 7f8ffa7d..e370b554 100644 --- a/src/3a/intel3a/Intel3AParameter.cpp +++ b/src/3a/intel3a/Intel3AParameter.cpp @@ -429,7 +429,7 @@ void Intel3AParameter::updateAeParameter(const aiq_parameter_t& param) { CLEAR(mAeParams.manual_total_target_exposure); // Ignore TET in manual exposure case if (param.totalExposureTarget > 0 && param.manualExpTimeUs <= 0 && param.manualIso <= 0) { - camera_range_t range = { -1, -1 }; + camera_range_t range = {-1, -1}; int ret = PlatformData::getSupportAeExposureTimeRange(mCameraId, param.sceneMode, range); int64_t tet = param.totalExposureTarget; if (ret == OK && mCMC.base_iso > 0) { @@ -451,10 +451,10 @@ void Intel3AParameter::updatePaResult(cca::cca_pa_params* paResult) { if (!mUseManualColorMatrix) return; if (VALID_COLOR_GAINS(mColorGains.color_gains_rggb)) { - paResult->color_gains.r = mColorGains.color_gains_rggb[0]; + paResult->color_gains.r = mColorGains.color_gains_rggb[0]; paResult->color_gains.gr = mColorGains.color_gains_rggb[1]; paResult->color_gains.gb = mColorGains.color_gains_rggb[2]; - paResult->color_gains.b = mColorGains.color_gains_rggb[3]; + paResult->color_gains.b = mColorGains.color_gains_rggb[3]; } // Override color_conversion_matrix and color_gains @@ -636,6 +636,7 @@ void Intel3AParameter::updateAfParameter(const aiq_parameter_t& param) { // Region mAfParams.focus_rect = {}; + mAfParams.focus_metering_mode = ia_aiq_af_metering_mode_auto; if (!param.afRegions.empty()) { // Current only one AF metering window is supported, so use the latest one camera_window_t window = param.afRegions.back(); @@ -643,6 +644,7 @@ void Intel3AParameter::updateAfParameter(const aiq_parameter_t& param) { camera_coordinate_system_t frameCoord = {0, 0, param.resolution.width, param.resolution.height}; window = AiqUtils::convertToIaWindow(frameCoord, window); + mAfParams.focus_metering_mode = ia_aiq_af_metering_mode_touch; mAfParams.focus_rect = {window.left, window.top, window.right, window.bottom}; } } diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 7969db98..ae13e2ee 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -21,6 +21,7 @@ add_subdirectory(hal) add_subdirectory(isp_control) # ISP_CONTROL_E add_subdirectory(iutils) +add_subdirectory(scheduler) add_subdirectory(metadata) add_subdirectory(platformdata) add_subdirectory(v4l2) diff --git a/src/core/BufferQueue.cpp b/src/core/BufferQueue.cpp index 0951ded1..5d514df3 100644 --- a/src/core/BufferQueue.cpp +++ b/src/core/BufferQueue.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015-2021 Intel Corporation. + * Copyright (C) 2015-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -151,6 +151,24 @@ void BufferQueue::getFrameInfo(std::map& inputInfo, outputInfo = mOutputFrameInfo; } +bool BufferQueue::waitBufferQueue(ConditionLock& lock, std::map& queue, + int64_t timeout) { + LOG2("@%s waiting buffers", __func__); + for (auto& bufQ : queue) { + if (bufQ.second.empty() && timeout > 0) { + // Thread was stopped during wait + if (!mThreadRunning) { + LOG1("@%s: inactive while waiting for buffers", __func__); + return false; + } + mFrameAvailableSignal.waitRelative(lock, timeout * SLOWLY_MULTIPLIER); + } + if (bufQ.second.empty()) return false; + } + + return true; +} + int BufferQueue::waitFreeBuffersInQueue(ConditionLock& lock, std::map >& cInBuffer, std::map >& cOutBuffer, diff --git a/src/core/BufferQueue.h b/src/core/BufferQueue.h index 4d499138..66dd492b 100644 --- a/src/core/BufferQueue.h +++ b/src/core/BufferQueue.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015-2021 Intel Corporation. + * Copyright (C) 2015-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -144,6 +144,14 @@ class BufferQueue : public BufferConsumer, public BufferProducer, public EventLi * \brief Clear and initialize input and output buffer queues. */ void clearBufferQueues(); + /** + * \brief Wait and check if queue is not empty until time out. + * + * No waiting if timeout value is zero + */ + bool waitBufferQueue(ConditionLock& lock, + std::map& queue, + int64_t timeout); /** * \brief Wait for available input and output buffers. * diff --git a/src/core/CameraDevice.cpp b/src/core/CameraDevice.cpp index 1f693a2a..d5496d92 100644 --- a/src/core/CameraDevice.cpp +++ b/src/core/CameraDevice.cpp @@ -68,8 +68,7 @@ CameraDevice::CameraDevice(int cameraId) mLensCtrl = new LensHw(mCameraId); mSensorCtrl = SensorHwCtrl::createSensorCtrl(mCameraId); - m3AControl = - I3AControlFactory::createI3AControl(mCameraId, mSensorCtrl, mLensCtrl); + m3AControl = I3AControlFactory::createI3AControl(mCameraId, mSensorCtrl, mLensCtrl); mRequestThread = new RequestThread(mCameraId, m3AControl, mParamGenerator); mRequestThread->registerListener(EVENT_PROCESS_REQUEST, this); @@ -250,6 +249,7 @@ void CameraDevice::bindListeners() { } } + mProducer->registerListener(EVENT_ISYS_ERROR, this); if (mPerframeControlSupport || !PlatformData::isIsysEnabled(mCameraId)) { mProcessors.back()->registerListener(EVENT_PSYS_FRAME, mRequestThread); } else { @@ -268,6 +268,12 @@ void CameraDevice::bindListeners() { mProducer->registerListener(EVENT_ISYS_SOF, mRequestThread); } // FILE_SOURCE_E + + // INTEL_DVS_S + auto dvsListener = m3AControl->getDVSEventListener(); + for (auto lis : dvsListener) + for (auto& item : mProcessors) item->registerListener(EVENT_DVS_READY, lis); + // INTEL_DVS_E } void CameraDevice::unbindListeners() { @@ -316,6 +322,7 @@ void CameraDevice::unbindListeners() { mProcessors.front()->removeListener(EVENT_REQUEST_METADATA_READY, this); } + mProducer->removeListener(EVENT_ISYS_ERROR, this); if (mPerframeControlSupport || !PlatformData::isIsysEnabled(mCameraId)) { mProcessors.back()->removeListener(EVENT_PSYS_FRAME, mRequestThread); } else { @@ -329,6 +336,12 @@ void CameraDevice::unbindListeners() { mProducer->removeListener(EVENT_ISYS_SOF, mRequestThread); } // FILE_SOURCE_E + + // INTEL_DVS_S + auto dvsListener = m3AControl->getDVSEventListener(); + for (auto lis : dvsListener) + for (auto& item : mProcessors) item->removeListener(EVENT_DVS_READY, lis); + // INTEL_DVS_E } int CameraDevice::configureInput(const stream_t* inputConfig) { @@ -877,12 +890,22 @@ int CameraDevice::getParameters(Parameters& param, int64_t sequence) { LOG2("@%s", mCameraId, sequence, __func__); AutoMutex m(mDeviceLock); +#ifdef CAL_BUILD if (sequence >= 0 && mState != DEVICE_STOP) { // fetch target parameter and results return mParamGenerator->getParameters(sequence, ¶m); } param = mParameter; +#else + param = mParameter; + Parameters nParam; + if (mState != DEVICE_STOP) { + // fetch target parameter and results + mParamGenerator->getParameters(sequence, &nParam, false); + } + param.merge(nParam); +#endif for (auto& item : mProcessors) { item->getParameters(param); @@ -1019,14 +1042,9 @@ void CameraDevice::handleEvent(EventData eventData) { case EVENT_PSYS_REQUEST_BUF_READY: { if (mCallback) { camera_msg_data_t data = {CAMERA_ISP_BUF_READY, {}}; - int32_t userRequestId = 0; - int ret = mParamGenerator->getUserRequestId(eventData.data.requestReady.sequence, - userRequestId); - CheckAndLogError(ret != OK, VOID_VALUE, "failed to find request id, seq %ld", - eventData.data.requestReady.sequence); data.data.buffer_ready.timestamp = eventData.data.requestReady.timestamp; - data.data.buffer_ready.frameNumber = static_cast(userRequestId); + data.data.buffer_ready.frameNumber = eventData.data.requestReady.requestId; mCallback->notify(mCallback, data); PlatformData::updateMakernoteTimeStamp(mCameraId, eventData.data.requestReady.sequence, @@ -1038,14 +1056,16 @@ void CameraDevice::handleEvent(EventData eventData) { case EVENT_REQUEST_METADATA_READY: { if (mCallback) { camera_msg_data_t data = {CAMERA_METADATA_READY, {}}; - int32_t userRequestId = 0; - int ret = mParamGenerator->getUserRequestId(eventData.data.requestReady.sequence, - userRequestId); - CheckAndLogError(ret != OK, VOID_VALUE, "failed to find request id, seq %ld", - eventData.data.requestReady.sequence); data.data.metadata_ready.sequence = eventData.data.requestReady.sequence; - data.data.metadata_ready.frameNumber = static_cast(userRequestId); + data.data.metadata_ready.frameNumber = eventData.data.requestReady.requestId; + mCallback->notify(mCallback, data); + } + break; + } + case EVENT_ISYS_ERROR: { + if (mCallback) { + camera_msg_data_t data = {CAMERA_DEVICE_ERROR, {}}; mCallback->notify(mCallback, data); } break; diff --git a/src/core/CameraEventType.h b/src/core/CameraEventType.h index a803df9f..240cb530 100644 --- a/src/core/CameraEventType.h +++ b/src/core/CameraEventType.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015-2021 Intel Corporation. + * Copyright (C) 2015-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -38,6 +38,10 @@ enum EventType { EVENT_FRAME_AVAILABLE, EVENT_PSYS_REQUEST_BUF_READY, EVENT_REQUEST_METADATA_READY, + // INTEL_DVS_S + EVENT_DVS_READY, + // INTEL_DVS_E + EVENT_ISYS_ERROR, }; struct EventDataStatsReady { @@ -82,8 +86,15 @@ struct EventFrameAvailable { struct EventRequestReady { int64_t timestamp; int64_t sequence; + uint32_t requestId; }; +// INTEL_DVS_S +struct EventDVSRunReady { + int streamId; +}; +// INTEL_DVS_E + struct EventData { EventData() : type(EVENT_ISYS_SOF), pipeType(-1) { CLEAR(data); } @@ -98,7 +109,11 @@ struct EventData { EventRequestData request; EventConfigData config; EventFrameAvailable frameDone; - EventRequestReady requestReady; // use for returning metadata and shutter event + // use for returning metadata and shutter event + EventRequestReady requestReady; + // INTEL_DVS_S + EventDVSRunReady dvsRunReady; + // INTEL_DVS_E } data; }; diff --git a/src/core/CameraStream.cpp b/src/core/CameraStream.cpp index 74ee71ca..8e982db4 100644 --- a/src/core/CameraStream.cpp +++ b/src/core/CameraStream.cpp @@ -118,8 +118,8 @@ int CameraStream::qbuf(camera_buffer_t* ubuffer, int64_t sequence) { shared_ptr camBuffer = userBufferToCameraBuffer(ubuffer); if (camBuffer) { camBuffer->setSettingSequence(sequence); - LOG2("@%s, mStreamId:%d, CameraBuffer:%p for port:%d, ubuffer:%p, addr:%p", - mCameraId, __func__, mStreamId, camBuffer.get(), mPort, ubuffer, ubuffer->addr); + LOG2("@%s, mStreamId:%d, CameraBuffer:%p for port:%d, ubuffer:%p, addr:%p", mCameraId, + __func__, mStreamId, camBuffer.get(), mPort, ubuffer, ubuffer->addr); } int ret = BAD_VALUE; diff --git a/src/core/CaptureUnit.cpp b/src/core/CaptureUnit.cpp index fdef76cc..5240977d 100644 --- a/src/core/CaptureUnit.cpp +++ b/src/core/CaptureUnit.cpp @@ -379,13 +379,15 @@ int CaptureUnit::poll() { PERF_CAMERA_ATRACE(); int ret = 0; const int poll_timeout_count = 10; - const int poll_timeout = gSlowlyRunRatio ? (gSlowlyRunRatio * 1000000) : 1000; + // Normally set the timeout threshold to 1s + const int poll_timeout = gSlowlyRunRatio ? (gSlowlyRunRatio * 100000) : 1000; LOG2("%s", mCameraId, __func__); CheckAndLogError((mState != CAPTURE_CONFIGURE && mState != CAPTURE_START), INVALID_OPERATION, "@%s: poll buffer in wrong state %d", __func__, mState); - int timeOutCount = poll_timeout_count; + int timeOutCount = (PlatformData::getMaxIsysTimeout() > 0) ? PlatformData::getMaxIsysTimeout() : + poll_timeout_count; std::vector pollDevs, readyDevices; for (const auto& device : mDevices) { pollDevs.push_back(device->getV4l2Device()); @@ -403,8 +405,6 @@ int CaptureUnit::poll() { V4L2DevicePoller poller{pollDevs, mFlushFd[0]}; ret = poller.Poll(poll_timeout, POLLPRI | POLLIN | POLLOUT | POLLERR, &readyDevices); - - LOG2("@%s: automation checkpoint: flag: poll_buffer, ret:%d", __func__, ret); } // In case poll error after stream off @@ -415,7 +415,20 @@ int CaptureUnit::poll() { } CheckAndLogError(ret < 0, UNKNOWN_ERROR, "%s: Poll error, ret:%d", __func__, ret); if (ret == 0) { - LOG1("%s, timeout happens, wait recovery", mCameraId, __func__); +#ifdef CAL_BUILD + LOGI("%s, timeout happens, buffer in device: %d. wait recovery", mCameraId, __func__, + mDevices.front()->getBufferNumInDevice()); +#else + LOG1("%s, timeout happens, buffer in device: %d. wait recovery", mCameraId, __func__, + mDevices.front()->getBufferNumInDevice()); +#endif + if (PlatformData::getMaxIsysTimeout() > 0 && mDevices.front()->getBufferNumInDevice() > 0) { + EventData errorData; + errorData.type = EVENT_ISYS_ERROR; + errorData.buffer = nullptr; + notifyListeners(errorData); + } + return OK; } @@ -461,11 +474,13 @@ void CaptureUnit::registerListener(EventType eventType, EventListener* eventList for (auto device : mDevices) { device->registerListener(eventType, eventListener); } + if (eventType == EVENT_ISYS_ERROR) EventSource::registerListener(eventType, eventListener); } void CaptureUnit::removeListener(EventType eventType, EventListener* eventListener) { for (auto device : mDevices) { device->removeListener(eventType, eventListener); } + if (eventType == EVENT_ISYS_ERROR) EventSource::removeListener(eventType, eventListener); } } // namespace icamera diff --git a/src/core/DeviceBase.cpp b/src/core/DeviceBase.cpp index d0c88b2e..c7808757 100644 --- a/src/core/DeviceBase.cpp +++ b/src/core/DeviceBase.cpp @@ -389,9 +389,8 @@ int MainDevice::onDequeueBuffer(shared_ptr buffer) { if (mNeedSkipFrame) return OK; - LOG2("@%s, field:%d, timestamp: sec=%ld, usec=%ld", buffer->getSequence(), - __func__, buffer->getField(), buffer->getTimestamp().tv_sec, - buffer->getTimestamp().tv_usec); + LOG2("@%s, field:%d, timestamp: sec=%ld, usec=%ld", buffer->getSequence(), __func__, + buffer->getField(), buffer->getTimestamp().tv_sec, buffer->getTimestamp().tv_usec); for (auto& consumer : mConsumers) { consumer->onFrameAvailable(mPort, buffer); diff --git a/src/core/DeviceBase.h b/src/core/DeviceBase.h index 052ce3fe..e1aae848 100644 --- a/src/core/DeviceBase.h +++ b/src/core/DeviceBase.h @@ -93,8 +93,9 @@ class DeviceBase : public EventSource { /** * Pre-process the buffer which to be queued to the device. */ - virtual int onQueueBuffer(int64_t sequence, - std::shared_ptr& buffer) { return OK; } + virtual int onQueueBuffer(int64_t sequence, std::shared_ptr& buffer) { + return OK; + } /** * Post-process the buffer after it's dequeued from the device. @@ -127,10 +128,10 @@ class DeviceBase : public EventSource { VideoNodeType mNodeType; VideoNodeDirection mNodeDirection; const char* mName; - V4L2VideoNode* mDevice; // The device used to queue/dequeue buffers. - int64_t mLatestSequence; // Track the latest bufffer sequence from driver. - bool mNeedSkipFrame; // True if the frame/buffer needs to be skipped. - int mFrameSkipNum; // How many frames need to be skipped after stream on. + V4L2VideoNode* mDevice; // The device used to queue/dequeue buffers. + int64_t mLatestSequence; // Track the latest bufffer sequence from driver. + bool mNeedSkipFrame; // True if the frame/buffer needs to be skipped. + int mFrameSkipNum; // How many frames need to be skipped after stream on. DeviceCallback* mDeviceCB; std::set mConsumers; diff --git a/src/core/FileSource.cpp b/src/core/FileSource.cpp index 3b6b10a5..4b58d7b0 100644 --- a/src/core/FileSource.cpp +++ b/src/core/FileSource.cpp @@ -263,7 +263,6 @@ void FileSource::fillFrameBuffer(string fileName, shared_ptr& buff void FileSource::fillFrameBuffer(shared_ptr& buffer) { string fileName; - if (mInjectionWay == USING_CONFIG_FILE) { FileSourceProfile profile(mInjectedFile); fileName = profile.getFrameFile(mCameraId, mSequence); diff --git a/src/core/IspParamAdaptor.cpp b/src/core/IspParamAdaptor.cpp index d4163ac4..7008a004 100644 --- a/src/core/IspParamAdaptor.cpp +++ b/src/core/IspParamAdaptor.cpp @@ -40,32 +40,28 @@ #include "ia_pal_types_isp_parameters_autogen.h" #include "ia_pal_types_isp.h" - namespace icamera { -IspParamAdaptor::IspParamAdaptor(int cameraId) : - mIspAdaptorState(ISP_ADAPTOR_NOT_INIT), - mCameraId(cameraId), - mTuningMode(TUNING_MODE_VIDEO), - mIpuOutputFormat(V4L2_PIX_FMT_NV12), - mGraphConfig(nullptr), - mIntelCca(nullptr), - mGammaTmOffset(-1) { +IspParamAdaptor::IspParamAdaptor(int cameraId) + : mIspAdaptorState(ISP_ADAPTOR_NOT_INIT), + mCameraId(cameraId), + mTuningMode(TUNING_MODE_VIDEO), + mIpuOutputFormat(V4L2_PIX_FMT_NV12), + mGraphConfig(nullptr), + mIntelCca(nullptr), + mGammaTmOffset(-1) { LOG1("@%s", mCameraId, __func__); CLEAR(mLastPalDataForVideoPipe); - PalRecord palRecordArray[] = { - { ia_pal_uuid_isp_call_info, -1 }, - { ia_pal_uuid_isp_bnlm_3_2, -1 }, - { ia_pal_uuid_isp_lsc_1_1, -1 } - }; + PalRecord palRecordArray[] = {{ia_pal_uuid_isp_call_info, -1}, + {ia_pal_uuid_isp_bnlm_3_2, -1}, + {ia_pal_uuid_isp_lsc_1_1, -1}}; for (uint32_t i = 0; i < sizeof(palRecordArray) / sizeof(PalRecord); i++) { mPalRecords.push_back(palRecordArray[i]); } } -IspParamAdaptor::~IspParamAdaptor() { -} +IspParamAdaptor::~IspParamAdaptor() {} int IspParamAdaptor::init() { PERF_CAMERA_ATRACE(); @@ -95,15 +91,15 @@ int IspParamAdaptor::deinit() { return OK; } -int IspParamAdaptor::deepCopyProgramGroup(const ia_isp_bxt_program_group *pgPtr, - cca::cca_program_group *programGroup) { +int IspParamAdaptor::deepCopyProgramGroup(const ia_isp_bxt_program_group* pgPtr, + cca::cca_program_group* programGroup) { CheckAndLogError(!programGroup, UNKNOWN_ERROR, "%s, the programGroup is nullptr", __func__); CheckAndLogError(pgPtr->kernel_count > cca::MAX_KERNEL_NUMBERS_IN_PIPE, NO_MEMORY, "%s, memory for program group is too small, kernel count: %d", __func__, pgPtr->kernel_count); programGroup->base = *pgPtr; - uint32_t &kernelCnt = programGroup->base.kernel_count; + uint32_t& kernelCnt = programGroup->base.kernel_count; kernelCnt = 0; for (unsigned int i = 0; i < pgPtr->kernel_count; ++i) { @@ -159,18 +155,18 @@ int IspParamAdaptor::getDataFromProgramGroup() { CheckAndLogError(ret != OK, UNKNOWN_ERROR, "Failed to get the PG streamIds"); for (auto id : streamIds) { - ia_isp_bxt_program_group *pgPtr = mGraphConfig->getProgramGroup(id); - CheckAndLogError(!pgPtr, UNKNOWN_ERROR, "%s, Failed to get the programGroup for streamId: %d", - __func__, id); + ia_isp_bxt_program_group* pgPtr = mGraphConfig->getProgramGroup(id); + CheckAndLogError(!pgPtr, UNKNOWN_ERROR, + "%s, Failed to get the programGroup for streamId: %d", __func__, id); cca::cca_program_group programGroup = {}; ret = deepCopyProgramGroup(pgPtr, &programGroup); - CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, Failed to convert cca programGroup. streamId %d", - __func__, id); + CheckAndLogError(ret != OK, UNKNOWN_ERROR, + "%s, Failed to convert cca programGroup. streamId %d", __func__, id); mStreamIdToPGOutSizeMap[id] = mIntelCca->getPalDataSize(programGroup); ia_isp_bxt_gdc_limits mbrData; - ret = mGraphConfig->getMBRData(id, &mbrData); + ret = mGraphConfig->getMBRData(id, &mbrData); if (ret == OK) { mStreamIdToMbrDataMap[id] = mbrData; LOG2("get mbr data for stream:%d:%f,%f,%f,%f", id, mbrData.rectilinear.zoom, @@ -181,7 +177,7 @@ int IspParamAdaptor::getDataFromProgramGroup() { return OK; } -void IspParamAdaptor::initInputParams(cca::cca_pal_input_params *params) { +void IspParamAdaptor::initInputParams(cca::cca_pal_input_params* params) { CheckAndLogError(params == nullptr, VOID_VALUE, "NULL input parameter"); params->ee_setting.feature_level = ia_isp_feature_level_low; @@ -202,8 +198,8 @@ void IspParamAdaptor::initInputParams(cca::cca_pal_input_params *params) { * \return OK: everything went ok. * \return UNKNOWN_ERROR: First run of ISP adaptation failed. */ -int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode, - TuningMode tuningMode, int ipuOutputFormat) { +int IspParamAdaptor::configure(const stream_t& stream, ConfigMode configMode, TuningMode tuningMode, + int ipuOutputFormat) { HAL_TRACE_CALL(CAMERA_DEBUG_LOG_LEVEL1); int ret = OK; @@ -223,10 +219,10 @@ int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode, mGammaTmOffset = -1; mIntelCca = IntelCca::getInstance(mCameraId, tuningMode); - CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr, tuningMode:%d", - __func__, mTuningMode); + CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr, tuningMode:%d", __func__, + mTuningMode); - IGraphConfigManager *gcm = IGraphConfigManager::getInstance(mCameraId); + IGraphConfigManager* gcm = IGraphConfigManager::getInstance(mCameraId); CheckAndLogError(!gcm, UNKNOWN_ERROR, "%s, Failed to get graph config manager for cameraId: %d", __func__, mCameraId); CheckAndLogError(!gcm->isGcConfigured(), UNKNOWN_ERROR, "%s, graph isn't configured", __func__); @@ -258,13 +254,13 @@ int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode, lardParam.isp_mode_index = ispTuningIndex; cca::cca_nvm tmpNvm = {}; - ia_err iaErr = mIntelCca->updateTuning(lardTag, lardParam, tmpNvm, - ispParamIt.first); + ia_err iaErr = + mIntelCca->updateTuning(lardTag, lardParam, tmpNvm, ispParamIt.first); CheckAndLogError(iaErr != ia_err_none, UNKNOWN_ERROR, - "%s, Failed to update isp tuning data. tuning_mode %d", - __func__, ispTuningIndex); - LOG2("%s, Update isp tuning data. tuning_mode:%d, streamId: %d,", - __func__, ispTuningIndex, ispParamIt.first); + "%s, Failed to update isp tuning data. tuning_mode %d", __func__, + ispTuningIndex); + LOG2("%s, Update isp tuning data. tuning_mode:%d, streamId: %d,", __func__, + ispTuningIndex, ispParamIt.first); } } } @@ -279,14 +275,15 @@ int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode, initInputParams(inputParams); inputParams->stream_id = ispParamIt.first; - ia_isp_bxt_program_group *pgPtr = mGraphConfig->getProgramGroup(ispParamIt.first); + ia_isp_bxt_program_group* pgPtr = mGraphConfig->getProgramGroup(ispParamIt.first); CheckAndLogError(!pgPtr, UNKNOWN_ERROR, - "%s, Failed to get the programGroup for streamId: %d", - __func__, ispParamIt.first); + "%s, Failed to get the programGroup for streamId: %d", __func__, + ispParamIt.first); ret = deepCopyProgramGroup(pgPtr, &(inputParams->program_group)); - CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, Failed to convert cca programGroup. streamId %d", - __func__, ispParamIt.first); + CheckAndLogError(ret != OK, UNKNOWN_ERROR, + "%s, Failed to convert cca programGroup. streamId %d", __func__, + ispParamIt.first); dumpProgramGroup(&inputParams->program_group.base); { @@ -314,8 +311,8 @@ int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode, int IspParamAdaptor::decodeStatsData(TuningMode tuningMode, std::shared_ptr statsBuffer, std::shared_ptr graphConfig) { - CheckAndLogError(mIspAdaptorState != ISP_ADAPTOR_CONFIGURED, - INVALID_OPERATION, "%s, wrong state %d", __func__, mIspAdaptorState); + CheckAndLogError(mIspAdaptorState != ISP_ADAPTOR_CONFIGURED, INVALID_OPERATION, + "%s, wrong state %d", __func__, mIspAdaptorState); CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr", __func__); int64_t sequence = statsBuffer->getSequence(); @@ -330,8 +327,8 @@ int IspParamAdaptor::decodeStatsData(TuningMode tuningMode, outStats->get_rgbs_stats = true; } - AiqResultStorage *aiqResultStorage = AiqResultStorage::getInstance(mCameraId); - AiqStatistics *aiqStatistics = aiqResultStorage->acquireAiqStatistics(); + AiqResultStorage* aiqResultStorage = AiqResultStorage::getInstance(mCameraId); + AiqStatistics* aiqStatistics = aiqResultStorage->acquireAiqStatistics(); aiqStatistics->mSequence = sequence; aiqStatistics->mTimestamp = TIMEVAL2USECS(statsBuffer->getTimestamp()); aiqStatistics->mTuningMode = tuningMode; @@ -344,7 +341,7 @@ int IspParamAdaptor::decodeStatsData(TuningMode tuningMode, // Pend stats decoding to running 3A if (aiqStatistics->mPendingDecode) return OK; - ia_binary_data *hwStatsData = (ia_binary_data *)(statsBuffer->getBufferAddr()); + ia_binary_data* hwStatsData = (ia_binary_data*)(statsBuffer->getBufferAddr()); if (CameraDump::isDumpTypeEnable(DUMP_PSYS_DECODED_STAT) && hwStatsData != nullptr) { BinParam_t bParam; bParam.bType = BIN_TYPE_GENERAL; @@ -369,29 +366,29 @@ int IspParamAdaptor::decodeStatsData(TuningMode tuningMode, return OK; } -void IspParamAdaptor::updateKernelToggles(cca::cca_program_group *programGroup) { - +void IspParamAdaptor::updateKernelToggles(cca::cca_program_group* programGroup) { if (!Log::isDebugLevelEnable(CAMERA_DEBUG_LOG_KERNEL_TOGGLE)) return; const char* ENABLED_KERNELS = "/tmp/enabledKernels"; const char* DISABLED_KERNELS = "/tmp/disabledKernels"; const int FLIE_CONT_MAX_LENGTH = 1024; - char enabledKernels[FLIE_CONT_MAX_LENGTH] = { 0 }; - char disabledKernels[FLIE_CONT_MAX_LENGTH] = { 0 }; + char enabledKernels[FLIE_CONT_MAX_LENGTH] = {0}; + char disabledKernels[FLIE_CONT_MAX_LENGTH] = {0}; - int enLen = CameraUtils::getFileContent(ENABLED_KERNELS, enabledKernels, FLIE_CONT_MAX_LENGTH - 1); - int disLen = CameraUtils::getFileContent(DISABLED_KERNELS, disabledKernels, FLIE_CONT_MAX_LENGTH - 1); + int enLen = + CameraUtils::getFileContent(ENABLED_KERNELS, enabledKernels, FLIE_CONT_MAX_LENGTH - 1); + int disLen = + CameraUtils::getFileContent(DISABLED_KERNELS, disabledKernels, FLIE_CONT_MAX_LENGTH - 1); if (enLen == 0 && disLen == 0) { LOG2("%s: no explicit kernel toggle.", __func__); return; } - LOG2("%s: enabled kernels: %s, disabled kernels %s", __func__, - enabledKernels, disabledKernels); + LOG2("%s: enabled kernels: %s, disabled kernels %s", __func__, enabledKernels, disabledKernels); for (unsigned int i = 0; i < programGroup->base.kernel_count; i++) { - ia_isp_bxt_run_kernels_t *curKernel = &(programGroup->base.run_kernels[i]); + ia_isp_bxt_run_kernels_t* curKernel = &(programGroup->base.run_kernels[i]); std::string curKernelUUID = std::to_string(curKernel->kernel_uuid); if (strstr(enabledKernels, curKernelUUID.c_str()) != nullptr) { @@ -409,18 +406,17 @@ void IspParamAdaptor::updateKernelToggles(cca::cca_program_group *programGroup) * So temporarily copy latest PAL data into PAL output buffer. */ void IspParamAdaptor::updatePalDataForVideoPipe(ia_binary_data dest) { - if (mLastPalDataForVideoPipe.data == nullptr || mLastPalDataForVideoPipe.size == 0) - return; + if (mLastPalDataForVideoPipe.data == nullptr || mLastPalDataForVideoPipe.size == 0) return; if (mPalRecords.empty()) return; - ia_pal_record_header *header = nullptr; + ia_pal_record_header* header = nullptr; char* src = static_cast(mLastPalDataForVideoPipe.data); // find uuid offset in saved PAL buffer if (mPalRecords[0].offset < 0) { uint32_t offset = 0; while (offset < mLastPalDataForVideoPipe.size) { - ia_pal_record_header *header = reinterpret_cast(src + offset); + ia_pal_record_header* header = reinterpret_cast(src + offset); // check if header is valid or not CheckWarning(header->uuid == 0 || header->size == 0, VOID_VALUE, "%s, source header info isn't correct", __func__); @@ -436,7 +432,7 @@ void IspParamAdaptor::updatePalDataForVideoPipe(ia_binary_data dest) { } char* destData = static_cast(dest.data); - ia_pal_record_header *headerSrc = nullptr; + ia_pal_record_header* headerSrc = nullptr; for (uint32_t i = 0; i < mPalRecords.size(); i++) { if (mPalRecords[i].offset >= 0) { // find source record header @@ -490,7 +486,7 @@ int IspParamAdaptor::runIspAdapt(const IspSettings* ispSettings, int64_t setting if (streamId != -1 && it.first != streamId) continue; ia_binary_data binaryData = {}; - IspParameter *ispParam = &(it.second); + IspParameter* ispParam = &(it.second); auto dataIt = ispParam->mSequenceToDataMap.end(); { @@ -507,8 +503,8 @@ int IspParamAdaptor::runIspAdapt(const IspSettings* ispSettings, int64_t setting "No PAL buf!"); binaryData = dataIt->second; - LOG2("@%s, Pal data buffer seq: %ld", settingSequence, - it.first, __func__, dataIt->first); + LOG2("@%s, Pal data buffer seq: %ld", settingSequence, it.first, + __func__, dataIt->first); } ia_isp_bxt_gdc_limits* mbrData = nullptr; @@ -521,7 +517,7 @@ int IspParamAdaptor::runIspAdapt(const IspSettings* ispSettings, int64_t setting updatePalDataForVideoPipe(binaryData); } - ia_isp_bxt_program_group *pgPtr = mGraphConfig->getProgramGroup(it.first); + ia_isp_bxt_program_group* pgPtr = mGraphConfig->getProgramGroup(it.first); CheckAndLogError(!pgPtr, UNKNOWN_ERROR, "%s, Failed to get the programGroup for streamId: %d", __func__, it.first); @@ -575,11 +571,10 @@ ia_binary_data* IspParamAdaptor::getIpuParameter(int64_t sequence, int streamId) } } } else { - auto seqIt =ispParam.mSequenceToDataId.find(sequence); + auto seqIt = ispParam.mSequenceToDataId.find(sequence); if (seqIt != ispParam.mSequenceToDataId.end()) { auto dataIt = ispParam.mSequenceToDataMap.find(seqIt->second); - if (dataIt != ispParam.mSequenceToDataMap.end()) - binaryData = &(dataIt->second); + if (dataIt != ispParam.mSequenceToDataMap.end()) binaryData = &(dataIt->second); } } @@ -590,7 +585,7 @@ ia_binary_data* IspParamAdaptor::getIpuParameter(int64_t sequence, int streamId) return binaryData; } -int IspParamAdaptor::getPalOutputDataSize(const ia_isp_bxt_program_group *programGroup) { +int IspParamAdaptor::getPalOutputDataSize(const ia_isp_bxt_program_group* programGroup) { CheckAndLogError(programGroup == nullptr, 0, "Request programGroup is nullptr"); CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr", __func__); @@ -607,22 +602,21 @@ int IspParamAdaptor::allocateIspParamBuffers() { releaseIspParamBuffers(); for (int i = 0; i < ISP_PARAM_QUEUE_SIZE; i++) { - for (auto & pgMap : mStreamIdToPGOutSizeMap) { + for (auto& pgMap : mStreamIdToPGOutSizeMap) { ia_binary_data binaryData = {}; int size = pgMap.second; binaryData.size = size; binaryData.data = mIntelCca->allocMem(pgMap.first, "palData", i, size); CheckAndLogError(binaryData.data == nullptr, NO_MEMORY, "Faile to calloc PAL data"); - int64_t index = i * (-1) - 2; // default index list: -2, -3, -4, ... + int64_t index = i * (-1) - 2; // default index list: -2, -3, -4, ... std::pair p(index, binaryData); mStreamIdToIspParameterMap[pgMap.first].mSequenceToDataMap.insert(p); } } for (auto& pgMap : mStreamIdToPGOutSizeMap) { - cca::cca_pal_input_params* p = static_cast( - mIntelCca->allocMem(pgMap.first, "palData", ISP_PARAM_QUEUE_SIZE, - sizeof(cca::cca_pal_input_params))); + cca::cca_pal_input_params* p = static_cast(mIntelCca->allocMem( + pgMap.first, "palData", ISP_PARAM_QUEUE_SIZE, sizeof(cca::cca_pal_input_params))); CheckAndLogError(p == nullptr, NO_MEMORY, "Cannot alloc memory for cca_pal_input_params!"); CLEAR(*p); mStreamIdToPalInputParamsMap[pgMap.first] = p; @@ -649,8 +643,8 @@ void IspParamAdaptor::releaseIspParamBuffers() { mStreamIdToPalInputParamsMap.clear(); } -void IspParamAdaptor::applyMediaFormat(const AiqResult* aiqResult, - ia_media_format* mediaFormat, bool* useLinearGamma) { +void IspParamAdaptor::applyMediaFormat(const AiqResult* aiqResult, ia_media_format* mediaFormat, + bool* useLinearGamma) { CheckAndLogError(!mediaFormat || !aiqResult, VOID_VALUE, "mediaFormat or aiqResult is nullptr"); *mediaFormat = media_format_legacy; @@ -701,16 +695,18 @@ void IspParamAdaptor::applyCscMatrix(ia_isp_bxt_csc* cscMatrix) { } } -int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gdc_limits *mbrData, +int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group* pgPtr, ia_isp_bxt_gdc_limits* mbrData, const IspSettings* ispSettings, int64_t settingSequence, - ia_binary_data *binaryData, int streamId) { + ia_binary_data* binaryData, int streamId) { PERF_CAMERA_ATRACE(); CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr", __func__); - AiqResult* aiqResults = const_cast(AiqResultStorage::getInstance(mCameraId)->getAiqResult(settingSequence)); + AiqResult* aiqResults = const_cast( + AiqResultStorage::getInstance(mCameraId)->getAiqResult(settingSequence)); if (aiqResults == nullptr) { LOGW("@%s: no result! use the latest instead", settingSequence, __func__); - aiqResults = const_cast(AiqResultStorage::getInstance(mCameraId)->getAiqResult()); + aiqResults = + const_cast(AiqResultStorage::getInstance(mCameraId)->getAiqResult()); CheckAndLogError((aiqResults == nullptr), INVALID_OPERATION, "Cannot find available aiq result."); } @@ -722,8 +718,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd bool useLinearGamma = false; applyMediaFormat(aiqResults, &inputParams->media_format, &useLinearGamma); - LOG2("%s, media format: 0x%x, gamma lut size: %d", __func__, - inputParams->media_format, aiqResults->mGbceResults.gamma_lut_size); + LOG2("%s, media format: 0x%x, gamma lut size: %d", __func__, inputParams->media_format, + aiqResults->mGbceResults.gamma_lut_size); if (inputParams->media_format == media_format_custom) { applyCscMatrix(&inputParams->csc_matrix); @@ -748,8 +744,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd case ia_pal_uuid_isp_tnr5_22: case ia_pal_uuid_isp_tnr5_25: inputParams->program_group.base.run_kernels[i].metadata[0] = aiqResults->mSequence; - LOG2("%s, ia_pal_uuid_isp_tnr5_2x frame count = %d", - __func__, inputParams->program_group.base.run_kernels[i].metadata[0]); + LOG2("%s, ia_pal_uuid_isp_tnr5_2x frame count = %d", __func__, + inputParams->program_group.base.run_kernels[i].metadata[0]); break; case ia_pal_uuid_isp_ofa_2_mp: case ia_pal_uuid_isp_ofa_2_dp: @@ -778,6 +774,7 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd break; case ia_pal_uuid_isp_bxt_blc: case ia_pal_uuid_isp_b2i_sie_1_1: + case ia_pal_uuid_isp_gammatm_v3: if (aiqResults->mAiqParam.testPatternMode != TEST_PATTERN_OFF) { LOG2("%s: disable kernel(%d) in test pattern mode", __func__, inputParams->program_group.base.run_kernels[i].kernel_uuid); @@ -828,9 +825,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd inputParams->custom_controls.count = aiqResults->mCustomControls.count; uint32_t cnt = static_cast(inputParams->custom_controls.count); if (cnt > 0) { - CheckAndLogError(cnt > cca::MAX_CUSTOM_CONTROLS_PARAM_SIZE, - UNKNOWN_ERROR, "%s, buffer for custom control[%d] is too small", - __func__, cnt); + CheckAndLogError(cnt > cca::MAX_CUSTOM_CONTROLS_PARAM_SIZE, UNKNOWN_ERROR, + "%s, buffer for custom control[%d] is too small", __func__, cnt); MEMCPY_S(inputParams->custom_controls.parameters, cnt, aiqResults->mCustomControls.parameters, cca::MAX_CUSTOM_CONTROLS_PARAM_SIZE); @@ -853,8 +849,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd // Fine-tune DG passed to ISP if partial ISP DG is needed. if (PlatformData::isUsingIspDigitalGain(mCameraId)) { - inputParams->manual_digital_gain = PlatformData::getIspDigitalGain(mCameraId, - aiqResults->mAeResults.exposures[0].exposure[0].digital_gain); + inputParams->manual_digital_gain = PlatformData::getIspDigitalGain( + mCameraId, aiqResults->mAeResults.exposures[0].exposure[0].digital_gain); } LOG2("%s: set digital gain for ULL pipe: %f", __func__, inputParams->manual_digital_gain); @@ -863,8 +859,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd inputParams->manual_digital_gain = aiqResults->mAeResults.exposures[0].exposure[0].digital_gain; - LOG2("%s: all digital gain is passed to ISP, DG(%ld): %f", __func__, - aiqResults->mSequence, aiqResults->mAeResults.exposures[0].exposure[0].digital_gain); + LOG2("%s: all digital gain is passed to ISP, DG(%ld): %f", __func__, aiqResults->mSequence, + aiqResults->mAeResults.exposures[0].exposure[0].digital_gain); } ia_err iaErr = ia_err_none; @@ -900,14 +896,13 @@ void IspParamAdaptor::updateResultFromAlgo(ia_binary_data* binaryData, int64_t s // update tone map result from pal algo if (aiqResults->mAiqParam.callbackTmCurve && aiqResults->mGbceResults.have_manual_settings == false) { - char* src = static_cast(binaryData->data); if (mGammaTmOffset < 0) { uint32_t offset = 0; bool foundRes = false; while (offset < binaryData->size) { - ia_pal_record_header *header = + ia_pal_record_header* header = reinterpret_cast(src + offset); if (header->uuid == ia_pal_uuid_isp_gammatm_v3) { LOG2("src uuid %d, offset %d, size %d", header->uuid, offset, header->size); @@ -920,27 +915,27 @@ void IspParamAdaptor::updateResultFromAlgo(ia_binary_data* binaryData, int64_t s mGammaTmOffset = offset; } - ia_pal_isp_gammatm_v3_t *TM = - reinterpret_cast(src + mGammaTmOffset + - ALIGN_8(sizeof(ia_pal_record_header))); - uint32_t tmSize = (reinterpret_cast(&(TM->prog_shift)) - - reinterpret_cast(TM->tm_lut_gen_lut)) / sizeof(int32_t); + ia_pal_isp_gammatm_v3_t* TM = reinterpret_cast( + src + mGammaTmOffset + ALIGN_8(sizeof(ia_pal_record_header))); + uint32_t tmSize = (reinterpret_cast(&(TM->prog_shift)) - + reinterpret_cast(TM->tm_lut_gen_lut)) / + sizeof(int32_t); if (aiqResults->mGbceResults.tone_map_lut_size == 0) { LOG2("%s, gbce running in bypass mode, reset to max value", __func__); aiqResults->mGbceResults.tone_map_lut_size = cca::MAX_TONE_MAP_LUT_SIZE; } - CheckAndLogError(tmSize < aiqResults->mGbceResults.tone_map_lut_size, - VOID_VALUE, "memory is mismatch to store tone map from algo"); + CheckAndLogError(tmSize < aiqResults->mGbceResults.tone_map_lut_size, VOID_VALUE, + "memory is mismatch to store tone map from algo"); - LOG2("%s, Tonemap Curve. enable: %d, prog_shift: %d, table size: %zu", - __func__, TM->enable, TM->prog_shift, tmSize); + LOG2("%s, Tonemap Curve. enable: %d, prog_shift: %d, table size: %zu", __func__, TM->enable, + TM->prog_shift, tmSize); const int shiftBase = 1 << TM->prog_shift; for (uint32_t i = 0; i < aiqResults->mGbceResults.tone_map_lut_size; i++) { - aiqResults->mGbceResults.tone_map_lut[i] = static_cast(TM->tm_lut_gen_lut[i]) / - shiftBase; + aiqResults->mGbceResults.tone_map_lut[i] = + static_cast(TM->tm_lut_gen_lut[i]) / shiftBase; } } } @@ -949,14 +944,14 @@ void IspParamAdaptor::dumpIspParameter(int streamId, int64_t sequence, ia_binary if (!CameraDump::isDumpTypeEnable(DUMP_PSYS_PAL)) return; BinParam_t bParam; - bParam.bType = BIN_TYPE_GENERAL; - bParam.mType = M_PSYS; + bParam.bType = BIN_TYPE_GENERAL; + bParam.mType = M_PSYS; bParam.sequence = sequence; - bParam.gParam.appendix = ("pal_" + std::to_string(streamId)).c_str();; + bParam.gParam.appendix = ("pal_" + std::to_string(streamId)).c_str(); CameraDump::dumpBinary(mCameraId, binaryData.data, binaryData.size, &bParam); } -void IspParamAdaptor::dumpProgramGroup(ia_isp_bxt_program_group *pgPtr) { +void IspParamAdaptor::dumpProgramGroup(ia_isp_bxt_program_group* pgPtr) { if (!Log::isLogTagEnabled(GET_FILE_SHIFT(IspParamAdaptor), CAMERA_DEBUG_LOG_LEVEL3)) return; LOG3("the kernel count: %d, run_kernels: %p", pgPtr->kernel_count, pgPtr->run_kernels); @@ -1000,37 +995,36 @@ void IspParamAdaptor::dumpCscMatrix(const ia_isp_bxt_csc* cscMatrix) { * 1, copy pal.bin file to local directory; * 2, define pal uuid in palRecordArray which are expected to be replaced. */ -void IspParamAdaptor::loadPalBinFile(ia_binary_data *binaryData) { +void IspParamAdaptor::loadPalBinFile(ia_binary_data* binaryData) { // Get file size struct stat fileStat; CLEAR(fileStat); - const char *fileName = "./pal.bin"; + const char* fileName = "./pal.bin"; int ret = stat(fileName, &fileStat); CheckWarning(ret != 0, VOID_VALUE, "no pal bin %s", fileName); FILE* fp = fopen(fileName, "rb"); - CheckWarning(fp == nullptr, VOID_VALUE, "Failed to open %s, err %s", fileName, - strerror(errno)); + CheckWarning(fp == nullptr, VOID_VALUE, "Failed to open %s, err %s", fileName, strerror(errno)); std::unique_ptr dataPtr(new char[fileStat.st_size]); size_t readSize = fread(dataPtr.get(), sizeof(char), fileStat.st_size, fp); fclose(fp); - CheckWarning(readSize != (size_t)fileStat.st_size, VOID_VALUE, - "Failed to read %s, err %s", fileName, strerror(errno)); + CheckWarning(readSize != (size_t)fileStat.st_size, VOID_VALUE, "Failed to read %s, err %s", + fileName, strerror(errno)); static PalRecord palRecordArray[] = { - { ia_pal_uuid_isp_bnlm_3_2, -1 }, - { ia_pal_uuid_isp_tnr_6_0, -1 }, + {ia_pal_uuid_isp_bnlm_3_2, -1}, + {ia_pal_uuid_isp_tnr_6_0, -1}, }; - ia_pal_record_header *header = nullptr; + ia_pal_record_header* header = nullptr; char* src = static_cast(dataPtr.get()); // find uuid offset in PAL bin if (palRecordArray[0].offset < 0) { uint32_t offset = 0; while (offset < readSize) { - ia_pal_record_header *header = reinterpret_cast(src + offset); + ia_pal_record_header* header = reinterpret_cast(src + offset); for (uint32_t i = 0; i < sizeof(palRecordArray) / sizeof(PalRecord); i++) { if (palRecordArray[i].offset < 0 && palRecordArray[i].uuid == header->uuid) { palRecordArray[i].offset = offset; @@ -1043,7 +1037,7 @@ void IspParamAdaptor::loadPalBinFile(ia_binary_data *binaryData) { } char* dest = static_cast(binaryData->data); - ia_pal_record_header *headerSrc = nullptr; + ia_pal_record_header* headerSrc = nullptr; for (uint32_t i = 0; i < sizeof(palRecordArray) / sizeof(PalRecord); i++) { if (palRecordArray[i].offset >= 0) { // find source record header @@ -1085,4 +1079,4 @@ uint32_t IspParamAdaptor::getRequestedStats() { return bitmap; } -} // namespace icamera +} // namespace icamera diff --git a/src/core/IspParamAdaptor.h b/src/core/IspParamAdaptor.h index f155fea9..22fde02f 100644 --- a/src/core/IspParamAdaptor.h +++ b/src/core/IspParamAdaptor.h @@ -65,32 +65,32 @@ namespace icamera { * 2. Run isp config */ class IspParamAdaptor { -public: + public: explicit IspParamAdaptor(int cameraId); virtual ~IspParamAdaptor(); int init(); int deinit(); - int configure(const stream_t &stream, ConfigMode configMode, TuningMode tuningMode, + int configure(const stream_t& stream, ConfigMode configMode, TuningMode tuningMode, int ipuOutputFormat = -1); - int decodeStatsData(TuningMode tuningMode, - std::shared_ptr statsBuffer, + int decodeStatsData(TuningMode tuningMode, std::shared_ptr statsBuffer, std::shared_ptr graphConfig = nullptr); - int runIspAdapt(const IspSettings* ispSettings, int64_t settingSequence = -1, int32_t streamId = -1); - //Get ISP param from mult-stream ISP param adaptation + int runIspAdapt(const IspSettings* ispSettings, int64_t settingSequence = -1, + int32_t streamId = -1); + // Get ISP param from mult-stream ISP param adaptation ia_binary_data* getIpuParameter(int64_t sequence = -1, int streamId = -1); int getPalOutputDataSize(const ia_isp_bxt_program_group* programGroup); -private: + private: DISALLOW_COPY_AND_ASSIGN(IspParamAdaptor); - int deepCopyProgramGroup(const ia_isp_bxt_program_group *pgPtr, - cca::cca_program_group *programGroup); + int deepCopyProgramGroup(const ia_isp_bxt_program_group* pgPtr, + cca::cca_program_group* programGroup); int getDataFromProgramGroup(); int initProgramGroupForAllStreams(ConfigMode configMode); - void initInputParams(cca::cca_pal_input_params *params); + void initInputParams(cca::cca_pal_input_params* params); void updatePalDataForVideoPipe(ia_binary_data dest); @@ -104,27 +104,27 @@ class IspParamAdaptor { // map from sequence to ia_binary_data std::multimap mSequenceToDataMap; }; - void updateIspParameterMap(IspParameter* ispParam, int64_t dataSeq, - int64_t settingSeq, ia_binary_data curIpuParam); - int runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gdc_limits* mbrData, + void updateIspParameterMap(IspParameter* ispParam, int64_t dataSeq, int64_t settingSeq, + ia_binary_data curIpuParam); + int runIspAdaptL(ia_isp_bxt_program_group* pgPtr, ia_isp_bxt_gdc_limits* mbrData, const IspSettings* ispSettings, int64_t settingSequence, - ia_binary_data *binaryData, int32_t streamId = -1); + ia_binary_data* binaryData, int32_t streamId = -1); - //Allocate memory for mIspParameters + // Allocate memory for mIspParameters int allocateIspParamBuffers(); - //Release memory for mIspParameters + // Release memory for mIspParameters void releaseIspParamBuffers(); // Dumping methods for debugging purposes. void dumpIspParameter(int streamId, int64_t sequence, ia_binary_data binaryData); // Enable or disable kernels according to environment variables for debug purpose. - void updateKernelToggles(cca::cca_program_group *programGroup); - void dumpProgramGroup(ia_isp_bxt_program_group *pgPtr); - void applyMediaFormat(const AiqResult* aiqResult, - ia_media_format* mediaFormat, bool* useLinearGamma); + void updateKernelToggles(cca::cca_program_group* programGroup); + void dumpProgramGroup(ia_isp_bxt_program_group* pgPtr); + void applyMediaFormat(const AiqResult* aiqResult, ia_media_format* mediaFormat, + bool* useLinearGamma); void dumpCscMatrix(const ia_isp_bxt_csc* cscMatrix); #ifdef PAL_DEBUG - void loadPalBinFile(ia_binary_data *binaryData); + void loadPalBinFile(ia_binary_data* binaryData); #endif void applyCscMatrix(ia_isp_bxt_csc* cscMatrix); void updateResultFromAlgo(ia_binary_data* binaryData, int64_t sequence); @@ -141,18 +141,18 @@ class IspParamAdaptor { TuningMode mTuningMode; int mIpuOutputFormat; - //Guard for IspParamAdaptor public API + // Guard for IspParamAdaptor public API Mutex mIspAdaptorLock; std::map mStreamIdToPGOutSizeMap; std::map mStreamIdToMbrDataMap; static const int ISP_PARAM_QUEUE_SIZE = MAX_SETTING_COUNT; - std::map mStreamIdToIspParameterMap; // map from stream id to IspParameter + std::map mStreamIdToIspParameterMap; // map from stream id to IspParameter ia_binary_data mLastPalDataForVideoPipe; - //Guard lock for ipu parameter + // Guard lock for ipu parameter Mutex mIpuParamLock; std::unordered_map mStreamIdToPalInputParamsMap; std::shared_ptr mGraphConfig; - IntelCca *mIntelCca; + IntelCca* mIntelCca; int mGammaTmOffset; struct PalRecord { @@ -161,4 +161,4 @@ class IspParamAdaptor { }; std::vector mPalRecords; // Save PAL offset info for overwriting PAL }; -} // namespace icamera +} // namespace icamera diff --git a/src/core/IspSettings.h b/src/core/IspSettings.h index 8049243b..f9ebc649 100644 --- a/src/core/IspSettings.h +++ b/src/core/IspSettings.h @@ -22,7 +22,7 @@ namespace icamera { -struct IspImageEnhancement{ +struct IspImageEnhancement { char manualSharpness; char manualBrightness; char manualContrast; @@ -38,12 +38,14 @@ struct IspSettings { IspImageEnhancement manualSettings; ia_binary_data* palOverride; // DOL_FEATURE_S - short vbp; // Used for DOL camera + short vbp; // Used for DOL camera // DOL_FEATURE_E float zoom; camera_mount_type_t sensorMountType; - IspSettings() { CLEAR(*this); zoom = 1.0f; } + IspSettings() { + CLEAR(*this); + zoom = 1.0f; + } }; -} // namespace icamera - +} // namespace icamera diff --git a/src/core/LensHw.cpp b/src/core/LensHw.cpp index 47a33a21..7d21bedc 100644 --- a/src/core/LensHw.cpp +++ b/src/core/LensHw.cpp @@ -23,15 +23,13 @@ namespace icamera { -LensHw::LensHw(int cameraId): - mCameraId(cameraId), - mLensSubdev(nullptr), - mLastLensPosition(0), - mLensMovementStartTime(0) { -} +LensHw::LensHw(int cameraId) + : mCameraId(cameraId), + mLensSubdev(nullptr), + mLastLensPosition(0), + mLensMovementStartTime(0) {} -LensHw::~LensHw() { -} +LensHw::~LensHw() {} int LensHw::init() { std::string lensName = PlatformData::getLensName(mCameraId); @@ -45,7 +43,7 @@ int LensHw::init() { CameraUtils::getSubDeviceName(lensName.c_str(), subDevName); if (!subDevName.empty()) { mLensSubdev = V4l2DeviceFactory::getSubDev(mCameraId, subDevName); - mLensName=lensName; + mLensName = lensName; return OK; } @@ -77,7 +75,7 @@ int LensHw::setFocusStep(int steps) { return mLensSubdev->SetControl(V4L2_CID_FOCUS_RELATIVE, steps); } -int LensHw::getFocusPosition(int &position) { +int LensHw::getFocusPosition(int& position) { CheckAndLogError(!mLensSubdev, NO_INIT, "%s: No Lens device inited.", __func__); return mLensSubdev->GetControl(V4L2_CID_FOCUS_ABSOLUTE, &position); } @@ -92,10 +90,9 @@ int LensHw::stopAutoFocus(void) { return mLensSubdev->SetControl(V4L2_CID_AUTO_FOCUS_STOP, 0); } -int LensHw::getAutoFocusStatus(int &status) { +int LensHw::getAutoFocusStatus(int& status) { CheckAndLogError(!mLensSubdev, NO_INIT, "%s: No Lens device inited.", __func__); - return mLensSubdev->GetControl(V4L2_CID_AUTO_FOCUS_STATUS, - reinterpret_cast(&status)); + return mLensSubdev->GetControl(V4L2_CID_AUTO_FOCUS_STATUS, reinterpret_cast(&status)); } int LensHw::setAutoFocusRange(int value) { @@ -103,7 +100,7 @@ int LensHw::setAutoFocusRange(int value) { return mLensSubdev->SetControl(V4L2_CID_AUTO_FOCUS_RANGE, value); } -int LensHw::getAutoFocusRange(int &value) { +int LensHw::getAutoFocusRange(int& value) { CheckAndLogError(!mLensSubdev, NO_INIT, "%s: No Lens device inited.", __func__); return mLensSubdev->GetControl(V4L2_CID_AUTO_FOCUS_RANGE, &value); } @@ -127,4 +124,4 @@ int LensHw::getLatestPosition(int& lensPosition, unsigned long long& time) { time = mLensMovementStartTime; return OK; } -} // namespace icamera +} // namespace icamera diff --git a/src/core/LensHw.h b/src/core/LensHw.h index b327f23f..eab0c166 100644 --- a/src/core/LensHw.h +++ b/src/core/LensHw.h @@ -35,8 +35,7 @@ typedef ::cros::V4L2Subdevice V4L2Subdevice; * */ class LensHw { - -public: + public: LensHw(int cameraId); ~LensHw(); @@ -46,21 +45,22 @@ class LensHw { int setFocusPosition(int position); int setFocusStep(int steps); - int getFocusPosition(int &position); + int getFocusPosition(int& position); int startAutoFocus(void); int stopAutoFocus(void); - int getAutoFocusStatus(int &status); + int getAutoFocusStatus(int& status); int setAutoFocusRange(int value); - int getAutoFocusRange(int &value); + int getAutoFocusRange(int& value); int getLatestPosition(int& lensPosition, unsigned long long& time); bool isLensSubdevAvailable() { return (mLensSubdev != nullptr); } -private: + private: int mCameraId; V4L2Subdevice* mLensSubdev; std::string mLensName; int mLastLensPosition; - unsigned long long mLensMovementStartTime; /*!< In microseconds */ + // In microseconds + unsigned long long mLensMovementStartTime; }; // class LensHW -} // namespace icamera +} // namespace icamera diff --git a/src/core/PSysProcessor.cpp b/src/core/PSysProcessor.cpp index 2a6e69b9..f2520b2a 100644 --- a/src/core/PSysProcessor.cpp +++ b/src/core/PSysProcessor.cpp @@ -40,7 +40,7 @@ * it's based on sensor vblank, psys iterating time * and thread scheduling */ -#define SOF_EVENT_MARGIN (5000000) // 5ms +#define SOF_EVENT_MARGIN (5000000) // 5ms #define SOF_EVENT_MAX_MARGIN (60000000) // 60ms #define EXTREME_STRENGTH_LEVEL4 (-120) @@ -52,42 +52,50 @@ using std::shared_ptr; using std::unique_ptr; namespace icamera { -PSysProcessor::PSysProcessor(int cameraId, ParameterGenerator *pGenerator) : - mCameraId(cameraId), - mParameterGenerator(pGenerator), - // ISP_CONTROL_S - mUpdatedIspIndex(-1), - mUsedIspIndex(-1), - // ISP_CONTROL_E - mCurConfigMode(CAMERA_STREAM_CONFIGURATION_MODE_NORMAL), - mTuningMode(TUNING_MODE_MAX), - mRawPort(INVALID_PORT), - mSofSequence(-1), - mOpaqueRawPort(INVALID_PORT), - mHoldRawBuffers(false), - mLastStillTnrSequence(-1), - mStatus(PIPELINE_UNCREATED) { +PSysProcessor::PSysProcessor(int cameraId, ParameterGenerator* pGenerator) + : mCameraId(cameraId), + mParameterGenerator(pGenerator), + mScheduler(nullptr), + // ISP_CONTROL_S + mUpdatedIspIndex(-1), + mUsedIspIndex(-1), + // ISP_CONTROL_E + mCurConfigMode(CAMERA_STREAM_CONFIGURATION_MODE_NORMAL), + mTuningMode(TUNING_MODE_MAX), + mRawPort(INVALID_PORT), + mSofSequence(-1), + mOpaqueRawPort(INVALID_PORT), + mHoldRawBuffers(false), + mLastStillTnrSequence(-1), + mStatus(PIPELINE_UNCREATED) { mProcessThread = new ProcessThread(this); // ISP_CONTROL_S allocPalControlBuffers(); // ISP_CONTROL_E CLEAR(mSofTimestamp); + + if (PlatformData::isSchedulerEnabled(mCameraId)) + mScheduler = new CameraScheduler(); } PSysProcessor::~PSysProcessor() { // ISP_CONTROL_S - for (int i = 0; i < IA_PAL_CONTROL_BUFFER_SIZE; i++) - free(mPalCtrlBuffers[i].data); + for (int i = 0; i < IA_PAL_CONTROL_BUFFER_SIZE; i++) free(mPalCtrlBuffers[i].data); mUpdatedIspIndex = -1; mUsedIspIndex = -1; + // ISP_CONTROL_E mProcessThread->join(); delete mProcessThread; + + // Delete PSysDAG before Scheduler because ~PSysDAG() needs Scheduler + mPSysDAGs.clear(); + if (mScheduler) delete mScheduler; } int PSysProcessor::configure(const std::vector& configModes) { - //Create PSysDAGs actually + // Create PSysDAGs actually CheckAndLogError(mStatus == PIPELINE_CREATED, -1, "@%s mStatus is in wrong status: PIPELINE_CREATED", __func__); @@ -100,7 +108,7 @@ int PSysProcessor::configure(const std::vector& configModes) { std::map outputFrameInfo; stream_t stillStream = {}, videoStream = {}; - for (auto &outFrameInfo : mOutputFrameInfo) { + for (auto& outFrameInfo : mOutputFrameInfo) { // Check if it's required to output raw image from ISYS if (outFrameInfo.second.format == V4L2_PIX_FMT_SGRBG12) { mRawPort = outFrameInfo.first; @@ -123,8 +131,8 @@ int PSysProcessor::configure(const std::vector& configModes) { } int ret = OK; - //Create PSysDAG according to real configure mode - for (auto &cfg : mConfigModes) { + // Create PSysDAG according to real configure mode + for (auto& cfg : mConfigModes) { if (mPSysDAGs.find(cfg) != mPSysDAGs.end()) { continue; } @@ -134,32 +142,30 @@ int PSysProcessor::configure(const std::vector& configModes) { CheckAndLogError(ret != OK, ret, "%s: can't get config for mode %d", __func__, cfg); LOG1("%s, Create PSysDAG for ConfigMode %d", __func__, cfg); - unique_ptr pSysDAG = unique_ptr(new PSysDAG(mCameraId, this)); + unique_ptr pSysDAG = unique_ptr(new PSysDAG(mCameraId, mScheduler, this)); pSysDAG->setFrameInfo(mInputFrameInfo, outputFrameInfo); bool useTnrOutBuffer = mOpaqueRawPort != INVALID_PORT; - ret = pSysDAG->configure(tuningConfig.configMode, tuningConfig.tuningMode, - useTnrOutBuffer); + ret = pSysDAG->configure(tuningConfig.configMode, tuningConfig.tuningMode, useTnrOutBuffer); CheckAndLogError(ret != OK, ret, "@%s configure psys dag failed:%d", __func__, ret); mPSysDAGs[tuningConfig.configMode] = std::move(pSysDAG); - //Update default active config mode + // Update default active config mode mCurConfigMode = tuningConfig.configMode; mTuningMode = tuningConfig.tuningMode; } if (ret == OK) mStatus = PIPELINE_CREATED; return ret; - } -int PSysProcessor::registerUserOutputBufs(Port port, const shared_ptr &camBuffer) { - for (auto &psysDAGPair : mPSysDAGs) { +int PSysProcessor::registerUserOutputBufs(Port port, const shared_ptr& camBuffer) { + for (auto& psysDAGPair : mPSysDAGs) { if (!psysDAGPair.second) continue; int ret = psysDAGPair.second->registerUserOutputBufs(port, camBuffer); - CheckAndLogError(ret != OK, BAD_VALUE, "%s, register user buffer failed, ret: %d", - __func__, ret); + CheckAndLogError(ret != OK, BAD_VALUE, "%s, register user buffer failed, ret: %d", __func__, + ret); } return OK; @@ -167,9 +173,9 @@ int PSysProcessor::registerUserOutputBufs(Port port, const shared_ptrrun("PsysProcessor", PRIORITY_NORMAL); - for (auto &psysDAGPair : mPSysDAGs) { + for (auto& psysDAGPair : mPSysDAGs) { if (!psysDAGPair.second) continue; psysDAGPair.second->start(); if (needProducerBuffer && PlatformData::isNeedToPreRegisterBuffer(mCameraId)) { @@ -202,7 +208,7 @@ int PSysProcessor::start() { void PSysProcessor::stop() { PERF_CAMERA_ATRACE(); - for (auto &psysDAGPair : mPSysDAGs) { + for (auto& psysDAGPair : mPSysDAGs) { if (!psysDAGPair.second) continue; psysDAGPair.second->stop(); } @@ -211,7 +217,7 @@ void PSysProcessor::stop() { { AutoMutex l(mBufferQueueLock); mThreadRunning = false; - //Wakeup the thread to exit + // Wakeup the thread to exit mFrameAvailableSignal.signal(); mOutputAvailableSignal.signal(); mFrameDoneSignal.signal(); @@ -268,7 +274,7 @@ int PSysProcessor::setParameters(const Parameters& param) { } } - LOG2("%s: ISP EE setting, level: %d, strength: %d", __func__, + LOG2("%s: ISP EE setting, level: %d, strength: %d", __func__, static_cast(mIspSettings.eeSetting.feature_level), static_cast(mIspSettings.eeSetting.strength)); @@ -310,9 +316,9 @@ int PSysProcessor::setParameters(const Parameters& param) { camera_video_stabilization_mode_t stabilizationMode; ret = param.getVideoStabilizationMode(stabilizationMode); if (ret == OK) { - mIspSettings.videoStabilization = (stabilizationMode == VIDEO_STABILIZATION_MODE_ON); + mIspSettings.videoStabilization = (stabilizationMode == VIDEO_STABILIZATION_MODE_ON); } else { - mIspSettings.videoStabilization = false; + mIspSettings.videoStabilization = false; } LOG2("%s: Video stablilization enabled:%d", __func__, mIspSettings.videoStabilization); @@ -325,24 +331,23 @@ int PSysProcessor::setParameters(const Parameters& param) { int PSysProcessor::getParameters(Parameters& param) { AutoRMutex rl(mIspSettingsLock); - camera_image_enhancement_t enhancement = { mIspSettings.manualSettings.manualSharpness, - mIspSettings.manualSettings.manualBrightness, - mIspSettings.manualSettings.manualContrast, - mIspSettings.manualSettings.manualHue, - mIspSettings.manualSettings.manualSaturation }; + camera_image_enhancement_t enhancement = { + mIspSettings.manualSettings.manualSharpness, mIspSettings.manualSettings.manualBrightness, + mIspSettings.manualSettings.manualContrast, mIspSettings.manualSettings.manualHue, + mIspSettings.manualSettings.manualSaturation}; int ret = param.setImageEnhancement(enhancement); // ISP_CONTROL_S // Override the data with what user has enabled before, since the data get from // IspParamAdaptor might be old, and it causes inconsistent between what user sets and gets. if (mUpdatedIspIndex != -1) { - ia_binary_data *palOverride = &mPalCtrlBuffers[mUpdatedIspIndex]; + ia_binary_data* palOverride = &mPalCtrlBuffers[mUpdatedIspIndex]; std::set enabledControls; param.getEnabledIspControls(enabledControls); for (auto ctrlId : enabledControls) { - void* data = IspControlUtils::findDataById(ctrlId, palOverride->data, - palOverride->size); + void* data = + IspControlUtils::findDataById(ctrlId, palOverride->data, palOverride->size); if (data == nullptr) continue; param.setIspControl(ctrlId, data); @@ -382,7 +387,7 @@ int PSysProcessor::fillPalOverrideData(const Parameters& param) { } // Use mPalCtrlBuffers[mUpdatedIspIndex] to store the override data - ia_binary_data *palOverride = &mPalCtrlBuffers[mUpdatedIspIndex]; + ia_binary_data* palOverride = &mPalCtrlBuffers[mUpdatedIspIndex]; palOverride->size = getRequiredPalBufferSize(); const size_t kHeaderSize = sizeof(ia_record_header); @@ -434,8 +439,8 @@ int PSysProcessor::fillPalOverrideData(const Parameters& param) { int PSysProcessor::fillDefaultAcmData(uint8_t* overrideData) { // Don't fill ACM if it's not supported. - if (!PlatformData::isIspControlFeatureSupported(mCameraId, - camera_control_isp_ctrl_id_advanced_color_correction_matrix)) { + if (!PlatformData::isIspControlFeatureSupported( + mCameraId, camera_control_isp_ctrl_id_advanced_color_correction_matrix)) { return 0; } @@ -451,8 +456,8 @@ int PSysProcessor::fillDefaultAcmData(uint8_t* overrideData) { acm->number_of_sectors = 24; const float kIdentityMatrix[] = {1, 0, 0, 0, 1, 0, 0, 0, 1}; for (int i = 0; i < acm->number_of_sectors; i++) { - MEMCPY_S(acm->ccm_matrices + i * 9, sizeof(kIdentityMatrix), - kIdentityMatrix, sizeof(kIdentityMatrix)); + MEMCPY_S(acm->ccm_matrices + i * 9, sizeof(kIdentityMatrix), kIdentityMatrix, + sizeof(kIdentityMatrix)); } return header->size; @@ -473,9 +478,9 @@ int PSysProcessor::allocPalControlBuffers() { /** * Get available setting sequence from outBuf */ -int64_t PSysProcessor::getSettingSequence(const CameraBufferPortMap &outBuf) { +int64_t PSysProcessor::getSettingSequence(const CameraBufferPortMap& outBuf) { int64_t settingSequence = -1; - for (auto& output: outBuf) { + for (auto& output : outBuf) { if (output.second) { settingSequence = output.second->getSettingSequence(); break; @@ -539,8 +544,8 @@ bool PSysProcessor::needHoldOnInputFrame(int64_t settingSequence, int64_t inputS bool PSysProcessor::needSwitchPipe(int64_t sequence) { const AiqResult* aiqResults = AiqResultStorage::getInstance(mCameraId)->getAiqResult(sequence); if (aiqResults == nullptr) { - LOG2("%s: not found sequence %ld in AiqResultStorage, no update for active modes", - __func__, sequence); + LOG2("%s: not found sequence %ld in AiqResultStorage, no update for active modes", __func__, + sequence); return false; } @@ -572,24 +577,22 @@ void PSysProcessor::handleEvent(EventData eventData) { if (PlatformData::needHandleVbpInMetaData(mCameraId, mCurConfigMode)) { AutoMutex l(mMetaQueueLock); mMetaQueue.push(eventData.data.meta); - LOG2("%s: received meta data, current queue size %lu", - __func__, mMetaQueue.size()); + LOG2("%s: received meta data, current queue size %lu", __func__, mMetaQueue.size()); mMetaAvailableSignal.signal(); } // DOL_FEATURE_E break; // CSI_META_E - case EVENT_ISYS_SOF: - { - AutoMutex l(mSofLock); - - mSofSequence = eventData.data.sync.sequence; - gettimeofday(&mSofTimestamp, nullptr); - LOG2("%s, received SOF event sequence: %ld, timestamp: %ld", - __func__, eventData.data.sync.sequence, TIMEVAL2USECS(mSofTimestamp)); - mSofCondition.signal(); - } + case EVENT_ISYS_SOF: { + AutoMutex l(mSofLock); + + mSofSequence = eventData.data.sync.sequence; + gettimeofday(&mSofTimestamp, nullptr); + LOG2("%s, received SOF event sequence: %ld, timestamp: %ld", __func__, + eventData.data.sync.sequence, TIMEVAL2USECS(mSofTimestamp)); + mSofCondition.signal(); break; + } default: LOGW("Unexpected event: %d", eventData.type); break; @@ -598,7 +601,7 @@ void PSysProcessor::handleEvent(EventData eventData) { // DOL_FEATURE_S int PSysProcessor::setVbpToIspParam(int64_t sequence, timeval timestamp) { - //Check fixed VBP firstly. + // Check fixed VBP firstly. int fixedVbp = PlatformData::getFixedVbp(mCameraId); if (fixedVbp >= 0) { AutoWMutex wl(mIspSettingsLock); @@ -607,13 +610,12 @@ int PSysProcessor::setVbpToIspParam(int64_t sequence, timeval timestamp) { return OK; } - //Check dynamic VBP. + // Check dynamic VBP. ConditionLock lock(mMetaQueueLock); - //Remove all older meta data + // Remove all older meta data while (!mMetaQueue.empty() && mMetaQueue.front().sequence < sequence) { - LOG2("%s: remove older meta data for sequence %ld", __func__, - mMetaQueue.front().sequence); + LOG2("%s: remove older meta data for sequence %ld", __func__, mMetaQueue.front().sequence); mMetaQueue.pop(); } @@ -636,9 +638,9 @@ int PSysProcessor::setVbpToIspParam(int64_t sequence, timeval timestamp) { return OK; } - LOGW("Missing meta data for seq %ld, timestamp %ld, Cur meta seq %ld, timestamp %ld", - sequence, TIMEVAL2USECS(timestamp), - mMetaQueue.front().sequence, TIMEVAL2USECS(mMetaQueue.front().timestamp)); + LOGW("Missing meta data for seq %ld, timestamp %ld, Cur meta seq %ld, timestamp %ld", sequence, + TIMEVAL2USECS(timestamp), mMetaQueue.front().sequence, + TIMEVAL2USECS(mMetaQueue.front().timestamp)); return UNKNOWN_ERROR; } // DOL_FEATURE_E @@ -650,7 +652,37 @@ int PSysProcessor::processNewFrame() { int ret = OK; CameraBufferPortMap srcBuffers, dstBuffers; - if (!PlatformData::psysAlignWithSof(mCameraId)) { + if (mScheduler) { + { + ConditionLock lock(mBufferQueueLock); + // Wait input buffer, use SOF_EVENT_MAX_MARGIN to ensure Scheduler is triggered in time + bool bufReady = waitBufferQueue(lock, mInputQueue, SOF_EVENT_MAX_MARGIN); + // Already stopped + if (!mThreadRunning) return -1; + + if (bufReady) { + // Fetch inputs and outputs if output buffer ready (no wait) + if (waitBufferQueue(lock, mOutputQueue, 0)) + ret = waitFreeBuffersInQueue(lock, srcBuffers, dstBuffers); + } + } + + int64_t inputSequence = -1; + if (!srcBuffers.empty()) { + inputSequence = srcBuffers.begin()->second->getSequence(); + ret = prepareTask(&srcBuffers, &dstBuffers); + CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, Failed to process frame", __func__); + } else { + // Wait frame buffer time out should not involve thread exit. + LOG1("@%s, timeout happen, wait recovery", mCameraId, __func__); + } + + // Trigger when there are tasks (new or existing) + if (mScheduler && !mSequencesInflight.empty()) { + std::string source; + mScheduler->executeNode(source, inputSequence); + } + } else if (!PlatformData::psysAlignWithSof(mCameraId)) { { ConditionLock lock(mBufferQueueLock); ret = waitFreeBuffersInQueue(lock, srcBuffers, dstBuffers); @@ -693,8 +725,7 @@ int PSysProcessor::processNewFrame() { // push all the pending buffers to task int64_t waitTime = SOF_EVENT_MARGIN; // Don't need to catch sof for 1st frame or sof time out - if (TIMEVAL2NSECS(mSofTimestamp) == 0 || sofInterval >= SOF_EVENT_MAX_MARGIN) - waitTime = 0; + if (TIMEVAL2NSECS(mSofTimestamp) == 0 || sofInterval >= SOF_EVENT_MAX_MARGIN) waitTime = 0; while (true) { { ConditionLock lock(mBufferQueueLock); @@ -730,9 +761,9 @@ int PSysProcessor::processNewFrame() { return OK; } -void PSysProcessor::handleRawReprocessing(CameraBufferPortMap *srcBuffers, - CameraBufferPortMap *dstBuffers, bool *allBufDone, - bool *hasRawOutput, bool *hasRawInput) { +void PSysProcessor::handleRawReprocessing(CameraBufferPortMap* srcBuffers, + CameraBufferPortMap* dstBuffers, bool* allBufDone, + bool* hasRawOutput, bool* hasRawInput) { std::shared_ptr rawOutputBuffer = nullptr; int64_t settingSequence = -1; CameraBufferPortMap videoBuf, stillBuf; @@ -785,15 +816,14 @@ void PSysProcessor::handleRawReprocessing(CameraBufferPortMap *srcBuffers, rawOutputBuffer->updateV4l2Buffer(*mainBuf->getV4L2Buffer().Get()); - LOG2("%s, timestamp %ld, inputSequence %ld, dstBufferSize %d, addr %p", __func__, - timestamp, inputSequence, rawOutputBuffer->getBufferSize(), - rawOutputBuffer->getBufferAddr()); + LOG2("%s, timestamp %ld, inputSequence %ld, dstBufferSize %d, addr %p", __func__, timestamp, + inputSequence, rawOutputBuffer->getBufferSize(), rawOutputBuffer->getBufferAddr()); // handle Shutter first if has raw output sendPsysRequestEvent(dstBuffers, settingSequence, timestamp, EVENT_PSYS_REQUEST_BUF_READY); // Return opaque RAW buffer - for (auto &it : mBufferConsumerList) { + for (auto& it : mBufferConsumerList) { it->onFrameAvailable(mOpaqueRawPort, rawOutputBuffer); } *hasRawOutput = true; @@ -828,7 +858,7 @@ void PSysProcessor::handleRawReprocessing(CameraBufferPortMap *srcBuffers, AutoMutex lock(mBufferMapLock); // Find Raw buffer in mRawBufferMap if (mRawBufferMap.find(settingSequence) != mRawBufferMap.end()) { - CameraBufferPortMap &mapBuf = mRawBufferMap[settingSequence]; + CameraBufferPortMap& mapBuf = mRawBufferMap[settingSequence]; // Update source buffers for (const auto& bufPortMap : mapBuf) { (*srcBuffers)[bufPortMap.first] = bufPortMap.second; @@ -856,7 +886,7 @@ bool PSysProcessor::isBufferHoldForRawReprocess(int64_t sequence) { return true; } -void PSysProcessor::saveRawBuffer(CameraBufferPortMap *srcBuffers) { +void PSysProcessor::saveRawBuffer(CameraBufferPortMap* srcBuffers) { // Save buffer into mRawBufferMap CameraBufferPortMap mapBuf; for (const auto& src : *srcBuffers) { @@ -880,7 +910,7 @@ void PSysProcessor::returnRawBuffer() { AutoMutex lock(mBufferMapLock); // If too many buffers are holden in mRawQueue, return back to producer if (mRawBufferMap.size() > (PlatformData::getMaxRawDataNum(mCameraId) - - PlatformData::getMaxRequestsInflight(mCameraId))) { + PlatformData::getMaxRequestsInflight(mCameraId))) { auto it = mRawBufferMap.cbegin(); { AutoMutex l(mBufferQueueLock); @@ -890,8 +920,8 @@ void PSysProcessor::returnRawBuffer() { } } - const CameraBufferPortMap &bufferPortMap = it->second; - for (auto &item : bufferPortMap) { + const CameraBufferPortMap& bufferPortMap = it->second; + for (auto& item : bufferPortMap) { mBufferProducer->qbuf(item.first, item.second); } LOG2("@%s, returned sequence %ld", __func__, it->first); @@ -899,10 +929,10 @@ void PSysProcessor::returnRawBuffer() { } } -status_t PSysProcessor::prepareTask(CameraBufferPortMap *srcBuffers, - CameraBufferPortMap *dstBuffers) { - CheckAndLogError(srcBuffers->empty() || dstBuffers->empty(), - UNKNOWN_ERROR, "%s, the input or output buffer is empty", __func__); +status_t PSysProcessor::prepareTask(CameraBufferPortMap* srcBuffers, + CameraBufferPortMap* dstBuffers) { + CheckAndLogError(srcBuffers->empty() || dstBuffers->empty(), UNKNOWN_ERROR, + "%s, the input or output buffer is empty", __func__); if (mHoldRawBuffers && mOpaqueRawPort == INVALID_PORT) { saveRawBuffer(srcBuffers); } @@ -920,11 +950,11 @@ status_t PSysProcessor::prepareTask(CameraBufferPortMap *srcBuffers, // If all buffers are handled AutoMutex l(mBufferQueueLock); if (hasRawOutput) { - for (auto& input: mInputQueue) { + for (auto& input : mInputQueue) { input.second.pop(); } } - for (auto& output: mOutputQueue) { + for (auto& output : mOutputQueue) { output.second.pop(); } return OK; @@ -946,7 +976,7 @@ status_t PSysProcessor::prepareTask(CameraBufferPortMap *srcBuffers, // Skip input frame and return buffer if no matching vbp set to ISP params if (vbpStatus != OK) { AutoMutex l(mBufferQueueLock); - for (auto& input: mInputQueue) { + for (auto& input : mInputQueue) { input.second.pop(); } @@ -963,7 +993,7 @@ status_t PSysProcessor::prepareTask(CameraBufferPortMap *srcBuffers, shared_ptr dstBuf = nullptr; // Get output buffer and remove it from dstBuffers - for (auto &buffer : *dstBuffers) { + for (auto& buffer : *dstBuffers) { if (buffer.first == mRawPort) { dstBuf = buffer.second; CheckAndLogError(!dstBuf, UNKNOWN_ERROR, "%s, dstBuf for output raw is null", @@ -985,14 +1015,14 @@ status_t PSysProcessor::prepareTask(CameraBufferPortMap *srcBuffers, { AutoMutex l(mBufferQueueLock); if (needRunPipe && !needSkipOutputFrame(inputSequence)) { - for (auto& output: mOutputQueue) { + for (auto& output : mOutputQueue) { output.second.pop(); } } // If input buffer will be used later, don't pop it from the queue. if (!holdOnInput && !hasRawInput) { - for (auto& input: mInputQueue) { + for (auto& input : mInputQueue) { input.second.pop(); } } @@ -1028,11 +1058,10 @@ status_t PSysProcessor::prepareTask(CameraBufferPortMap *srcBuffers, mBufferProducer->qbuf(src.first, src.second); } } - return OK; } -void PSysProcessor::handleStillPipeForTnr(int64_t sequence, CameraBufferPortMap *dstBuffers) { +void PSysProcessor::handleStillPipeForTnr(int64_t sequence, CameraBufferPortMap* dstBuffers) { bool hasStill = false; for (const auto& item : *dstBuffers) { if (item.second && item.second->getStreamUsage() == CAMERA_STREAM_STILL_CAPTURE) { @@ -1056,14 +1085,14 @@ void PSysProcessor::handleStillPipeForTnr(int64_t sequence, CameraBufferPortMap for (int i = mPSysDAGs[mCurConfigMode]->getTnrExtraFrameCount(sequence); i > 0; i--) { CameraBufferPortMap srcBuf; { - AutoMutex lock(mBufferMapLock); - if (sequence - i != mLastStillTnrSequence && - mRawBufferMap.find(sequence - i) != mRawBufferMap.end()) { - for (const auto& item : mRawBufferMap[sequence - i]) { - srcBuf[item.first] = item.second; + AutoMutex lock(mBufferMapLock); + if (sequence - i != mLastStillTnrSequence && + mRawBufferMap.find(sequence - i) != mRawBufferMap.end()) { + for (const auto& item : mRawBufferMap[sequence - i]) { + srcBuf[item.first] = item.second; + } } } - } if (!srcBuf.empty()) { dispatchTask(srcBuf, fakeTaskBuffers, true, false); } @@ -1073,7 +1102,7 @@ void PSysProcessor::handleStillPipeForTnr(int64_t sequence, CameraBufferPortMap if (hasStill) mLastStillTnrSequence = sequence; } -void PSysProcessor::dispatchTask(CameraBufferPortMap &inBuf, CameraBufferPortMap &outBuf, +void PSysProcessor::dispatchTask(CameraBufferPortMap& inBuf, CameraBufferPortMap& outBuf, bool fakeTask, bool callbackRgbs) { int64_t currentSequence = inBuf.begin()->second->getSequence(); TRACE_LOG_POINT("PSysProcessor", "start run PSYS", MAKE_COLOR(currentSequence), @@ -1085,8 +1114,8 @@ void PSysProcessor::dispatchTask(CameraBufferPortMap &inBuf, CameraBufferPortMap bool needSwitch = needSwitchPipe(currentSequence); if (needSwitch) { - LOG1("Switch pipe for sequence:%ld, unprocessed buffer number:%zu", - currentSequence, mSequencesInflight.size()); + LOG1("Switch pipe for sequence:%ld, unprocessed buffer number:%zu", currentSequence, + mSequencesInflight.size()); // Deactive the PSysDag which is no longer used. mPSysDAGs[previousMode]->pause(); @@ -1150,10 +1179,8 @@ void PSysProcessor::dispatchTask(CameraBufferPortMap &inBuf, CameraBufferPortMap AutoRMutex rl(mIspSettingsLock); mIspSettings.palOverride = nullptr; // ISP_CONTROL_S - if (mUpdatedIspIndex > -1) - mUsedIspIndex = mUpdatedIspIndex; - if (mUsedIspIndex > -1 && - mPalCtrlBuffers[mUsedIspIndex].size > 0) { + if (mUpdatedIspIndex > -1) mUsedIspIndex = mUpdatedIspIndex; + if (mUsedIspIndex > -1 && mPalCtrlBuffers[mUsedIspIndex].size > 0) { mIspSettings.palOverride = &mPalCtrlBuffers[mUsedIspIndex]; } // ISP_CONTROL_E @@ -1167,12 +1194,17 @@ void PSysProcessor::dispatchTask(CameraBufferPortMap &inBuf, CameraBufferPortMap void PSysProcessor::registerListener(EventType eventType, EventListener* eventListener) { // Only delegate stats event registration to deeper layer DAG and PipeExecutor - if ((eventType != EVENT_PSYS_STATS_BUF_READY) && (eventType != EVENT_PSYS_STATS_SIS_BUF_READY)) { + if ((eventType != EVENT_PSYS_STATS_BUF_READY) && + (eventType != EVENT_PSYS_STATS_SIS_BUF_READY) + // INTEL_DVS_S + && eventType != EVENT_DVS_READY + // INTEL_DVS_E + ) { BufferQueue::registerListener(eventType, eventListener); return; } - for (auto const& realModeDAGPair: mPSysDAGs) { + for (auto const& realModeDAGPair : mPSysDAGs) { realModeDAGPair.second->registerListener(eventType, eventListener); } } @@ -1185,13 +1217,13 @@ void PSysProcessor::removeListener(EventType eventType, EventListener* eventList return; } - for (auto const& realModeDAGPair: mPSysDAGs) { + for (auto const& realModeDAGPair : mPSysDAGs) { realModeDAGPair.second->removeListener(eventType, eventListener); } } void PSysProcessor::onBufferDone(int64_t sequence, Port port, - const std::shared_ptr &camBuffer) { + const std::shared_ptr& camBuffer) { LOG2("@%s, port %d", mCameraId, sequence, __func__, port); if (CameraDump::isDumpTypeEnable(DUMP_PSYS_OUTPUT_BUFFER)) { @@ -1208,7 +1240,7 @@ void PSysProcessor::onBufferDone(int64_t sequence, Port port, } if (!needSkipOutputFrame(sequence)) { - for (auto &it : mBufferConsumerList) { + for (auto& it : mBufferConsumerList) { it->onFrameAvailable(port, camBuffer); } } @@ -1244,6 +1276,7 @@ void PSysProcessor::sendPsysRequestEvent(const CameraBufferPortMap* dstBuffers, event.data.requestReady.timestamp = timestamp > 0 ? timestamp : output.second->getUserBuffer()->timestamp; event.data.requestReady.sequence = sequence; + event.data.requestReady.requestId = output.second->getUserBuffer()->requestId; notifyListeners(event); break; @@ -1273,7 +1306,7 @@ void PSysProcessor::onFrameDone(const PSysTaskData& result) { mBufferProducer->qbuf(src.first, src.second); if (src.second->getStreamType() == CAMERA_STREAM_INPUT) { - for (auto &it : mBufferConsumerList) { + for (auto& it : mBufferConsumerList) { it->onFrameAvailable(src.first, src.second); } } @@ -1282,15 +1315,15 @@ void PSysProcessor::onFrameDone(const PSysTaskData& result) { } { - AutoMutex l(mBufferQueueLock); - std::multiset::iterator it = mSequencesInflight.find(sequence); - if (it != mSequencesInflight.end()) { - mSequencesInflight.erase(it); - } + AutoMutex l(mBufferQueueLock); + std::multiset::iterator it = mSequencesInflight.find(sequence); + if (it != mSequencesInflight.end()) { + mSequencesInflight.erase(it); + } - if (mSequencesInflight.empty()) { - mFrameDoneSignal.signal(); - } + if (mSequencesInflight.empty()) { + mFrameDoneSignal.signal(); + } } returnRawBuffer(); @@ -1303,8 +1336,8 @@ void PSysProcessor::onStatsDone(int64_t sequence, const CameraBufferPortMap& out sendPsysRequestEvent(&outBuf, sequence, 0, EVENT_REQUEST_METADATA_READY); } -void PSysProcessor::outputRawImage(shared_ptr &srcBuf, - shared_ptr &dstBuf) { +void PSysProcessor::outputRawImage(shared_ptr& srcBuf, + shared_ptr& dstBuf) { if ((srcBuf == nullptr) || (dstBuf == nullptr)) { return; } @@ -1312,15 +1345,15 @@ void PSysProcessor::outputRawImage(shared_ptr &srcBuf, // Copy from source buffer int srcBufferSize = srcBuf->getBufferSize(); int srcMemoryType = srcBuf->getMemory(); - void* pSrcBuf = (srcMemoryType == V4L2_MEMORY_DMABUF) - ? CameraBuffer::mapDmaBufferAddr(srcBuf->getFd(), srcBufferSize) - : srcBuf->getBufferAddr(); + void* pSrcBuf = (srcMemoryType == V4L2_MEMORY_DMABUF) ? + CameraBuffer::mapDmaBufferAddr(srcBuf->getFd(), srcBufferSize) : + srcBuf->getBufferAddr(); int dstBufferSize = dstBuf->getBufferSize(); int dstMemoryType = dstBuf->getMemory(); - void* pDstBuf = (dstMemoryType == V4L2_MEMORY_DMABUF) - ? CameraBuffer::mapDmaBufferAddr(dstBuf->getFd(), dstBufferSize) - : dstBuf->getBufferAddr(); + void* pDstBuf = (dstMemoryType == V4L2_MEMORY_DMABUF) ? + CameraBuffer::mapDmaBufferAddr(dstBuf->getFd(), dstBufferSize) : + dstBuf->getBufferAddr(); MEMCPY_S(pDstBuf, dstBufferSize, pSrcBuf, srcBufferSize); @@ -1332,9 +1365,9 @@ void PSysProcessor::outputRawImage(shared_ptr &srcBuf, } // Send output buffer to its consumer - for (auto &it : mBufferConsumerList) { + for (auto& it : mBufferConsumerList) { it->onFrameAvailable(mRawPort, dstBuf); } } -} //namespace icamera +} // namespace icamera diff --git a/src/core/PSysProcessor.h b/src/core/PSysProcessor.h index d9504be5..5f707c01 100644 --- a/src/core/PSysProcessor.h +++ b/src/core/PSysProcessor.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2020 Intel Corporation. + * Copyright (C) 2017-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,6 +24,7 @@ #include "IspSettings.h" #include "psysprocessor/PSysDAG.h" +#include "CameraScheduler.h" namespace icamera { @@ -34,21 +35,20 @@ typedef std::map> CameraBufferPortMap; typedef std::map> PSysDAGConfigModeMap; /** - * PSysProcessor runs the Image Process Algorithm in the PSYS. - * It implements the BufferConsumer and BufferProducer Interface - */ -class PSysProcessor: public BufferQueue, public PSysDagCallback { - -public: - PSysProcessor(int cameraId, ParameterGenerator *pGenerator); + * PSysProcessor runs the Image Process Algorithm in the PSYS. + * It implements the BufferConsumer and BufferProducer Interface + */ +class PSysProcessor : public BufferQueue, public PSysDagCallback { + public: + PSysProcessor(int cameraId, ParameterGenerator* pGenerator); virtual ~PSysProcessor(); virtual int configure(const std::vector& configModes); virtual int setParameters(const Parameters& param); virtual int getParameters(Parameters& param); - virtual int registerUserOutputBufs(Port port, const std::shared_ptr &camBuffer); + virtual int registerUserOutputBufs(Port port, const std::shared_ptr& camBuffer); - //Overwrite event source API to delegate related functions + // Overwrite event source API to delegate related functions void registerListener(EventType eventType, EventListener* eventListener); void removeListener(EventType eventType, EventListener* eventListener); @@ -57,19 +57,18 @@ class PSysProcessor: public BufferQueue, public PSysDagCallback { // Overwrite PSysDagCallback API, used for returning back buffers from PSysDAG. void onFrameDone(const PSysTaskData& result); - void onBufferDone(int64_t sequence, Port port, - const std::shared_ptr &camBuffer); + void onBufferDone(int64_t sequence, Port port, const std::shared_ptr& camBuffer); void onStatsDone(int64_t sequence, const CameraBufferPortMap& outBuf); -private: + private: DISALLOW_COPY_AND_ASSIGN(PSysProcessor); -private: + private: int processNewFrame(); std::shared_ptr allocStatsBuffer(int index); - status_t prepareTask(CameraBufferPortMap *srcBuffers, CameraBufferPortMap *dstBuffers); - void dispatchTask(CameraBufferPortMap &inBuf, CameraBufferPortMap &outBuf, + status_t prepareTask(CameraBufferPortMap* srcBuffers, CameraBufferPortMap* dstBuffers); + void dispatchTask(CameraBufferPortMap& inBuf, CameraBufferPortMap& outBuf, bool fakeTask = false, bool callbackRgbs = false); void handleEvent(EventData eventData); @@ -77,7 +76,7 @@ class PSysProcessor: public BufferQueue, public PSysDagCallback { int setVbpToIspParam(int64_t sequence, timeval timestamp); // DOL_FEATURE_E - int64_t getSettingSequence(const CameraBufferPortMap &outBuf); + int64_t getSettingSequence(const CameraBufferPortMap& outBuf); bool needSkipOutputFrame(int64_t sequence); bool needExecutePipe(int64_t settingSequence, int64_t inputSequence); bool needHoldOnInputFrame(int64_t settingSequence, int64_t inputSequence); @@ -89,35 +88,35 @@ class PSysProcessor: public BufferQueue, public PSysDagCallback { int fillPalOverrideData(const Parameters& param); int fillDefaultAcmData(uint8_t* overrideData); // ISP_CONTROL_E - void outputRawImage(std::shared_ptr &srcBuf, - std::shared_ptr &dstBuf); + void outputRawImage(std::shared_ptr& srcBuf, + std::shared_ptr& dstBuf); - void handleRawReprocessing(CameraBufferPortMap *srcBuffers, - CameraBufferPortMap *dstBuffers, bool *allBufDone, - bool *hasRawOutput, bool *hasRawInput); + void handleRawReprocessing(CameraBufferPortMap* srcBuffers, CameraBufferPortMap* dstBuffers, + bool* allBufDone, bool* hasRawOutput, bool* hasRawInput); bool isBufferHoldForRawReprocess(int64_t sequence); - void saveRawBuffer(CameraBufferPortMap *srcBuffers); + void saveRawBuffer(CameraBufferPortMap* srcBuffers); void returnRawBuffer(); - void handleStillPipeForTnr(int64_t sequence, CameraBufferPortMap *dstBuffers); + void handleStillPipeForTnr(int64_t sequence, CameraBufferPortMap* dstBuffers); void sendPsysFrameDoneEvent(const CameraBufferPortMap* dstBuffers); - void sendPsysRequestEvent(const CameraBufferPortMap* dstBuffers, - int64_t sequence, uint64_t timestamp, EventType eventType); + void sendPsysRequestEvent(const CameraBufferPortMap* dstBuffers, int64_t sequence, + uint64_t timestamp, EventType eventType); -private: + private: int mCameraId; - static const nsecs_t kWaitDuration = 1000000000; //1000ms - ParameterGenerator *mParameterGenerator; + static const nsecs_t kWaitDuration = 1000000000; // 1000ms + ParameterGenerator* mParameterGenerator; + CameraScheduler* mScheduler; IspSettings mIspSettings; RWLock mIspSettingsLock; - //Since the isp settings may be re-used in all modes, so the buffer size of - //isp settings should be equal to frame buffer size. + // Since the isp settings may be re-used in all modes, so the buffer size of + // isp settings should be equal to frame buffer size. static const int IA_PAL_CONTROL_BUFFER_SIZE = 10; // ISP_CONTROL_S - //Use mUpdatedIspIndex to select the buffer to store the updated param - //and use mUsedIspIndex to select the buffer to set isp control. + // Use mUpdatedIspIndex to select the buffer to store the updated param + // and use mUsedIspIndex to select the buffer to set isp control. int mUpdatedIspIndex; int mUsedIspIndex; ia_binary_data mPalCtrlBuffers[IA_PAL_CONTROL_BUFFER_SIZE]; @@ -134,8 +133,8 @@ class PSysProcessor: public BufferQueue, public PSysDagCallback { TuningMode mTuningMode; std::queue mMetaQueue; - //Guard for the metadata queue - Mutex mMetaQueueLock; + // Guard for the metadata queue + Mutex mMetaQueueLock; Condition mMetaAvailableSignal; Port mRawPort; @@ -155,10 +154,7 @@ class PSysProcessor: public BufferQueue, public PSysDagCallback { // Indicate the latest sequence of raw buffer used in still TNR int64_t mLastStillTnrSequence; - enum { - PIPELINE_UNCREATED = 0, - PIPELINE_CREATED - } mStatus; -}; // End of class PSysProcessor + enum { PIPELINE_UNCREATED = 0, PIPELINE_CREATED } mStatus; +}; // End of class PSysProcessor -} //namespace icamera +} // namespace icamera diff --git a/src/core/ProcessorManager.cpp b/src/core/ProcessorManager.cpp index b43e8cd5..4d73b10d 100644 --- a/src/core/ProcessorManager.cpp +++ b/src/core/ProcessorManager.cpp @@ -25,20 +25,15 @@ namespace icamera { -ProcessorManager::ProcessorManager(int cameraId) : - mCameraId(cameraId), - mPsysUsage(PSYS_NOT_USED) { -} +ProcessorManager::ProcessorManager(int cameraId) : mCameraId(cameraId), mPsysUsage(PSYS_NOT_USED) {} ProcessorManager::~ProcessorManager() { deleteProcessors(); } -std::vector -ProcessorManager::createProcessors(const std::map& producerConfigs, - const std::map& streamIdToPortMap, - stream_config_t *streamList, - ParameterGenerator* paramGenerator) { +std::vector ProcessorManager::createProcessors( + const std::map& producerConfigs, const std::map& streamIdToPortMap, + stream_config_t* streamList, ParameterGenerator* paramGenerator) { LOG1("@%s", mCameraId, __func__); ProcessorConfig processorItem; @@ -52,7 +47,8 @@ ProcessorManager::createProcessors(const std::map& producerConfi mPsysUsage = PSYS_NORMAL; for (int i = 0; i < streamList->num_streams; i++) { if (streamList->streams[i].streamType == CAMERA_STREAM_INPUT || - streamList->streams[i].usage == CAMERA_STREAM_OPAQUE_RAW) continue; + streamList->streams[i].usage == CAMERA_STREAM_OPAQUE_RAW) + continue; if (!PlatformData::usePsys(mCameraId, streamList->streams[i].format)) { mPsysUsage = PSYS_NOT_USED; @@ -98,7 +94,7 @@ int ProcessorManager::configureProcessors(const std::vector& configM BufferProducer* producer, const Parameters& param) { LOG1("@%s", mCameraId, __func__); - BufferProducer* preProcess = nullptr; + BufferProducer* preProcess = nullptr; for (auto& item : mProcessors) { BufferQueue* processor = item.mProcessor; processor->setFrameInfo(item.mInputConfigs, item.mOutputConfigs); @@ -113,5 +109,4 @@ int ProcessorManager::configureProcessors(const std::vector& configM return OK; } -} // end of namespace icamera - +} // end of namespace icamera diff --git a/src/core/ProcessorManager.h b/src/core/ProcessorManager.h index a0c5a1c0..13e6669f 100644 --- a/src/core/ProcessorManager.h +++ b/src/core/ProcessorManager.h @@ -28,19 +28,19 @@ class ParameterGenerator; * \brief ProcessorManager helps to create and maintain the post processors. */ class ProcessorManager { -public: + public: ProcessorManager(int cameraId); ~ProcessorManager(); std::vector createProcessors(const std::map& producerConfigs, const std::map& streamIdToPortMap, - stream_config_t *streamList, + stream_config_t* streamList, ParameterGenerator* paramGenerator); int configureProcessors(const std::vector& configModes, BufferProducer* producer, const Parameters& param); int deleteProcessors(); -private: + private: DISALLOW_COPY_AND_ASSIGN(ProcessorManager); int mCameraId; @@ -59,4 +59,4 @@ class ProcessorManager { std::vector mProcessors; }; -} // end of namespace icamera +} // end of namespace icamera diff --git a/src/core/RequestThread.cpp b/src/core/RequestThread.cpp index aa594bea..d82da494 100644 --- a/src/core/RequestThread.cpp +++ b/src/core/RequestThread.cpp @@ -21,27 +21,27 @@ #include "RequestThread.h" -using std::vector; using std::shared_ptr; +using std::vector; namespace icamera { -RequestThread::RequestThread(int cameraId, AiqUnitBase *a3AControl, ParameterGenerator* aParamGen) : - mCameraId(cameraId), - m3AControl(a3AControl), - mParamGenerator(aParamGen), - mPerframeControlSupport(false), - mGet3AStatWithFakeRequest(false), - mRequestsInProcessing(0), - mFirstRequest(true), - mActive(false), - mRequestTriggerEvent(NONE_EVENT), - mLastRequestId(-1), - mLastEffectSeq(-1), - mLastAppliedSeq(-1), - mLastSofSeq(-1), - mBlockRequest(true), - mSofEnabled(false) { +RequestThread::RequestThread(int cameraId, AiqUnitBase* a3AControl, ParameterGenerator* aParamGen) + : mCameraId(cameraId), + m3AControl(a3AControl), + mParamGenerator(aParamGen), + mPerframeControlSupport(false), + mGet3AStatWithFakeRequest(false), + mRequestsInProcessing(0), + mFirstRequest(true), + mActive(false), + mRequestTriggerEvent(NONE_EVENT), + mLastRequestId(-1), + mLastEffectSeq(-1), + mLastAppliedSeq(-1), + mLastSofSeq(-1), + mBlockRequest(true), + mSofEnabled(false) { CLEAR(mFakeReqBuf); mPerframeControlSupport = PlatformData::isFeatureSupported(mCameraId, PER_FRAME_CONTROL); @@ -93,7 +93,7 @@ void RequestThread::clearRequests() { mBlockRequest = true; } -int RequestThread::configure(const stream_config_t *streamList) { +int RequestThread::configure(const stream_config_t* streamList) { int previewIndex = -1, videoIndex = -1, stillIndex = -1; for (int i = 0; i < streamList->num_streams; i++) { if (streamList->streams[i].usage == CAMERA_STREAM_PREVIEW) { @@ -113,8 +113,8 @@ int RequestThread::configure(const stream_config_t *streamList) { mGet3AStatWithFakeRequest = mPerframeControlSupport ? PlatformData::isPsysContinueStats(mCameraId) : false; if (mGet3AStatWithFakeRequest) { - int fakeStreamIndex = (previewIndex >= 0) ? previewIndex : - ((videoIndex >= 0) ? videoIndex : stillIndex); + int fakeStreamIndex = + (previewIndex >= 0) ? previewIndex : ((videoIndex >= 0) ? videoIndex : stillIndex); if (fakeStreamIndex < 0) { LOGW("There isn't valid stream to trigger stats event"); mGet3AStatWithFakeRequest = false; @@ -122,12 +122,11 @@ int RequestThread::configure(const stream_config_t *streamList) { } CLEAR(mFakeReqBuf); - stream_t &fakeStream = streamList->streams[fakeStreamIndex]; + stream_t& fakeStream = streamList->streams[fakeStreamIndex]; LOG2("%s: create fake request with stream index %d", __func__, fakeStreamIndex); mFakeBuffer = CameraBuffer::create(mCameraId, BUFFER_USAGE_PSYS_INTERNAL, V4L2_MEMORY_USERPTR, fakeStream.size, 0, - fakeStream.format, fakeStream.width, - fakeStream.height); + fakeStream.format, fakeStream.width, fakeStream.height); mFakeReqBuf.s = fakeStream; mFakeReqBuf.s.memType = V4L2_MEMORY_USERPTR; @@ -147,11 +146,11 @@ bool RequestThread::blockRequest() { * 3. if no trigger event is available. */ return ((mBlockRequest && (mLastRequestId >= 0)) || - (mRequestsInProcessing >= PlatformData::getMaxRequestsInflight(mCameraId)) || - (mPerframeControlSupport && (mRequestTriggerEvent == NONE_EVENT))); + (mRequestsInProcessing >= PlatformData::getMaxRequestsInflight(mCameraId)) || + (mPerframeControlSupport && (mRequestTriggerEvent == NONE_EVENT))); } -int RequestThread::processRequest(int bufferNum, camera_buffer_t **ubuffer, +int RequestThread::processRequest(int bufferNum, camera_buffer_t** ubuffer, const Parameters* params) { AutoMutex l(mPendingReqLock); CameraRequest request; @@ -183,10 +182,8 @@ int RequestThread::processRequest(int bufferNum, camera_buffer_t **ubuffer, return OK; } -shared_ptr -RequestThread::copyRequestParams(const Parameters *srcParams) { - if (srcParams == nullptr) - return nullptr; +shared_ptr RequestThread::copyRequestParams(const Parameters* srcParams) { + if (srcParams == nullptr) return nullptr; if (mReqParamsPool.empty()) { shared_ptr sParams = std::make_shared(); @@ -200,19 +197,18 @@ RequestThread::copyRequestParams(const Parameters *srcParams) { return sParams; } -int RequestThread::waitFrame(int streamId, camera_buffer_t **ubuffer) { +int RequestThread::waitFrame(int streamId, camera_buffer_t** ubuffer) { FrameQueue& frameQueue = mOutputFrames[streamId]; ConditionLock lock(frameQueue.mFrameMutex); if (!mActive) return NO_INIT; while (frameQueue.mFrameQueue.empty()) { int ret = frameQueue.mFrameAvailableSignal.waitRelative( - lock, - kWaitFrameDuration * SLOWLY_MULTIPLIER); + lock, kWaitFrameDuration * SLOWLY_MULTIPLIER); if (!mActive) return NO_INIT; - CheckWarning(ret == TIMED_OUT, ret, "@%s, time out happens, wait recovery", - mCameraId, __func__); + CheckWarning(ret == TIMED_OUT, ret, "@%s, time out happens, wait recovery", mCameraId, + __func__); } shared_ptr camBuffer = frameQueue.mFrameQueue.front(); @@ -229,11 +225,9 @@ int RequestThread::wait1stRequestDone() { ConditionLock lock(mFirstRequestLock); if (mFirstRequest) { LOG2("%s, waiting the first request done", __func__); - ret = mFirstRequestSignal.waitRelative( - lock, - kWaitFirstRequestDoneDuration * SLOWLY_MULTIPLIER); - if (ret == TIMED_OUT) - LOGE("@%s: Wait 1st request timed out", __func__); + ret = mFirstRequestSignal.waitRelative(lock, + kWaitFirstRequestDoneDuration * SLOWLY_MULTIPLIER); + if (ret == TIMED_OUT) LOGE("@%s: Wait 1st request timed out", __func__); } return ret; @@ -243,83 +237,72 @@ void RequestThread::handleEvent(EventData eventData) { if (!mActive) return; /* Notes: - * There should be only one of EVENT_ISYS_FRAME - * and EVENT_PSYS_FRAME registered. - * There should be only one of EVENT_xx_STATS_BUF_READY - * registered. - */ + * There should be only one of EVENT_ISYS_FRAME + * and EVENT_PSYS_FRAME registered. + * There should be only one of EVENT_xx_STATS_BUF_READY + * registered. + */ switch (eventData.type) { case EVENT_ISYS_FRAME: - case EVENT_PSYS_FRAME: - { - AutoMutex l(mPendingReqLock); - if (mRequestsInProcessing > 0) { - mRequestsInProcessing--; - } - // Just in case too many requests are pending in mPendingRequests. - if (!mPendingRequests.empty()) { - mRequestTriggerEvent |= NEW_FRAME; - mRequestSignal.signal(); - } + case EVENT_PSYS_FRAME: { + AutoMutex l(mPendingReqLock); + if (mRequestsInProcessing > 0) { + mRequestsInProcessing--; } - break; - case EVENT_PSYS_STATS_BUF_READY: - { - TRACE_LOG_POINT("RequestThread", "receive the stat event"); - AutoMutex l(mPendingReqLock); - if (mBlockRequest) { - mBlockRequest = false; - } - mRequestTriggerEvent |= NEW_STATS; + // Just in case too many requests are pending in mPendingRequests. + if (!mPendingRequests.empty()) { + mRequestTriggerEvent |= NEW_FRAME; mRequestSignal.signal(); } - break; - case EVENT_ISYS_SOF: - { - AutoMutex l(mPendingReqLock); - mLastSofSeq = eventData.data.sync.sequence; - mRequestTriggerEvent |= NEW_SOF; - mRequestSignal.signal(); + } break; + case EVENT_PSYS_STATS_BUF_READY: { + TRACE_LOG_POINT("RequestThread", "receive the stat event"); + AutoMutex l(mPendingReqLock); + if (mBlockRequest) { + mBlockRequest = false; } - break; - case EVENT_FRAME_AVAILABLE: - { - if (eventData.buffer->getUserBuffer() != &mFakeReqBuf) { - int streamId = eventData.data.frameDone.streamId; - FrameQueue& frameQueue = mOutputFrames[streamId]; - - AutoMutex lock(frameQueue.mFrameMutex); - bool needSignal = frameQueue.mFrameQueue.empty(); - frameQueue.mFrameQueue.push(eventData.buffer); - if (needSignal) { - frameQueue.mFrameAvailableSignal.signal(); - } - } else { - LOG2("%s: fake request return %u", __func__, eventData.buffer->getSequence()); - } - - AutoMutex l(mPendingReqLock); - // Insert fake request if no any request in the HAL to keep 3A running - if (mGet3AStatWithFakeRequest && - eventData.buffer->getSequence() >= mLastEffectSeq && - mPendingRequests.empty()) { - LOGW("No request, insert fake req after req %ld to keep 3A stats update", - mLastRequestId); - CameraRequest fakeRequest; - fakeRequest.mBufferNum = 1; - fakeRequest.mBuffer[0] = &mFakeReqBuf; - mFakeReqBuf.sequence = -1; - mPendingRequests.push_back(fakeRequest); - mRequestTriggerEvent |= NEW_REQUEST; - mRequestSignal.signal(); + mRequestTriggerEvent |= NEW_STATS; + mRequestSignal.signal(); + } break; + case EVENT_ISYS_SOF: { + AutoMutex l(mPendingReqLock); + mLastSofSeq = eventData.data.sync.sequence; + mRequestTriggerEvent |= NEW_SOF; + mRequestSignal.signal(); + } break; + case EVENT_FRAME_AVAILABLE: { + if (eventData.buffer->getUserBuffer() != &mFakeReqBuf) { + int streamId = eventData.data.frameDone.streamId; + FrameQueue& frameQueue = mOutputFrames[streamId]; + + AutoMutex lock(frameQueue.mFrameMutex); + bool needSignal = frameQueue.mFrameQueue.empty(); + frameQueue.mFrameQueue.push(eventData.buffer); + if (needSignal) { + frameQueue.mFrameAvailableSignal.signal(); } + } else { + LOG2("%s: fake request return %u", __func__, eventData.buffer->getSequence()); } - break; - default: - { - LOGW("Unknown event type %d", eventData.type); + + AutoMutex l(mPendingReqLock); + // Insert fake request if no any request in the HAL to keep 3A running + if (mGet3AStatWithFakeRequest && eventData.buffer->getSequence() >= mLastEffectSeq && + mPendingRequests.empty()) { + LOGW("No request, insert fake req after req %ld to keep 3A stats update", + mLastRequestId); + CameraRequest fakeRequest; + fakeRequest.mBufferNum = 1; + fakeRequest.mBuffer[0] = &mFakeReqBuf; + mFakeReqBuf.sequence = -1; + mPendingRequests.push_back(fakeRequest); + mRequestTriggerEvent |= NEW_REQUEST; + mRequestSignal.signal(); } - break; + } break; + default: { + LOGW("Unknown event type %d", eventData.type); + } break; } } @@ -343,9 +326,9 @@ bool RequestThread::fetchNextRequest(CameraRequest& request) { bool RequestThread::threadLoop() { int64_t applyingSeq = -1; { - ConditionLock lock(mPendingReqLock); + ConditionLock lock(mPendingReqLock); - if (blockRequest()) { + if (blockRequest()) { int ret = mRequestSignal.waitRelative(lock, kWaitDuration * SLOWLY_MULTIPLIER); CheckWarning(ret == TIMED_OUT, true, "wait event time out, %d requests processing, %zu requests in HAL", @@ -412,8 +395,8 @@ void RequestThread::handleRequest(CameraRequest& request, int64_t applyingSeq) { if (request.mParams.get()) { mParamGenerator->updateParameters(effectSeq, request.mParams.get()); } - LOG2("%s: Reprocess request: seq %ld, out buffer %d", __func__, - effectSeq, request.mBufferNum); + LOG2("%s: Reprocess request: seq %ld, out buffer %d", __func__, effectSeq, + request.mBufferNum); } else { long requestId = -1; { @@ -448,9 +431,8 @@ void RequestThread::handleRequest(CameraRequest& request, int64_t applyingSeq) { mParamGenerator->saveParameters(effectSeq, mLastRequestId, request.mParams.get()); mLastEffectSeq = effectSeq; - LOG2("%s: Process request: %ld:%ld, out buffer %d, param? %s", __func__, - mLastRequestId, effectSeq, request.mBufferNum, - request.mParams.get() ? "true" : "false"); + LOG2("%s: Process request: %ld:%ld, out buffer %d, param? %s", __func__, mLastRequestId, + effectSeq, request.mBufferNum, request.mParams.get() ? "true" : "false"); } } @@ -481,4 +463,4 @@ void RequestThread::handleRequest(CameraRequest& request, int64_t applyingSeq) { } } -} //namespace icamera +} // namespace icamera diff --git a/src/core/RequestThread.h b/src/core/RequestThread.h index 116dccb8..ca27780a 100644 --- a/src/core/RequestThread.h +++ b/src/core/RequestThread.h @@ -32,8 +32,8 @@ namespace icamera { * The RequestThread is used to assist CameraDevice to handle request(qbuf/dqbuf). */ class RequestThread : public Thread, public EventSource, public EventListener { -public: - RequestThread(int cameraId, AiqUnitBase *a3AControl, ParameterGenerator* aParamGen); + public: + RequestThread(int cameraId, AiqUnitBase* a3AControl, ParameterGenerator* aParamGen); ~RequestThread(); bool threadLoop(); @@ -49,9 +49,9 @@ class RequestThread : public Thread, public EventSource, public EventListener { /** * \Accept requests from user. */ - int processRequest(int bufferNum, camera_buffer_t **ubuffer, const Parameters * params); + int processRequest(int bufferNum, camera_buffer_t** ubuffer, const Parameters* params); - int waitFrame(int streamId, camera_buffer_t **ubuffer); + int waitFrame(int streamId, camera_buffer_t** ubuffer); /** * \Block the caller until the first request is processed. @@ -65,28 +65,26 @@ class RequestThread : public Thread, public EventSource, public EventListener { * * \return OK if succeed and BAD_VALUE if failed */ - int configure(const stream_config_t *streamList); + int configure(const stream_config_t* streamList); -private: + private: int mCameraId; - AiqUnitBase *m3AControl; - ParameterGenerator *mParamGenerator; + AiqUnitBase* m3AControl; + ParameterGenerator* mParamGenerator; bool mPerframeControlSupport; bool mGet3AStatWithFakeRequest; camera_buffer_t mFakeReqBuf; std::shared_ptr mFakeBuffer; struct CameraRequest { - CameraRequest() : mBufferNum(0), mParams(nullptr) { - CLEAR(mBuffer); - } + CameraRequest() : mBufferNum(0), mParams(nullptr) { CLEAR(mBuffer); } int mBufferNum; - camera_buffer_t *mBuffer[MAX_STREAM_NUMBER]; + camera_buffer_t* mBuffer[MAX_STREAM_NUMBER]; std::shared_ptr mParams; }; - std::shared_ptr copyRequestParams(const Parameters *params); + std::shared_ptr copyRequestParams(const Parameters* params); /** * \Fetch one request from pending request Q for processing. @@ -98,15 +96,15 @@ class RequestThread : public Thread, public EventSource, public EventListener { bool blockRequest(); static const int kMaxRequests = MAX_BUFFER_COUNT; - static const nsecs_t kWaitFrameDuration = 5000000000; // 5s - static const nsecs_t kWaitDuration = 2000000000; // 2s - static const nsecs_t kWaitFirstRequestDoneDuration = 1000000000; // 1s + static const nsecs_t kWaitFrameDuration = 5000000000; // 5s + static const nsecs_t kWaitDuration = 2000000000; // 2s + static const nsecs_t kWaitFirstRequestDoneDuration = 1000000000; // 1s - //Guard for all the pending requests + // Guard for all the pending requests Mutex mPendingReqLock; Condition mRequestSignal; - std::deque mPendingRequests; - std::queue > mReqParamsPool; + std::deque mPendingRequests; + std::queue > mReqParamsPool; int mRequestsInProcessing; // Guard for the first request. @@ -123,21 +121,21 @@ class RequestThread : public Thread, public EventSource, public EventListener { std::atomic mActive; enum RequestTriggerEvent { - NONE_EVENT = 0, + NONE_EVENT = 0, NEW_REQUEST = 1, - NEW_FRAME = 1 << 1, - NEW_STATS = 1 << 2, - NEW_SOF = 1 << 3, + NEW_FRAME = 1 << 1, + NEW_STATS = 1 << 2, + NEW_SOF = 1 << 3, }; int mRequestTriggerEvent; long mLastRequestId; - int64_t mLastEffectSeq; // Last sequence is which last results had been taken effect on - int64_t mLastAppliedSeq; // Last sequence id which last results had been set on + int64_t mLastEffectSeq; // Last sequence is which last results had been taken effect on + int64_t mLastAppliedSeq; // Last sequence id which last results had been set on int64_t mLastSofSeq; bool mBlockRequest; // Process the 2nd or 3th request after the 1st 3A event // to avoid unstable AWB at the beginning of stream on bool mSofEnabled; }; -} //namespace icamera +} // namespace icamera diff --git a/src/core/SensorHwCtrl.cpp b/src/core/SensorHwCtrl.cpp index fbca2399..2ebf9cf4 100644 --- a/src/core/SensorHwCtrl.cpp +++ b/src/core/SensorHwCtrl.cpp @@ -357,8 +357,8 @@ int SensorHwCtrl::setAnalogGains(const vector& analogGains) { int low, high; if (PlatformData::getDisableBLCByAGain(mCameraId, low, high)) { // Set V4L2_CID_BLC to 0(disable) if analog gain falls into the given range. - status = mPixelArraySubdev->SetControl(V4L2_CID_BLC, - (analogGains[0] >= low && analogGains[0] <= high) ? 0 : 1); + status = mPixelArraySubdev->SetControl( + V4L2_CID_BLC, (analogGains[0] >= low && analogGains[0] <= high) ? 0 : 1); } #endif return status; diff --git a/src/core/SwImageProcessor.cpp b/src/core/SwImageProcessor.cpp index 74f86856..ab6ed81b 100644 --- a/src/core/SwImageProcessor.cpp +++ b/src/core/SwImageProcessor.cpp @@ -42,7 +42,7 @@ SwImageProcessor::~SwImageProcessor() { int SwImageProcessor::start() { PERF_CAMERA_ATRACE(); LOG1("@%s", mCameraId, __func__); - AutoMutex l(mBufferQueueLock); + AutoMutex l(mBufferQueueLock); int memType = mOutputFrameInfo.begin()->second.memType; CheckAndLogError(memType == V4L2_MEMORY_DMABUF, BAD_VALUE, @@ -64,7 +64,7 @@ void SwImageProcessor::stop() { { AutoMutex l(mBufferQueueLock); mThreadRunning = false; - //Wakeup the thread to exit + // Wakeup the thread to exit mFrameAvailableSignal.signal(); mOutputAvailableSignal.signal(); } @@ -84,25 +84,25 @@ int SwImageProcessor::processNewFrame() { Port inputPort = INVALID_PORT; LOG1("@%s", mCameraId, __func__); - { // Auto lock mBufferQueueLock scope + { // Auto lock mBufferQueueLock scope ConditionLock lock(mBufferQueueLock); ret = waitFreeBuffersInQueue(lock, srcBuffers, dstBuffers); if (!mThreadRunning) return -1; - CheckAndLogError((ret < 0), -1, - "@%s: wake up from the wait abnomal such as stop", __func__); + CheckAndLogError((ret < 0), -1, "@%s: wake up from the wait abnomal such as stop", + __func__); inputPort = srcBuffers.begin()->first; cInBuffer = srcBuffers[inputPort]; - for (auto& output: mOutputQueue) { + for (auto& output : mOutputQueue) { output.second.pop(); } - for (auto& input: mInputQueue) { + for (auto& input : mInputQueue) { input.second.pop(); } - } // End of auto lock mBufferQueueLock scope + } // End of auto lock mBufferQueueLock scope CheckAndLogError(!cInBuffer, BAD_VALUE, "Invalid input buffer."); for (auto& dst : dstBuffers) { @@ -114,24 +114,23 @@ int SwImageProcessor::processNewFrame() { continue; } - //No Lock for this function make sure buffers are not freed before the stop + // No Lock for this function make sure buffers are not freed before the stop ret = SwImageConverter::convertFormat( - cInBuffer->getWidth(), cInBuffer->getHeight(), - static_cast(cInBuffer->getBufferAddr()), - cInBuffer->getBufferSize(), cInBuffer->getFormat(), - static_cast(cOutBuffer->getBufferAddr()), - cOutBuffer->getBufferSize(), cOutBuffer->getFormat()); + cInBuffer->getWidth(), cInBuffer->getHeight(), + static_cast(cInBuffer->getBufferAddr()), cInBuffer->getBufferSize(), + cInBuffer->getFormat(), static_cast(cOutBuffer->getBufferAddr()), + cOutBuffer->getBufferSize(), cOutBuffer->getFormat()); CheckAndLogError((ret < 0), ret, "format convertion failed with %d", ret); if (CameraDump::isDumpTypeEnable(DUMP_SW_IMG_PROC_OUTPUT)) { CameraDump::dumpImage(mCameraId, cOutBuffer, M_SWIPOP); } - //update the interlaced field, sequence, and timestamp from the src buf to dst buf + // update the interlaced field, sequence, and timestamp from the src buf to dst buf cOutBuffer->updateV4l2Buffer(*cInBuffer->getV4L2Buffer().Get()); - //Notify listener: No lock here: mBufferConsumerList will not updated in this state - for (auto &it : mBufferConsumerList) { + // Notify listener: No lock here: mBufferConsumerList will not updated in this state + for (auto& it : mBufferConsumerList) { it->onFrameAvailable(port, cOutBuffer); } } @@ -144,4 +143,4 @@ int SwImageProcessor::processNewFrame() { return OK; } -} //namespace icamera +} // namespace icamera diff --git a/src/core/SwImageProcessor.h b/src/core/SwImageProcessor.h index caf83260..f33f8f1f 100644 --- a/src/core/SwImageProcessor.h +++ b/src/core/SwImageProcessor.h @@ -21,26 +21,26 @@ namespace icamera { /** - * SwImageProcessor runs the Image Process Alogirhtm in the CPU. - * It implements the BufferConsumer and BufferProducer Interface - * This class is for debug purpose when the PsysProcess is not ready. - */ -class SwImageProcessor: public BufferQueue { -public: + * SwImageProcessor runs the Image Process Alogirhtm in the CPU. + * It implements the BufferConsumer and BufferProducer Interface + * This class is for debug purpose when the PsysProcess is not ready. + */ +class SwImageProcessor : public BufferQueue { + public: SwImageProcessor(int cameraId); virtual ~SwImageProcessor(); /** * \brief Buffer producer Interface */ - virtual int start(); - virtual void stop(); + virtual int start(); + virtual void stop(); -private: + private: int processNewFrame(); -private: + private: int mCameraId; }; -} //namespace icamera +} // namespace icamera diff --git a/src/core/psysprocessor/GPUExecutor.cpp b/src/core/psysprocessor/GPUExecutor.cpp index e70ff293..f3ecb042 100644 --- a/src/core/psysprocessor/GPUExecutor.cpp +++ b/src/core/psysprocessor/GPUExecutor.cpp @@ -223,9 +223,9 @@ bool GPUExecutor::fetchTnrOutBuffer(int64_t seq, std::shared_ptr b std::unique_lock lock(mTnrOutBufMapLock); if (mTnrOutBufMap.find(seq) != mTnrOutBufMap.end()) { - void* pSrcBuf = (buf->getMemory() == V4L2_MEMORY_DMABUF) - ? CameraBuffer::mapDmaBufferAddr(buf->getFd(), buf->getBufferSize()) - : buf->getBufferAddr(); + void* pSrcBuf = (buf->getMemory() == V4L2_MEMORY_DMABUF) ? + CameraBuffer::mapDmaBufferAddr(buf->getFd(), buf->getBufferSize()) : + buf->getBufferAddr(); CheckAndLogError(!pSrcBuf, false, "pSrcBuf is nullptr"); LOG2("Sequence %ld is used for output", seq); MEMCPY_S(pSrcBuf, buf->getBufferSize(), mTnrOutBufMap[seq], mOutBufferSize); @@ -301,9 +301,9 @@ int GPUExecutor::allocTnrOutBufs(uint32_t bufSize) { /* for yuv still stream, we use maxRaw buffer to do reprocessing, and for real still stream, 2 * tnr buffers are enough */ - int maxTnrOutBufCount = (mStreamId == VIDEO_STREAM_ID && mUseInternalTnrBuffer) - ? PlatformData::getMaxRawDataNum(mCameraId) - : DEFAULT_TNR7US_BUFFER_COUNT; + int maxTnrOutBufCount = (mStreamId == VIDEO_STREAM_ID && mUseInternalTnrBuffer) ? + PlatformData::getMaxRawDataNum(mCameraId) : + DEFAULT_TNR7US_BUFFER_COUNT; std::unique_lock lock(mTnrOutBufMapLock); for (int i = 0; i < maxTnrOutBufCount; i++) { @@ -566,9 +566,9 @@ int GPUExecutor::runTnrFrame(const std::shared_ptr& inBuf, int fd = outBuf->getFd(); int memoryType = outBuf->getMemory(); int bufferSize = outBuf->getBufferSize(); - void* outPtr = (memoryType == V4L2_MEMORY_DMABUF) - ? CameraBuffer::mapDmaBufferAddr(fd, bufferSize) - : outBuf->getBufferAddr(); + void* outPtr = (memoryType == V4L2_MEMORY_DMABUF) ? + CameraBuffer::mapDmaBufferAddr(fd, bufferSize) : + outBuf->getBufferAddr(); if (!outPtr) return UNKNOWN_ERROR; outBuf->setSequence(sequence); diff --git a/src/core/psysprocessor/PGCommon.cpp b/src/core/psysprocessor/PGCommon.cpp index bfecedf8..c0648a6f 100644 --- a/src/core/psysprocessor/PGCommon.cpp +++ b/src/core/psysprocessor/PGCommon.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2019-2021 Intel Corporation. + * Copyright (C) 2019-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ #include #include +#include #include #include "iutils/CameraDump.h" @@ -38,10 +39,7 @@ int PGCommon::getFrameSize(int format, int width, int height, bool needAlignedHe int stride = PGUtils::getCssStride(format, width); switch (cssFormat) { case IA_CSS_DATA_FORMAT_BAYER_LINE_INTERLEAVED: // CSL6 - if (needAlignedHeight) { - height = ALIGN_64(height); - } - size = stride * height * 3 / 2; + size = stride * height; break; default: break; @@ -619,14 +617,15 @@ int PGCommon::setTerminalParams(const ia_css_frame_format_type* frameFormatTypes terminalParam->index[IA_CSS_ROW_DIMENSION] = 0; LOG2("%s: %s: index=%d, format=%d, w=%d, h=%d, fw=%d, fh=%d, bpp=%d, bpe=%d, " - "stride=%d, offset=%d, col=%d, row=%d", - __func__, getName(), i, terminalParam->frame_format_type, - terminalParam->dimensions[IA_CSS_COL_DIMENSION], - terminalParam->dimensions[IA_CSS_ROW_DIMENSION], - terminalParam->fragment_dimensions[IA_CSS_COL_DIMENSION], - terminalParam->fragment_dimensions[IA_CSS_ROW_DIMENSION], terminalParam->bpp, - terminalParam->bpe, terminalParam->stride, terminalParam->offset, - terminalParam->index[IA_CSS_COL_DIMENSION], terminalParam->index[IA_CSS_ROW_DIMENSION]); + "stride=%d, offset=%d, col=%d, row=%d", + __func__, getName(), i, terminalParam->frame_format_type, + terminalParam->dimensions[IA_CSS_COL_DIMENSION], + terminalParam->dimensions[IA_CSS_ROW_DIMENSION], + terminalParam->fragment_dimensions[IA_CSS_COL_DIMENSION], + terminalParam->fragment_dimensions[IA_CSS_ROW_DIMENSION], terminalParam->bpp, + terminalParam->bpe, terminalParam->stride, terminalParam->offset, + terminalParam->index[IA_CSS_COL_DIMENSION], + terminalParam->index[IA_CSS_ROW_DIMENSION]); } return OK; @@ -879,12 +878,12 @@ int PGCommon::configureFrameDesc() { int PGCommon::iterate(CameraBufferMap& inBufs, CameraBufferMap& outBufs, ia_binary_data* statistics, const ia_binary_data* ipuParameters) { PERF_CAMERA_ATRACE(); - LOG2("%s:%s ++", getName(), __func__); int64_t sequence = 0; if (!inBufs.empty()) { sequence = inBufs.begin()->second->getSequence(); } + LOG2("%s:%s ++", sequence, getName(), __func__); int ret = prepareTerminalBuffers(ipuParameters, inBufs, outBufs, sequence); CheckAndLogError((ret != OK), ret, "%s, prepareTerminalBuffers fail with %d", getName(), ret); @@ -932,7 +931,7 @@ int PGCommon::iterate(CameraBufferMap& inBufs, CameraBufferMap& outBufs, ia_bina } postTerminalBuffersDone(sequence); - LOG2("%s:%s -- ", getName(), __func__); + LOG2("%s:%s -- ", sequence, getName(), __func__); return ret; } @@ -990,14 +989,14 @@ int PGCommon::allocateTnrDataBuffers() { bool isCompression = PlatformData::getPSACompression(mCameraId) && PGUtils::isCompressionTerminal(termIndex + mTerminalBaseUid); - int size = isCompression - ? CameraUtils::getFrameSize(mTerminalFrameInfos[termIndex].mFormat, - mTerminalFrameInfos[mInputMainTerminal].mWidth, - mTerminalFrameInfos[mInputMainTerminal].mHeight, - false, true, true) - : CameraUtils::getFrameSize(mTerminalFrameInfos[termIndex].mFormat, - mTerminalFrameInfos[mInputMainTerminal].mWidth, - mTerminalFrameInfos[mInputMainTerminal].mHeight); + int size = isCompression ? + CameraUtils::getFrameSize(mTerminalFrameInfos[termIndex].mFormat, + mTerminalFrameInfos[mInputMainTerminal].mWidth, + mTerminalFrameInfos[mInputMainTerminal].mHeight, false, + true, true) : + CameraUtils::getFrameSize(mTerminalFrameInfos[termIndex].mFormat, + mTerminalFrameInfos[mInputMainTerminal].mWidth, + mTerminalFrameInfos[mInputMainTerminal].mHeight); for (int32_t i = 0; i < bufferCount; i++) { uint8_t* buffer = nullptr; @@ -1097,9 +1096,9 @@ int PGCommon::prepareTerminalBuffers(const ia_binary_data* ipuParameters, flush = false; } ciprBuf = - (buffer->getMemory() == V4L2_MEMORY_DMABUF) - ? registerUserBuffer(buffer->getBufferSize(), buffer->getFd(), flush) - : registerUserBuffer(buffer->getBufferSize(), buffer->getBufferAddr(), flush); + (buffer->getMemory() == V4L2_MEMORY_DMABUF) ? + registerUserBuffer(buffer->getBufferSize(), buffer->getFd(), flush) : + registerUserBuffer(buffer->getBufferSize(), buffer->getBufferAddr(), flush); CheckAndLogError(!ciprBuf, NO_MEMORY, "%s, register buffer size %d for terminal %d fail", __func__, buffer->getBufferSize(), termIdx); @@ -1135,7 +1134,6 @@ int PGCommon::prepareTerminalBuffers(const ia_binary_data* ipuParameters, mTerminalBuffers[pair.outId]->getMemoryCpuPtr(&mParamPayload[pair.outId].data); } - return mPGParamAdapt->updatePALAndEncode(ipuParameters, mTerminalCount, mParamPayload); } @@ -1328,8 +1326,8 @@ int PGCommon::getManifest(int pgId) { kernelBitmap = ia_css_program_group_manifest_get_kernel_bitmap(mf); LOG1("%s: pgIndex: %d, programGroupId: %d, manifestSize: %d, programCount: %d," - "terminalCount: %d", __func__, i, programGroupId, manifestSize, programCount, - terminalCount); + "terminalCount: %d", + __func__, i, programGroupId, manifestSize, programCount, terminalCount); if (pgId == programGroupId) { mProgramCount = programCount; @@ -1508,6 +1506,7 @@ void PGCommon::dumpTerminalPyldAndDesc(int pgId, int64_t sequence, } if (IS_DATA_TERMINAL(terminal->terminal_type)) continue; + // clang-format off void* ptr = getCiprBufferPtr(mTerminalBuffers[terminal->tm_index]); int size = getCiprBufferSize(mTerminalBuffers[terminal->tm_index]); const char* typeStr = @@ -1526,6 +1525,7 @@ void PGCommon::dumpTerminalPyldAndDesc(int pgId, int64_t sequence, : (terminal->terminal_type == IA_CSS_TERMINAL_TYPE_PROGRAM_CONTROL_INIT) ? "PROGRAM_CONTROL_INIT" : "UNKNOWN"; + // clang-format on printPtr = (const unsigned int*)ptr; fprintf(fp, "::terminal %d dump size %d(0x%x), line %d, type %s\n", terminal->tm_index, size, size, PAGE_ALIGN(size) / 4, typeStr); diff --git a/src/core/psysprocessor/PGUtils.cpp b/src/core/psysprocessor/PGUtils.cpp index ef81e2a4..838a3260 100644 --- a/src/core/psysprocessor/PGUtils.cpp +++ b/src/core/psysprocessor/PGUtils.cpp @@ -81,7 +81,7 @@ static const FormatMap sFormatMapping[] = { {GET_FOURCC_FMT('V', '4', '2', '0'), IA_CSS_DATA_FORMAT_YUV420, 24, 16}, {GET_FOURCC_FMT('b', 'V', '0', 'K'), IA_CSS_DATA_FORMAT_BAYER_VECTORIZED, 16, 16}, {GET_FOURCC_FMT('b', 'V', '0', 'G'), IA_CSS_DATA_FORMAT_BAYER_VECTORIZED, 16, 16}, - {GET_FOURCC_FMT('C', 'S', 'L', '6'), IA_CSS_DATA_FORMAT_BAYER_LINE_INTERLEAVED, 12, 10}, + {GET_FOURCC_FMT('C', 'S', 'L', '6'), IA_CSS_DATA_FORMAT_BAYER_LINE_INTERLEAVED, 16, 16}, {GET_FOURCC_FMT('C', 'S', '4', '2'), IA_CSS_DATA_FORMAT_YUV420, 18, 12}, {GET_FOURCC_FMT('G', 'R', '1', '0'), IA_CSS_DATA_FORMAT_BAYER_GRBG, 16, 16}, {GET_FOURCC_FMT('R', 'G', '1', '0'), IA_CSS_DATA_FORMAT_BAYER_RGGB, 16, 16}, @@ -244,23 +244,23 @@ int getStride(int cssFmt, int width) { #define PG_PSYS_IPU6_ISL 198 // the below terminals belong to PG_PSYS_IPU6_BB -#define PG_BB_TERMINAL_ID_TNR_REF_IN 4 // data_terminal -#define PG_BB_TERMINAL_ID_TNR_REF_OUT 6 // data_terminal -#define PG_BB_TERMINAL_ID_TNR_SIM_REF_IN 5 // spetial_terminal +#define PG_BB_TERMINAL_ID_TNR_REF_IN 4 // data_terminal +#define PG_BB_TERMINAL_ID_TNR_REF_OUT 6 // data_terminal +#define PG_BB_TERMINAL_ID_TNR_SIM_REF_IN 5 // spetial_terminal #define PG_BB_TERMINAL_ID_TNR_SIM_REF_OUT 7 // spetial_terminal // the below terminals belong to PG_PSYS_IPU6_ISA_LB #if defined(IPU_SYSVER_ipu6v5) || defined(IPU_SYSVER_ipu6v6) -#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L0 20 // program_terminal -#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L1 21 // program_terminal -#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L2 22 // program_terminal +#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L0 20 // program_terminal +#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L1 21 // program_terminal +#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L2 22 // program_terminal #define ISA_LB_TERMINAL_ID_DVS_FE_OUT_L0 23 // param_terminal #define ISA_LB_TERMINAL_ID_DVS_FE_OUT_L1 24 // param_terminal #define ISA_LB_TERMINAL_ID_DVS_FE_OUT_L2 25 // param_terminal #else -#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L0 21 // program_terminal -#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L1 22 // program_terminal -#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L2 23 // program_terminal +#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L0 21 // program_terminal +#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L1 22 // program_terminal +#define ISA_LB_TERMINAL_ID_DVS_FE_IN_L2 23 // program_terminal #define ISA_LB_TERMINAL_ID_DVS_FE_OUT_L0 24 // param_terminal #define ISA_LB_TERMINAL_ID_DVS_FE_OUT_L1 25 // param_terminal #define ISA_LB_TERMINAL_ID_DVS_FE_OUT_L2 26 // param_terminal @@ -300,12 +300,15 @@ bool getTerminalPairs(int pgId, TERMINAL_PAIR_TYPE type, std::vector& inputInfo, void PSysDAG::releasePipeExecutors() { for (auto& executor : mExecutorsPool) { + if (mScheduler) mScheduler->unregisterNode(executor); delete executor; } mExecutorsPool.clear(); @@ -100,6 +103,7 @@ int PSysDAG::createPipeExecutors(bool useTnrOutBuffer) { int graphId = gc->getGraphId(); PolicyConfig* cfg = PlatformData::getExecutorPolicyConfig(graphId); CheckAndLogError(!cfg, UNKNOWN_ERROR, "Failed to get PolicyConfig in PSysDAG!"); + if (mScheduler) mScheduler->configurate(graphId); #ifdef USE_PG_LITE_PIPE configShareReferPool(gc); @@ -147,12 +151,18 @@ int PSysDAG::createPipeExecutors(bool useTnrOutBuffer) { executor = new PipeExecutor(mCameraId, item, cfg->exclusivePgs, this, gc); if (streamId == STILL_STREAM_ID) mStillExecutor = executor; } + executor->setPolicyManager(mPolicyManager); #else PipeExecutor* executor = new PipeExecutor(mCameraId, item, cfg->exclusivePgs, this, gc); + if (mScheduler) { + mScheduler->registerNode(executor); + } else { + // Use PolicyManager to sync iteration if no scheduler + executor->setPolicyManager(mPolicyManager); + } #endif executor->setIspParamAdaptor(mIspParamAdaptor); executor->setStreamId(streamId); - executor->setPolicyManager(mPolicyManager); executor->setNotifyPolicy(item.notifyPolicy); #ifdef USE_PG_LITE_PIPE executor->setShareReferPool(mShareReferPool); @@ -417,17 +427,46 @@ int PSysDAG::registerInternalBufs(std::map& internalBufs) return OK; } +int PSysDAG::getActiveStreamIds(const PSysTaskData& taskData, + std::vector* activeStreamIds) { + // According to the output port to filter the valid executor stream Ids, and then run AIC + for (auto& outputFrame : taskData.mOutputBuffers) { + if (outputFrame.second.get() == nullptr) continue; + + std::map >::iterator it = + mOutputPortToStreamIds.find(outputFrame.first); + CheckAndLogError(it == mOutputPortToStreamIds.end(), UNKNOWN_ERROR, + "%s, failed to find streamIds for output port: %d", __func__, + outputFrame.first); + + for (auto& streamId : it->second) { + if (isInactiveStillStream(streamId, &taskData, outputFrame.first)) continue; + if (std::find(activeStreamIds->begin(), activeStreamIds->end(), streamId) == + activeStreamIds->end()) { + activeStreamIds->push_back(streamId); + } + } + } + LOG2("%s, The active streamId size for current task: %zu", __func__, activeStreamIds->size()); + + return OK; +} + /** * Queue the buffers in PSysTaskData to the cooresponding executors. */ int PSysDAG::queueBuffers(const PSysTaskData& task) { LOG2("@%s", mCameraId, __func__); + + std::vector activeStreamIds; + getActiveStreamIds(task, &activeStreamIds); + // Provide the input buffers for the input edge executor. for (auto& inputFrame : task.mInputBuffers) { for (auto& inputMap : mInputMaps) { if (inputMap.mDagPort == inputFrame.first) { - if (isInactiveStillStream(mExecutorStreamId[inputMap.mExecutor], &task, - inputFrame.first)) + if (std::find(activeStreamIds.begin(), activeStreamIds.end(), + mExecutorStreamId[inputMap.mExecutor]) == activeStreamIds.end()) continue; inputMap.mExecutor->onFrameAvailable(inputMap.mExecutorPort, inputFrame.second); LOG2("%s, queue input buffer: dagPort: %d, executorPort: %d, name: %s", __func__, @@ -440,8 +479,8 @@ int PSysDAG::queueBuffers(const PSysTaskData& task) { for (auto& outputFrame : task.mOutputBuffers) { for (auto& outputMap : mOutputMaps) { if (outputMap.mDagPort == outputFrame.first) { - if (isInactiveStillStream(mExecutorStreamId[outputMap.mExecutor], &task, - outputFrame.first)) + if (std::find(activeStreamIds.begin(), activeStreamIds.end(), + mExecutorStreamId[outputMap.mExecutor]) == activeStreamIds.end()) continue; outputMap.mExecutor->qbuf(outputMap.mExecutorPort, outputFrame.second); LOG2("%s, queue output buffer, dagPort: %d, executorPort: %d, name: %s", __func__, @@ -715,6 +754,7 @@ void PSysDAG::onStatsDone(int64_t sequence) { int PSysDAG::prepareIpuParams(int64_t sequence, bool forceUpdate, TaskInfo* task) { TRACE_LOG_PROCESS("PSysDAG", __func__, MAKE_COLOR(sequence), sequence); + if (task == nullptr) { AutoMutex taskLock(mTaskLock); for (size_t i = 0; i < mOngoingTasks.size(); i++) { @@ -728,29 +768,8 @@ int PSysDAG::prepareIpuParams(int64_t sequence, bool forceUpdate, TaskInfo* task CheckAndLogError(!task, UNKNOWN_ERROR, "%s, Failed to find the task", __func__, sequence); - // According to the output port to filter the valid executor stream Ids, and then run AIC std::vector activeStreamIds; - for (auto& outputFrame : task->mTaskData.mOutputBuffers) { - if (outputFrame.second.get() == nullptr) continue; - - std::map >::iterator it = - mOutputPortToStreamIds.find(outputFrame.first); - CheckAndLogError(it == mOutputPortToStreamIds.end(), UNKNOWN_ERROR, - "%s, failed to find streamIds for output port: %d", __func__, - outputFrame.first); - - for (auto& streamId : it->second) { - if (isInactiveStillStream(streamId, &(task->mTaskData), outputFrame.first)) continue; - if (std::find(activeStreamIds.begin(), activeStreamIds.end(), streamId) == - activeStreamIds.end()) { - activeStreamIds.push_back(streamId); - } - } - } - LOG2("%s, the active streamId size for aic is %zu", __func__, sequence, - activeStreamIds.size()); - - int ret = OK; + getActiveStreamIds(task->mTaskData, &activeStreamIds); for (auto& id : activeStreamIds) { // Make sure the AIC is executed once. if (!forceUpdate) { @@ -765,7 +784,7 @@ int PSysDAG::prepareIpuParams(int64_t sequence, bool forceUpdate, TaskInfo* task } } - ret = mIspParamAdaptor->runIspAdapt(&task->mTaskData.mIspSettings, sequence, id); + int ret = mIspParamAdaptor->runIspAdapt(&task->mTaskData.mIspSettings, sequence, id); CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, Failed to run AIC: streamId: %d", __func__, sequence, id); diff --git a/src/core/psysprocessor/PSysDAG.h b/src/core/psysprocessor/PSysDAG.h index c680c7e2..eb978e49 100644 --- a/src/core/psysprocessor/PSysDAG.h +++ b/src/core/psysprocessor/PSysDAG.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2021 Intel Corporation + * Copyright (C) 2017-2022 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ #include #include "CameraBuffer.h" +#include "CameraScheduler.h" #include "IspParamAdaptor.h" #include "Parameters.h" #include "PlatformData.h" @@ -73,7 +74,7 @@ class PSysDagCallback { class PSysDAG { public: - PSysDAG(int cameraId, PSysDagCallback* psysDagCB); + PSysDAG(int cameraId, CameraScheduler* scheduler, PSysDagCallback* psysDagCB); virtual ~PSysDAG(); void setFrameInfo(const std::map& inputInfo, const std::map& outputInfo); @@ -123,11 +124,13 @@ class PSysDAG { int queueBuffers(const PSysTaskData& task); int returnBuffers(PSysTaskData& result); bool isInactiveStillStream(int streamId, const PSysTaskData* task, Port port); + int getActiveStreamIds(const PSysTaskData& taskData, std::vector* activeStreamIds); void dumpExternalPortMap(); private: int mCameraId; + CameraScheduler* mScheduler; PSysDagCallback* mPSysDagCB; // Used to callback notify frame done handling PolicyManager* mPolicyManager; ConfigMode mConfigMode; // It is actually real config mode. diff --git a/src/core/psysprocessor/PipeLiteExecutor.cpp b/src/core/psysprocessor/PipeLiteExecutor.cpp index 76909a88..0ea47000 100644 --- a/src/core/psysprocessor/PipeLiteExecutor.cpp +++ b/src/core/psysprocessor/PipeLiteExecutor.cpp @@ -48,7 +48,8 @@ static const int32_t sSisKernels[] = {ia_pal_uuid_isp_sis_1_0_a}; PipeLiteExecutor::PipeLiteExecutor(int cameraId, const ExecutorPolicy& policy, vector exclusivePGs, PSysDAG* psysDag, shared_ptr gc) - : mCameraId(cameraId), + : ISchedulerNode(policy.exeName.c_str()), + mCameraId(cameraId), mStreamId(-1), mName(policy.exeName), mPGNames(policy.pgList), @@ -63,7 +64,8 @@ PipeLiteExecutor::PipeLiteExecutor(int cameraId, const ExecutorPolicy& policy, mLastStatsSequence(-1), mExclusivePGs(exclusivePGs), mPSysDag(psysDag), - mkernelsCountWithStats(0) {} + mkernelsCountWithStats(0) { +} PipeLiteExecutor::~PipeLiteExecutor() { while (!mPGExecutors.empty()) { @@ -231,7 +233,7 @@ int PipeLiteExecutor::createPGs() { int PipeLiteExecutor::configurePGs(const vector& tnrPortFormat) { FrameInfo tnrFormatInfo = {}; - for (auto &tnrFormat : tnrPortFormat) { + for (auto& tnrFormat : tnrPortFormat) { if (tnrFormat.streamId == mStreamId) { tnrFormatInfo.mWidth = tnrFormat.formatSetting.width; tnrFormatInfo.mHeight = tnrFormat.formatSetting.height; @@ -382,7 +384,8 @@ int PipeLiteExecutor::setInputTerminals(const std::map& sourceTerm int PipeLiteExecutor::start() { LOG1("%s executor:%s", __func__, mName.c_str()); - mProcessThread = new ProcessThread(this); + // Need thread when PolicyManager takes responsibility. Otherwise Scheduler will handle. + if (mPolicyManager) mProcessThread = new ProcessThread(this); AutoMutex l(mBufferQueueLock); allocBuffers(); @@ -390,8 +393,10 @@ int PipeLiteExecutor::start() { mLastStatsSequence = -1; - mThreadRunning = true; - mProcessThread->run(mName.c_str(), PRIORITY_NORMAL); + if (mProcessThread) { + mThreadRunning = true; + mProcessThread->run(mName.c_str(), PRIORITY_NORMAL); + } return OK; } @@ -399,11 +404,11 @@ int PipeLiteExecutor::start() { void PipeLiteExecutor::stop() { LOG1("%s executor:%s", __func__, mName.c_str()); - mProcessThread->requestExitAndWait(); + if (mProcessThread) mProcessThread->requestExitAndWait(); // Thread is not running. It is safe to clear the Queue clearBufferQueues(); - delete mProcessThread; + if (mProcessThread) delete mProcessThread; // Clear the buffer pool of pg Uint for (auto& unit : mPGExecutors) { @@ -414,6 +419,7 @@ void PipeLiteExecutor::stop() { void PipeLiteExecutor::notifyStop() { LOG1("%s executor:%s", __func__, mName.c_str()); + if (!mProcessThread) return; mProcessThread->requestExit(); { @@ -525,6 +531,34 @@ bool PipeLiteExecutor::hasValidBuffers(const CameraBufferPortMap& buffers) { return false; } +bool PipeLiteExecutor::fetchBuffersInQueue(map >& cInBuffer, + map >& cOutBuffer) { + for (auto& input : mInputQueue) { + Port port = input.first; + CameraBufQ& inputQueue = input.second; + if (inputQueue.empty()) { + LOG2("%s: No buffer input port %d", __func__, port); + cInBuffer.clear(); + return false; + } + cInBuffer[port] = inputQueue.front(); + } + + for (auto& output : mOutputQueue) { + Port port = output.first; + CameraBufQ& outputQueue = output.second; + if (outputQueue.empty()) { + LOG2("%s: No buffer output port %d", __func__, port); + cInBuffer.clear(); + cOutBuffer.clear(); + return false; + } + + cOutBuffer[port] = outputQueue.front(); + } + return true; +} + int PipeLiteExecutor::processNewFrame() { PERF_CAMERA_ATRACE(); @@ -533,14 +567,20 @@ int PipeLiteExecutor::processNewFrame() { // Wait frame buffers. { ConditionLock lock(mBufferQueueLock); - ret = waitFreeBuffersInQueue(lock, inBuffers, outBuffers); - // Already stopped - if (!mThreadRunning) return -1; + if (mPolicyManager) { + // Prepare frames at first, then mPolicyManager decides when to run + ret = waitFreeBuffersInQueue(lock, inBuffers, outBuffers); + // Already stopped + if (!mThreadRunning) return -1; - if (ret != OK) return OK; // Wait frame buffer error should not involve thread exit. + if (ret != OK) return OK; // Wait frame buffer error should not involve thread exit. - CheckAndLogError(inBuffers.empty() || outBuffers.empty(), UNKNOWN_ERROR, - "Failed to get input or output buffers."); + CheckAndLogError(inBuffers.empty() || outBuffers.empty(), UNKNOWN_ERROR, + "Failed to get input or output buffers."); + } else { + // Triggered by scheduler, will run if frames are ready + if (!fetchBuffersInQueue(inBuffers, outBuffers)) return OK; + } for (auto& output : mOutputQueue) { output.second.pop(); @@ -598,24 +638,24 @@ int PipeLiteExecutor::processNewFrame() { int seq = cInBuffer->getSequence(); SyncManager::getInstance()->printVcSyncCount(); - LOG2(" [start runPipe], CPU-timestamp:%lu, vc:%d, kernel-timestamp:%.3l", seq, - CameraUtils::systemTime(), cInBuffer->getVirtualChannel(), - cInBuffer->getTimestamp().tv_sec * 1000.0 + - cInBuffer->getTimestamp().tv_usec / 1000.0); + LOG2( + " [start runPipe], CPU-timestamp:%lu, vc:%d, kernel-timestamp:%.3l", seq, + CameraUtils::systemTime(), cInBuffer->getVirtualChannel(), + cInBuffer->getTimestamp().tv_sec * 1000.0 + cInBuffer->getTimestamp().tv_usec / 1000.0); SyncManager::getInstance()->updateVcSyncCount(vc); // Run pipe with buffers ret = runPipe(inBuffers, outBuffers, outStatsBuffers, eventType); - LOG2(" [done runPipe], CPU-timestamp:%lu, vc:%d, kernel-timestamp:%.3lf", - cInBuffer->getSequence(), CameraUtils::systemTime(), cInBuffer->getVirtualChannel(), - cInBuffer->getTimestamp().tv_sec * 1000.0 + - cInBuffer->getTimestamp().tv_usec / 1000.0); + LOG2( + " [done runPipe], CPU-timestamp:%lu, vc:%d, kernel-timestamp:%.3lf", + cInBuffer->getSequence(), CameraUtils::systemTime(), cInBuffer->getVirtualChannel(), + cInBuffer->getTimestamp().tv_sec * 1000.0 + cInBuffer->getTimestamp().tv_usec / 1000.0); } else { - // FRAME_SYNC_E + // FRAME_SYNC_E // Run pipe with buffers ret = runPipe(inBuffers, outBuffers, outStatsBuffers, eventType); - // FRAME_SYNC_S + // FRAME_SYNC_S } // FRAME_SYNC_E CheckAndLogError((ret != OK), UNKNOWN_ERROR, "@%s: failed to run pipe", __func__); @@ -844,6 +884,14 @@ int PipeLiteExecutor::notifyStatsDone(TuningMode tuningMode, const v4l2_buffer_t if (!statsBuf) continue; if (mStreamId != VIDEO_STREAM_ID) { + // DVS Zoom without STAT buffer. + { + EventData eventData; + eventData.type = EVENT_DVS_READY; + eventData.data.dvsRunReady.streamId = mStreamId; + notifyListeners(eventData); + } + if (!PlatformData::isStillOnlyPipeEnabled(mCameraId)) { LOG2("%s: Drop still pipe statistics data", __func__); releaseStatsBuffer(statsBuf); @@ -929,9 +977,9 @@ int PipeLiteExecutor::allocBuffers() { bool isCompression = PlatformData::getPSACompression(mCameraId) && PGUtils::isCompressionTerminal(termDesc.terminal); - int size = isCompression - ? PGCommon::getFrameSize(srcFmt, srcWidth, srcHeight, false, true, true) - : PGCommon::getFrameSize(srcFmt, srcWidth, srcHeight, true); + int size = isCompression ? + PGCommon::getFrameSize(srcFmt, srcWidth, srcHeight, false, true, true) : + PGCommon::getFrameSize(srcFmt, srcWidth, srcHeight, true); shared_ptr buf = CameraBuffer::create(mCameraId, BUFFER_USAGE_PSYS_INPUT, V4L2_MEMORY_USERPTR, size, 0, @@ -987,9 +1035,9 @@ int PipeLiteExecutor::allocBuffers() { bool isCompression = PlatformData::getPSACompression(mCameraId) && PGUtils::isCompressionTerminal(terminal); - int size = isCompression - ? PGCommon::getFrameSize(srcFmt, srcWidth, srcHeight, false, true, true) - : PGCommon::getFrameSize(srcFmt, srcWidth, srcHeight, true); + int size = isCompression ? + PGCommon::getFrameSize(srcFmt, srcWidth, srcHeight, false, true, true) : + PGCommon::getFrameSize(srcFmt, srcWidth, srcHeight, true); for (int i = 0; i < MAX_BUFFER_COUNT; i++) { // Prepare internal frame buffer for its producer. diff --git a/src/core/psysprocessor/PipeLiteExecutor.h b/src/core/psysprocessor/PipeLiteExecutor.h index 4e3bdd94..a41343e6 100644 --- a/src/core/psysprocessor/PipeLiteExecutor.h +++ b/src/core/psysprocessor/PipeLiteExecutor.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2019-2021 Intel Corporation + * Copyright (C) 2019-2022 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ #include "Parameters.h" #include "PolicyManager.h" #include "ShareReferBufferPool.h" +#include "ISchedulerNode.h" #include "psysprocessor/PGCommon.h" namespace icamera { @@ -37,7 +38,7 @@ class PSysDAG; typedef std::map> CameraBufferPortMap; -class PipeLiteExecutor : public BufferQueue { +class PipeLiteExecutor : public BufferQueue, public ISchedulerNode { public: PipeLiteExecutor(int cameraId, const ExecutorPolicy& policy, std::vector exclusivePGs, PSysDAG* psysDag, @@ -79,7 +80,10 @@ class PipeLiteExecutor : public BufferQueue { bool isInputEdge() { return mIsInputEdge; } bool isOutputEdge() { return mIsOutputEdge; } - const char* getName() const { return mName.c_str(); } + // ISchedulerNode + virtual bool process(int64_t triggerId) { + return processNewFrame() == OK; + } private: DISALLOW_COPY_AND_ASSIGN(PipeLiteExecutor); @@ -137,6 +141,9 @@ class PipeLiteExecutor : public BufferQueue { void dumpPGs() const; private: + bool fetchBuffersInQueue(std::map >& cInBuffer, + std::map >& cOutBuffer); + int processNewFrame(); int runPipe(std::map>& inBuffers, std::map>& outBuffers, diff --git a/src/core/psysprocessor/PolicyManager.cpp b/src/core/psysprocessor/PolicyManager.cpp index d46b55e2..0b3f89b2 100644 --- a/src/core/psysprocessor/PolicyManager.cpp +++ b/src/core/psysprocessor/PolicyManager.cpp @@ -22,20 +22,17 @@ namespace icamera { -PolicyManager::PolicyManager(int cameraId) : mCameraId(cameraId), mIsActive(false) -{ +PolicyManager::PolicyManager(int cameraId) : mCameraId(cameraId), mIsActive(false) { LOG1("@%s: camera id:%d", __func__, mCameraId); } -PolicyManager::~PolicyManager() -{ +PolicyManager::~PolicyManager() { LOG1("@%s: camera id:%d", __func__, mCameraId); releaseBundles(); } -void PolicyManager::releaseBundles() -{ +void PolicyManager::releaseBundles() { LOG1("@%s: camera id:%d", __func__, mCameraId); for (const auto& bundle : mBundles) { @@ -45,14 +42,13 @@ void PolicyManager::releaseBundles() mBundles.clear(); } -void PolicyManager::setActive(bool isActive) -{ +void PolicyManager::setActive(bool isActive) { AutoMutex lock(mPolicyLock); - LOG1("@%s: camera id:%d update active mode from %d to %d", - __func__, mCameraId, mIsActive, isActive); + LOG1("@%s: camera id:%d update active mode from %d to %d", __func__, mCameraId, mIsActive, + isActive); - if (mIsActive == isActive) return; // No action is needed if the mode unchanged. + if (mIsActive == isActive) return; // No action is needed if the mode unchanged. for (auto& bundle : mBundles) { AutoMutex lock(bundle->mLock); @@ -79,8 +75,8 @@ int PolicyManager::addExecutorBundle(const std::vector& executors, AutoMutex lock(mPolicyLock); uint8_t size = executors.size(); - CheckAndLogError(size != depths.size(), - BAD_VALUE, "The size for executor and its depth not match"); + CheckAndLogError(size != depths.size(), BAD_VALUE, + "The size for executor and its depth not match"); int maxDepth = 0; std::map executorData; @@ -106,8 +102,7 @@ int PolicyManager::addExecutorBundle(const std::vector& executors, return OK; } -int PolicyManager::wait(std::string executorName, int64_t sequence) -{ +int PolicyManager::wait(std::string executorName, int64_t sequence) { ExecutorBundle* bundle = nullptr; { AutoMutex lock(mPolicyLock); @@ -163,5 +158,4 @@ int PolicyManager::wait(std::string executorName, int64_t sequence) return OK; } -} // end of namespace icamera - +} // end of namespace icamera diff --git a/src/core/psysprocessor/PolicyManager.h b/src/core/psysprocessor/PolicyManager.h index 4bed24cd..a8e6b668 100644 --- a/src/core/psysprocessor/PolicyManager.h +++ b/src/core/psysprocessor/PolicyManager.h @@ -26,7 +26,7 @@ namespace icamera { class PolicyManager { -public: + public: PolicyManager(int cameraId); ~PolicyManager(); @@ -34,9 +34,8 @@ class PolicyManager { * Create a bundle for the given set of executors, and add the bundle into mBundles. * These executors are guaranteed running at the same time. */ - int addExecutorBundle(const std::vector& executors, - const std::vector& depths, - int64_t startSequence); + int addExecutorBundle(const std::vector& executors, const std::vector& depths, + int64_t startSequence); void setActive(bool isActive); @@ -48,35 +47,36 @@ class PolicyManager { */ int wait(std::string executorName, int64_t sequence = 0); -private: + private: DISALLOW_COPY_AND_ASSIGN(PolicyManager); void releaseBundles(); -private: + private: struct ExecutorData { ExecutorData(int depth = 0) : mRunCount(0), mDepth(depth) {} - long mRunCount; // How many times the executor has run. - int mDepth; // Indicates how many direct dependencies the executor has. + long mRunCount; // How many times the executor has run. + int mDepth; // Indicates how many direct dependencies the executor has. }; struct ExecutorBundle { - std::map mExecutorData; // The index of the map is executor name. - int mMaxDepth; // The max depth among all executors. - int mExecutorNum; // Indicates how many executors the bundle has. - int mWaitingCount; // How many executors have already waited. + std::map + mExecutorData; // The index of the map is executor name. + int mMaxDepth; // The max depth among all executors. + int mExecutorNum; // Indicates how many executors the bundle has. + int mWaitingCount; // How many executors have already waited. bool mIsActive; int64_t mStartSequence; - //Guard for the Bundle data + // Guard for the Bundle data Mutex mLock; Condition mCondition; }; int mCameraId; - //Guard for the PolicyManager public API + // Guard for the PolicyManager public API Mutex mPolicyLock; std::vector mBundles; bool mIsActive; }; -} +} // namespace icamera diff --git a/src/evcp/EvcpManager.cpp b/src/evcp/EvcpManager.cpp index 3d14a573..f056f580 100644 --- a/src/evcp/EvcpManager.cpp +++ b/src/evcp/EvcpManager.cpp @@ -188,9 +188,9 @@ void EvcpManager::runEvcpL(const camera_buffer_t& buffer) { #ifdef ENABLE_SANDBOXING bool ret = mEvcp->runEvcpFrame(buffer.dmafd, size); #else - void* pBuf = (buffer.s.memType == V4L2_MEMORY_DMABUF) - ? CameraBuffer::mapDmaBufferAddr(buffer.dmafd, size) - : buffer.addr; + void* pBuf = (buffer.s.memType == V4L2_MEMORY_DMABUF) ? + CameraBuffer::mapDmaBufferAddr(buffer.dmafd, size) : + buffer.addr; bool ret = mEvcp->runEvcpFrame(pBuf, size); diff --git a/src/fd/FaceDetection.cpp b/src/fd/FaceDetection.cpp index 7d9dbfc1..a68c51b5 100644 --- a/src/fd/FaceDetection.cpp +++ b/src/fd/FaceDetection.cpp @@ -153,12 +153,13 @@ void FaceDetection::printfFDRunRate() { mRequestRunTime = curTime; } -void FaceDetection::runFaceDetection(const std::shared_ptr& ccBuf) { +void FaceDetection::runFaceDetection(const std::shared_ptr& ccBuf, + bool forceSync) { CheckAndLogError(mInitialized == false, VOID_VALUE, "mInitialized is false"); if (!faceRunningByCondition()) return; - if (PlatformData::isFaceEngineSyncRunning(mCameraId)) { + if (forceSync || PlatformData::isFaceEngineSyncRunning(mCameraId)) { runFaceDetectionBySync(ccBuf); } else { runFaceDetectionByAsync(ccBuf); @@ -190,18 +191,17 @@ void FaceDetection::initRatioInfo(struct RatioInfo* ratioInfo) { horizontalCrop = mHeight * activeWidth / activeHeight - mWidth; } } - LOG2( - "%s, imageRotationChanged:%d, height:%d, width:%d, activeWidth:%d, activeHeight:%d, " - "verticalCrop:%d, horizontalCrop:%d", - __func__, imageRotationChanged, mHeight, mWidth, activeWidth, activeHeight, verticalCrop, - horizontalCrop); + LOG2("%s, imageRotationChanged:%d, height:%d, width:%d, activeWidth:%d, activeHeight:%d, " + "verticalCrop:%d, horizontalCrop:%d", + __func__, imageRotationChanged, mHeight, mWidth, activeWidth, activeHeight, verticalCrop, + horizontalCrop); *ratioInfo = { {0, 0, activeWidth, activeHeight}, verticalCrop, horizontalCrop, imageRotationChanged}; } void FaceDetection::convertFaceCoordinate(camera_coordinate_system_t& sysCoord, int* left, int* top, - int* right, int* bottom) { + int* right, int* bottom) { int verticalCrop = mRatioInfo.verticalCrop; int horizontalCrop = mRatioInfo.horizontalCrop; bool imageRotationChanged = mRatioInfo.imageRotationChanged; diff --git a/src/fd/FaceDetection.h b/src/fd/FaceDetection.h index e16838c3..c6c5febb 100644 --- a/src/fd/FaceDetection.h +++ b/src/fd/FaceDetection.h @@ -62,7 +62,8 @@ class FaceDetection : public Thread { static void destoryInstance(int cameraId); static FaceDetection* getInstance(int cameraId); - void runFaceDetection(const std::shared_ptr& ccBuf); + void runFaceDetection(const std::shared_ptr& ccBuf, + bool forceSync = false); virtual void runFaceDetectionBySync(const std::shared_ptr& ccBuf) = 0; virtual void runFaceDetectionByAsync(const std::shared_ptr& ccBuf) = 0; static int getResult(int cameraId, cca::cca_face_state* faceState); diff --git a/src/hal/CameraHal.cpp b/src/hal/CameraHal.cpp index 7130c92f..a0d51e8d 100644 --- a/src/hal/CameraHal.cpp +++ b/src/hal/CameraHal.cpp @@ -145,8 +145,10 @@ int CameraHal::deviceOpen(int cameraId) { if (mCameraOpenNum == 1) { MediaControl* mc = MediaControl::getInstance(); CheckAndLogError(!mc, UNKNOWN_ERROR, "MediaControl init failed"); - if (PlatformData::isResetLinkRoute(cameraId)) - mc->resetAllLinks(); + if (PlatformData::isResetLinkRoute(cameraId)) { + int ret = mc->resetAllLinks(); + CheckAndLogError(ret != OK, DEV_BUSY, "resetAllLinks failed"); + } } return mCameraDevices[cameraId]->init(); @@ -188,14 +190,16 @@ int CameraHal::deviceOpen(int cameraId, int vcNum) { if (mCameraOpenNum == 1) { MediaControl* mc = MediaControl::getInstance(); CheckAndLogError(!mc, UNKNOWN_ERROR, "MediaControl init failed"); - if (PlatformData::isResetLinkRoute(cameraId)) - mc->resetAllLinks(); + + if (PlatformData::isResetLinkRoute(cameraId)) { + int ret = mc->resetAllLinks(); + CheckAndLogError(ret != OK, DEV_BUSY, "resetAllLinks failed"); + } // VIRTUAL_CHANNEL_S if (info.vc.total_num) { // when the sensor belongs to virtual channel, reset the routes - if (PlatformData::isResetLinkRoute(cameraId)) - mc->resetAllRoutes(cameraId); + if (PlatformData::isResetLinkRoute(cameraId)) mc->resetAllRoutes(cameraId); } // VIRTUAL_CHANNEL_E } diff --git a/src/hal/ICamera.cpp b/src/hal/ICamera.cpp index f8e02092..700599bc 100644 --- a/src/hal/ICamera.cpp +++ b/src/hal/ICamera.cpp @@ -35,13 +35,13 @@ namespace icamera { static CameraHal* gCameraHal = nullptr; -#define CheckCameraId(camera_id, err_code) \ - do { \ - int max_cam = PlatformData::numberOfCameras(); \ - if (((camera_id) < 0) || (camera_id) >= max_cam) { \ - LOGE(" is invalid, max_cam:%d", camera_id, max_cam); \ - return err_code; \ - } \ +#define CheckCameraId(camera_id, err_code) \ + do { \ + int max_cam = PlatformData::numberOfCameras(); \ + if (((camera_id) < 0) || (camera_id) >= max_cam) { \ + LOGE(" is invalid, max_cam:%d", camera_id, max_cam); \ + return err_code; \ + } \ } while (0) /** diff --git a/src/image_process/IImageProcessor.h b/src/image_process/IImageProcessor.h index c4b2d2fb..e7880d3a 100644 --- a/src/image_process/IImageProcessor.h +++ b/src/image_process/IImageProcessor.h @@ -24,23 +24,24 @@ namespace icamera { class IImageProcessor { -public: - IImageProcessor() {}; - virtual ~IImageProcessor() {}; + public: + IImageProcessor(){}; + virtual ~IImageProcessor(){}; static std::unique_ptr createImageProcessor(); static bool isProcessingTypeSupported(PostProcessType type); - virtual status_t cropFrame(const std::shared_ptr &input, - std::shared_ptr &output) = 0; - virtual status_t scaleFrame(const std::shared_ptr &input, - std::shared_ptr &output) = 0; - virtual status_t rotateFrame(const std::shared_ptr &input, - std::shared_ptr &output, - int angle, std::vector &rotateBuf) = 0; - virtual status_t convertFrame(const std::shared_ptr &input, - std::shared_ptr &output) = 0; -private: + virtual status_t cropFrame(const std::shared_ptr& input, + std::shared_ptr& output) = 0; + virtual status_t scaleFrame(const std::shared_ptr& input, + std::shared_ptr& output) = 0; + virtual status_t rotateFrame(const std::shared_ptr& input, + std::shared_ptr& output, int angle, + std::vector& rotateBuf) = 0; + virtual status_t convertFrame(const std::shared_ptr& input, + std::shared_ptr& output) = 0; + + private: DISALLOW_COPY_AND_ASSIGN(IImageProcessor); }; -} // namespace icamera +} // namespace icamera diff --git a/src/image_process/ImageConverter.cpp b/src/image_process/ImageConverter.cpp index 0cef1e2c..7eb48331 100644 --- a/src/image_process/ImageConverter.cpp +++ b/src/image_process/ImageConverter.cpp @@ -27,16 +27,15 @@ namespace icamera { namespace ImageConverter { -void YUV420ToRGB565(int width, int height, void *src, void *dst) -{ +void YUV420ToRGB565(int width, int height, void* src, void* dst) { int line, col, linewidth; int y, u, v, yy, vr, ug, vg, ub; int r, g, b; const unsigned char *py, *pu, *pv; - unsigned short *rgbs = (unsigned short *) dst; + unsigned short* rgbs = (unsigned short*)dst; linewidth = width >> 1; - py = (unsigned char *) src; + py = (unsigned char*)src; pu = py + (width * height); pv = pu + (width * height) / 4; @@ -53,15 +52,15 @@ void YUV420ToRGB565(int width, int height, void *src, void *dst) for (col = 0; col < width; col++) { r = (yy + vr) >> 8; g = (yy - ug - vg) >> 8; - b = (yy + ub ) >> 8; + b = (yy + ub) >> 8; if (r < 0) r = 0; if (r > 255) r = 255; if (g < 0) g = 0; if (g > 255) g = 255; if (b < 0) b = 0; if (b > 255) b = 255; - *rgbs++ = (((unsigned short)r>>3)<<11) | (((unsigned short)g>>2)<<5) - | (((unsigned short)b>>3)<<0); + *rgbs++ = (((unsigned short)r >> 3) << 11) | (((unsigned short)g >> 2) << 5) | + (((unsigned short)b >> 3) << 0); y = *py++; yy = y << 8; @@ -83,66 +82,81 @@ void YUV420ToRGB565(int width, int height, void *src, void *dst) } } -void trimConvertNV12ToRGB565(int width, int height, int srcStride, void *src, void *dst) -{ +void trimConvertNV12ToRGB565(int width, int height, int srcStride, void* src, void* dst) { + unsigned char* yuvs = (unsigned char*)src; + unsigned char* rgbs = (unsigned char*)dst; - unsigned char *yuvs = (unsigned char *) src; - unsigned char *rgbs = (unsigned char *) dst; - - //the end of the luminance data + // the end of the luminance data int lumEnd = srcStride * height; int i = 0, j = 0; - for( i=0; i < height; i++) { - //points to the next luminance value pair + for (i = 0; i < height; i++) { + // points to the next luminance value pair int lumPtr = i * srcStride; - //points to the next chromiance value pair + // points to the next chromiance value pair int chrPtr = i / 2 * srcStride + lumEnd; - for ( j=0; j < width; j+=2 ) { - //read the luminance and chromiance values + for (j = 0; j < width; j += 2) { + // read the luminance and chromiance values int Y1 = yuvs[lumPtr++] & 0xff; int Y2 = yuvs[lumPtr++] & 0xff; int Cb = (yuvs[chrPtr++] & 0xff) - 128; int Cr = (yuvs[chrPtr++] & 0xff) - 128; int R, G, B; - //generate first RGB components + // generate first RGB components B = Y1 + ((454 * Cb) >> 8); - if(B < 0) B = 0; else if(B > 255) B = 255; + if (B < 0) + B = 0; + else if (B > 255) + B = 255; G = Y1 - ((88 * Cb + 183 * Cr) >> 8); - if(G < 0) G = 0; else if(G > 255) G = 255; + if (G < 0) + G = 0; + else if (G > 255) + G = 255; R = Y1 + ((359 * Cr) >> 8); - if(R < 0) R = 0; else if(R > 255) R = 255; - //NOTE: this assume little-endian encoding - *rgbs++ = (unsigned char) (((G & 0x3c) << 3) | (B >> 3)); - *rgbs++ = (unsigned char) ((R & 0xf8) | (G >> 5)); - - //generate second RGB components + if (R < 0) + R = 0; + else if (R > 255) + R = 255; + // NOTE: this assume little-endian encoding + *rgbs++ = (unsigned char)(((G & 0x3c) << 3) | (B >> 3)); + *rgbs++ = (unsigned char)((R & 0xf8) | (G >> 5)); + + // generate second RGB components B = Y2 + ((454 * Cb) >> 8); - if(B < 0) B = 0; else if(B > 255) B = 255; + if (B < 0) + B = 0; + else if (B > 255) + B = 255; G = Y2 - ((88 * Cb + 183 * Cr) >> 8); - if(G < 0) G = 0; else if(G > 255) G = 255; + if (G < 0) + G = 0; + else if (G > 255) + G = 255; R = Y2 + ((359 * Cr) >> 8); - if(R < 0) R = 0; else if(R > 255) R = 255; - //NOTE: this assume little-endian encoding - *rgbs++ = (unsigned char) (((G & 0x3c) << 3) | (B >> 3)); - *rgbs++ = (unsigned char) ((R & 0xf8) | (G >> 5)); + if (R < 0) + R = 0; + else if (R > 255) + R = 255; + // NOTE: this assume little-endian encoding + *rgbs++ = (unsigned char)(((G & 0x3c) << 3) | (B >> 3)); + *rgbs++ = (unsigned char)((R & 0xf8) | (G >> 5)); } } } // covert YV12 (Y plane, V plane, U plane) to NV21 (Y plane, interlaced VU bytes) -void convertYV12ToNV21(int width, int height, int srcStride, int dstStride, void *src, void *dst) -{ - const int cStride = srcStride>>1; +void convertYV12ToNV21(int width, int height, int srcStride, int dstStride, void* src, void* dst) { + const int cStride = srcStride >> 1; const int vuStride = dstStride; - const int hhalf = height>>1; - const int whalf = width>>1; + const int hhalf = height >> 1; + const int whalf = width >> 1; // copy the entire Y plane - unsigned char *srcPtr = (unsigned char *)src; - unsigned char *dstPtr = (unsigned char *)dst; + unsigned char* srcPtr = (unsigned char*)src; + unsigned char* dstPtr = (unsigned char*)dst; if (srcStride == dstStride) { - MEMCPY_S(dstPtr, dstStride*height, srcPtr, dstStride*height); + MEMCPY_S(dstPtr, dstStride * height, srcPtr, dstStride * height); } else { for (int i = 0; i < height; i++) { MEMCPY_S(dstPtr, width, srcPtr, width); @@ -152,16 +166,16 @@ void convertYV12ToNV21(int width, int height, int srcStride, int dstStride, void } // interlace the VU data - unsigned char *srcPtrV = (unsigned char *)src + height*srcStride; - unsigned char *srcPtrU = srcPtrV + cStride*hhalf; - dstPtr = (unsigned char *)dst + dstStride*height; + unsigned char* srcPtrV = (unsigned char*)src + height * srcStride; + unsigned char* srcPtrU = srcPtrV + cStride * hhalf; + dstPtr = (unsigned char*)dst + dstStride * height; for (int i = 0; i < hhalf; ++i) { - unsigned char *pDstVU = dstPtr; - unsigned char *pSrcV = srcPtrV; - unsigned char *pSrcU = srcPtrU; + unsigned char* pDstVU = dstPtr; + unsigned char* pSrcV = srcPtrV; + unsigned char* pSrcU = srcPtrU; for (int j = 0; j < whalf; ++j) { - *pDstVU ++ = *pSrcV ++; - *pDstVU ++ = *pSrcU ++; + *pDstVU++ = *pSrcV++; + *pDstVU++ = *pSrcU++; } dstPtr += vuStride; srcPtrV += cStride; @@ -170,15 +184,14 @@ void convertYV12ToNV21(int width, int height, int srcStride, int dstStride, void } // copy YV12 to YV12 (Y plane, V plan, U plan) in case of different stride length -void copyYV12ToYV12(int width, int height, int srcStride, int dstStride, void *src, void *dst) -{ +void copyYV12ToYV12(int width, int height, int srcStride, int dstStride, void* src, void* dst) { // copy the entire Y plane if (srcStride == dstStride) { MEMCPY_S(dst, dstStride * height, src, dstStride * height); } else { - unsigned char *srcPtrY = (unsigned char *)src; - unsigned char *dstPtrY = (unsigned char *)dst; - for (int i = 0; i < height; i ++) { + unsigned char* srcPtrY = (unsigned char*)src; + unsigned char* dstPtrY = (unsigned char*)dst; + for (int i = 0; i < height; i++) { MEMCPY_S(dstPtrY, width, srcPtrY, width); srcPtrY += srcStride; dstPtrY += dstStride; @@ -187,19 +200,20 @@ void copyYV12ToYV12(int width, int height, int srcStride, int dstStride, void *s // copy VU plane const int scStride = srcStride >> 1; - const int dcStride = ALIGN_16(dstStride >> 1); // Android CTS required: U/V plane needs 16 bytes aligned! + const int dcStride = + ALIGN_16(dstStride >> 1); // Android CTS required: U/V plane needs 16 bytes aligned! if (dcStride == scStride) { - unsigned char *srcPtrVU = (unsigned char *)src + height * srcStride; - unsigned char *dstPtrVU = (unsigned char *)dst + height * dstStride; + unsigned char* srcPtrVU = (unsigned char*)src + height * srcStride; + unsigned char* dstPtrVU = (unsigned char*)dst + height * dstStride; MEMCPY_S(dstPtrVU, height * dcStride, srcPtrVU, height * dcStride); } else { const int wHalf = width >> 1; const int hHalf = height >> 1; - unsigned char *srcPtrV = (unsigned char *)src + height * srcStride; - unsigned char *srcPtrU = srcPtrV + scStride * hHalf; - unsigned char *dstPtrV = (unsigned char *)dst + height * dstStride; - unsigned char *dstPtrU = dstPtrV + dcStride * hHalf; - for (int i = 0; i < hHalf; i ++) { + unsigned char* srcPtrV = (unsigned char*)src + height * srcStride; + unsigned char* srcPtrU = srcPtrV + scStride * hHalf; + unsigned char* dstPtrV = (unsigned char*)dst + height * dstStride; + unsigned char* dstPtrU = dstPtrV + dcStride * hHalf; + for (int i = 0; i < hHalf; i++) { MEMCPY_S(dstPtrU, wHalf, srcPtrU, wHalf); MEMCPY_S(dstPtrV, wHalf, srcPtrV, wHalf); dstPtrU += dcStride, srcPtrU += scStride; @@ -210,18 +224,17 @@ void copyYV12ToYV12(int width, int height, int srcStride, int dstStride, void *s // covert NV12 (Y plane, interlaced UV bytes) to // NV21 (Y plane, interlaced VU bytes) and trim stride width to real width -void trimConvertNV12ToNV21(int width, int height, int srcStride, void *src, void *dst) -{ +void trimConvertNV12ToNV21(int width, int height, int srcStride, void* src, void* dst) { const int ysize = width * height; - unsigned const char *pSrc = (unsigned char *)src; - unsigned char *pDst = (unsigned char *)dst; + unsigned const char* pSrc = (unsigned char*)src; + unsigned char* pDst = (unsigned char*)dst; // Copy Y component if (srcStride == width) { MEMCPY_S(pDst, ysize, pSrc, ysize); } else if (srcStride > width) { int j = height; - while(j--) { + while (j--) { MEMCPY_S(pDst, width, pSrc, width); pSrc += srcStride; pDst += width; @@ -232,17 +245,17 @@ void trimConvertNV12ToNV21(int width, int height, int srcStride, void *src, void } // Convert UV to VU - pSrc = (unsigned char *)src + srcStride * height; - pDst = (unsigned char *)dst + width * height; + pSrc = (unsigned char*)src + srcStride * height; + pDst = (unsigned char*)dst + width * height; for (int j = 0; j < height / 2; j++) { if (width >= 16) { - const uint32_t *ptr0 = (const uint32_t *)(pSrc); - uint32_t *ptr1 = (uint32_t *)(pDst); - int bNotLastLine = ((j+1) == (height/2)) ? 0 : 1; + const uint32_t* ptr0 = (const uint32_t*)(pSrc); + uint32_t* ptr1 = (uint32_t*)(pDst); + int bNotLastLine = ((j + 1) == (height / 2)) ? 0 : 1; int width_16 = (width + 15 * bNotLastLine) & ~0xf; - if ((((uint64_t)(pSrc)) & 0xf) == 0 && (((uint64_t)(pDst)) & 0xf) == 0) { // 16 bytes aligned for both src and dest - __asm__ volatile(\ - "movl %0, %%eax \n\t" + if ((((uint64_t)(pSrc)) & 0xf) == 0 && + (((uint64_t)(pDst)) & 0xf) == 0) { // 16 bytes aligned for both src and dest + __asm__ volatile("movl %0, %%eax \n\t" "movl %1, %%edx \n\t" "movl %2, %%ecx \n\t" "1: \n\t" @@ -258,12 +271,9 @@ void trimConvertNV12ToNV21(int width, int height, int srcStride, void *src, void "jnz 1b \n\t" : "+m"(ptr0), "+m"(ptr1), "+m"(width_16) : - : "eax", "ecx", "edx", "xmm0", "xmm1" - ); - } - else { // either src or dest is not 16-bytes aligned - __asm__ volatile(\ - "movl %0, %%eax \n\t" + : "eax", "ecx", "edx", "xmm0", "xmm1"); + } else { // either src or dest is not 16-bytes aligned + __asm__ volatile("movl %0, %%eax \n\t" "movl %1, %%edx \n\t" "movl %2, %%ecx \n\t" "1: \n\t" @@ -279,8 +289,7 @@ void trimConvertNV12ToNV21(int width, int height, int srcStride, void *src, void "jnz 1b \n\t" : "+m"(ptr0), "+m"(ptr1), "+m"(width_16) : - : "eax", "ecx", "edx", "xmm0", "xmm1" - ); + : "eax", "ecx", "edx", "xmm0", "xmm1"); } // process remaining data of less than 16 bytes of last row @@ -288,10 +297,10 @@ void trimConvertNV12ToNV21(int width, int height, int srcStride, void *src, void pDst[i] = pSrc[i + 1]; pDst[i + 1] = pSrc[i]; } - } - else if ((((uint64_t)(pSrc)) & 0x3) == 0 && (((uint64_t)(pDst)) & 0x3) == 0){ // 4 bytes aligned for both src and dest - const uint32_t *ptr0 = (const uint32_t *)(pSrc); - uint32_t *ptr1 = (uint32_t *)(pDst); + } else if ((((uint64_t)(pSrc)) & 0x3) == 0 && + (((uint64_t)(pDst)) & 0x3) == 0) { // 4 bytes aligned for both src and dest + const uint32_t* ptr0 = (const uint32_t*)(pSrc); + uint32_t* ptr1 = (uint32_t*)(pDst); int width_4 = width & ~3; for (int i = 0; i < width_4; i += 4) { uint32_t data0 = *ptr0++; @@ -304,10 +313,9 @@ void trimConvertNV12ToNV21(int width, int height, int srcStride, void *src, void pDst[i] = pSrc[i + 1]; pDst[i + 1] = pSrc[i]; } - } - else { - unsigned const char *ptr0 = pSrc; - unsigned char *ptr1 = pDst; + } else { + unsigned const char* ptr0 = pSrc; + unsigned char* ptr1 = pDst; for (int i = 0; i < width; i += 2) { *ptr1++ = ptr0[1]; *ptr1++ = ptr0[0]; @@ -321,17 +329,16 @@ void trimConvertNV12ToNV21(int width, int height, int srcStride, void *src, void // convert NV12 (Y plane, interlaced UV bytes) to YV12 (Y plane, V plane, U plane) // without Y and C 16 bytes aligned -void convertNV12ToYV12(int width, int height, int srcStride, void *src, void *dst) -{ +void convertNV12ToYV12(int width, int height, int srcStride, void* src, void* dst) { int yStride = width; size_t ySize = yStride * height; - int cStride = yStride/2; - size_t cSize = cStride * height/2; + int cStride = yStride / 2; + size_t cSize = cStride * height / 2; - unsigned char *srcPtr = (unsigned char *) src; - unsigned char *dstPtr = (unsigned char *) dst; - unsigned char *dstPtrV = (unsigned char *) dst + ySize; - unsigned char *dstPtrU = (unsigned char *) dst + ySize + cSize; + unsigned char* srcPtr = (unsigned char*)src; + unsigned char* dstPtr = (unsigned char*)dst; + unsigned char* dstPtrV = (unsigned char*)dst + ySize; + unsigned char* dstPtrU = (unsigned char*)dst + ySize + cSize; // copy the entire Y plane if (srcStride == yStride) { @@ -351,8 +358,8 @@ void convertNV12ToYV12(int width, int height, int srcStride, void *src, void *ds // deinterlace the UV data int halfHeight = height / 2; int halfWidth = width / 2; - for ( int i = 0; i < halfHeight; ++i) { - for ( int j = 0; j < halfWidth; ++j) { + for (int i = 0; i < halfHeight; ++i) { + for (int j = 0; j < halfWidth; ++j) { dstPtrV[j] = srcPtr[j * 2 + 1]; dstPtrU[j] = srcPtr[j * 2]; } @@ -364,17 +371,16 @@ void convertNV12ToYV12(int width, int height, int srcStride, void *src, void *ds // convert NV12 (Y plane, interlaced UV bytes) to YV12 (Y plane, V plane, U plane) // with Y and C 16 bytes aligned -void align16ConvertNV12ToYV12(int width, int height, int srcStride, void *src, void *dst) -{ +void align16ConvertNV12ToYV12(int width, int height, int srcStride, void* src, void* dst) { int yStride = ALIGN_16(width); size_t ySize = yStride * height; - int cStride = ALIGN_16(yStride/2); - size_t cSize = cStride * height/2; + int cStride = ALIGN_16(yStride / 2); + size_t cSize = cStride * height / 2; - unsigned char *srcPtr = (unsigned char *) src; - unsigned char *dstPtr = (unsigned char *) dst; - unsigned char *dstPtrV = (unsigned char *) dst + ySize; - unsigned char *dstPtrU = (unsigned char *) dst + ySize + cSize; + unsigned char* srcPtr = (unsigned char*)src; + unsigned char* dstPtr = (unsigned char*)dst; + unsigned char* dstPtrV = (unsigned char*)dst + ySize; + unsigned char* dstPtrU = (unsigned char*)dst + ySize + cSize; // copy the entire Y plane if (srcStride == yStride) { @@ -392,8 +398,8 @@ void align16ConvertNV12ToYV12(int width, int height, int srcStride, void *src, v } // deinterlace the UV data - for ( int i = 0; i < height / 2; ++i) { - for ( int j = 0; j < width / 2; ++j) { + for (int i = 0; i < height / 2; ++i) { + for (int j = 0; j < width / 2; ++j) { dstPtrV[j] = srcPtr[j * 2 + 1]; dstPtrU[j] = srcPtr[j * 2]; } @@ -404,33 +410,32 @@ void align16ConvertNV12ToYV12(int width, int height, int srcStride, void *src, v } // P411's Y, U, V are seperated. But the YUY2's Y, U and V are interleaved. -void YUY2ToP411(int width, int height, int stride, void *src, void *dst) -{ +void YUY2ToP411(int width, int height, int stride, void* src, void* dst) { int ySize = width * height; int cSize = width * height / 4; int wHalf = width >> 1; - unsigned char *srcPtr = (unsigned char *) src; - unsigned char *dstPtr = (unsigned char *) dst; - unsigned char *dstPtrU = (unsigned char *) dst + ySize; - unsigned char *dstPtrV = (unsigned char *) dst + ySize + cSize; + unsigned char* srcPtr = (unsigned char*)src; + unsigned char* dstPtr = (unsigned char*)dst; + unsigned char* dstPtrU = (unsigned char*)dst + ySize; + unsigned char* dstPtrV = (unsigned char*)dst + ySize + cSize; for (int i = 0; i < height; i++) { - //The first line of the source - //Copy first Y Plane first - for (int j=0; j < width; j++) { - dstPtr[j] = srcPtr[j*2]; + // The first line of the source + // Copy first Y Plane first + for (int j = 0; j < width; j++) { + dstPtr[j] = srcPtr[j * 2]; } if (i & 1) { - //Copy the V plane + // Copy the V plane for (int k = 0; k < wHalf; k++) { dstPtrV[k] = srcPtr[k * 4 + 3]; } dstPtrV = dstPtrV + wHalf; } else { - //Copy the U plane - for (int k = 0; k< wHalf; k++) { + // Copy the U plane + for (int k = 0; k < wHalf; k++) { dstPtrU[k] = srcPtr[k * 4 + 1]; } dstPtrU = dstPtrU + wHalf; @@ -442,14 +447,12 @@ void YUY2ToP411(int width, int height, int stride, void *src, void *dst) } // P411's Y, U, V are separated. But the NV12's U and V are interleaved. -void NV12ToP411Separate(int width, int height, int stride, - void *srcY, void *srcUV, void *dst) -{ +void NV12ToP411Separate(int width, int height, int stride, void* srcY, void* srcUV, void* dst) { int i, j, p, q; - unsigned char *psrcY = (unsigned char *) srcY; - unsigned char *pdstY = (unsigned char *) dst; + unsigned char* psrcY = (unsigned char*)srcY; + unsigned char* pdstY = (unsigned char*)dst; unsigned char *pdstU, *pdstV; - unsigned char *psrcUV; + unsigned char* psrcUV; // copy Y data for (i = 0; i < height; i++) { @@ -459,8 +462,8 @@ void NV12ToP411Separate(int width, int height, int stride, } // copy U data and V data - psrcUV = (unsigned char *)srcUV; - pdstU = (unsigned char *)dst + width * height; + psrcUV = (unsigned char*)srcUV; + pdstU = (unsigned char*)dst + width * height; pdstV = pdstU + width * height / 4; p = q = 0; for (i = 0; i < height / 2; i++) { @@ -468,7 +471,7 @@ void NV12ToP411Separate(int width, int height, int stride, if (j % 2 == 0) { pdstU[p] = (psrcUV[i * stride + j] & 0xFF); p++; - } else { + } else { pdstV[q] = (psrcUV[i * stride + j] & 0xFF); q++; } @@ -477,21 +480,18 @@ void NV12ToP411Separate(int width, int height, int stride, } // P411's Y, U, V are seperated. But the NV12's U and V are interleaved. -void NV12ToP411(int width, int height, int stride, void *src, void *dst) -{ - NV12ToP411Separate(width, height, stride, - src, (void *)((unsigned char *)src + width * height), dst); +void NV12ToP411(int width, int height, int stride, void* src, void* dst) { + NV12ToP411Separate(width, height, stride, src, (void*)((unsigned char*)src + width * height), + dst); } // P411's Y, U, V are separated. But the NV21's U and V are interleaved. -void NV21ToP411Separate(int width, int height, int stride, - void *srcY, void *srcUV, void *dst) -{ +void NV21ToP411Separate(int width, int height, int stride, void* srcY, void* srcUV, void* dst) { int i, j, p, q; - unsigned char *psrcY = (unsigned char *) srcY; - unsigned char *pdstY = (unsigned char *) dst; + unsigned char* psrcY = (unsigned char*)srcY; + unsigned char* pdstY = (unsigned char*)dst; unsigned char *pdstU, *pdstV; - unsigned char *psrcUV; + unsigned char* psrcUV; // copy Y data for (i = 0; i < height; i++) { @@ -501,8 +501,8 @@ void NV21ToP411Separate(int width, int height, int stride, } // copy U data and V data - psrcUV = (unsigned char *)srcUV; - pdstU = (unsigned char *)dst + width * height; + psrcUV = (unsigned char*)srcUV; + pdstU = (unsigned char*)dst + width * height; pdstV = pdstU + width * height / 4; p = q = 0; for (i = 0; i < height / 2; i++) { @@ -510,7 +510,7 @@ void NV21ToP411Separate(int width, int height, int stride, if ((j & 1) == 0) { pdstV[p] = (psrcUV[i * stride + j] & 0xFF); p++; - } else { + } else { pdstU[q] = (psrcUV[i * stride + j] & 0xFF); q++; } @@ -519,82 +519,78 @@ void NV21ToP411Separate(int width, int height, int stride, } // P411's Y, U, V are seperated. But the NV21's U and V are interleaved. -void NV21ToP411(int width, int height, int stride, void *src, void *dst) -{ - NV21ToP411Separate(width, height, stride, - src, (void *)((unsigned char *)src + width * height), dst); +void NV21ToP411(int width, int height, int stride, void* src, void* dst) { + NV21ToP411Separate(width, height, stride, src, (void*)((unsigned char*)src + width * height), + dst); } // IMC3 Y, U, V are separated,the stride for U/V is the same as Y. // about IMC3 detail, please refer to http://www.fourcc.org/yuv.php // But the NV12's U and V are interleaved. -void NV12ToIMC3(int width, int height, int stride, void *srcY, void *srcUV, void *dst) -{ +void NV12ToIMC3(int width, int height, int stride, void* srcY, void* srcUV, void* dst) { int i, j, p, q; unsigned char *pdstU, *pdstV; - unsigned char *psrcUV; + unsigned char* psrcUV; // copy Y data even with stride MEMCPY_S(dst, stride * height, srcY, stride * height); // copy U data and V data - psrcUV = (unsigned char *)srcUV; - pdstU = (unsigned char *)dst + stride * height; + psrcUV = (unsigned char*)srcUV; + pdstU = (unsigned char*)dst + stride * height; pdstV = pdstU + stride * height / 2; p = q = 0; for (i = 0; i < height / 2; i++) { for (j = 0; j < width; j++) { if (j % 2 == 0) { - pdstU[p]= (psrcUV[i * stride + j] & 0xFF) ; + pdstU[p] = (psrcUV[i * stride + j] & 0xFF); p++; - } else { - pdstV[q]= (psrcUV[i * stride + j] & 0xFF); + } else { + pdstV[q] = (psrcUV[i * stride + j] & 0xFF); q++; } } - p += stride - width/2; - q += stride - width/2; + p += stride - width / 2; + q += stride - width / 2; } } // IMC1 Y, V,U are separated,the stride for U/V is the same as Y. // IMC's V is before U // But the NV12's U and V are interleaved. -void NV12ToIMC1(int width, int height, int stride, void *srcY, void *srcUV, void *dst) -{ +void NV12ToIMC1(int width, int height, int stride, void* srcY, void* srcUV, void* dst) { int i, j, p, q; unsigned char *pdstU, *pdstV; - unsigned char *psrcUV; + unsigned char* psrcUV; // copy Y data even with stride MEMCPY_S(dst, stride * height, srcY, stride * height); // copy U data and V data - psrcUV = (unsigned char *)srcUV; - pdstV = (unsigned char *)dst + stride * height; + psrcUV = (unsigned char*)srcUV; + pdstV = (unsigned char*)dst + stride * height; pdstU = pdstV + stride * height / 2; p = q = 0; for (i = 0; i < height / 2; i++) { for (j = 0; j < width; j++) { if (j % 2 == 0) { - pdstU[p]= (psrcUV[i * stride + j] & 0xFF) ; + pdstU[p] = (psrcUV[i * stride + j] & 0xFF); p++; - } else { - pdstV[q]= (psrcUV[i * stride + j] & 0xFF); + } else { + pdstV[q] = (psrcUV[i * stride + j] & 0xFF); q++; } } - p += stride - width/2; - q += stride - width/2; + p += stride - width / 2; + q += stride - width / 2; } } // Re-pad YUV420 format image, the format can be YV12, YU12 or YUV420 planar. // If buffer size: (height*dstStride*1.5) > (height*srcStride*1.5), src and dst // buffer start addresses are same, the re-padding can be done inplace. -void repadYUV420(int width, int height, int srcStride, int dstStride, void *src, void *dst) -{ - unsigned char *dptr; - unsigned char *sptr; - void * (*myCopy)(void *dst, const void *src, size_t n); +void repadYUV420(int width, int height, int srcStride, int dstStride, void* src, void* dst) { + unsigned char* dptr; + unsigned char* sptr; + void* (*myCopy)(void* dst, const void* src, size_t n); const int whalf = width >> 1; const int hhalf = height >> 1; @@ -607,36 +603,36 @@ void repadYUV420(int width, int height, int srcStride, int dstStride, void *src, // directly copy, if (srcStride == dstStride) if (srcStride == dstStride) { - MEMCPY_S(dst, dySize + 2*dcSize, src, dySize + 2*dcSize); + MEMCPY_S(dst, dySize + 2 * dcSize, src, dySize + 2 * dcSize); return; } // copy V(YV12 case) or U(YU12 case) plane line by line - sptr = (unsigned char *)src + sySize + 2*scSize - scStride; - dptr = (unsigned char *)dst + dySize + 2*dcSize - dcStride; + sptr = (unsigned char*)src + sySize + 2 * scSize - scStride; + dptr = (unsigned char*)dst + dySize + 2 * dcSize - dcStride; // try to avoid overlapped memcpy() - myCopy = (abs(sptr -dptr) > dstStride) ? memcpy : memmove; + myCopy = (abs(sptr - dptr) > dstStride) ? memcpy : memmove; - for (int i = 0; i < hhalf; i ++) { + for (int i = 0; i < hhalf; i++) { myCopy(dptr, sptr, whalf); sptr -= scStride; dptr -= dcStride; } // copy V(YV12 case) or U(YU12 case) U/V plane line by line - sptr = (unsigned char *)src + sySize + scSize - scStride; - dptr = (unsigned char *)dst + dySize + dcSize - dcStride; - for (int i = 0; i < hhalf; i ++) { + sptr = (unsigned char*)src + sySize + scSize - scStride; + dptr = (unsigned char*)dst + dySize + dcSize - dcStride; + for (int i = 0; i < hhalf; i++) { myCopy(dptr, sptr, whalf); sptr -= scStride; dptr -= dcStride; } // copy Y plane line by line - sptr = (unsigned char *)src + sySize - srcStride; - dptr = (unsigned char *)dst + dySize - dstStride; - for (int i = 0; i < height; i ++) { + sptr = (unsigned char*)src + sySize - srcStride; + dptr = (unsigned char*)dst + dySize - dstStride; + for (int i = 0; i < height; i++) { myCopy(dptr, sptr, width); sptr -= srcStride; dptr -= dstStride; @@ -644,36 +640,35 @@ void repadYUV420(int width, int height, int srcStride, int dstStride, void *src, } // covert YUYV(YUY2, YUV422 format) to YV12 (Y plane, V plane, U plane) -void convertYUYVToYV12(int width, int height, int srcStride, int dstStride, void *src, void *dst) -{ +void convertYUYVToYV12(int width, int height, int srcStride, int dstStride, void* src, void* dst) { int ySize = width * height; - int cSize = ALIGN_16(dstStride/2) * height / 2; + int cSize = ALIGN_16(dstStride / 2) * height / 2; int wHalf = width >> 1; - unsigned char *srcPtr = (unsigned char *) src; - unsigned char *dstPtr = (unsigned char *) dst; - unsigned char *dstPtrV = (unsigned char *) dst + ySize; - unsigned char *dstPtrU = (unsigned char *) dst + ySize + cSize; + unsigned char* srcPtr = (unsigned char*)src; + unsigned char* dstPtr = (unsigned char*)dst; + unsigned char* dstPtrV = (unsigned char*)dst + ySize; + unsigned char* dstPtrU = (unsigned char*)dst + ySize + cSize; for (int i = 0; i < height; i++) { - //The first line of the source - //Copy first Y Plane first - for (int j=0; j < width; j++) { - dstPtr[j] = srcPtr[j*2]; + // The first line of the source + // Copy first Y Plane first + for (int j = 0; j < width; j++) { + dstPtr[j] = srcPtr[j * 2]; } if (i & 1) { - //Copy the V plane - for (int k = 0; k< wHalf; k++) { + // Copy the V plane + for (int k = 0; k < wHalf; k++) { dstPtrV[k] = srcPtr[k * 4 + 3]; } - dstPtrV = dstPtrV + ALIGN_16(dstStride>>1); + dstPtrV = dstPtrV + ALIGN_16(dstStride >> 1); } else { - //Copy the U plane - for (int k = 0; k< wHalf; k++) { + // Copy the U plane + for (int k = 0; k < wHalf; k++) { dstPtrU[k] = srcPtr[k * 4 + 1]; } - dstPtrU = dstPtrU + ALIGN_16(dstStride>>1); + dstPtrU = dstPtrU + ALIGN_16(dstStride >> 1); } srcPtr = srcPtr + srcStride * 2; @@ -682,28 +677,26 @@ void convertYUYVToYV12(int width, int height, int srcStride, int dstStride, void } // covert YUYV(YUY2, YUV422 format) to NV21 (Y plane, interlaced VU bytes) -void convertYUYVToNV21(int width, int height, int srcStride, void *src, void *dst) -{ +void convertYUYVToNV21(int width, int height, int srcStride, void* src, void* dst) { int ySize = width * height; - int u_counter=1, v_counter=0; - - unsigned char *srcPtr = (unsigned char *) src; - unsigned char *dstPtr = (unsigned char *) dst; - unsigned char *dstPtrUV = (unsigned char *) dst + ySize; - - for (int i=0; i < height; i++) { - //The first line of the source - //Copy first Y Plane first - for (int j=0; j < width * 2; j++) { - if (j % 2 == 0) - dstPtr[j/2] = srcPtr[j]; - if (i%2) { - if (( j % 4 ) == 3) { - dstPtrUV[v_counter] = srcPtr[j]; //V plane + int u_counter = 1, v_counter = 0; + + unsigned char* srcPtr = (unsigned char*)src; + unsigned char* dstPtr = (unsigned char*)dst; + unsigned char* dstPtrUV = (unsigned char*)dst + ySize; + + for (int i = 0; i < height; i++) { + // The first line of the source + // Copy first Y Plane first + for (int j = 0; j < width * 2; j++) { + if (j % 2 == 0) dstPtr[j / 2] = srcPtr[j]; + if (i % 2) { + if ((j % 4) == 3) { + dstPtrUV[v_counter] = srcPtr[j]; // V plane v_counter += 2; } - if (( j % 4 ) == 1) { - dstPtrUV[u_counter] = srcPtr[j]; //U plane + if ((j % 4) == 1) { + dstPtrUV[u_counter] = srcPtr[j]; // U plane u_counter += 2; } } @@ -714,22 +707,22 @@ void convertYUYVToNV21(int width, int height, int srcStride, void *src, void *ds } } -void convertNV12ToYUYV(int srcWidth, int srcHeight, int srcStride, int dstStride, const void *src, void *dst) -{ +void convertNV12ToYUYV(int srcWidth, int srcHeight, int srcStride, int dstStride, const void* src, + void* dst) { int y_counter = 0, u_counter = 1, v_counter = 3, uv_counter = 0; - unsigned char *srcYPtr = (unsigned char *) src; - unsigned char *srcUVPtr = (unsigned char *)src + srcWidth * srcHeight; - unsigned char *dstPtr = (unsigned char *) dst; + unsigned char* srcYPtr = (unsigned char*)src; + unsigned char* srcUVPtr = (unsigned char*)src + srcWidth * srcHeight; + unsigned char* dstPtr = (unsigned char*)dst; for (int i = 0; i < srcHeight; i++) { for (int k = 0; k < srcWidth; k++) { - dstPtr[y_counter] = srcYPtr[k]; - y_counter += 2; - dstPtr[u_counter] = srcUVPtr[uv_counter]; - u_counter += 4; - dstPtr[v_counter] = srcUVPtr[uv_counter + 1]; - v_counter += 4; - uv_counter += 2; + dstPtr[y_counter] = srcYPtr[k]; + y_counter += 2; + dstPtr[u_counter] = srcUVPtr[uv_counter]; + u_counter += 4; + dstPtr[v_counter] = srcUVPtr[uv_counter + 1]; + v_counter += 4; + uv_counter += 2; } if ((i % 2) == 0) { srcUVPtr = srcUVPtr + srcStride; @@ -744,56 +737,53 @@ void convertNV12ToYUYV(int srcWidth, int srcHeight, int srcStride, int dstStride } } -void convertBuftoYV12(int format, int width, int height, int srcStride, - int dstStride, void *src, void *dst, bool align16) -{ +void convertBuftoYV12(int format, int width, int height, int srcStride, int dstStride, void* src, + void* dst, bool align16) { switch (format) { - case V4L2_PIX_FMT_NV12: - align16 ? align16ConvertNV12ToYV12(width, height, srcStride, src, dst) - : convertNV12ToYV12(width, height, srcStride, src, dst); - break; - case V4L2_PIX_FMT_YVU420: - copyYV12ToYV12(width, height, srcStride, dstStride, src, dst); - break; - case V4L2_PIX_FMT_YUYV: - convertYUYVToYV12(width, height, srcStride, dstStride, src, dst); - break; - default: - ALOGE("%s: unsupported format %d", __func__, format); - break; + case V4L2_PIX_FMT_NV12: + align16 ? align16ConvertNV12ToYV12(width, height, srcStride, src, dst) : + convertNV12ToYV12(width, height, srcStride, src, dst); + break; + case V4L2_PIX_FMT_YVU420: + copyYV12ToYV12(width, height, srcStride, dstStride, src, dst); + break; + case V4L2_PIX_FMT_YUYV: + convertYUYVToYV12(width, height, srcStride, dstStride, src, dst); + break; + default: + ALOGE("%s: unsupported format %d", __func__, format); + break; } } -void convertBuftoNV21(int format, int width, int height, int srcStride, - int dstStride, void *src, void *dst) -{ +void convertBuftoNV21(int format, int width, int height, int srcStride, int dstStride, void* src, + void* dst) { switch (format) { - case V4L2_PIX_FMT_NV12: - trimConvertNV12ToNV21(width, height, srcStride, src, dst); - break; - case V4L2_PIX_FMT_YVU420: - convertYV12ToNV21(width, height, srcStride, dstStride, src, dst); - break; - case V4L2_PIX_FMT_YUYV: - convertYUYVToNV21(width, height, srcStride, src, dst); - break; - default: - ALOGE("%s: unsupported format %d", __func__, format); - break; + case V4L2_PIX_FMT_NV12: + trimConvertNV12ToNV21(width, height, srcStride, src, dst); + break; + case V4L2_PIX_FMT_YVU420: + convertYV12ToNV21(width, height, srcStride, dstStride, src, dst); + break; + case V4L2_PIX_FMT_YUYV: + convertYUYVToNV21(width, height, srcStride, src, dst); + break; + default: + ALOGE("%s: unsupported format %d", __func__, format); + break; } } -void convertBuftoYUYV(int format, int width, int height, int srcStride, - int dstStride, void *src, void *dst) -{ +void convertBuftoYUYV(int format, int width, int height, int srcStride, int dstStride, void* src, + void* dst) { switch (format) { - case V4L2_PIX_FMT_NV12: - convertNV12ToYUYV(width, height, srcStride, dstStride, src, dst); - break; - default: - LOGE("%s: unsupported format %d", __func__, format); - break; + case V4L2_PIX_FMT_NV12: + convertNV12ToYUYV(width, height, srcStride, dstStride, src, dst); + break; + default: + LOGE("%s: unsupported format %d", __func__, format); + break; } } -} // namespace ImageConverter -} // namespace icamera +} // namespace ImageConverter +} // namespace icamera diff --git a/src/image_process/ImageConverter.h b/src/image_process/ImageConverter.h index f55479a5..55a045bf 100644 --- a/src/image_process/ImageConverter.h +++ b/src/image_process/ImageConverter.h @@ -19,41 +19,40 @@ namespace icamera { namespace ImageConverter { -void YUV420ToRGB565(int width, int height, void *src, void *dst); +void YUV420ToRGB565(int width, int height, void* src, void* dst); -void trimConvertNV12ToRGB565(int width, int height, int srcStride, void *src, void *dst); +void trimConvertNV12ToRGB565(int width, int height, int srcStride, void* src, void* dst); -void convertYV12ToNV21(int width, int height, int srcStride, int dstStride, void *src, void *dst); -void copyYV12ToYV12(int width, int height, int srcStride, int dstStride, void *src, void *dst); +void convertYV12ToNV21(int width, int height, int srcStride, int dstStride, void* src, void* dst); +void copyYV12ToYV12(int width, int height, int srcStride, int dstStride, void* src, void* dst); -void trimConvertNV12ToNV21(int width, int height, int srcStride, void *src, void *dst); +void trimConvertNV12ToNV21(int width, int height, int srcStride, void* src, void* dst); -void convertNV12ToYV12(int width, int height, int srcStride, void *src, void *dst); -void align16ConvertNV12ToYV12(int width, int height, int srcStride, void *src, void *dst); +void convertNV12ToYV12(int width, int height, int srcStride, void* src, void* dst); +void align16ConvertNV12ToYV12(int width, int height, int srcStride, void* src, void* dst); -void NV12ToP411(int width, int height, int stride, void *src, void *dst); -void NV21ToP411(int width, int height, int stride, void *src, void *dst); -void NV12ToP411Separate(int width, int height, int stride, - void *srcY, void *srcUV, void *dst); -void NV21ToP411Separate(int width, int height, int stride, - void *srcY, void *srcUV, void *dst); +void NV12ToP411(int width, int height, int stride, void* src, void* dst); +void NV21ToP411(int width, int height, int stride, void* src, void* dst); +void NV12ToP411Separate(int width, int height, int stride, void* srcY, void* srcUV, void* dst); +void NV21ToP411Separate(int width, int height, int stride, void* srcY, void* srcUV, void* dst); -void YUY2ToP411(int width, int height, int stride, void *src, void *dst); -void NV12ToIMC3(int width, int height, int stride,void *srcY, void *srcUV, void *dst); -void NV12ToIMC1(int width, int height, int stride, void *srcY, void *srcUV, void *dst); -void convertYUYVToYV12(int width, int height, int srcStride, int dstStride, void *src, void *dst); +void YUY2ToP411(int width, int height, int stride, void* src, void* dst); +void NV12ToIMC3(int width, int height, int stride, void* srcY, void* srcUV, void* dst); +void NV12ToIMC1(int width, int height, int stride, void* srcY, void* srcUV, void* dst); +void convertYUYVToYV12(int width, int height, int srcStride, int dstStride, void* src, void* dst); -void convertYUYVToNV21(int width, int height, int srcStride, void *src, void *dst); -void convertNV12ToYUYV(int srcWidth, int srcHeight, int srcStride, int dstStride, const void *src, void *dst); +void convertYUYVToNV21(int width, int height, int srcStride, void* src, void* dst); +void convertNV12ToYUYV(int srcWidth, int srcHeight, int srcStride, int dstStride, const void* src, + void* dst); -void convertBuftoYV12(int format, int width, int height, int srcStride, - int dstStride, void *src, void *dst, bool align16 = true); -void convertBuftoNV21(int format, int width, int height, int srcStride, - int dstStride, void *src, void *dst); -void convertBuftoYUYV(int format, int width, int height, int srcStride, - int dstStride, void *src, void *dst); +void convertBuftoYV12(int format, int width, int height, int srcStride, int dstStride, void* src, + void* dst, bool align16 = true); +void convertBuftoNV21(int format, int width, int height, int srcStride, int dstStride, void* src, + void* dst); +void convertBuftoYUYV(int format, int width, int height, int srcStride, int dstStride, void* src, + void* dst); -void repadYUV420(int width, int height, int srcStride, int dstStride, void *src, void *dst); +void repadYUV420(int width, int height, int srcStride, int dstStride, void* src, void* dst); -} // namespace ImageConverter -} // namespace icamera +} // namespace ImageConverter +} // namespace icamera diff --git a/src/image_process/ImageScalerCore.cpp b/src/image_process/ImageScalerCore.cpp index ac5aceee..70ffde71 100644 --- a/src/image_process/ImageScalerCore.cpp +++ b/src/image_process/ImageScalerCore.cpp @@ -23,46 +23,45 @@ #include "iutils/CameraLog.h" #include "ImageScalerCore.h" -#define RESOLUTION_VGA_WIDTH 640 -#define RESOLUTION_VGA_HEIGHT 480 -#define RESOLUTION_QVGA_WIDTH 320 -#define RESOLUTION_QVGA_HEIGHT 240 -#define RESOLUTION_QCIF_WIDTH 176 -#define RESOLUTION_QCIF_HEIGHT 144 -#define MIN(a,b) ((a)<(b)?(a):(b)) +#define RESOLUTION_VGA_WIDTH 640 +#define RESOLUTION_VGA_HEIGHT 480 +#define RESOLUTION_QVGA_WIDTH 320 +#define RESOLUTION_QVGA_HEIGHT 240 +#define RESOLUTION_QCIF_WIDTH 176 +#define RESOLUTION_QCIF_HEIGHT 144 +#define MIN(a, b) ((a) < (b) ? (a) : (b)) namespace icamera { -void ImageScalerCore::downScaleImage(void *src, void *dest, - int dest_w, int dest_h, int dest_stride, - int src_w, int src_h, int src_stride, - int format, int src_skip_lines_top, // number of lines that are skipped from src image start pointer - int src_skip_lines_bottom) // number of lines that are skipped after reading src_h (should be set always to reach full image height) -{ - unsigned char *m_dest = (unsigned char *)dest; - const unsigned char * m_src = (const unsigned char *)src; +void ImageScalerCore::downScaleImage( + void* src, void* dest, int dest_w, int dest_h, int dest_stride, int src_w, int src_h, + int src_stride, + // number of lines that are skipped from src image start pointer + int format, int src_skip_lines_top, + // number of lines that are skipped after reading src_h + // (should be set always to reach full image height) + int src_skip_lines_bottom) { + unsigned char* m_dest = (unsigned char*)dest; + const unsigned char* m_src = (const unsigned char*)src; switch (format) { case V4L2_PIX_FMT_NV21: case V4L2_PIX_FMT_NV12: { if ((dest_w == src_w && dest_h <= src_h) || (dest_w <= src_w && dest_h == src_h)) { // trim only if only src_h is larger than dest_h or src_w is larger than dest_w - ImageScalerCore::trimNv12Image(m_dest, m_src, - dest_w, dest_h, dest_stride, - src_w, src_h, src_stride, - src_skip_lines_top, src_skip_lines_bottom); + ImageScalerCore::trimNv12Image(m_dest, m_src, dest_w, dest_h, dest_stride, src_w, + src_h, src_stride, src_skip_lines_top, + src_skip_lines_bottom); } else { // downscale & crop - ImageScalerCore::downScaleAndCropNv12Image(m_dest, m_src, - dest_w, dest_h, dest_stride, - src_w, src_h, src_stride, - src_skip_lines_top, src_skip_lines_bottom); + ImageScalerCore::downScaleAndCropNv12Image( + m_dest, m_src, dest_w, dest_h, dest_stride, src_w, src_h, src_stride, + src_skip_lines_top, src_skip_lines_bottom); } break; } case V4L2_PIX_FMT_YUYV: { - ImageScalerCore::downScaleYUY2Image(m_dest, m_src, - dest_w, dest_h, dest_stride, - src_w, src_h, src_stride); + ImageScalerCore::downScaleYUY2Image(m_dest, m_src, dest_w, dest_h, dest_stride, src_w, + src_h, src_stride); break; } default: { @@ -72,67 +71,77 @@ void ImageScalerCore::downScaleImage(void *src, void *dest, } } -void ImageScalerCore::downScaleYUY2Image(unsigned char *dest, const unsigned char *src, +void ImageScalerCore::downScaleYUY2Image(unsigned char* dest, const unsigned char* src, const int dest_w, const int dest_h, const int dest_stride, - const int src_w, const int src_h, const int src_stride) -{ - if (dest==NULL || dest_w <=0 || dest_h <=0 || src==NULL || src_w <=0 || src_h <= 0 ) + const int src_w, const int src_h, const int src_stride) { + if (dest == NULL || dest_w <= 0 || dest_h <= 0 || src == NULL || src_w <= 0 || src_h <= 0) return; - if (dest_w%2 != 0) // if the dest_w is not an even number, exit + if (dest_w % 2 != 0) // if the dest_w is not an even number, exit return; - const int scale_w = (src_w<<8) / dest_w; // scale factors - const int scale_h = (src_h<<8) / dest_h; + const int scale_w = (src_w << 8) / dest_w; // scale factors + const int scale_h = (src_h << 8) / dest_h; int macro_pixel_width = dest_w >> 1; - unsigned int val_1, val_2; // for bi-linear-interpolation - int i,j,k; + unsigned int val_1, val_2; // for bi-linear-interpolation + int i, j, k; - for(i=0; i < dest_h; ++i) { + for (i = 0; i < dest_h; ++i) { int src_i = i * scale_h; int dy = src_i & 0xff; src_i >>= 8; - for(j=0; j < macro_pixel_width; ++j) { + for (j = 0; j < macro_pixel_width; ++j) { int src_j = j * scale_w; int dx = src_j & 0xff; src_j = src_j >> 8; - for(k = 0; k < 4; ++k) { + for (k = 0; k < 4; ++k) { // bi-linear-interpolation - if(dx == 0 && dy == 0) { - dest[i * 2 * dest_stride + 4 * j + k] = src[src_i * 2 * src_stride + src_j * 4 + k]; - } else if(dx == 0 && dy != 0){ + if (dx == 0 && dy == 0) { + dest[i * 2 * dest_stride + 4 * j + k] = + src[src_i * 2 * src_stride + src_j * 4 + k]; + } else if (dx == 0 && dy != 0) { val_1 = (unsigned int)src[src_i * 2 * src_stride + src_j * 4 + k]; val_2 = (unsigned int)src[(src_i + 1) * 2 * src_stride + src_j * 4 + k]; val_1 = (val_1 * (256 - dy) + val_2 * dy) >> 8; - dest[i * 2 * dest_stride + 4 * j + k] = ((val_1 <= 255) ? val_1: 255); - } else if(dx != 0 && dy == 0) { - val_1 = ((unsigned int)src[src_i * 2 * src_stride + src_j * 4 + k] * (256 - dx) - + (unsigned int)src[src_i * 2 * src_stride + (src_j +1) * 4 + k] * dx) >> 8; - dest[i * 2 * dest_stride + 4 * j + k] = ((val_1 <= 255) ? val_1: 255); + dest[i * 2 * dest_stride + 4 * j + k] = ((val_1 <= 255) ? val_1 : 255); + } else if (dx != 0 && dy == 0) { + val_1 = + ((unsigned int)src[src_i * 2 * src_stride + src_j * 4 + k] * (256 - dx) + + (unsigned int)src[src_i * 2 * src_stride + (src_j + 1) * 4 + k] * dx) >> + 8; + dest[i * 2 * dest_stride + 4 * j + k] = ((val_1 <= 255) ? val_1 : 255); } else { - val_1 = ((unsigned int)src[src_i * 2 * src_stride + src_j * 4 + k] * (256 - dx) - + (unsigned int)src[src_i * 2 * src_stride + (src_j +1) * 4 + k] * dx) >> 8; - val_2 = ((unsigned int)src[(src_i + 1) * 2 * src_stride + src_j * 4 + k] * (256 - dx) - + (unsigned int)src[(src_i + 1) * 2 * src_stride + (src_j+1) * 4 + k] * dx) >> 8; + val_1 = + ((unsigned int)src[src_i * 2 * src_stride + src_j * 4 + k] * (256 - dx) + + (unsigned int)src[src_i * 2 * src_stride + (src_j + 1) * 4 + k] * dx) >> + 8; + val_2 = ((unsigned int)src[(src_i + 1) * 2 * src_stride + src_j * 4 + k] * + (256 - dx) + + (unsigned int)src[(src_i + 1) * 2 * src_stride + (src_j + 1) * 4 + k] * + dx) >> + 8; val_1 = (val_1 * (256 - dy) + val_2 * dy) >> 8; - dest[i * 2 * dest_stride + 4 * j + k] = ((val_1 <= 255) ? val_1: 255); + dest[i * 2 * dest_stride + 4 * j + k] = ((val_1 <= 255) ? val_1 : 255); } } } } } -void ImageScalerCore::trimNv12Image(unsigned char *dest, const unsigned char *src, - const int dest_w, const int dest_h, const int dest_stride, - const int src_w, const int src_h, const int src_stride, - const int src_skip_lines_top, // number of lines that are skipped from src image start pointer - const int src_skip_lines_bottom) // number of lines that are skipped after reading src_h (should be set always to reach full image height) +void ImageScalerCore::trimNv12Image( + unsigned char* dest, const unsigned char* src, const int dest_w, const int dest_h, + const int dest_stride, const int src_w, const int src_h, const int src_stride, + const int src_skip_lines_top, // number of lines that are skipped from src image start pointer + const int src_skip_lines_bottom) // number of lines that are skipped after reading src_h + // (should be set always to reach full image height) { - LOG1("@%s: dest_w: %d, dest_h: %d, dest_stride:%d, src_w: %d, src_h: %d, src_stride: %d, skip_top: %d, skip_bottom: %d", - __func__, dest_w,dest_h,dest_stride,src_w,src_h,src_stride,src_skip_lines_top,src_skip_lines_bottom); + LOG1("@%s: dest_w: %d, dest_h: %d, dest_stride:%d, src_w: %d, src_h: %d, src_stride: %d, " + "skip_top: %d, skip_bottom: %d", + __func__, dest_w, dest_h, dest_stride, src_w, src_h, src_stride, src_skip_lines_top, + src_skip_lines_bottom); - const unsigned char *y = src; - const unsigned char *uv = src + src_h * src_stride; + const unsigned char* y = src; + const unsigned char* uv = src + src_h * src_stride; if (dest_w < src_w) { /* * src_w @@ -195,36 +204,39 @@ void ImageScalerCore::trimNv12Image(unsigned char *dest, const unsigned char *sr } // VGA-QCIF begin (Enzo specific) -void ImageScalerCore::downScaleAndCropNv12Image(unsigned char *dest, const unsigned char *src, - const int dest_w, const int dest_h, const int dest_stride, - const int src_w, const int src_h, const int src_stride, - const int src_skip_lines_top, // number of lines that are skipped from src image start pointer - const int src_skip_lines_bottom) // number of lines that are skipped after reading src_h (should be set always to reach full image height) -{ - LOG1("@%s: dest_w: %d, dest_h: %d, dest_stride: %d, src_w: %d, src_h: %d, src_stride: %d, skip_top: %d, skip_bottom: %d, dest: %p, src: %p", - __func__, dest_w, dest_h, dest_stride, src_w, src_h, src_stride, src_skip_lines_top, src_skip_lines_bottom, dest, src); - - if (src_w == 800 && src_h == 600 && src_skip_lines_top == 0 && src_skip_lines_bottom == 0 - && dest_w == RESOLUTION_QVGA_WIDTH && dest_h == RESOLUTION_QVGA_HEIGHT) { +void ImageScalerCore::downScaleAndCropNv12Image( + unsigned char* dest, const unsigned char* src, const int dest_w, const int dest_h, + const int dest_stride, const int src_w, const int src_h, const int src_stride, + // number of lines that are skipped from src image start pointer + const int src_skip_lines_top, + // number of lines that are skipped after reading src_h + // (should be set always to reach full image height) + const int src_skip_lines_bottom) { + LOG1("@%s: dest_w: %d, dest_h: %d, dest_stride: %d, src_w: %d, src_h: %d, src_stride: %d, " + "skip_top: %d, skip_bottom: %d, dest: %p, src: %p", + __func__, dest_w, dest_h, dest_stride, src_w, src_h, src_stride, src_skip_lines_top, + src_skip_lines_bottom, dest, src); + + if (src_w == 800 && src_h == 600 && src_skip_lines_top == 0 && src_skip_lines_bottom == 0 && + dest_w == RESOLUTION_QVGA_WIDTH && dest_h == RESOLUTION_QVGA_HEIGHT) { downScaleNv12ImageFrom800x600ToQvga(dest, src, dest_stride, src_stride); return; } - if (src_w == RESOLUTION_VGA_WIDTH && src_h == RESOLUTION_VGA_HEIGHT - && src_skip_lines_top == 0 && src_skip_lines_bottom == 0 - && dest_w == RESOLUTION_QVGA_WIDTH && dest_h == RESOLUTION_QVGA_HEIGHT) { + if (src_w == RESOLUTION_VGA_WIDTH && src_h == RESOLUTION_VGA_HEIGHT && + src_skip_lines_top == 0 && src_skip_lines_bottom == 0 && dest_w == RESOLUTION_QVGA_WIDTH && + dest_h == RESOLUTION_QVGA_HEIGHT) { downScaleAndCropNv12ImageQvga(dest, src, dest_stride, src_stride); return; } - if (src_w == RESOLUTION_VGA_WIDTH && src_h == RESOLUTION_VGA_HEIGHT - && src_skip_lines_top == 0 && src_skip_lines_bottom == 0 - && dest_w == RESOLUTION_QCIF_WIDTH && dest_h == RESOLUTION_QCIF_WIDTH) { + if (src_w == RESOLUTION_VGA_WIDTH && src_h == RESOLUTION_VGA_HEIGHT && + src_skip_lines_top == 0 && src_skip_lines_bottom == 0 && dest_w == RESOLUTION_QCIF_WIDTH && + dest_h == RESOLUTION_QCIF_WIDTH) { downScaleAndCropNv12ImageQcif(dest, src, dest_stride, src_stride); return; } // skip lines from top - if (src_skip_lines_top > 0) - src += src_skip_lines_top * src_stride; + if (src_skip_lines_top > 0) src += src_skip_lines_top * src_stride; // Correct aspect ratio is defined by destination buffer long int aspect_ratio = (dest_w << 16) / dest_h; @@ -264,10 +276,12 @@ void ImageScalerCore::downScaleAndCropNv12Image(unsigned char *dest, const unsig x1 = j * scaling_w; dx = x1 & 0xff; x2 = (x1 >> 8) + l_skip; - val_1 = ((unsigned int)src[y2 * src_stride + x2] * (256 - dx) - + (unsigned int)src[y2 * src_stride + x2 + 1] * dx) >> 8; - val_2 = ((unsigned int)src[(y2 + 1) * src_stride + x2] * (256 - dx) - + (unsigned int)src[(y2 + 1) * src_stride + x2 + 1] * dx) >> 8; + val_1 = ((unsigned int)src[y2 * src_stride + x2] * (256 - dx) + + (unsigned int)src[y2 * src_stride + x2 + 1] * dx) >> + 8; + val_2 = ((unsigned int)src[(y2 + 1) * src_stride + x2] * (256 - dx) + + (unsigned int)src[(y2 + 1) * src_stride + x2 + 1] * dx) >> + 8; dest[i * dest_stride + j] = MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); } } @@ -275,7 +289,7 @@ void ImageScalerCore::downScaleAndCropNv12Image(unsigned char *dest, const unsig j = 0; width = dest_w >> 1; height = dest_h >> 1; - //get UV data + // get UV data for (i = 0; i < height; i++) { y1 = i * scaling_h; dy = y1 & 0xff; @@ -284,25 +298,33 @@ void ImageScalerCore::downScaleAndCropNv12Image(unsigned char *dest, const unsig x1 = j * scaling_w; dx = x1 & 0xff; x2 = (x1 >> 8) + l_skip / 2; - //fill U data - val_1 = ((unsigned int)src[y2 * src_stride + (x2 << 1) + src_Y_data] * (256 - dx) - + (unsigned int)src[y2 * src_stride + ((x2 + 1) << 1) + src_Y_data] * dx) >> 8; - val_2 = ((unsigned int)src[(y2 + 1) * src_stride + (x2 << 1) + src_Y_data] * (256 -dx) - + (unsigned int)src[(y2 +1) * src_stride + ((x2 + 1) << 1) + src_Y_data] * dx) >> 8; - dest[i * dest_stride + (j << 1) + dest_Y_data] = MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); - //fill V data - val_1 = ((unsigned int)src[y2 * src_stride + (x2 << 1) + 1 + src_Y_data] * (256 - dx) - + (unsigned int)src[y2 * src_stride + ((x2 + 1) << 1) + 1 + src_Y_data] * dx) >> 8; - val_2 = ((unsigned int)src[(y2 + 1) * src_stride + (x2 << 1) + 1 + src_Y_data] * (256 -dx) - + (unsigned int)src[(y2 +1) * src_stride + ((x2 + 1) << 1) + 1 + src_Y_data] * dx) >> 8; - dest[i * dest_stride + (j << 1) + 1 + dest_Y_data] = MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); + // fill U data + val_1 = ((unsigned int)src[y2 * src_stride + (x2 << 1) + src_Y_data] * (256 - dx) + + (unsigned int)src[y2 * src_stride + ((x2 + 1) << 1) + src_Y_data] * dx) >> + 8; + val_2 = + ((unsigned int)src[(y2 + 1) * src_stride + (x2 << 1) + src_Y_data] * (256 - dx) + + (unsigned int)src[(y2 + 1) * src_stride + ((x2 + 1) << 1) + src_Y_data] * dx) >> + 8; + dest[i * dest_stride + (j << 1) + dest_Y_data] = + MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); + // fill V data + val_1 = ((unsigned int)src[y2 * src_stride + (x2 << 1) + 1 + src_Y_data] * (256 - dx) + + (unsigned int)src[y2 * src_stride + ((x2 + 1) << 1) + 1 + src_Y_data] * dx) >> + 8; + val_2 = ((unsigned int)src[(y2 + 1) * src_stride + (x2 << 1) + 1 + src_Y_data] * + (256 - dx) + + (unsigned int)src[(y2 + 1) * src_stride + ((x2 + 1) << 1) + 1 + src_Y_data] * + dx) >> + 8; + dest[i * dest_stride + (j << 1) + 1 + dest_Y_data] = + MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); } } } -void ImageScalerCore::downScaleAndCropNv12ImageQvga(unsigned char *dest, const unsigned char *src, - const int dest_stride, const int src_stride) -{ +void ImageScalerCore::downScaleAndCropNv12ImageQvga(unsigned char* dest, const unsigned char* src, + const int dest_stride, const int src_stride) { LOG1("@%s", __func__); const int dest_w = RESOLUTION_QVGA_WIDTH; const int dest_h = RESOLUTION_QVGA_HEIGHT; @@ -311,22 +333,29 @@ void ImageScalerCore::downScaleAndCropNv12ImageQvga(unsigned char *dest, const u // Y component for (int i = 0; i < dest_h; i++) { - u_int32_t *s1 = (u_int32_t *)(&src[(i * scale + 0) * src_stride]); - u_int32_t *s2 = (u_int32_t *)(&src[(i * scale + 1) * src_stride]); - u_int32_t *d = (u_int32_t *)(&dest[i * dest_stride]); + u_int32_t* s1 = (u_int32_t*)(&src[(i * scale + 0) * src_stride]); + u_int32_t* s2 = (u_int32_t*)(&src[(i * scale + 1) * src_stride]); + u_int32_t* d = (u_int32_t*)(&dest[i * dest_stride]); // This processes 4 dest pixels at a time - for (int j = 0; j < dest_w; j+=4) { - u_int32_t a1; // Input data upper row - u_int32_t a2; // Input data lower row - u_int32_t b; // Output data + for (int j = 0; j < dest_w; j += 4) { + u_int32_t a1; // Input data upper row + u_int32_t a2; // Input data lower row + u_int32_t b; // Output data a1 = *s1++; a2 = *s2++; - b = ((a1 & 0xff) + ((a1 >> 8) & 0xff) + (a2 & 0xff) + ((a2 >> 8) & 0xff) + 2) / 4; - b |= ((((a1 >> 16) & 0xff) + ((a1 >> 24) & 0xff) + ((a2 >> 16) & 0xff) + ((a2 >> 24) & 0xff) + 2) / 4) << 8; + b = ((a1 & 0xff) + ((a1 >> 8) & 0xff) + (a2 & 0xff) + ((a2 >> 8) & 0xff) + 2) / 4; + b |= ((((a1 >> 16) & 0xff) + ((a1 >> 24) & 0xff) + ((a2 >> 16) & 0xff) + + ((a2 >> 24) & 0xff) + 2) / + 4) + << 8; a1 = *s1++; a2 = *s2++; - b |= (((a1 & 0xff) + ((a1 >> 8) & 0xff) + (a2 & 0xff) + ((a2 >> 8) & 0xff) + 2) / 4) << 16; - b |= ((((a1 >> 16) & 0xff) + ((a1 >> 24) & 0xff) + ((a2 >> 16) & 0xff) + ((a2 >> 24) & 0xff) + 2) / 4) << 24; + b |= (((a1 & 0xff) + ((a1 >> 8) & 0xff) + (a2 & 0xff) + ((a2 >> 8) & 0xff) + 2) / 4) + << 16; + b |= ((((a1 >> 16) & 0xff) + ((a1 >> 24) & 0xff) + ((a2 >> 16) & 0xff) + + ((a2 >> 24) & 0xff) + 2) / + 4) + << 24; *d++ = b; } } @@ -335,31 +364,37 @@ void ImageScalerCore::downScaleAndCropNv12ImageQvga(unsigned char *dest, const u src = &src[src_stride * src_h]; dest = &dest[dest_stride * dest_h]; - for (int i = 0; i < dest_h/2; i++) { - u_int32_t *s1 = (u_int32_t *)(&src[(i * scale + 0) * src_stride]); - u_int32_t *s2 = (u_int32_t *)(&src[(i * scale + 1) * src_stride]); - u_int32_t *d = (u_int32_t *)(&dest[i * dest_stride]); + for (int i = 0; i < dest_h / 2; i++) { + u_int32_t* s1 = (u_int32_t*)(&src[(i * scale + 0) * src_stride]); + u_int32_t* s2 = (u_int32_t*)(&src[(i * scale + 1) * src_stride]); + u_int32_t* d = (u_int32_t*)(&dest[i * dest_stride]); // This processes 2 dest UV pairs at a time - for (int j = 0; j < dest_w/2; j+=2) { - u_int32_t a1; // Input data upper row - u_int32_t a2; // Input data lower row - u_int32_t b; // Output data + for (int j = 0; j < dest_w / 2; j += 2) { + u_int32_t a1; // Input data upper row + u_int32_t a2; // Input data lower row + u_int32_t b; // Output data a1 = *s1++; a2 = *s2++; - b = ((a1 & 0xff) + ((a1 >> 16) & 0xff) + (a2 & 0xff) + ((a2 >> 16) & 0xff) + 2) / 4; - b |= ((((a1 >> 8) & 0xff) + ((a1 >> 24) & 0xff) + ((a2 >> 8) & 0xff) + ((a2 >> 24) & 0xff) + 2) / 4) << 8; + b = ((a1 & 0xff) + ((a1 >> 16) & 0xff) + (a2 & 0xff) + ((a2 >> 16) & 0xff) + 2) / 4; + b |= ((((a1 >> 8) & 0xff) + ((a1 >> 24) & 0xff) + ((a2 >> 8) & 0xff) + + ((a2 >> 24) & 0xff) + 2) / + 4) + << 8; a1 = *s1++; a2 = *s2++; - b |= (((a1 & 0xff) + ((a1 >> 16) & 0xff) + (a2 & 0xff) + ((a2 >> 16) & 0xff) + 2) / 4) << 16; - b |= ((((a1 >> 8) & 0xff) + ((a1 >> 24) & 0xff) + ((a2 >> 8) & 0xff) + ((a2 >> 24) & 0xff) + 2) / 4) << 24; + b |= (((a1 & 0xff) + ((a1 >> 16) & 0xff) + (a2 & 0xff) + ((a2 >> 16) & 0xff) + 2) / 4) + << 16; + b |= ((((a1 >> 8) & 0xff) + ((a1 >> 24) & 0xff) + ((a2 >> 8) & 0xff) + + ((a2 >> 24) & 0xff) + 2) / + 4) + << 24; *d++ = b; } } } -void ImageScalerCore::downScaleAndCropNv12ImageQcif(unsigned char *dest, const unsigned char *src, - const int dest_stride, const int src_stride) -{ +void ImageScalerCore::downScaleAndCropNv12ImageQcif(unsigned char* dest, const unsigned char* src, + const int dest_stride, const int src_stride) { LOG1("@%s", __func__); const int dest_w = RESOLUTION_QCIF_WIDTH; const int dest_h = RESOLUTION_QCIF_HEIGHT; @@ -401,10 +436,12 @@ void ImageScalerCore::downScaleAndCropNv12ImageQcif(unsigned char *dest, const u x1 = j * scaling_w; dx = x1 & 0xff; x2 = (x1 >> 8) + l_skip; - val_1 = ((unsigned int)src[y2 * src_stride + x2] * (256 - dx) - + (unsigned int)src[y2 * src_stride + x2 + 1] * dx) >> 8; - val_2 = ((unsigned int)src[(y2 + 1) * src_stride + x2] * (256 - dx) - + (unsigned int)src[(y2 + 1) * src_stride + x2 + 1] * dx) >> 8; + val_1 = ((unsigned int)src[y2 * src_stride + x2] * (256 - dx) + + (unsigned int)src[y2 * src_stride + x2 + 1] * dx) >> + 8; + val_2 = ((unsigned int)src[(y2 + 1) * src_stride + x2] * (256 - dx) + + (unsigned int)src[(y2 + 1) * src_stride + x2 + 1] * dx) >> + 8; dest[i * dest_stride + j] = MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); } } @@ -412,7 +449,7 @@ void ImageScalerCore::downScaleAndCropNv12ImageQcif(unsigned char *dest, const u j = 0; width = dest_w >> 1; height = dest_h >> 1; - //get UV data + // get UV data for (i = 0; i < height; i++) { y1 = i * scaling_h; dy = y1 & 0xff; @@ -421,25 +458,33 @@ void ImageScalerCore::downScaleAndCropNv12ImageQcif(unsigned char *dest, const u x1 = j * scaling_w; dx = x1 & 0xff; x2 = (x1 >> 8) + l_skip / 2; - //fill U data - val_1 = ((unsigned int)src[y2 * src_stride + (x2 << 1) + src_Y_data] * (256 - dx) - + (unsigned int)src[y2 * src_stride + ((x2 + 1) << 1) + src_Y_data] * dx) >> 8; - val_2 = ((unsigned int)src[(y2 + 1) * src_stride + (x2 << 1) + src_Y_data] * (256 -dx) - + (unsigned int)src[(y2 +1) * src_stride + ((x2 + 1) << 1) + src_Y_data] * dx) >> 8; - dest[i * dest_stride + (j << 1) + dest_Y_data] = MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); - //fill V data - val_1 = ((unsigned int)src[y2 * src_w + (x2 << 1) + 1 + src_Y_data] * (256 - dx) - + (unsigned int)src[y2 * src_w + ((x2 + 1) << 1) + 1 + src_Y_data] * dx) >> 8; - val_2 = ((unsigned int)src[(y2 + 1) * src_w + (x2 << 1) + 1 + src_Y_data] * (256 -dx) - + (unsigned int)src[(y2 +1) * src_w + ((x2 + 1) << 1) + 1 + src_Y_data] * dx) >> 8; - dest[i * dest_stride + (j << 1) + 1 + dest_Y_data] = MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); + // fill U data + val_1 = ((unsigned int)src[y2 * src_stride + (x2 << 1) + src_Y_data] * (256 - dx) + + (unsigned int)src[y2 * src_stride + ((x2 + 1) << 1) + src_Y_data] * dx) >> + 8; + val_2 = + ((unsigned int)src[(y2 + 1) * src_stride + (x2 << 1) + src_Y_data] * (256 - dx) + + (unsigned int)src[(y2 + 1) * src_stride + ((x2 + 1) << 1) + src_Y_data] * dx) >> + 8; + dest[i * dest_stride + (j << 1) + dest_Y_data] = + MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); + // fill V data + val_1 = ((unsigned int)src[y2 * src_w + (x2 << 1) + 1 + src_Y_data] * (256 - dx) + + (unsigned int)src[y2 * src_w + ((x2 + 1) << 1) + 1 + src_Y_data] * dx) >> + 8; + val_2 = ((unsigned int)src[(y2 + 1) * src_w + (x2 << 1) + 1 + src_Y_data] * (256 - dx) + + (unsigned int)src[(y2 + 1) * src_w + ((x2 + 1) << 1) + 1 + src_Y_data] * dx) >> + 8; + dest[i * dest_stride + (j << 1) + 1 + dest_Y_data] = + MIN(((val_1 * (256 - dy) + val_2 * dy) >> 8), 0xff); } } } -void ImageScalerCore::downScaleNv12ImageFrom800x600ToQvga(unsigned char *dest, const unsigned char *src, - const int dest_stride, const int src_stride) -{ +void ImageScalerCore::downScaleNv12ImageFrom800x600ToQvga(unsigned char* dest, + const unsigned char* src, + const int dest_stride, + const int src_stride) { LOG1("@%s", __func__); const int dest_w = RESOLUTION_QVGA_WIDTH; const int dest_h = RESOLUTION_QVGA_HEIGHT; @@ -449,166 +494,198 @@ void ImageScalerCore::downScaleNv12ImageFrom800x600ToQvga(unsigned char *dest, c // Processing 2 dest rows and 5 src rows at a time for (int i = 0; i < dest_h / 2; i++) { - u_int32_t *s1 = (u_int32_t *)(&src[(i * 5 + 0) * src_stride]); - u_int32_t *s2 = (u_int32_t *)(&src[(i * 5 + 1) * src_stride]); - u_int32_t *s3 = (u_int32_t *)(&src[(i * 5 + 2) * src_stride]); - u_int32_t *d = (u_int32_t *)(&dest[(i * 2 + 0) * dest_stride]); + u_int32_t* s1 = (u_int32_t*)(&src[(i * 5 + 0) * src_stride]); + u_int32_t* s2 = (u_int32_t*)(&src[(i * 5 + 1) * src_stride]); + u_int32_t* s3 = (u_int32_t*)(&src[(i * 5 + 2) * src_stride]); + u_int32_t* d = (u_int32_t*)(&dest[(i * 2 + 0) * dest_stride]); // This processes 8 dest pixels at a time - for (int j = 0; j < dest_w; j+=8) { - u_int32_t a1; // Input data upper row - u_int32_t a2; // Input data middle row - u_int32_t a3; // Input data lower row - u_int32_t t; // Temp data (for constructing the output) - u_int32_t b; // Output data + for (int j = 0; j < dest_w; j += 8) { + u_int32_t a1; // Input data upper row + u_int32_t a2; // Input data middle row + u_int32_t a3; // Input data lower row + u_int32_t t; // Temp data (for constructing the output) + u_int32_t b; // Output data a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + 2 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); + t = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 2 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 0) & 0xff) + + 2 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); t = (t + 12) / 25; - b = t; // First pixel - t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 16) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 2 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + b = t; // First pixel + t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 16) & 0xff) + + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 2 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (4 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ t; + t = (4 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + t; t = (t + 12) / 25; - b |= t << 8; // Second pixel - t = (0 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 2 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 16) & 0xff) + 2 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 1 * ((a3 >> 24) & 0xff)); + b |= t << 8; // Second pixel + t = (0 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + + 2 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 2 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 1 * ((a3 >> 24) & 0xff)); t = (t + 12) / 25; - b |= t << 16; // Third pixel - t = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); + b |= t << 16; // Third pixel + t = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ t; + t = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 0) & 0xff) + + 2 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + t; t = (t + 12) / 25; - b |= t << 24; // Fourth pixel + b |= t << 24; // Fourth pixel *d++ = b; - t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ t; + t = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + t; t = (t + 12) / 25; - b = t; // Fifth pixel - t = (2 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); + b = t; // Fifth pixel + t = (2 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 0) & 0xff) + + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); t = (t + 12) / 25; - b |= t << 8; // Sixth pixel - t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + b |= t << 8; // Sixth pixel + t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (4 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ t; + t = (4 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 0) & 0xff) + + 1 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + t; t = (t + 12) / 25; - b |= t << 16; // Seventh pixel - t = (0 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + b |= t << 16; // Seventh pixel + t = (0 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 1 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); t = (t + 12) / 25; - b |= t << 24; // Eigth pixel + b |= t << 24; // Eigth pixel *d++ = b; } - s1 = (u_int32_t *)(&src[(i * 5 + 4) * src_stride]); - s2 = (u_int32_t *)(&src[(i * 5 + 3) * src_stride]); - s3 = (u_int32_t *)(&src[(i * 5 + 2) * src_stride]); - d = (u_int32_t *)(&dest[(i * 2 + 1) * dest_stride]); + s1 = (u_int32_t*)(&src[(i * 5 + 4) * src_stride]); + s2 = (u_int32_t*)(&src[(i * 5 + 3) * src_stride]); + s3 = (u_int32_t*)(&src[(i * 5 + 2) * src_stride]); + d = (u_int32_t*)(&dest[(i * 2 + 1) * dest_stride]); // This processes 8 dest pixels at a time - for (int j = 0; j < dest_w; j+=8) { - u_int32_t a1; // Input data lower row - u_int32_t a2; // Input data middle row - u_int32_t a3; // Input data upper row - u_int32_t t; // Temp data (for constructing the output) - u_int32_t b; // Output data + for (int j = 0; j < dest_w; j += 8) { + u_int32_t a1; // Input data lower row + u_int32_t a2; // Input data middle row + u_int32_t a3; // Input data upper row + u_int32_t t; // Temp data (for constructing the output) + u_int32_t b; // Output data a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + 2 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); + t = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 2 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 0) & 0xff) + + 2 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); t = (t + 12) / 25; - b = t; // First pixel - t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 16) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 2 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + b = t; // First pixel + t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 16) & 0xff) + + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 2 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (4 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ t; + t = (4 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + t; t = (t + 12) / 25; - b |= t << 8; // Second pixel - t = (0 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 2 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 16) & 0xff) + 2 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 1 * ((a3 >> 24) & 0xff)); + b |= t << 8; // Second pixel + t = (0 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + + 2 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 2 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 1 * ((a3 >> 24) & 0xff)); t = (t + 12) / 25; - b |= t << 16; // Third pixel - t = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); + b |= t << 16; // Third pixel + t = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ t; + t = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 0) & 0xff) + + 2 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + t; t = (t + 12) / 25; - b |= t << 24; // Fourth pixel + b |= t << 24; // Fourth pixel *d++ = b; - t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ t; + t = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + t; t = (t + 12) / 25; - b = t; // Fifth pixel - t = (2 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); + b = t; // Fifth pixel + t = (2 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 0) & 0xff) + + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)); t = (t + 12) / 25; - b |= t << 8; // Sixth pixel - t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + b |= t << 8; // Sixth pixel + t = (0 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 0 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - t = (4 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ t; + t = (4 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 16) & 0xff) + + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 0) & 0xff) + + 1 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 16) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + t; t = (t + 12) / 25; - b |= t << 16; // Seventh pixel - t = (0 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + b |= t << 16; // Seventh pixel + t = (0 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 16) & 0xff) + + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 0) & 0xff) + + 1 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 16) & 0xff) + 2 * ((a3 >> 24) & 0xff)); t = (t + 12) / 25; - b |= t << 24; // Eigth pixel + b |= t << 24; // Eigth pixel *d++ = b; } } @@ -618,198 +695,181 @@ void ImageScalerCore::downScaleNv12ImageFrom800x600ToQvga(unsigned char *dest, c dest = &dest[dest_stride * dest_h]; // Processing 2 dest rows and 5 src rows at a time - for (int i = 0; i < (dest_h/2) / 2; i++) { - u_int32_t *s1 = (u_int32_t *)(&src[(i * 5 + 0) * src_stride]); - u_int32_t *s2 = (u_int32_t *)(&src[(i * 5 + 1) * src_stride]); - u_int32_t *s3 = (u_int32_t *)(&src[(i * 5 + 2) * src_stride]); - u_int16_t *d = (u_int16_t *)(&dest[(i * 2 + 0) * dest_stride]); + for (int i = 0; i < (dest_h / 2) / 2; i++) { + u_int32_t* s1 = (u_int32_t*)(&src[(i * 5 + 0) * src_stride]); + u_int32_t* s2 = (u_int32_t*)(&src[(i * 5 + 1) * src_stride]); + u_int32_t* s3 = (u_int32_t*)(&src[(i * 5 + 2) * src_stride]); + u_int16_t* d = (u_int16_t*)(&dest[(i * 2 + 0) * dest_stride]); // This processes 4 dest UV pairs at a time - for (int j = 0; j < dest_w/2; j+=4) { - u_int32_t a1; // Input data upper row - u_int32_t a2; // Input data middle row - u_int32_t a3; // Input data lower row - u_int32_t u; // Temp data (for constructing the output) - u_int32_t v; // Temp data (for constructing the output) + for (int j = 0; j < dest_w / 2; j += 4) { + u_int32_t a1; // Input data upper row + u_int32_t a2; // Input data middle row + u_int32_t a3; // Input data lower row + u_int32_t u; // Temp data (for constructing the output) + u_int32_t v; // Temp data (for constructing the output) a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 16) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); - v = (4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + u = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 4 * ((a2 >> 0) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); + v = (4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 16) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 16) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 16) & 0xff))+ u; - v = (2 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ v; + u = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 2 * ((a2 >> 0) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 1 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 16) & 0xff)) + + u; + v = (2 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + v; u = (u + 12) / 25; v = (v + 12) / 25; - *d++ = u | (v << 8); // First uv pair; - u = (2 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 16) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); - v = (2 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + *d++ = u | (v << 8); // First uv pair; + u = (2 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 2 * ((a2 >> 0) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 1 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); + v = (2 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (4 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 16) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 16) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 16) & 0xff))+ u; - v = (4 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ v; + u = (4 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 4 * ((a2 >> 0) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 2 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 16) & 0xff)) + + u; + v = (4 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + v; u = (u + 12) / 25; v = (v + 12) / 25; - *d++ = u | (v << 8); // Second uv pair; - u = (0 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 16) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); - v = (0 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + *d++ = u | (v << 8); // Second uv pair; + u = (0 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 0 * ((a2 >> 0) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 0 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); + v = (0 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (4 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 16) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 16) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 16) & 0xff))+ u; - v = (4 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 8) & 0xff) + 2 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 24) & 0xff))+ v; + u = (4 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 16) & 0xff) + 4 * ((a2 >> 0) & 0xff) + + 2 * ((a2 >> 16) & 0xff) + 2 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 16) & 0xff)) + + u; + v = (4 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 2 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 24) & 0xff)) + + v; u = (u + 12) / 25; v = (v + 12) / 25; - *d++ = u | (v << 8); // Third uv pair; - u = (0 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 16) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 16) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 16) & 0xff)); - v = (0 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 8) & 0xff) + 2 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 24) & 0xff)); + *d++ = u | (v << 8); // Third uv pair; + u = (0 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 16) & 0xff) + 0 * ((a2 >> 0) & 0xff) + + 2 * ((a2 >> 16) & 0xff) + 0 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 16) & 0xff)); + v = (0 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 2 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 16) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff))+ u; - v = (4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff))+ v; + u = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 4 * ((a2 >> 0) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)) + + u; + v = (4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)) + + v; u = (u + 12) / 25; v = (v + 12) / 25; - *d++ = u | (v << 8); // Fourth uv pair; + *d++ = u | (v << 8); // Fourth uv pair; } - s1 = (u_int32_t *)(&src[(i * 5 + 4) * src_stride]); - s2 = (u_int32_t *)(&src[(i * 5 + 3) * src_stride]); - s3 = (u_int32_t *)(&src[(i * 5 + 2) * src_stride]); - d = (u_int16_t *)(&dest[(i * 2 + 1) * dest_stride]); + s1 = (u_int32_t*)(&src[(i * 5 + 4) * src_stride]); + s2 = (u_int32_t*)(&src[(i * 5 + 3) * src_stride]); + s3 = (u_int32_t*)(&src[(i * 5 + 2) * src_stride]); + d = (u_int16_t*)(&dest[(i * 2 + 1) * dest_stride]); // This processes 4 dest UV pairs at a time - for (int j = 0; j < dest_w/2; j+=4) { - u_int32_t a1; // Input data lower row - u_int32_t a2; // Input data middle row - u_int32_t a3; // Input data upper row - u_int32_t u; // Temp data (for constructing the output) - u_int32_t v; // Temp data (for constructing the output) + for (int j = 0; j < dest_w / 2; j += 4) { + u_int32_t a1; // Input data lower row + u_int32_t a2; // Input data middle row + u_int32_t a3; // Input data upper row + u_int32_t u; // Temp data (for constructing the output) + u_int32_t v; // Temp data (for constructing the output) a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 16) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); - v = (4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + u = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 4 * ((a2 >> 0) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); + v = (4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 16) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 16) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 16) & 0xff))+ u; - v = (2 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ v; + u = (2 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 2 * ((a2 >> 0) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 1 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 16) & 0xff)) + + u; + v = (2 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + v; u = (u + 12) / 25; v = (v + 12) / 25; - *d++ = u | (v << 8); // First uv pair; - u = (2 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + - 2 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 16) & 0xff) + - 1 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); - v = (2 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 2 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 1 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + *d++ = u | (v << 8); // First uv pair; + u = (2 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 2 * ((a2 >> 0) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 1 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); + v = (2 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + 2 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 24) & 0xff) + 1 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (4 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 16) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 0 * ((a2 >> 16) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 16) & 0xff))+ u; - v = (4 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 8) & 0xff) + 0 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 24) & 0xff))+ v; + u = (4 * ((a1 >> 0) & 0xff) + 0 * ((a1 >> 16) & 0xff) + 4 * ((a2 >> 0) & 0xff) + + 0 * ((a2 >> 16) & 0xff) + 2 * ((a3 >> 0) & 0xff) + 0 * ((a3 >> 16) & 0xff)) + + u; + v = (4 * ((a1 >> 8) & 0xff) + 0 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 0 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 0 * ((a3 >> 24) & 0xff)) + + v; u = (u + 12) / 25; v = (v + 12) / 25; - *d++ = u | (v << 8); // Second uv pair; - u = (0 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 16) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); - v = (0 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); + *d++ = u | (v << 8); // Second uv pair; + u = (0 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 0 * ((a2 >> 0) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 0 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)); + v = (0 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (4 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 16) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 16) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 16) & 0xff))+ u; - v = (4 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 8) & 0xff) + 2 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 24) & 0xff))+ v; + u = (4 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 16) & 0xff) + 4 * ((a2 >> 0) & 0xff) + + 2 * ((a2 >> 16) & 0xff) + 2 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 16) & 0xff)) + + u; + v = (4 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 2 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 24) & 0xff)) + + v; u = (u + 12) / 25; v = (v + 12) / 25; - *d++ = u | (v << 8); // Third uv pair; - u = (0 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 16) & 0xff) + - 0 * ((a2 >> 0) & 0xff) + 2 * ((a2 >> 16) & 0xff) + - 0 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 16) & 0xff)); - v = (0 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 24) & 0xff) + - 0 * ((a2 >> 8) & 0xff) + 2 * ((a2 >> 24) & 0xff) + - 0 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 24) & 0xff)); + *d++ = u | (v << 8); // Third uv pair; + u = (0 * ((a1 >> 0) & 0xff) + 2 * ((a1 >> 16) & 0xff) + 0 * ((a2 >> 0) & 0xff) + + 2 * ((a2 >> 16) & 0xff) + 0 * ((a3 >> 0) & 0xff) + 1 * ((a3 >> 16) & 0xff)); + v = (0 * ((a1 >> 8) & 0xff) + 2 * ((a1 >> 24) & 0xff) + 0 * ((a2 >> 8) & 0xff) + + 2 * ((a2 >> 24) & 0xff) + 0 * ((a3 >> 8) & 0xff) + 1 * ((a3 >> 24) & 0xff)); a1 = *s1++; a2 = *s2++; a3 = *s3++; - u = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + - 4 * ((a2 >> 0) & 0xff) + 4 * ((a2 >> 16) & 0xff) + - 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff))+ u; - v = (4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + - 4 * ((a2 >> 8) & 0xff) + 4 * ((a2 >> 24) & 0xff) + - 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff))+ v; + u = (4 * ((a1 >> 0) & 0xff) + 4 * ((a1 >> 16) & 0xff) + 4 * ((a2 >> 0) & 0xff) + + 4 * ((a2 >> 16) & 0xff) + 2 * ((a3 >> 0) & 0xff) + 2 * ((a3 >> 16) & 0xff)) + + u; + v = (4 * ((a1 >> 8) & 0xff) + 4 * ((a1 >> 24) & 0xff) + 4 * ((a2 >> 8) & 0xff) + + 4 * ((a2 >> 24) & 0xff) + 2 * ((a3 >> 8) & 0xff) + 2 * ((a3 >> 24) & 0xff)) + + v; u = (u + 12) / 25; v = (v + 12) / 25; - *d++ = u | (v << 8); // Fourth uv pair; + *d++ = u | (v << 8); // Fourth uv pair; } } - } // VGA-QCIF end -int ImageScalerCore::cropCompose( - void *src, unsigned int srcW, unsigned int srcH, unsigned int srcStride, int srcFormat, - void *dst, unsigned int dstW, unsigned int dstH, unsigned int dstStride, int dstFormat, - unsigned int srcCropW, unsigned int srcCropH, unsigned int srcCropLeft, unsigned int srcCropTop, - unsigned int dstCropW, unsigned int dstCropH, unsigned int dstCropLeft, unsigned int dstCropTop) -{ +int ImageScalerCore::cropCompose(void* src, unsigned int srcW, unsigned int srcH, + unsigned int srcStride, int srcFormat, void* dst, + unsigned int dstW, unsigned int dstH, unsigned int dstStride, + int dstFormat, unsigned int srcCropW, unsigned int srcCropH, + unsigned int srcCropLeft, unsigned int srcCropTop, + unsigned int dstCropW, unsigned int dstCropH, + unsigned int dstCropLeft, unsigned int dstCropTop) { static const unsigned int MAXVAL = 65536; static const int ALLOW_DOWNSCALING = 1; // Check that we support the formats - if ((srcFormat != V4L2_PIX_FMT_NV12 && - srcFormat != V4L2_PIX_FMT_NV21) || + if ((srcFormat != V4L2_PIX_FMT_NV12 && srcFormat != V4L2_PIX_FMT_NV21) || srcFormat != dstFormat) { LOGE("Format conversion is not yet supported"); return UNKNOWN_ERROR; @@ -834,25 +894,23 @@ int ImageScalerCore::cropCompose( return UNKNOWN_ERROR; } - if (srcStride == dstStride && srcCropW == srcW && srcW == dstW && - srcCropH == srcH && srcH == dstH && - dstCropW == dstW && dstCropH == dstH) { + if (srcStride == dstStride && srcCropW == srcW && srcW == dstW && srcCropH == srcH && + srcH == dstH && dstCropW == dstW && dstCropH == dstH) { // If no cropping/scaling is requested, just copy data cropComposeCopy(src, dst, srcStride * srcH * 3 / 2); return 0; } - if (!ALLOW_DOWNSCALING && - (dstCropH < srcCropH || dstCropW < srcCropW)) { + if (!ALLOW_DOWNSCALING && (dstCropH < srcCropH || dstCropW < srcCropW)) { LOGE("Trying to downscale when it is disabled"); return UNKNOWN_ERROR; } if (srcStride == srcW && dstStride == dstW) { // Upscaling both horizontally and vertically - cropComposeUpscaleNV12_bl( - src, srcH, srcStride, srcCropLeft, srcCropTop, srcCropW, srcCropH, - dst, dstH, dstStride, dstCropLeft, dstCropTop, dstCropW, dstCropH); + cropComposeUpscaleNV12_bl(src, srcH, srcStride, srcCropLeft, srcCropTop, srcCropW, srcCropH, + dst, dstH, dstStride, dstCropLeft, dstCropTop, dstCropW, + dstCropH); return 0; } @@ -868,40 +926,36 @@ int ImageScalerCore::cropCompose( * the crop rectangle in the source image is scaled to fill whole image * in the destination image. */ -int ImageScalerCore::cropComposeZoom(void *src, void *dst, - unsigned int width, unsigned int height, unsigned int stride, int format, - unsigned int srcCropW, unsigned int srcCropH, unsigned int srcCropLeft, unsigned int srcCropTop) -{ - return cropCompose(src, width, height, stride, format, - dst, width, height, stride, format, - srcCropW, srcCropH, srcCropLeft, srcCropTop, - width, height, 0, 0); +int ImageScalerCore::cropComposeZoom(void* src, void* dst, unsigned int width, unsigned int height, + unsigned int stride, int format, unsigned int srcCropW, + unsigned int srcCropH, unsigned int srcCropLeft, + unsigned int srcCropTop) { + return cropCompose(src, width, height, stride, format, dst, width, height, stride, format, + srcCropW, srcCropH, srcCropLeft, srcCropTop, width, height, 0, 0); } -void ImageScalerCore::cropComposeCopy(void *src, void *dst, unsigned int size) -{ - MEMCPY_S((int8_t *) dst, size, (int8_t *) src, size); +void ImageScalerCore::cropComposeCopy(void* src, void* dst, unsigned int size) { + MEMCPY_S((int8_t*)dst, size, (int8_t*)src, size); } // Bilinear scaling, chrominance with nearest neighbor -void ImageScalerCore::cropComposeUpscaleNV12_bl( - void *src, unsigned int srcH, unsigned int srcStride, - unsigned int srcCropLeft, unsigned int srcCropTop, - unsigned int srcCropW, unsigned int srcCropH, - void *dst, unsigned int dstH, unsigned int dstStride, - unsigned int dstCropLeft, unsigned int dstCropTop, - unsigned int dstCropW, unsigned int dstCropH) -{ +void ImageScalerCore::cropComposeUpscaleNV12_bl(void* src, unsigned int srcH, + unsigned int srcStride, unsigned int srcCropLeft, + unsigned int srcCropTop, unsigned int srcCropW, + unsigned int srcCropH, void* dst, unsigned int dstH, + unsigned int dstStride, unsigned int dstCropLeft, + unsigned int dstCropTop, unsigned int dstCropW, + unsigned int dstCropH) { static const int BILINEAR = 1; - static const unsigned int FP_1 = 1 << MFP; // Fixed point 1.0 - static const unsigned int FRACT = (1 << MFP) - 1; // Fractional part mask + static const unsigned int FP_1 = 1 << MFP; // Fixed point 1.0 + static const unsigned int FRACT = (1 << MFP) - 1; // Fractional part mask unsigned int dx, dy, sx, sy; - unsigned char *s = (unsigned char *)src; - unsigned char *d = (unsigned char *)dst; + unsigned char* s = (unsigned char*)src; + unsigned char* d = (unsigned char*)dst; unsigned int sx0, sy0, dx0, dy0, dx1, dy1; - unsigned int sxd = ((srcCropW<>1)) / dstCropW; - unsigned int syd = ((srcCropH<>1)) / dstCropH; + unsigned int sxd = ((srcCropW << MFP) + (dstCropW >> 1)) / dstCropW; + unsigned int syd = ((srcCropH << MFP) + (dstCropH >> 1)) / dstCropH; if (!src || !dst) { LOGE("buffer pointer is NULL"); @@ -919,26 +973,26 @@ void ImageScalerCore::cropComposeUpscaleNV12_bl( for (dx = dx0, sx = sx0; dx < dx1; dx++, sx += sxd) { unsigned int sxi = sx >> MFP; unsigned int syi = sy >> MFP; - unsigned int s0 = s[srcStride*syi+sxi]; + unsigned int s0 = s[srcStride * syi + sxi]; if (BILINEAR) { - unsigned int fx = sx & FRACT; // Fractional part + unsigned int fx = sx & FRACT; // Fractional part unsigned int fy = sy & FRACT; - unsigned int fx1 = FP_1 - fx; // 1 - fractional part + unsigned int fx1 = FP_1 - fx; // 1 - fractional part unsigned int fy1 = FP_1 - fy; - unsigned int s1 = s[srcStride*syi+sxi+1]; - unsigned int s2 = s[srcStride*(syi+1)+sxi]; - unsigned int s3 = s[srcStride*(syi+1)+sxi+1]; + unsigned int s1 = s[srcStride * syi + sxi + 1]; + unsigned int s2 = s[srcStride * (syi + 1) + sxi]; + unsigned int s3 = s[srcStride * (syi + 1) + sxi + 1]; unsigned int s4 = (s0 * fx1 + s1 * fx) >> MFP; unsigned int s5 = (s2 * fx1 + s3 * fx) >> MFP; s0 = (s4 * fy1 + s5 * fy) >> MFP; } - d[dstStride*dy+dx] = s0; + d[dstStride * dy + dx] = s0; } } // Upscale chrominance - s = (unsigned char *)src + srcStride*srcH; - d = (unsigned char *)dst + dstStride*dstH; + s = (unsigned char*)src + srcStride * srcH; + d = (unsigned char*)dst + dstStride * dstH; sx0 = srcCropLeft << (MFP - 1); sy0 = srcCropTop << (MFP - 1); dx0 = dstCropLeft >> 1; @@ -949,11 +1003,10 @@ void ImageScalerCore::cropComposeUpscaleNV12_bl( for (dx = dx0, sx = sx0; dx < dx1; dx++, sx += sxd) { unsigned int sxi = sx >> MFP; unsigned int syi = sy >> MFP; - d[dstStride*dy+dx*2+0] = s[srcStride*syi+sxi*2+0]; - d[dstStride*dy+dx*2+1] = s[srcStride*syi+sxi*2+1]; + d[dstStride * dy + dx * 2 + 0] = s[srcStride * syi + sxi * 2 + 0]; + d[dstStride * dy + dx * 2 + 1] = s[srcStride * syi + sxi * 2 + 1]; } } } -} // namespace icamera - +} // namespace icamera diff --git a/src/image_process/ImageScalerCore.h b/src/image_process/ImageScalerCore.h index eaeb536e..0aac8071 100644 --- a/src/image_process/ImageScalerCore.h +++ b/src/image_process/ImageScalerCore.h @@ -21,58 +21,57 @@ namespace icamera { * */ class ImageScalerCore { -public: - static void downScaleImage(void *src, void *dest, - int dest_w, int dest_h, int dest_stride, - int src_w, int src_h, int src_stride, - int format, int src_skip_lines_top = 0, - int src_skip_lines_bottom = 0); - static int cropCompose(void *src, unsigned int srcW, unsigned int srcH, unsigned int srcStride, int srcFormat, - void *dst, unsigned int dstW, unsigned int dstH, unsigned int dstStride, int dstFormat, - unsigned int srcCropW, unsigned int srcCropH, unsigned int srcCropLeft, unsigned int srcCropTop, - unsigned int dstCropW, unsigned int dstCropH, unsigned int dstCropLeft, unsigned int dstCropTop); - static int cropComposeZoom(void *src, void *dst, - unsigned int width, unsigned int height, unsigned int stride, int format, - unsigned int srcCropW, unsigned int srcCropH, unsigned int srcCropLeft, unsigned int srcCropTop); + public: + static void downScaleImage(void* src, void* dest, int dest_w, int dest_h, int dest_stride, + int src_w, int src_h, int src_stride, int format, + int src_skip_lines_top = 0, int src_skip_lines_bottom = 0); + static int cropCompose(void* src, unsigned int srcW, unsigned int srcH, unsigned int srcStride, + int srcFormat, void* dst, unsigned int dstW, unsigned int dstH, + unsigned int dstStride, int dstFormat, unsigned int srcCropW, + unsigned int srcCropH, unsigned int srcCropLeft, unsigned int srcCropTop, + unsigned int dstCropW, unsigned int dstCropH, unsigned int dstCropLeft, + unsigned int dstCropTop); + static int cropComposeZoom(void* src, void* dst, unsigned int width, unsigned int height, + unsigned int stride, int format, unsigned int srcCropW, + unsigned int srcCropH, unsigned int srcCropLeft, + unsigned int srcCropTop); -protected: - static void downScaleYUY2Image(unsigned char *dest, const unsigned char *src, - const int dest_w, const int dest_h, const int dest_stride, - const int src_w, const int src_h, const int src_stride); + protected: + static void downScaleYUY2Image(unsigned char* dest, const unsigned char* src, const int dest_w, + const int dest_h, const int dest_stride, const int src_w, + const int src_h, const int src_stride); - static void downScaleAndCropNv12Image(unsigned char *dest, const unsigned char *src, + static void downScaleAndCropNv12Image(unsigned char* dest, const unsigned char* src, const int dest_w, const int dest_h, const int dest_stride, const int src_w, const int src_h, const int src_stride, const int src_skip_lines_top = 0, const int src_skip_lines_bottom = 0); - static void trimNv12Image(unsigned char *dest, const unsigned char *src, - const int dest_w, const int dest_h, const int dest_stride, - const int src_w, const int src_h, const int src_stride, + static void trimNv12Image(unsigned char* dest, const unsigned char* src, const int dest_w, + const int dest_h, const int dest_stride, const int src_w, + const int src_h, const int src_stride, const int src_skip_lines_top = 0, const int src_skip_lines_bottom = 0); - static void downScaleAndCropNv12ImageQvga(unsigned char *dest, const unsigned char *src, + static void downScaleAndCropNv12ImageQvga(unsigned char* dest, const unsigned char* src, const int dest_stride, const int src_stride); - static void downScaleAndCropNv12ImageQcif(unsigned char *dest, const unsigned char *src, + static void downScaleAndCropNv12ImageQcif(unsigned char* dest, const unsigned char* src, const int dest_stride, const int src_stride); - static void downScaleNv12ImageFrom800x600ToQvga(unsigned char *dest, const unsigned char *src, + static void downScaleNv12ImageFrom800x600ToQvga(unsigned char* dest, const unsigned char* src, const int dest_stride, const int src_stride); -private: - static const int MFP = 16; // Fractional bits for fixed point calculations - -private: - static void cropComposeCopy(void *src, void *dst, unsigned int size); - static void cropComposeUpscaleNV12_bl( - void *src, unsigned int srcH, unsigned int srcStride, - unsigned int srcCropLeft, unsigned int srcCropTop, - unsigned int srcCropW, unsigned int srcCropH, - void *dst, unsigned int dstH, unsigned int dstStride, - unsigned int dstCropLeft, unsigned int dstCropTop, - unsigned int dstCropW, unsigned int dstCropH); + private: + static const int MFP = 16; // Fractional bits for fixed point calculations + private: + static void cropComposeCopy(void* src, void* dst, unsigned int size); + static void cropComposeUpscaleNV12_bl(void* src, unsigned int srcH, unsigned int srcStride, + unsigned int srcCropLeft, unsigned int srcCropTop, + unsigned int srcCropW, unsigned int srcCropH, void* dst, + unsigned int dstH, unsigned int dstStride, + unsigned int dstCropLeft, unsigned int dstCropTop, + unsigned int dstCropW, unsigned int dstCropH); }; -} // namespace icamera +} // namespace icamera diff --git a/src/image_process/PostProcessorBase.cpp b/src/image_process/PostProcessorBase.cpp index d0f9e2e1..70a289d9 100644 --- a/src/image_process/PostProcessorBase.cpp +++ b/src/image_process/PostProcessorBase.cpp @@ -277,10 +277,9 @@ status_t JpegProcess::doPostProcessing(const shared_ptr& thumbnailPackage.quality > 0); if (!isEncoded || thumbnailPackage.quality < 0) { - LOGW( - "Failed to generate thumbnail, isEncoded: %d, encoded thumbnail size: %d, " - "quality:%d", - isEncoded, thumbnailPackage.encodedDataSize, thumbnailPackage.quality); + LOGW("Failed to generate thumbnail, isEncoded: %d, encoded thumbnail size: %d, " + "quality:%d", + isEncoded, thumbnailPackage.encodedDataSize, thumbnailPackage.quality); } } diff --git a/src/image_process/ProcessType.h b/src/image_process/ProcessType.h index d7354ddd..afc156d9 100644 --- a/src/image_process/ProcessType.h +++ b/src/image_process/ProcessType.h @@ -27,4 +27,4 @@ enum PostProcessType { POST_PROCESS_JPEG_ENCODING = 1 << 4 }; -} // namespace icamera +} // namespace icamera diff --git a/src/image_process/chrome/ImageProcessorCore.cpp b/src/image_process/chrome/ImageProcessorCore.cpp index 58678ce3..514a0c74 100644 --- a/src/image_process/chrome/ImageProcessorCore.cpp +++ b/src/image_process/chrome/ImageProcessorCore.cpp @@ -75,9 +75,9 @@ status_t ImageProcessorCore::cropFrame(const std::shared_ptr(srcI420Buf.get()), srcI420BufSize, static_cast(output->data()), dstW, dstI420BufU, - (dstW + 1) / 2, dstI420BufV, (dstW + 1) / 2, left, top, - srcW, srcH, output->width(), dstH, - libyuv::RotationMode::kRotate0, libyuv::FourCC::FOURCC_I420); + (dstW + 1) / 2, dstI420BufV, (dstW + 1) / 2, left, top, srcW, srcH, + output->width(), dstH, libyuv::RotationMode::kRotate0, + libyuv::FourCC::FOURCC_I420); CheckAndLogError(ret != 0, UNKNOWN_ERROR, "ConvertToI420 failed"); uint8_t* dstBufUV = static_cast(output->data()) + dstW * dstH; diff --git a/src/image_process/chrome/ImageProcessorCore.h b/src/image_process/chrome/ImageProcessorCore.h index 54d4f1ef..e1dc1b46 100644 --- a/src/image_process/chrome/ImageProcessorCore.h +++ b/src/image_process/chrome/ImageProcessorCore.h @@ -24,21 +24,21 @@ namespace icamera { class ImageProcessorCore : public IImageProcessor { -public: + public: ImageProcessorCore(); ~ImageProcessorCore() {} - virtual status_t cropFrame(const std::shared_ptr &input, - std::shared_ptr &output); - virtual status_t scaleFrame(const std::shared_ptr &input, - std::shared_ptr &output); - virtual status_t rotateFrame(const std::shared_ptr &input, - std::shared_ptr &output, - int angle, std::vector &rotateBuf); - virtual status_t convertFrame(const std::shared_ptr &input, - std::shared_ptr &output); - -private: + virtual status_t cropFrame(const std::shared_ptr& input, + std::shared_ptr& output); + virtual status_t scaleFrame(const std::shared_ptr& input, + std::shared_ptr& output); + virtual status_t rotateFrame(const std::shared_ptr& input, + std::shared_ptr& output, int angle, + std::vector& rotateBuf); + virtual status_t convertFrame(const std::shared_ptr& input, + std::shared_ptr& output); + + private: DISALLOW_COPY_AND_ASSIGN(ImageProcessorCore); std::unordered_map mRotationMode; diff --git a/src/isp_control/IspControlInfoMap.cpp b/src/isp_control/IspControlInfoMap.cpp index 69350d0d..1c3aa52a 100644 --- a/src/isp_control/IspControlInfoMap.cpp +++ b/src/isp_control/IspControlInfoMap.cpp @@ -27,18 +27,33 @@ struct IspControlInfo { }; static const IspControlInfo gIspControlInfoMap[] = { - { "wb_gains", camera_control_isp_ctrl_id_wb_gains, sizeof(camera_control_isp_wb_gains_t), INTEL_CONTROL_ISP_WB_GAINS }, - { "color_correction_matrix", camera_control_isp_ctrl_id_color_correction_matrix, sizeof(camera_control_isp_color_correction_matrix_t), INTEL_CONTROL_ISP_COLOR_CORRECTION_MATRIX }, - { "advanced_color_correction_matrix", camera_control_isp_ctrl_id_advanced_color_correction_matrix, sizeof(camera_control_isp_advanced_color_correction_matrix_t), INTEL_CONTROL_ISP_ADVANCED_COLOR_CORRECTION_MATRIX }, - { "bxt_csc", camera_control_isp_ctrl_id_bxt_csc, sizeof(camera_control_isp_bxt_csc_t), INTEL_CONTROL_ISP_BXT_CSC }, - { "bxt_demosaic", camera_control_isp_ctrl_id_bxt_demosaic, sizeof(camera_control_isp_bxt_demosaic_t), INTEL_CONTROL_ISP_BXT_DEMOSAIC }, - { "sc_iefd", camera_control_isp_ctrl_id_sc_iefd, sizeof(camera_control_isp_sc_iefd_t), INTEL_CONTROL_ISP_SC_IEFD }, - { "see", camera_control_isp_ctrl_id_see, sizeof(camera_control_isp_see_t), INTEL_CONTROL_ISP_SEE }, - { "bnlm", camera_control_isp_ctrl_id_bnlm, sizeof(camera_control_isp_bnlm_t), INTEL_CONTROL_ISP_BNLM }, - { "tnr5_21", camera_control_isp_ctrl_id_tnr5_21, sizeof(camera_control_isp_tnr5_21_t), INTEL_CONTROL_ISP_TNR5_21 }, - { "xnr_dss", camera_control_isp_ctrl_id_xnr_dss, sizeof(camera_control_isp_xnr_dss_t), INTEL_CONTROL_ISP_XNR_DSS }, - { "gamma_tone_map", camera_control_isp_ctrl_id_gamma_tone_map, sizeof(camera_control_isp_gamma_tone_map_t), INTEL_CONTROL_ISP_GAMMA_TONE_MAP }, - { "tnr5_22", camera_control_isp_ctrl_id_tnr5_22, sizeof(camera_control_isp_tnr5_22_t), INTEL_CONTROL_ISP_TNR5_22 }, - { "tnr5_25", camera_control_isp_ctrl_id_tnr5_25, sizeof(camera_control_isp_tnr5_25_t), INTEL_CONTROL_ISP_TNR5_25 }, + {"wb_gains", camera_control_isp_ctrl_id_wb_gains, sizeof(camera_control_isp_wb_gains_t), + INTEL_CONTROL_ISP_WB_GAINS}, + {"color_correction_matrix", camera_control_isp_ctrl_id_color_correction_matrix, + sizeof(camera_control_isp_color_correction_matrix_t), + INTEL_CONTROL_ISP_COLOR_CORRECTION_MATRIX}, + {"advanced_color_correction_matrix", + camera_control_isp_ctrl_id_advanced_color_correction_matrix, + sizeof(camera_control_isp_advanced_color_correction_matrix_t), + INTEL_CONTROL_ISP_ADVANCED_COLOR_CORRECTION_MATRIX}, + {"bxt_csc", camera_control_isp_ctrl_id_bxt_csc, sizeof(camera_control_isp_bxt_csc_t), + INTEL_CONTROL_ISP_BXT_CSC}, + {"bxt_demosaic", camera_control_isp_ctrl_id_bxt_demosaic, + sizeof(camera_control_isp_bxt_demosaic_t), INTEL_CONTROL_ISP_BXT_DEMOSAIC}, + {"sc_iefd", camera_control_isp_ctrl_id_sc_iefd, sizeof(camera_control_isp_sc_iefd_t), + INTEL_CONTROL_ISP_SC_IEFD}, + {"see", camera_control_isp_ctrl_id_see, sizeof(camera_control_isp_see_t), + INTEL_CONTROL_ISP_SEE}, + {"bnlm", camera_control_isp_ctrl_id_bnlm, sizeof(camera_control_isp_bnlm_t), + INTEL_CONTROL_ISP_BNLM}, + {"tnr5_21", camera_control_isp_ctrl_id_tnr5_21, sizeof(camera_control_isp_tnr5_21_t), + INTEL_CONTROL_ISP_TNR5_21}, + {"xnr_dss", camera_control_isp_ctrl_id_xnr_dss, sizeof(camera_control_isp_xnr_dss_t), + INTEL_CONTROL_ISP_XNR_DSS}, + {"gamma_tone_map", camera_control_isp_ctrl_id_gamma_tone_map, + sizeof(camera_control_isp_gamma_tone_map_t), INTEL_CONTROL_ISP_GAMMA_TONE_MAP}, + {"tnr5_22", camera_control_isp_ctrl_id_tnr5_22, sizeof(camera_control_isp_tnr5_22_t), + INTEL_CONTROL_ISP_TNR5_22}, + {"tnr5_25", camera_control_isp_ctrl_id_tnr5_25, sizeof(camera_control_isp_tnr5_25_t), + INTEL_CONTROL_ISP_TNR5_25}, }; - diff --git a/src/isp_control/IspControlUtils.cpp b/src/isp_control/IspControlUtils.cpp index 6264b688..66fb8b8d 100644 --- a/src/isp_control/IspControlUtils.cpp +++ b/src/isp_control/IspControlUtils.cpp @@ -28,8 +28,7 @@ namespace icamera { #include "IspControlInfoMap.cpp" -const char* IspControlUtils::getNameById(uint32_t ctrlId) -{ +const char* IspControlUtils::getNameById(uint32_t ctrlId) { int size = ARRAY_SIZE(gIspControlInfoMap); for (int i = 0; i < size; i++) { if (gIspControlInfoMap[i].ctrlId == ctrlId) { @@ -40,8 +39,7 @@ const char* IspControlUtils::getNameById(uint32_t ctrlId) return nullptr; } -uint32_t IspControlUtils::getIdByName(const char* name) -{ +uint32_t IspControlUtils::getIdByName(const char* name) { int size = ARRAY_SIZE(gIspControlInfoMap); for (int i = 0; i < size; i++) { if (strcmp(gIspControlInfoMap[i].name, name) == 0) { @@ -52,8 +50,7 @@ uint32_t IspControlUtils::getIdByName(const char* name) return 0; } -uint32_t IspControlUtils::getSizeById(uint32_t ctrlId) -{ +uint32_t IspControlUtils::getSizeById(uint32_t ctrlId) { int size = ARRAY_SIZE(gIspControlInfoMap); for (int i = 0; i < size; i++) { if (gIspControlInfoMap[i].ctrlId == ctrlId) { @@ -64,8 +61,7 @@ uint32_t IspControlUtils::getSizeById(uint32_t ctrlId) return 0; } -uint32_t IspControlUtils::getTagById(uint32_t ctrlId) -{ +uint32_t IspControlUtils::getTagById(uint32_t ctrlId) { int size = ARRAY_SIZE(gIspControlInfoMap); for (int i = 0; i < size; i++) { if (gIspControlInfoMap[i].ctrlId == ctrlId) { @@ -78,8 +74,7 @@ uint32_t IspControlUtils::getTagById(uint32_t ctrlId) #include "ia_types.h" -void* IspControlUtils::findDataById(uint32_t ctrlId, void* fullData, uint32_t size) -{ +void* IspControlUtils::findDataById(uint32_t ctrlId, void* fullData, uint32_t size) { CheckAndLogError(fullData == nullptr || size == 0, nullptr, "Invalid input parameters"); char* pData = (char*)fullData; @@ -100,5 +95,4 @@ void* IspControlUtils::findDataById(uint32_t ctrlId, void* fullData, uint32_t si return nullptr; } -} // end of icamera - +} // namespace icamera diff --git a/src/isp_control/IspControlUtils.h b/src/isp_control/IspControlUtils.h index 3a9e4538..70308f7c 100644 --- a/src/isp_control/IspControlUtils.h +++ b/src/isp_control/IspControlUtils.h @@ -21,20 +21,20 @@ namespace icamera { namespace IspControlUtils { - const char* getNameById(uint32_t ctrlId); +const char* getNameById(uint32_t ctrlId); - uint32_t getIdByName(const char* name); +uint32_t getIdByName(const char* name); - uint32_t getSizeById(uint32_t ctrlId); +uint32_t getSizeById(uint32_t ctrlId); - uint32_t getTagById(uint32_t ctrlId); +uint32_t getTagById(uint32_t ctrlId); - /** - * Find the data pointer which belongs to ctrlId in fullData - * - * Return NULL if cannot find ctrlId in fullData, otherwise will return the data pointer. - */ - void* findDataById(uint32_t ctrlId, void* fullData, uint32_t size); -} // end of IspControlUtils +/** + * Find the data pointer which belongs to ctrlId in fullData + * + * Return NULL if cannot find ctrlId in fullData, otherwise will return the data pointer. + */ +void* findDataById(uint32_t ctrlId, void* fullData, uint32_t size); +} // namespace IspControlUtils -} // end of icamera +} // namespace icamera diff --git a/src/iutils/CameraDump.cpp b/src/iutils/CameraDump.cpp index bfd80325..e16d51ca 100644 --- a/src/iutils/CameraDump.cpp +++ b/src/iutils/CameraDump.cpp @@ -394,8 +394,9 @@ void CameraDump::dumpImage(int cameraId, const shared_ptr& camBuff int fd = camBuffer->getFd(); int bufferSize = camBuffer->getBufferSize(); int memoryType = camBuffer->getMemory(); - void* pBuf = (memoryType == V4L2_MEMORY_DMABUF) ? CameraBuffer::mapDmaBufferAddr(fd, bufferSize) - : camBuffer->getBufferAddr(); + void* pBuf = (memoryType == V4L2_MEMORY_DMABUF) ? + CameraBuffer::mapDmaBufferAddr(fd, bufferSize) : + camBuffer->getBufferAddr(); LOG1("@%s, fd:%d, buffersize:%d, buf:%p, memoryType:%d, fileName:%s", __func__, fd, bufferSize, pBuf, memoryType, fileName.c_str()); writeData(pBuf, bufferSize, fileName.c_str()); diff --git a/src/iutils/CameraDump.h b/src/iutils/CameraDump.h index c75c5ca2..cb8f583f 100644 --- a/src/iutils/CameraDump.h +++ b/src/iutils/CameraDump.h @@ -38,35 +38,35 @@ extern char gDumpPath[50]; // Dump bit mask definition enum { // IPU Buffer dump (bit[0-3]), export cameraDump=0xf - DUMP_ISYS_BUFFER = 1 << 0, - DUMP_PSYS_OUTPUT_BUFFER = 1 << 1, - DUMP_PSYS_INTERM_BUFFER = 1 << 2, // dump Psys intermediate buffers like PreGDC output - DUMP_EXECUTOR_OUTPUT = 1 << 3, + DUMP_ISYS_BUFFER = 1 << 0, + DUMP_PSYS_OUTPUT_BUFFER = 1 << 1, + DUMP_PSYS_INTERM_BUFFER = 1 << 2, // dump Psys intermediate buffers like PreGDC output + DUMP_EXECUTOR_OUTPUT = 1 << 3, // Other buffer dump (bit[4-7]), export cameraDump=0xf0 - DUMP_JPEG_BUFFER = 1 << 4, - DUMP_UT_BUFFER = 1 << 5, - DUMP_SW_IMG_PROC_OUTPUT = 1 << 6, - DUMP_GPU_TNR = 1 << 7, + DUMP_JPEG_BUFFER = 1 << 4, + DUMP_UT_BUFFER = 1 << 5, + DUMP_SW_IMG_PROC_OUTPUT = 1 << 6, + DUMP_GPU_TNR = 1 << 7, // PG/PAL/Stats dump (bit[8-11]), export cameraDump=0xf00 - DUMP_PSYS_PAL = 1 << 8, // ISP param binary - DUMP_PSYS_PG = 1 << 9, // PSYS whole PG dump assisted by libiacss - DUMP_PSYS_DECODED_STAT = 1 << 10, // p2p decoded statistics + DUMP_PSYS_PAL = 1 << 8, // ISP param binary + DUMP_PSYS_PG = 1 << 9, // PSYS whole PG dump assisted by libiacss + DUMP_PSYS_DECODED_STAT = 1 << 10, // p2p decoded statistics // AAL dump (bit[12-15]), export cameraDump=0xf000 - DUMP_AAL_OUTPUT = 1 << 12, - DUMP_AAL_INPUT = 1 << 13, + DUMP_AAL_OUTPUT = 1 << 12, + DUMP_AAL_INPUT = 1 << 13, // Other dump (bit[16-19]), export cameraDump=0xf0000 - DUMP_NVM_DATA = 1 << 16, - DUMP_MAKER_NOTE = 1 << 17, - DUMP_EMBEDDED_METADATA = 1 << 18, + DUMP_NVM_DATA = 1 << 16, + DUMP_MAKER_NOTE = 1 << 17, + DUMP_EMBEDDED_METADATA = 1 << 18, }; enum { - DUMP_FORMAT_NORMAL = 1 << 0, // Normal format - DUMP_FORMAT_IQSTUDIO = 1 << 1, // IQStudio format + DUMP_FORMAT_NORMAL = 1 << 0, // Normal format + DUMP_FORMAT_IQSTUDIO = 1 << 1, // IQStudio format }; const int MAX_NAME_LEN = 256; diff --git a/src/iutils/CameraLog.cpp b/src/iutils/CameraLog.cpp index 33c2426f..876a27b3 100644 --- a/src/iutils/CameraLog.cpp +++ b/src/iutils/CameraLog.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015-2021 Intel Corporation. + * Copyright (C) 2015-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,6 +29,10 @@ #include #endif +#ifdef CAMERA_SYS_LOG +#include +#endif + #include "CameraLog.h" #include "Trace.h" #include "iutils/Utils.h" @@ -87,7 +91,53 @@ __attribute__((__format__(__printf__, 3, 0))) static void printLog(const char* m break; } } -#else +#endif + +#ifdef CAMERA_SYS_LOG +__attribute__((__format__(__printf__, 3, 0))) static void printLog(const char* module, int level, + const char* fmt, va_list ap) { + const char* levelStr = nullptr; + int priority; + + switch (level) { + case CAMERA_DEBUG_LOG_LEVEL1: + levelStr = "LV1"; + priority = LOG_DEBUG; + break; + case CAMERA_DEBUG_LOG_LEVEL2: + levelStr = "LV2"; + priority = LOG_DEBUG; + break; + case CAMERA_DEBUG_LOG_LEVEL3: + levelStr = "LV3"; + priority = LOG_DEBUG; + break; + case CAMERA_DEBUG_LOG_INFO: + levelStr = "INF"; + priority = LOG_INFO; + break; + case CAMERA_DEBUG_LOG_ERR: + levelStr = "ERR"; + priority = LOG_ERR; + break; + case CAMERA_DEBUG_LOG_WARNING: + levelStr = "WAR"; + priority = LOG_WARNING; + break; + default: + levelStr = "UKN"; + priority = LOG_DEBUG; + break; + } + + char format[1024] = {0}; + snprintf(format, sizeof(format), "[%s]: CamHAL_%s: %s", levelStr, module, fmt); + openlog("cameraHal", LOG_PID | LOG_CONS, LOG_USER); + vsyslog(priority, format, ap); + closelog(); +} +#endif + static void getLogTime(char* timeBuf, int bufLen) { // The format of time is: 01-22 15:24:53.071 struct timeval tv; @@ -114,7 +164,6 @@ __attribute__((__format__(__printf__, 3, 0))) static void printLog(const char* m vfprintf(stdout, fmt, ap); fprintf(stdout, "\n"); } -#endif void doLogBody(int logTag, int level, int grpPosition, const char* fmt, ...) { if (!(level & globalGroupsDescp[grpPosition].level)) return; @@ -143,7 +192,7 @@ void doLogBody(int logTag, int level, const char* fmt, ...) { namespace Log { #define DEFAULT_LOG_SINK "GLOG" -#define FILELOG_SINK "FILELOG" +#define FILELOG_SINK "FILELOG" static void initLogSinks() { #ifdef CAL_BUILD @@ -160,9 +209,25 @@ static void initLogSinks() { } else { globalLogSink = new StdconLogSink(); } -#else - globalLogSink = new StdconLogSink(); #endif + +#ifdef CAMERA_SYS_LOG + const char* sinkName = ::getenv("logSink"); + + if (!sinkName) { + sinkName = DEFAULT_LOG_SINK; + } + + if (!::strcmp(sinkName, DEFAULT_LOG_SINK)) { + globalLogSink = new SysLogSink(); + } else if (!::strcmp(sinkName, FILELOG_SINK)) { + globalLogSink = new FileLogSink; + } else { + globalLogSink = new StdconLogSink(); + } +#endif + + globalLogSink = new StdconLogSink(); } static void setLogTagLevel() { @@ -260,8 +325,7 @@ bool isDebugLevelEnable(int level) { bool isLogTagEnabled(int tag, int level) { if (tag < 0 || tag >= TAGS_MAX_NUM) return false; - return level ? (globalGroupsDescp[tag].level & level) - : (globalGroupsDescp[tag].level > 0); + return level ? (globalGroupsDescp[tag].level & level) : (globalGroupsDescp[tag].level > 0); } // DUMP_ENTITY_TOPOLOGY_S diff --git a/src/iutils/CameraShm.cpp b/src/iutils/CameraShm.cpp index 1ba8254e..403402a8 100644 --- a/src/iutils/CameraShm.cpp +++ b/src/iutils/CameraShm.cpp @@ -233,7 +233,8 @@ void CameraSharedMemory::openSemLock() { // Wait the semaphore lock for 2 seconds clock_gettime(CLOCK_REALTIME, &ts); ts.tv_sec += CAMERA_SHM_LOCK_TIME; - while ((ret = sem_timedwait(mSemLock, &ts)) == -1 && errno == EINTR); + while ((ret = sem_timedwait(mSemLock, &ts)) == -1 && errno == EINTR) { + } if (ret == 0) { sem_post(mSemLock); return; @@ -259,7 +260,8 @@ int CameraSharedMemory::lock() { // Wait the semaphore lock for 2 seconds clock_gettime(CLOCK_REALTIME, &ts); ts.tv_sec += CAMERA_SHM_LOCK_TIME; - while (((ret = sem_timedwait(mSemLock, &ts)) == -1) && errno == EINTR); + while (((ret = sem_timedwait(mSemLock, &ts)) == -1) && errno == EINTR) { + } CheckAndLogError(ret != 0, UNKNOWN_ERROR, "Lock failed or timed out"); return OK; diff --git a/src/iutils/CameraTrace.cpp b/src/iutils/CameraTrace.cpp index f419295b..dbcd260a 100644 --- a/src/iutils/CameraTrace.cpp +++ b/src/iutils/CameraTrace.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2020-2021 Intel Corporation. + * Copyright (C) 2020-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ #include "src/iutils/CameraTrace.h" #include +#include #include #include diff --git a/src/iutils/Errors.h b/src/iutils/Errors.h index 8ef824f0..5bd608c9 100644 --- a/src/iutils/Errors.h +++ b/src/iutils/Errors.h @@ -22,7 +22,7 @@ namespace icamera { -typedef int status_t; +typedef int status_t; /* * Error codes. @@ -30,36 +30,37 @@ typedef int status_t; */ enum { - OK = 0, // Everything's swell. + OK = 0, // Everything's swell. - UNKNOWN_ERROR = (-2147483647-1), // INT32_MIN value + UNKNOWN_ERROR = (-2147483647 - 1), // INT32_MIN value - NO_MEMORY = -ENOMEM, - INVALID_OPERATION = -ENOSYS, - BAD_VALUE = -EINVAL, - BAD_TYPE = (UNKNOWN_ERROR + 1), - NAME_NOT_FOUND = -ENOENT, - PERMISSION_DENIED = -EPERM, - NO_INIT = -ENODEV, - ALREADY_EXISTS = -EEXIST, - DEAD_OBJECT = -EPIPE, - FAILED_TRANSACTION = (UNKNOWN_ERROR + 2), - JPARKS_BROKE_IT = -EPIPE, + NO_MEMORY = -ENOMEM, + INVALID_OPERATION = -ENOSYS, + BAD_VALUE = -EINVAL, + BAD_TYPE = (UNKNOWN_ERROR + 1), + NAME_NOT_FOUND = -ENOENT, + PERMISSION_DENIED = -EPERM, + NO_INIT = -ENODEV, + ALREADY_EXISTS = -EEXIST, + DEAD_OBJECT = -EPIPE, + FAILED_TRANSACTION = (UNKNOWN_ERROR + 2), + JPARKS_BROKE_IT = -EPIPE, + DEV_BUSY = -EBUSY, #if !defined(HAVE_MS_C_RUNTIME) - BAD_INDEX = -EOVERFLOW, - NOT_ENOUGH_DATA = -ENODATA, - WOULD_BLOCK = -EWOULDBLOCK, - TIMED_OUT = -ETIMEDOUT, + BAD_INDEX = -EOVERFLOW, + NOT_ENOUGH_DATA = -ENODATA, + WOULD_BLOCK = -EWOULDBLOCK, + TIMED_OUT = -ETIMEDOUT, UNKNOWN_TRANSACTION = -EBADMSG, #else - BAD_INDEX = -E2BIG, - NOT_ENOUGH_DATA = (UNKNOWN_ERROR + 3), - WOULD_BLOCK = (UNKNOWN_ERROR + 4), - TIMED_OUT = (UNKNOWN_ERROR + 5), + BAD_INDEX = -E2BIG, + NOT_ENOUGH_DATA = (UNKNOWN_ERROR + 3), + WOULD_BLOCK = (UNKNOWN_ERROR + 4), + TIMED_OUT = (UNKNOWN_ERROR + 5), UNKNOWN_TRANSACTION = (UNKNOWN_ERROR + 6), #endif - FDS_NOT_ALLOWED = (UNKNOWN_ERROR + 7), - NO_ENTRY = (UNKNOWN_ERROR + 8), + FDS_NOT_ALLOWED = (UNKNOWN_ERROR + 7), + NO_ENTRY = (UNKNOWN_ERROR + 8), }; -} // namespace icamera +} // namespace icamera diff --git a/src/iutils/LogSink.cpp b/src/iutils/LogSink.cpp index 1a55d2d5..19dc6e34 100644 --- a/src/iutils/LogSink.cpp +++ b/src/iutils/LogSink.cpp @@ -27,10 +27,15 @@ #include #include - #include "iutils/LogSink.h" #include "iutils/Utils.h" +#ifdef CAMERA_SYS_LOG +#include +#include +#include +#include +#endif namespace icamera { extern const char* cameraDebugLogToString(int level); #define CAMERA_DEBUG_LOG_ERR (1 << 5) @@ -43,8 +48,8 @@ const char* GLogSink::getName() const { void GLogSink::sendOffLog(LogItem logItem) { char prefix[32]; - ::snprintf(prefix, sizeof(prefix), "CamHAL[%s]: ", - icamera::cameraDebugLogToString(logItem.level)); + ::snprintf(prefix, sizeof(prefix), + "CamHAL[%s]: ", icamera::cameraDebugLogToString(logItem.level)); switch (logItem.level) { case CAMERA_DEBUG_LOG_ERR: @@ -61,8 +66,8 @@ void GLogSink::sendOffLog(LogItem logItem) { break; } } - #endif + const char* StdconLogSink::getName() const { return "Stdcon LOG"; } @@ -132,4 +137,30 @@ void FileLogSink::sendOffLog(LogItem logItem) { fflush(mFp); } +#ifdef CAMERA_SYS_LOG +SysLogSink::SysLogSink() {} + +SysLogSink::~SysLogSink() {} + +const char* SysLogSink::getName() const { + return "SYS LOG"; +} + +void SysLogSink::sendOffLog(LogItem logItem) { +#define TIME_BUF_SIZE 128 + char logMsg[500] = {0}; + char timeInfo[TIME_BUF_SIZE] = {0}; + setLogTime(timeInfo); + const char* levelStr = icamera::cameraDebugLogToString(logItem.level); + snprintf(logMsg, sizeof(logMsg), "[%s] CamHAL[%s] %s\n", timeInfo, levelStr, logItem.logEntry); + std::map levelMap{ + {"LV1", LOG_DEBUG}, {"LV2", LOG_DEBUG}, {"LV3", LOG_DEBUG}, {"INF", LOG_INFO}, + {"ERR", LOG_ERR}, {"WAR", LOG_WARNING}, {"UKN", LOG_DEBUG}}; + + openlog("cameraHal", LOG_PID | LOG_CONS, LOG_USER); + syslog(levelMap[levelStr], "%s", logMsg); + closelog(); +} +#endif + }; // namespace icamera diff --git a/src/iutils/LogSink.h b/src/iutils/LogSink.h index f6f38050..edf507b4 100644 --- a/src/iutils/LogSink.h +++ b/src/iutils/LogSink.h @@ -62,6 +62,16 @@ class StdconLogSink : public LogOutputSink { void sendOffLog(LogItem logItem) override; }; +#ifdef CAMERA_SYS_LOG +class SysLogSink : public LogOutputSink { + public: + SysLogSink(); + ~SysLogSink(); + const char* getName() const override; + void sendOffLog(LogItem logItem) override; +}; +#endif + class FileLogSink : public LogOutputSink { public: FileLogSink(); diff --git a/src/iutils/ModuleTags.cpp b/src/iutils/ModuleTags.cpp index 49993e98..48f0db62 100644 --- a/src/iutils/ModuleTags.cpp +++ b/src/iutils/ModuleTags.cpp @@ -102,7 +102,6 @@ const char* tagNames[] = { "HAL_jpeg", "HAL_multi_streams_test", "HAL_rotation_test", - "HAL_supported_streams_test", "HAL_yuv", "HalV3Utils", "I3AControlFactory", @@ -129,6 +128,7 @@ const char* tagNames[] = { "IntelFaceDetection", "IntelFaceDetectionClient", "IntelGPUAlgoServer", + "IntelIC2", "IntelPGParam", "IntelPGParamClient", "IntelPGParamS", @@ -177,6 +177,8 @@ const char* tagNames[] = { "ResultProcessor", "SWJpegEncoder", "SWPostProcessor", + "SchedPolicy", + "Scheduler", "SensorHwCtrl", "SensorManager", "SensorOB", diff --git a/src/iutils/ModuleTags.h b/src/iutils/ModuleTags.h index b114979a..bb5bea21 100644 --- a/src/iutils/ModuleTags.h +++ b/src/iutils/ModuleTags.h @@ -110,33 +110,33 @@ enum ModuleTags { GENERATED_TAGS_HAL_jpeg = 80, GENERATED_TAGS_HAL_multi_streams_test = 81, GENERATED_TAGS_HAL_rotation_test = 82, - GENERATED_TAGS_HAL_supported_streams_test = 83, - GENERATED_TAGS_HAL_yuv = 84, - GENERATED_TAGS_HalV3Utils = 85, - GENERATED_TAGS_I3AControlFactory = 86, - GENERATED_TAGS_IA_CIPR_UTILS = 87, - GENERATED_TAGS_ICamera = 88, - GENERATED_TAGS_IFaceDetection = 89, - GENERATED_TAGS_IPCIntelPGParam = 90, - GENERATED_TAGS_IPC_FACE_DETECTION = 91, - GENERATED_TAGS_IPC_GRAPH_CONFIG = 92, - GENERATED_TAGS_ImageProcessorCore = 93, - GENERATED_TAGS_ImageScalerCore = 94, - GENERATED_TAGS_Intel3AParameter = 95, - GENERATED_TAGS_IntelAEStateMachine = 96, - GENERATED_TAGS_IntelAFStateMachine = 97, - GENERATED_TAGS_IntelAWBStateMachine = 98, - GENERATED_TAGS_IntelAlgoClient = 99, - GENERATED_TAGS_IntelAlgoCommonClient = 100, - GENERATED_TAGS_IntelAlgoServer = 101, - GENERATED_TAGS_IntelCPUAlgoServer = 102, - GENERATED_TAGS_IntelCca = 103, - GENERATED_TAGS_IntelCcaClient = 104, - GENERATED_TAGS_IntelCcaServer = 105, - GENERATED_TAGS_IntelFDServer = 106, - GENERATED_TAGS_IntelFaceDetection = 107, - GENERATED_TAGS_IntelFaceDetectionClient = 108, - GENERATED_TAGS_IntelGPUAlgoServer = 109, + GENERATED_TAGS_HAL_yuv = 83, + GENERATED_TAGS_HalV3Utils = 84, + GENERATED_TAGS_I3AControlFactory = 85, + GENERATED_TAGS_IA_CIPR_UTILS = 86, + GENERATED_TAGS_ICamera = 87, + GENERATED_TAGS_IFaceDetection = 88, + GENERATED_TAGS_IPCIntelPGParam = 89, + GENERATED_TAGS_IPC_FACE_DETECTION = 90, + GENERATED_TAGS_IPC_GRAPH_CONFIG = 91, + GENERATED_TAGS_ImageProcessorCore = 92, + GENERATED_TAGS_ImageScalerCore = 93, + GENERATED_TAGS_Intel3AParameter = 94, + GENERATED_TAGS_IntelAEStateMachine = 95, + GENERATED_TAGS_IntelAFStateMachine = 96, + GENERATED_TAGS_IntelAWBStateMachine = 97, + GENERATED_TAGS_IntelAlgoClient = 98, + GENERATED_TAGS_IntelAlgoCommonClient = 99, + GENERATED_TAGS_IntelAlgoServer = 100, + GENERATED_TAGS_IntelCPUAlgoServer = 101, + GENERATED_TAGS_IntelCca = 102, + GENERATED_TAGS_IntelCcaClient = 103, + GENERATED_TAGS_IntelCcaServer = 104, + GENERATED_TAGS_IntelFDServer = 105, + GENERATED_TAGS_IntelFaceDetection = 106, + GENERATED_TAGS_IntelFaceDetectionClient = 107, + GENERATED_TAGS_IntelGPUAlgoServer = 108, + GENERATED_TAGS_IntelIC2 = 109, GENERATED_TAGS_IntelPGParam = 110, GENERATED_TAGS_IntelPGParamClient = 111, GENERATED_TAGS_IntelPGParamS = 112, @@ -185,35 +185,37 @@ enum ModuleTags { GENERATED_TAGS_ResultProcessor = 155, GENERATED_TAGS_SWJpegEncoder = 156, GENERATED_TAGS_SWPostProcessor = 157, - GENERATED_TAGS_SensorHwCtrl = 158, - GENERATED_TAGS_SensorManager = 159, - GENERATED_TAGS_SensorOB = 160, - GENERATED_TAGS_ShareRefer = 161, - GENERATED_TAGS_SofSource = 162, - GENERATED_TAGS_StreamBuffer = 163, - GENERATED_TAGS_SwImageConverter = 164, - GENERATED_TAGS_SwImageProcessor = 165, - GENERATED_TAGS_SyncManager = 166, - GENERATED_TAGS_SysCall = 167, - GENERATED_TAGS_TCPServer = 168, - GENERATED_TAGS_Thread = 169, - GENERATED_TAGS_Trace = 170, - GENERATED_TAGS_TunningParser = 171, - GENERATED_TAGS_Utils = 172, - GENERATED_TAGS_V4l2DeviceFactory = 173, - GENERATED_TAGS_V4l2_device_cc = 174, - GENERATED_TAGS_V4l2_subdevice_cc = 175, - GENERATED_TAGS_V4l2_video_node_cc = 176, - GENERATED_TAGS_VendorTags = 177, - GENERATED_TAGS_camera_metadata_tests = 178, - GENERATED_TAGS_icamera_metadata_base = 179, - GENERATED_TAGS_metadata_test = 180, - ST_FPS = 181, - ST_GPU_TNR = 182, - ST_STATS = 183, + GENERATED_TAGS_SchedPolicy = 158, + GENERATED_TAGS_Scheduler = 159, + GENERATED_TAGS_SensorHwCtrl = 160, + GENERATED_TAGS_SensorManager = 161, + GENERATED_TAGS_SensorOB = 162, + GENERATED_TAGS_ShareRefer = 163, + GENERATED_TAGS_SofSource = 164, + GENERATED_TAGS_StreamBuffer = 165, + GENERATED_TAGS_SwImageConverter = 166, + GENERATED_TAGS_SwImageProcessor = 167, + GENERATED_TAGS_SyncManager = 168, + GENERATED_TAGS_SysCall = 169, + GENERATED_TAGS_TCPServer = 170, + GENERATED_TAGS_Thread = 171, + GENERATED_TAGS_Trace = 172, + GENERATED_TAGS_TunningParser = 173, + GENERATED_TAGS_Utils = 174, + GENERATED_TAGS_V4l2DeviceFactory = 175, + GENERATED_TAGS_V4l2_device_cc = 176, + GENERATED_TAGS_V4l2_subdevice_cc = 177, + GENERATED_TAGS_V4l2_video_node_cc = 178, + GENERATED_TAGS_VendorTags = 179, + GENERATED_TAGS_camera_metadata_tests = 180, + GENERATED_TAGS_icamera_metadata_base = 181, + GENERATED_TAGS_metadata_test = 182, + ST_FPS = 183, + ST_GPU_TNR = 184, + ST_STATS = 185, }; -#define TAGS_MAX_NUM 184 +#define TAGS_MAX_NUM 186 #endif // !!! DO NOT EDIT THIS FILE !!! diff --git a/src/iutils/RWLock.h b/src/iutils/RWLock.h index 2082811e..d9611d0d 100644 --- a/src/iutils/RWLock.h +++ b/src/iutils/RWLock.h @@ -36,38 +36,40 @@ namespace icamera { * recursive, i.e. the same thread can't lock it multiple times. */ class RWLock { -public: - RWLock() {}; - ~RWLock(); + public: + RWLock(){}; + ~RWLock(); - status_t readLock(); - status_t tryReadLock(); - status_t writeLock(); - status_t tryWriteLock(); - void unlock(); + status_t readLock(); + status_t tryReadLock(); + status_t writeLock(); + status_t tryWriteLock(); + void unlock(); class AutoRLock { - public: - inline AutoRLock(RWLock& rwlock) : mLock(rwlock) { mLock.readLock(); } + public: + inline AutoRLock(RWLock& rwlock) : mLock(rwlock) { mLock.readLock(); } inline ~AutoRLock() { mLock.unlock(); } - private: + + private: RWLock& mLock; }; class AutoWLock { - public: - inline AutoWLock(RWLock& rwlock) : mLock(rwlock) { mLock.writeLock(); } + public: + inline AutoWLock(RWLock& rwlock) : mLock(rwlock) { mLock.writeLock(); } inline ~AutoWLock() { mLock.unlock(); } - private: + + private: RWLock& mLock; }; -private: + private: // A RWLock cannot be copied - RWLock(const RWLock&); - RWLock& operator = (const RWLock&); + RWLock(const RWLock&); + RWLock& operator=(const RWLock&); - pthread_rwlock_t mRWLock = PTHREAD_RWLOCK_INITIALIZER; + pthread_rwlock_t mRWLock = PTHREAD_RWLOCK_INITIALIZER; }; inline RWLock::~RWLock() { @@ -89,10 +91,10 @@ inline void RWLock::unlock() { pthread_rwlock_unlock(&mRWLock); } -#endif // HAVE_PTHREADS +#endif // HAVE_PTHREADS typedef RWLock::AutoRLock AutoRMutex; typedef RWLock::AutoWLock AutoWMutex; // --------------------------------------------------------------------------- -} // namespace icamera +} // namespace icamera // --------------------------------------------------------------------------- diff --git a/src/iutils/Utils.cpp b/src/iutils/Utils.cpp index 86eb8bd3..bac49883 100644 --- a/src/iutils/Utils.cpp +++ b/src/iutils/Utils.cpp @@ -473,7 +473,7 @@ int CameraUtils::getCompressedFrameSize(int format, int width, int height) { break; } case V4L2_PIX_FMT_NV12: - case V4L2_PIX_FMT_P010: { + case V4L2_PIX_FMT_P010: { int bpl = 0, heightAlignment = 0, tsBit = 0, tileSize = 0; if (format == V4L2_PIX_FMT_NV12) { bpl = width; @@ -489,8 +489,8 @@ int CameraUtils::getCompressedFrameSize(int format, int width, int height) { int alignedBpl = ALIGN(bpl, PSYS_COMPRESSION_TNR_STRIDE_ALIGNMENT); int alignedHeight = ALIGN(height, heightAlignment); int alignedHeightUV = ALIGN(height / UV_HEIGHT_DIVIDER, heightAlignment); - int imageBufferSize = ALIGN(alignedBpl * (alignedHeight + alignedHeightUV), - PSYS_COMPRESSION_PAGE_SIZE); + int imageBufferSize = + ALIGN(alignedBpl * (alignedHeight + alignedHeightUV), PSYS_COMPRESSION_PAGE_SIZE); int planarYTileStatus = CAMHAL_CEIL_DIV((alignedBpl * alignedHeight / tileSize) * tsBit, 8); planarYTileStatus = ALIGN(planarYTileStatus, PSYS_COMPRESSION_PAGE_SIZE); @@ -499,8 +499,9 @@ int CameraUtils::getCompressedFrameSize(int format, int width, int height) { planarUVTileStatus = ALIGN(planarUVTileStatus, PSYS_COMPRESSION_PAGE_SIZE); LOG1("@%s: format: %s, stride:%d height:%d imageSize:%d, tile_status_Y:%d, " - "tile_status_UV:%d", __func__, pixelCode2String(format), alignedBpl, - alignedHeight, imageBufferSize, planarYTileStatus, planarUVTileStatus); + "tile_status_UV:%d", + __func__, pixelCode2String(format), alignedBpl, alignedHeight, imageBufferSize, + planarYTileStatus, planarUVTileStatus); frameSize = imageBufferSize + planarYTileStatus + planarUVTileStatus; break; } @@ -556,9 +557,9 @@ int CameraUtils::getFrameSize(int format, int width, int height, bool needAligne } // Extra size should be at least one alignedBpl - int extraSize = isPlanarFormat(format) - ? (alignedBpl * getBpp(format) / 8 / getPlanarByte(format)) - : alignedBpl; + int extraSize = isPlanarFormat(format) ? + (alignedBpl * getBpp(format) / 8 / getPlanarByte(format)) : + alignedBpl; extraSize = std::max(extraSize, 1024); return alignedBpl * bufferHeight + extraSize; diff --git a/src/metadata/ParameterGenerator.cpp b/src/metadata/ParameterGenerator.cpp index 052736ac..146471f3 100644 --- a/src/metadata/ParameterGenerator.cpp +++ b/src/metadata/ParameterGenerator.cpp @@ -182,19 +182,15 @@ int ParameterGenerator::getParameters(int64_t sequence, Parameters* param, bool AutoMutex l(mParamsLock); if (sequence < 0) { *param = mLastParam; - } else if (mRequestParamMap.find(sequence) != mRequestParamMap.end()) { - *param = mRequestParamMap[sequence]->param; } else { // Find nearest parameter - bool found = false; - for (auto it = mRequestParamMap.crbegin(); it != mRequestParamMap.crend(); ++it) { - if (it->first <= sequence) { - *param = mRequestParamMap[it->first]->param; - found = true; - break; - } + // The sequence of parameter should <= sequence + auto it = mRequestParamMap.upper_bound(sequence); + if (it == mRequestParamMap.begin()) { + LOGE("Can't find settings for seq %ld", sequence); + } else { + *param = (--it)->second->param; } - if (!found) LOGE("Can't find settings for seq %ld", sequence); } } @@ -266,22 +262,21 @@ int ParameterGenerator::updateWithAiqResultsL(int64_t sequence, Parameters* para colorGains.color_gains_rggb[3] = aiqResult->mPaResults.color_gains.b; params->setColorGains(colorGains); - camera_awb_state_t awbState = (fabs(aiqResult->mAwbResults.distance_from_convergence) < 0.001) - ? AWB_STATE_CONVERGED - : AWB_STATE_NOT_CONVERGED; + camera_awb_state_t awbState = (fabs(aiqResult->mAwbResults.distance_from_convergence) < 0.001) ? + AWB_STATE_CONVERGED : + AWB_STATE_NOT_CONVERGED; params->setAwbState(awbState); // Update AF related parameters camera_af_state_t afState = - (aiqResult->mAfResults.status == ia_aiq_af_status_local_search) - ? AF_STATE_LOCAL_SEARCH - : (aiqResult->mAfResults.status == ia_aiq_af_status_extended_search) - ? AF_STATE_EXTENDED_SEARCH - : ((aiqResult->mAfResults.status == ia_aiq_af_status_success) && - aiqResult->mAfResults.final_lens_position_reached) - ? AF_STATE_SUCCESS - : (aiqResult->mAfResults.status == ia_aiq_af_status_fail) ? AF_STATE_FAIL - : AF_STATE_IDLE; + (aiqResult->mAfResults.status == ia_aiq_af_status_local_search) ? + AF_STATE_LOCAL_SEARCH : + (aiqResult->mAfResults.status == ia_aiq_af_status_extended_search) ? + AF_STATE_EXTENDED_SEARCH : + ((aiqResult->mAfResults.status == ia_aiq_af_status_success) && + aiqResult->mAfResults.final_lens_position_reached) ? + AF_STATE_SUCCESS : + (aiqResult->mAfResults.status == ia_aiq_af_status_fail) ? AF_STATE_FAIL : AF_STATE_IDLE; params->setAfState(afState); bool lensMoving = false; @@ -440,19 +435,20 @@ int ParameterGenerator::updateCommonMetadata(Parameters* params, const AiqResult ParameterHelper::mergeTag(entry, params); if (Log::isLogTagEnabled(ST_STATS)) { - const cca::cca_out_stats *outStats = &aiqResult->mOutStats; - const rgbs_grid_block *rgbsPtr = aiqResult->mOutStats.rgbs_blocks; + const cca::cca_out_stats* outStats = &aiqResult->mOutStats; + const rgbs_grid_block* rgbsPtr = aiqResult->mOutStats.rgbs_blocks; int size = outStats->rgbs_grid.grid_width * outStats->rgbs_grid.grid_height; int sumLuma = 0; for (int j = 0; j < size; j++) { - sumLuma += (rgbsPtr[j].avg_b + rgbsPtr[j].avg_r + - (rgbsPtr[j].avg_gb + rgbsPtr[j].avg_gr) / 2) / 3; + sumLuma += ((rgbsPtr[j].avg_b + rgbsPtr[j].avg_r + + (rgbsPtr[j].avg_gb + rgbsPtr[j].avg_gr) / 2) / + 3); } LOG2(ST_STATS, "RGB stat %dx%d, sequence %lld, y_mean %d", outStats->rgbs_grid.grid_width, outStats->rgbs_grid.grid_height, - aiqResult->mSequence, size > 0 ? sumLuma/size : 0); + aiqResult->mSequence, size > 0 ? sumLuma / size : 0); } entry.tag = INTEL_VENDOR_CAMERA_RGBS_STATS_BLOCKS; @@ -463,9 +459,8 @@ int ParameterGenerator::updateCommonMetadata(Parameters* params, const AiqResult } if (aiqResult->mAiqParam.manualExpTimeUs <= 0 && aiqResult->mAiqParam.manualIso <= 0) { - int64_t range[] = - { aiqResult->mAeResults.exposures[0].exposure[0].low_limit_total_exposure, - aiqResult->mAeResults.exposures[0].exposure[0].up_limit_total_exposure }; + int64_t range[] = {aiqResult->mAeResults.exposures[0].exposure[0].low_limit_total_exposure, + aiqResult->mAeResults.exposures[0].exposure[0].up_limit_total_exposure}; LOG2("total et limits [%ldx%ld]", range[0], range[1]); entry.tag = INTEL_VENDOR_CAMERA_TOTAL_EXPOSURE_TARGET_RANGE; entry.type = ICAMERA_TYPE_INT64; diff --git a/src/metadata/ParameterHelper.cpp b/src/metadata/ParameterHelper.cpp index c1e043cc..d0b86946 100644 --- a/src/metadata/ParameterHelper.cpp +++ b/src/metadata/ParameterHelper.cpp @@ -24,14 +24,12 @@ namespace icamera { -void ParameterHelper::merge(const Parameters& src, Parameters* dst) -{ +void ParameterHelper::merge(const Parameters& src, Parameters* dst) { AutoRLock rl(src.mData); merge(getMetadata(src.mData), dst); } -void ParameterHelper::merge(const CameraMetadata& metadata, Parameters* dst) -{ +void ParameterHelper::merge(const CameraMetadata& metadata, Parameters* dst) { if (metadata.isEmpty()) { // Nothing needs to be merged return; @@ -47,34 +45,33 @@ void ParameterHelper::merge(const CameraMetadata& metadata, Parameters* dst) continue; } switch (entry.type) { - case ICAMERA_TYPE_BYTE: - getMetadata(dst->mData).update(entry.tag, entry.data.u8, entry.count); - break; - case ICAMERA_TYPE_INT32: - getMetadata(dst->mData).update(entry.tag, entry.data.i32, entry.count); - break; - case ICAMERA_TYPE_FLOAT: - getMetadata(dst->mData).update(entry.tag, entry.data.f, entry.count); - break; - case ICAMERA_TYPE_INT64: - getMetadata(dst->mData).update(entry.tag, entry.data.i64, entry.count); - break; - case ICAMERA_TYPE_DOUBLE: - getMetadata(dst->mData).update(entry.tag, entry.data.d, entry.count); - break; - case ICAMERA_TYPE_RATIONAL: - getMetadata(dst->mData).update(entry.tag, entry.data.r, entry.count); - break; - default: - LOGW("Invalid entry type, should never happen"); - break; + case ICAMERA_TYPE_BYTE: + getMetadata(dst->mData).update(entry.tag, entry.data.u8, entry.count); + break; + case ICAMERA_TYPE_INT32: + getMetadata(dst->mData).update(entry.tag, entry.data.i32, entry.count); + break; + case ICAMERA_TYPE_FLOAT: + getMetadata(dst->mData).update(entry.tag, entry.data.f, entry.count); + break; + case ICAMERA_TYPE_INT64: + getMetadata(dst->mData).update(entry.tag, entry.data.i64, entry.count); + break; + case ICAMERA_TYPE_DOUBLE: + getMetadata(dst->mData).update(entry.tag, entry.data.d, entry.count); + break; + case ICAMERA_TYPE_RATIONAL: + getMetadata(dst->mData).update(entry.tag, entry.data.r, entry.count); + break; + default: + LOGW("Invalid entry type, should never happen"); + break; } } const_cast(&metadata)->unlock(src); } -void ParameterHelper::copyMetadata(const Parameters& source, CameraMetadata* metadata) -{ +void ParameterHelper::copyMetadata(const Parameters& source, CameraMetadata* metadata) { CheckAndLogError((!metadata), VOID_VALUE, "null metadata to be updated!"); AutoRLock rl(source.mData); @@ -114,4 +111,4 @@ void ParameterHelper::mergeTag(const icamera_metadata_ro_entry& entry, Parameter } } -} // end of namespace icamera +} // end of namespace icamera diff --git a/src/metadata/ParameterHelper.h b/src/metadata/ParameterHelper.h index fcc9a141..7f99c337 100644 --- a/src/metadata/ParameterHelper.h +++ b/src/metadata/ParameterHelper.h @@ -31,7 +31,7 @@ class Parameters; * 2. Provide some interface for HAL to access and modify Parameters internal data. */ class ParameterHelper { -public: + public: /** * \brief Merge and update dst parameter buffer with another parameter instance. * @@ -93,7 +93,7 @@ class ParameterHelper { */ static const CameraMetadata& getMetadata(const Parameters& source); -private: + private: // The definitions and interfaces in this private section are only for Parameters internal // use, HAL other code shouldn't and cannot access them. friend class Parameters; @@ -105,7 +105,7 @@ class ParameterHelper { * details of Parameters. */ class ParameterData { - public: + public: ParameterData() {} ~ParameterData() {} @@ -115,24 +115,26 @@ class ParameterHelper { return *this; } - CameraMetadata mMetadata; // The data structure to save all of the parameters. - RWLock mRwLock; // Read-write lock to make Parameters class thread-safe + CameraMetadata mMetadata; // The data structure to save all of the parameters. + RWLock mRwLock; // Read-write lock to make Parameters class thread-safe }; // Customized wrappers of RWLock to make the implementation of Parameters much cleaner. class AutoRLock { - public: + public: AutoRLock(void* data) : mLock(getInternalData(data).mRwLock) { mLock.readLock(); } ~AutoRLock() { mLock.unlock(); } - private: + + private: RWLock& mLock; }; class AutoWLock { - public: + public: AutoWLock(void* data) : mLock(getInternalData(data).mRwLock) { mLock.writeLock(); } ~AutoWLock() { mLock.unlock(); } - private: + + private: RWLock& mLock; }; @@ -140,29 +142,23 @@ class ParameterHelper { return *reinterpret_cast(data); } - static void* createParameterData() { - return new ParameterData(); - } + static void* createParameterData() { return new ParameterData(); } static void* createParameterData(void* data) { return new ParameterData(getInternalData(data)); } - static void releaseParameterData(void* data) { - delete &getInternalData(data); - } + static void releaseParameterData(void* data) { delete &getInternalData(data); } static void deepCopy(void* srcData, void* dstData) { getInternalData(dstData) = getInternalData(srcData); } - static CameraMetadata& getMetadata(void* data) { - return getInternalData(data).mMetadata; - } + static CameraMetadata& getMetadata(void* data) { return getInternalData(data).mMetadata; } static icamera_metadata_ro_entry_t getMetadataEntry(void* data, uint32_t tag) { return const_cast(&getMetadata(data))->find(tag); } }; -} // namespace icamera +} // namespace icamera diff --git a/src/metadata/Parameters.cpp b/src/metadata/Parameters.cpp index 0839c00e..943cd48d 100644 --- a/src/metadata/Parameters.cpp +++ b/src/metadata/Parameters.cpp @@ -36,36 +36,31 @@ namespace icamera { Parameters::Parameters() : mData(ParameterHelper::createParameterData()) {} -Parameters::Parameters(const Parameters& other) : - mData(ParameterHelper::createParameterData(other.mData)) {} +Parameters::Parameters(const Parameters& other) + : mData(ParameterHelper::createParameterData(other.mData)) {} -Parameters& Parameters::operator=(const Parameters& other) -{ +Parameters& Parameters::operator=(const Parameters& other) { ParameterHelper::AutoWLock wl(mData); ParameterHelper::deepCopy(other.mData, mData); return *this; } -Parameters::~Parameters() -{ +Parameters::~Parameters() { ParameterHelper::releaseParameterData(mData); mData = nullptr; } -void Parameters::merge(const Parameters& other) -{ +void Parameters::merge(const Parameters& other) { ParameterHelper::merge(other, this); } -int Parameters::setAeMode(camera_ae_mode_t aeMode) -{ +int Parameters::setAeMode(camera_ae_mode_t aeMode) { uint8_t mode = aeMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AE_MODE, &mode, 1); } -int Parameters::getAeMode(camera_ae_mode_t& aeMode) const -{ +int Parameters::getAeMode(camera_ae_mode_t& aeMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_MODE); if (entry.count != 1) { @@ -75,15 +70,13 @@ int Parameters::getAeMode(camera_ae_mode_t& aeMode) const return OK; } -int Parameters::setAeState(camera_ae_state_t aeState) -{ +int Parameters::setAeState(camera_ae_state_t aeState) { uint8_t state = aeState; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AE_STATE, &state, 1); } -int Parameters::getAeState(camera_ae_state_t& aeState) const -{ +int Parameters::getAeState(camera_ae_state_t& aeState) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_STATE); if (entry.count != 1) { @@ -93,8 +86,7 @@ int Parameters::getAeState(camera_ae_state_t& aeState) const return OK; } -static int setRegions(CameraMetadata& metadata, camera_window_list_t regions, int tag) -{ +static int setRegions(CameraMetadata& metadata, camera_window_list_t regions, int tag) { if (regions.empty()) { // Nothing to do with an empty parameter. return INVALID_OPERATION; @@ -113,8 +105,7 @@ static int setRegions(CameraMetadata& metadata, camera_window_list_t regions, in return metadata.update(tag, values, ARRAY_SIZE(values)); } -static int getRegions(icamera_metadata_ro_entry_t entry, camera_window_list_t& regions) -{ +static int getRegions(icamera_metadata_ro_entry_t entry, camera_window_list_t& regions) { regions.clear(); const int ELEM_NUM = sizeof(camera_window_t) / sizeof(int); if (entry.count == 0 || entry.count % ELEM_NUM != 0) { @@ -134,27 +125,23 @@ static int getRegions(icamera_metadata_ro_entry_t entry, camera_window_list_t& r return OK; } -int Parameters::setAeRegions(camera_window_list_t aeRegions) -{ +int Parameters::setAeRegions(camera_window_list_t aeRegions) { ParameterHelper::AutoWLock wl(mData); return setRegions(ParameterHelper::getMetadata(mData), aeRegions, CAMERA_AE_REGIONS); } -int Parameters::getAeRegions(camera_window_list_t& aeRegions) const -{ +int Parameters::getAeRegions(camera_window_list_t& aeRegions) const { ParameterHelper::AutoRLock rl(mData); return getRegions(ParameterHelper::getMetadataEntry(mData, CAMERA_AE_REGIONS), aeRegions); } -int Parameters::setAeLock(bool lock) -{ +int Parameters::setAeLock(bool lock) { uint8_t lockValue = lock ? 1 : 0; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AE_LOCK, &lockValue, 1); } -int Parameters::getAeLock(bool& lock) const -{ +int Parameters::getAeLock(bool& lock) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_LOCK); if (entry.count != 1) { @@ -164,14 +151,13 @@ int Parameters::getAeLock(bool& lock) const return OK; } -int Parameters::setExposureTime(int64_t exposureTime) -{ +int Parameters::setExposureTime(int64_t exposureTime) { ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_SENSOR_EXPOSURE_TIME, &exposureTime, 1); + return ParameterHelper::getMetadata(mData).update(CAMERA_SENSOR_EXPOSURE_TIME, &exposureTime, + 1); } -int Parameters::getExposureTime(int64_t& exposureTime) const -{ +int Parameters::getExposureTime(int64_t& exposureTime) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_SENSOR_EXPOSURE_TIME); if (entry.count != 1) { @@ -181,14 +167,12 @@ int Parameters::getExposureTime(int64_t& exposureTime) const return OK; } -int Parameters::setSensitivityGain(float gain) -{ +int Parameters::setSensitivityGain(float gain) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_SENSITIVITY_GAIN, &gain, 1); } -int Parameters::getSensitivityGain(float& gain) const -{ +int Parameters::getSensitivityGain(float& gain) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_SENSITIVITY_GAIN); if (entry.count != 1) { @@ -198,14 +182,12 @@ int Parameters::getSensitivityGain(float& gain) const return OK; } -int Parameters::setSensitivityIso(int32_t iso) -{ +int Parameters::setSensitivityIso(int32_t iso) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_SENSOR_SENSITIVITY, &iso, 1); } -int Parameters::getSensitivityIso(int32_t& iso) const -{ +int Parameters::getSensitivityIso(int32_t& iso) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_SENSOR_SENSITIVITY); if (entry.count != 1) { @@ -215,14 +197,12 @@ int Parameters::getSensitivityIso(int32_t& iso) const return OK; } -int Parameters::setAeCompensation(int ev) -{ +int Parameters::setAeCompensation(int ev) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AE_COMPENSATION, &ev, 1); } -int Parameters::getAeCompensation(int& ev) const -{ +int Parameters::getAeCompensation(int& ev) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_COMPENSATION); if (entry.count != 1) { @@ -232,14 +212,12 @@ int Parameters::getAeCompensation(int& ev) const return OK; } -int Parameters::setFrameRate(float fps) -{ +int Parameters::setFrameRate(float fps) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_FRAME_RATE, &fps, 1); } -int Parameters::getFrameRate(float& fps) const -{ +int Parameters::getFrameRate(float& fps) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_FRAME_RATE); if (entry.count != 1) { @@ -249,15 +227,13 @@ int Parameters::getFrameRate(float& fps) const return OK; } -int Parameters::setAntiBandingMode(camera_antibanding_mode_t bandingMode) -{ +int Parameters::setAntiBandingMode(camera_antibanding_mode_t bandingMode) { uint8_t mode = bandingMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AE_ANTIBANDING_MODE, &mode, 1); } -int Parameters::getAntiBandingMode(camera_antibanding_mode_t& bandingMode) const -{ +int Parameters::getAntiBandingMode(camera_antibanding_mode_t& bandingMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_ANTIBANDING_MODE); if (entry.count != 1) { @@ -267,15 +243,13 @@ int Parameters::getAntiBandingMode(camera_antibanding_mode_t& bandingMode) const return OK; } -int Parameters::setAwbMode(camera_awb_mode_t awbMode) -{ +int Parameters::setAwbMode(camera_awb_mode_t awbMode) { uint8_t mode = awbMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_MODE, &mode, 1); } -int Parameters::getAwbMode(camera_awb_mode_t& awbMode) const -{ +int Parameters::getAwbMode(camera_awb_mode_t& awbMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_MODE); if (entry.count != 1) { @@ -285,15 +259,13 @@ int Parameters::getAwbMode(camera_awb_mode_t& awbMode) const return OK; } -int Parameters::setAwbState(camera_awb_state_t awbState) -{ +int Parameters::setAwbState(camera_awb_state_t awbState) { uint8_t state = awbState; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_STATE, &state, 1); } -int Parameters::getAwbState(camera_awb_state_t& awbState) const -{ +int Parameters::getAwbState(camera_awb_state_t& awbState) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_STATE); if (entry.count != 1) { @@ -303,15 +275,13 @@ int Parameters::getAwbState(camera_awb_state_t& awbState) const return OK; } -int Parameters::setAwbLock(bool lock) -{ +int Parameters::setAwbLock(bool lock) { uint8_t lockValue = lock ? 1 : 0; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_LOCK, &lockValue, 1); } -int Parameters::getAwbLock(bool& lock) const -{ +int Parameters::getAwbLock(bool& lock) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_LOCK); if (entry.count != 1) { @@ -321,15 +291,14 @@ int Parameters::getAwbLock(bool& lock) const return OK; } -int Parameters::setAwbCctRange(camera_range_t cct) -{ +int Parameters::setAwbCctRange(camera_range_t cct) { int range[] = {(int)cct.min, (int)cct.max}; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_CCT_RANGE, range, ARRAY_SIZE(range)); + return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_CCT_RANGE, range, + ARRAY_SIZE(range)); } -int Parameters::getAwbCctRange(camera_range_t& cct) const -{ +int Parameters::getAwbCctRange(camera_range_t& cct) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_CCT_RANGE); const size_t ELEM_NUM = sizeof(camera_range_t) / sizeof(int); @@ -341,15 +310,13 @@ int Parameters::getAwbCctRange(camera_range_t& cct) const return OK; } -int Parameters::setAwbGains(camera_awb_gains_t awbGains) -{ +int Parameters::setAwbGains(camera_awb_gains_t awbGains) { int values[] = {awbGains.r_gain, awbGains.g_gain, awbGains.b_gain}; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_GAINS, values, ARRAY_SIZE(values)); } -int Parameters::getAwbGains(camera_awb_gains_t& awbGains) const -{ +int Parameters::getAwbGains(camera_awb_gains_t& awbGains) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_GAINS); const size_t ELEM_NUM = sizeof(camera_awb_gains_t) / sizeof(int); @@ -362,8 +329,7 @@ int Parameters::getAwbGains(camera_awb_gains_t& awbGains) const return OK; } -int Parameters::setAwbResult(void* data) -{ +int Parameters::setAwbResult(void* data) { uint32_t size = sizeof(camera_awb_result_t); uint32_t tag = CAMERA_AWB_RESULT; ParameterHelper::AutoWLock wl(mData); @@ -374,8 +340,7 @@ int Parameters::setAwbResult(void* data) return ParameterHelper::getMetadata(mData).update(tag, (uint8_t*)data, size); } -int Parameters::getAwbResult(void* data) const -{ +int Parameters::getAwbResult(void* data) const { if (data == NULL) { return BAD_VALUE; } @@ -394,15 +359,14 @@ int Parameters::getAwbResult(void* data) const return OK; } -int Parameters::setAwbGainShift(camera_awb_gains_t awbGainShift) -{ +int Parameters::setAwbGainShift(camera_awb_gains_t awbGainShift) { int values[] = {awbGainShift.r_gain, awbGainShift.g_gain, awbGainShift.b_gain}; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_GAIN_SHIFT, values, ARRAY_SIZE(values)); + return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_GAIN_SHIFT, values, + ARRAY_SIZE(values)); } -int Parameters::getAwbGainShift(camera_awb_gains_t& awbGainShift) const -{ +int Parameters::getAwbGainShift(camera_awb_gains_t& awbGainShift) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_GAIN_SHIFT); const size_t ELEM_NUM = sizeof(camera_awb_gains_t) / sizeof(int); @@ -415,15 +379,14 @@ int Parameters::getAwbGainShift(camera_awb_gains_t& awbGainShift) const return OK; } -int Parameters::setAwbWhitePoint(camera_coordinate_t whitePoint) -{ +int Parameters::setAwbWhitePoint(camera_coordinate_t whitePoint) { int values[] = {whitePoint.x, whitePoint.y}; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_WHITE_POINT, values, ARRAY_SIZE(values)); + return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_WHITE_POINT, values, + ARRAY_SIZE(values)); } -int Parameters::getAwbWhitePoint(camera_coordinate_t& whitePoint) const -{ +int Parameters::getAwbWhitePoint(camera_coordinate_t& whitePoint) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_WHITE_POINT); const size_t ELEM_NUM = sizeof(camera_coordinate_t) / sizeof(int); @@ -437,15 +400,13 @@ int Parameters::getAwbWhitePoint(camera_coordinate_t& whitePoint) const return OK; } -int Parameters::setColorTransform(camera_color_transform_t colorTransform) -{ +int Parameters::setColorTransform(camera_color_transform_t colorTransform) { float* transform = (float*)colorTransform.color_transform; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_COLOR_TRANSFORM, transform, 3 * 3); } -int Parameters::getColorTransform(camera_color_transform_t& colorTransform) const -{ +int Parameters::getColorTransform(camera_color_transform_t& colorTransform) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_COLOR_TRANSFORM); const size_t ELEM_NUM = 3 * 3; @@ -459,15 +420,13 @@ int Parameters::getColorTransform(camera_color_transform_t& colorTransform) cons return OK; } -int Parameters::setColorGains(camera_color_gains_t colorGains) -{ +int Parameters::setColorGains(camera_color_gains_t colorGains) { float* gains = colorGains.color_gains_rggb; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_COLOR_GAINS, gains, 4); } -int Parameters::getColorGains(camera_color_gains_t& colorGains) const -{ +int Parameters::getColorGains(camera_color_gains_t& colorGains) const { ParameterHelper::AutoRLock rl(mData); icamera_metadata_ro_entry_t entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_COLOR_GAINS); @@ -481,27 +440,23 @@ int Parameters::getColorGains(camera_color_gains_t& colorGains) const return OK; } -int Parameters::setAwbRegions(camera_window_list_t awbRegions) -{ +int Parameters::setAwbRegions(camera_window_list_t awbRegions) { ParameterHelper::AutoWLock wl(mData); return setRegions(ParameterHelper::getMetadata(mData), awbRegions, CAMERA_AWB_REGIONS); } -int Parameters::getAwbRegions(camera_window_list_t& awbRegions) const -{ +int Parameters::getAwbRegions(camera_window_list_t& awbRegions) const { ParameterHelper::AutoRLock rl(mData); return getRegions(ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_REGIONS), awbRegions); } -int Parameters::setEdgeMode(camera_edge_mode_t edgeMode) -{ +int Parameters::setEdgeMode(camera_edge_mode_t edgeMode) { uint8_t mode = edgeMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_EDGE_MODE, &mode, 1); } -int Parameters::getEdgeMode(camera_edge_mode_t& edgeMode) const -{ +int Parameters::getEdgeMode(camera_edge_mode_t& edgeMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_EDGE_MODE); if (entry.count != 1) { @@ -511,15 +466,13 @@ int Parameters::getEdgeMode(camera_edge_mode_t& edgeMode) const return OK; } -int Parameters::setNrMode(camera_nr_mode_t nrMode) -{ +int Parameters::setNrMode(camera_nr_mode_t nrMode) { uint8_t mode = nrMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_NR_MODE, &mode, 1); } -int Parameters::getNrMode(camera_nr_mode_t& nrMode) const -{ +int Parameters::getNrMode(camera_nr_mode_t& nrMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_NR_MODE); if (entry.count != 1) { @@ -529,15 +482,14 @@ int Parameters::getNrMode(camera_nr_mode_t& nrMode) const return OK; } -int Parameters::setNrLevel(camera_nr_level_t level) -{ - int values [] = {level.overall, level.spatial, level.temporal}; +int Parameters::setNrLevel(camera_nr_level_t level) { + int values[] = {level.overall, level.spatial, level.temporal}; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_NR_LEVEL, values, ARRAY_SIZE(values)); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_NR_LEVEL, values, + ARRAY_SIZE(values)); } -int Parameters::getNrLevel(camera_nr_level_t& level) const -{ +int Parameters::getNrLevel(camera_nr_level_t& level) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_NR_LEVEL); const size_t ELEM_NUM = sizeof(camera_nr_level_t) / sizeof(int); @@ -550,15 +502,13 @@ int Parameters::getNrLevel(camera_nr_level_t& level) const return OK; } -int Parameters::setIrisMode(camera_iris_mode_t irisMode) -{ +int Parameters::setIrisMode(camera_iris_mode_t irisMode) { uint8_t mode = irisMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_IRIS_MODE, &mode, 1); } -int Parameters::getIrisMode(camera_iris_mode_t& irisMode) -{ +int Parameters::getIrisMode(camera_iris_mode_t& irisMode) { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_IRIS_MODE); if (entry.count != 1) { @@ -568,14 +518,12 @@ int Parameters::getIrisMode(camera_iris_mode_t& irisMode) return OK; } -int Parameters::setIrisLevel(int level) -{ +int Parameters::setIrisLevel(int level) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_IRIS_LEVEL, &level, 1); } -int Parameters::getIrisLevel(int& level) -{ +int Parameters::getIrisLevel(int& level) { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_IRIS_LEVEL); if (entry.count != 1) { @@ -586,15 +534,13 @@ int Parameters::getIrisLevel(int& level) } // HDR_FEATURE_S -int Parameters::setWdrMode(camera_wdr_mode_t wdrMode) -{ +int Parameters::setWdrMode(camera_wdr_mode_t wdrMode) { uint8_t mode = wdrMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_WDR_MODE, &mode, 1); } -int Parameters::getWdrMode(camera_wdr_mode_t& wdrMode) const -{ +int Parameters::getWdrMode(camera_wdr_mode_t& wdrMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_WDR_MODE); if (entry.count != 1) { @@ -605,14 +551,12 @@ int Parameters::getWdrMode(camera_wdr_mode_t& wdrMode) const } // HDR_FEATURE_E -int Parameters::setWdrLevel(uint8_t level) -{ +int Parameters::setWdrLevel(uint8_t level) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_WDR_LEVEL, &level, 1); } -int Parameters::getWdrLevel(uint8_t& level) const -{ +int Parameters::getWdrLevel(uint8_t& level) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_WDR_LEVEL); if (entry.count != 1) { @@ -622,16 +566,14 @@ int Parameters::getWdrLevel(uint8_t& level) const return OK; } -int Parameters::setEffectSceneMode(camera_scene_mode_t sceneMode) -{ +int Parameters::setEffectSceneMode(camera_scene_mode_t sceneMode) { uint8_t mode = sceneMode; LOGW("Effect scene mode is deprecated. Please use setSceneMode() instead."); ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_SCENE_MODE, &mode, 1); } -int Parameters::getEffectSceneMode(camera_scene_mode_t& sceneMode) const -{ +int Parameters::getEffectSceneMode(camera_scene_mode_t& sceneMode) const { LOGW("Effect scene mode is deprecated. Please use getSceneMode() instead."); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_SCENE_MODE); @@ -642,15 +584,13 @@ int Parameters::getEffectSceneMode(camera_scene_mode_t& sceneMode) const return OK; } -int Parameters::setSceneMode(camera_scene_mode_t sceneMode) -{ +int Parameters::setSceneMode(camera_scene_mode_t sceneMode) { uint8_t mode = sceneMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_SCENE_MODE, &mode, 1); } -int Parameters::getSceneMode(camera_scene_mode_t& sceneMode) const -{ +int Parameters::getSceneMode(camera_scene_mode_t& sceneMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_SCENE_MODE); if (entry.count != 1) { @@ -660,15 +600,13 @@ int Parameters::getSceneMode(camera_scene_mode_t& sceneMode) const return OK; } -int Parameters::setWeightGridMode(camera_weight_grid_mode_t weightGridMode) -{ +int Parameters::setWeightGridMode(camera_weight_grid_mode_t weightGridMode) { uint8_t mode = (uint8_t)weightGridMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_WEIGHT_GRID_MODE, &mode, 1); } -int Parameters::getWeightGridMode(camera_weight_grid_mode_t& weightGridMode) const -{ +int Parameters::getWeightGridMode(camera_weight_grid_mode_t& weightGridMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_WEIGHT_GRID_MODE); if (entry.count != 1) { @@ -678,15 +616,13 @@ int Parameters::getWeightGridMode(camera_weight_grid_mode_t& weightGridMode) con return OK; } -int Parameters::setBlcAreaMode(camera_blc_area_mode_t blcAreaMode) -{ +int Parameters::setBlcAreaMode(camera_blc_area_mode_t blcAreaMode) { uint8_t mode = blcAreaMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_BLC_AREA_MODE, &mode, 1); } -int Parameters::getBlcAreaMode(camera_blc_area_mode_t& blcAreaMode) const -{ +int Parameters::getBlcAreaMode(camera_blc_area_mode_t& blcAreaMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_BLC_AREA_MODE); if (entry.count != 1) { @@ -696,15 +632,14 @@ int Parameters::getBlcAreaMode(camera_blc_area_mode_t& blcAreaMode) const return OK; } -int Parameters::setFpsRange(camera_range_t fpsRange) -{ +int Parameters::setFpsRange(camera_range_t fpsRange) { float range[] = {fpsRange.min, fpsRange.max}; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_AE_TARGET_FPS_RANGE, range, ARRAY_SIZE(range)); + return ParameterHelper::getMetadata(mData).update(CAMERA_AE_TARGET_FPS_RANGE, range, + ARRAY_SIZE(range)); } -int Parameters::getFpsRange(camera_range_t& fpsRange) const -{ +int Parameters::getFpsRange(camera_range_t& fpsRange) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_TARGET_FPS_RANGE); const size_t ELEM_NUM = sizeof(camera_range_t) / sizeof(float); @@ -716,15 +651,15 @@ int Parameters::getFpsRange(camera_range_t& fpsRange) const return OK; } -int Parameters::setImageEnhancement(camera_image_enhancement_t effects) -{ - int values[] = {effects.sharpness, effects.brightness, effects.contrast, effects.hue, effects.saturation}; +int Parameters::setImageEnhancement(camera_image_enhancement_t effects) { + int values[] = {effects.sharpness, effects.brightness, effects.contrast, effects.hue, + effects.saturation}; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_IMAGE_ENHANCEMENT, values, ARRAY_SIZE(values)); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_IMAGE_ENHANCEMENT, values, + ARRAY_SIZE(values)); } -int Parameters::getImageEnhancement(camera_image_enhancement_t& effects) const -{ +int Parameters::getImageEnhancement(camera_image_enhancement_t& effects) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_IMAGE_ENHANCEMENT); size_t number_of_effects = sizeof(camera_image_enhancement_t) / sizeof(int); @@ -740,15 +675,13 @@ int Parameters::getImageEnhancement(camera_image_enhancement_t& effects) const return OK; } -int Parameters::setDeinterlaceMode(camera_deinterlace_mode_t deinterlaceMode) -{ +int Parameters::setDeinterlaceMode(camera_deinterlace_mode_t deinterlaceMode) { uint8_t mode = deinterlaceMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_DEINTERLACE_MODE, &mode, 1); } -int Parameters::getDeinterlaceMode(camera_deinterlace_mode_t &deinterlaceMode) const -{ +int Parameters::getDeinterlaceMode(camera_deinterlace_mode_t& deinterlaceMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_DEINTERLACE_MODE); if (entry.count != 1) { @@ -758,11 +691,12 @@ int Parameters::getDeinterlaceMode(camera_deinterlace_mode_t &deinterlaceMode) c return OK; } -int Parameters::getSupportedVideoStabilizationMode(camera_video_stabilization_list_t &supportedModes) const -{ +int Parameters::getSupportedVideoStabilizationMode( + camera_video_stabilization_list_t& supportedModes) const { supportedModes.clear(); ParameterHelper::AutoRLock rl(mData); - auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); + auto entry = ParameterHelper::getMetadataEntry( + mData, CAMERA_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); for (size_t i = 0; i < entry.count; i++) { supportedModes.push_back((camera_video_stabilization_mode_t)entry.data.u8[i]); } @@ -770,8 +704,7 @@ int Parameters::getSupportedVideoStabilizationMode(camera_video_stabilization_li return OK; } -int Parameters::getSupportedAeMode(vector &supportedAeModes) const -{ +int Parameters::getSupportedAeMode(vector& supportedAeModes) const { supportedAeModes.clear(); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_AVAILABLE_MODES); @@ -782,8 +715,7 @@ int Parameters::getSupportedAeMode(vector &supportedAeModes) return OK; } -int Parameters::getSupportedAwbMode(vector &supportedAwbModes) const -{ +int Parameters::getSupportedAwbMode(vector& supportedAwbModes) const { supportedAwbModes.clear(); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_AVAILABLE_MODES); @@ -794,8 +726,7 @@ int Parameters::getSupportedAwbMode(vector &supportedAwbMode return OK; } -int Parameters::getSupportedAfMode(vector &supportedAfModes) const -{ +int Parameters::getSupportedAfMode(vector& supportedAfModes) const { supportedAfModes.clear(); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AF_AVAILABLE_MODES); @@ -806,8 +737,7 @@ int Parameters::getSupportedAfMode(vector &supportedAfModes) return OK; } -int Parameters::getSupportedSceneMode(vector &supportedSceneModes) const -{ +int Parameters::getSupportedSceneMode(vector& supportedSceneModes) const { supportedSceneModes.clear(); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_CONTROL_AVAILABLE_SCENE_MODES); @@ -818,8 +748,8 @@ int Parameters::getSupportedSceneMode(vector &supportedSce return OK; } -int Parameters::getSupportedAntibandingMode(vector &supportedAntibindingModes) const -{ +int Parameters::getSupportedAntibandingMode( + vector& supportedAntibindingModes) const { supportedAntibindingModes.clear(); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_AVAILABLE_ANTIBANDING_MODES); @@ -830,8 +760,7 @@ int Parameters::getSupportedAntibandingMode(vector & return OK; } -int Parameters::getSupportedFpsRange(camera_range_array_t& ranges) const -{ +int Parameters::getSupportedFpsRange(camera_range_array_t& ranges) const { ranges.clear(); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_AVAILABLE_TARGET_FPS_RANGES); @@ -849,8 +778,7 @@ int Parameters::getSupportedFpsRange(camera_range_array_t& ranges) const return OK; } -int Parameters::getSupportedStreamConfig(stream_array_t& config) const -{ +int Parameters::getSupportedStreamConfig(stream_array_t& config) const { config.clear(); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_INFO_AVAILABLE_CONFIGURATIONS); @@ -865,14 +793,13 @@ int Parameters::getSupportedStreamConfig(stream_array_t& config) const for (size_t i = 0; i < entry.count; i += streamConfMemberNum) { MEMCPY_S(&cfg, sizeof(stream_t), &entry.data.i32[i], sizeof(stream_t)); cfg.stride = CameraUtils::getStride(cfg.format, cfg.width); - cfg.size = CameraUtils::getFrameSize(cfg.format, cfg.width, cfg.height); + cfg.size = CameraUtils::getFrameSize(cfg.format, cfg.width, cfg.height); config.push_back(cfg); } return OK; } -int Parameters::getSupportedSensorExposureTimeRange(camera_range_t& range) const -{ +int Parameters::getSupportedSensorExposureTimeRange(camera_range_t& range) const { CLEAR(range); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE); @@ -885,8 +812,7 @@ int Parameters::getSupportedSensorExposureTimeRange(camera_range_t& range) const return OK; } -int Parameters::getSupportedSensorSensitivityRange(camera_range_t& range) const -{ +int Parameters::getSupportedSensorSensitivityRange(camera_range_t& range) const { CLEAR(range); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_SENSOR_INFO_SENSITIVITY_RANGE); @@ -899,8 +825,7 @@ int Parameters::getSupportedSensorSensitivityRange(camera_range_t& range) const return OK; } -int Parameters::getSupportedFeatures(camera_features_list_t& features) const -{ +int Parameters::getSupportedFeatures(camera_features_list_t& features) const { features.clear(); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_INFO_AVAILABLE_FEATURES); @@ -911,8 +836,7 @@ int Parameters::getSupportedFeatures(camera_features_list_t& features) const } // ISP_CONTROL_S -int Parameters::getSupportedIspControlFeatures(vector& controls) const -{ +int Parameters::getSupportedIspControlFeatures(vector& controls) const { controls.clear(); ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_ISP_SUPPORTED_CTRL_IDS); @@ -924,8 +848,7 @@ int Parameters::getSupportedIspControlFeatures(vector& controls) const } // ISP_CONTROL_E -int Parameters::getAeCompensationRange(camera_range_t& evRange) const -{ +int Parameters::getAeCompensationRange(camera_range_t& evRange) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_COMPENSATION_RANGE); const size_t ELEM_NUM = sizeof(camera_range_t) / sizeof(int); @@ -938,8 +861,7 @@ int Parameters::getAeCompensationRange(camera_range_t& evRange) const return OK; } -int Parameters::getAeCompensationStep(camera_rational_t& evStep) const -{ +int Parameters::getAeCompensationStep(camera_rational_t& evStep) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_COMPENSATION_STEP); if (entry.count != 1) { @@ -951,8 +873,8 @@ int Parameters::getAeCompensationStep(camera_rational_t& evStep) const return OK; } -int Parameters::getSupportedAeExposureTimeRange(std::vector & etRanges) const -{ +int Parameters::getSupportedAeExposureTimeRange( + std::vector& etRanges) const { ParameterHelper::AutoRLock rl(mData); const int MEMBER_COUNT = 3; @@ -973,8 +895,7 @@ int Parameters::getSupportedAeExposureTimeRange(std::vector& gainRanges) const -{ +int Parameters::getSupportedAeGainRange(std::vector& gainRanges) const { ParameterHelper::AutoRLock rl(mData); const int MEMBER_COUNT = 3; @@ -997,8 +918,7 @@ int Parameters::getSupportedAeGainRange(std::vector& gai return OK; } -bool Parameters::getAeLockAvailable() const -{ +bool Parameters::getAeLockAvailable() const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AE_LOCK_AVAILABLE); if (entry.count != 1) { @@ -1008,8 +928,7 @@ bool Parameters::getAeLockAvailable() const return (entry.data.u8[0] == CAMERA_AE_LOCK_AVAILABLE_TRUE); } -bool Parameters::getAwbLockAvailable() const -{ +bool Parameters::getAwbLockAvailable() const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_LOCK_AVAILABLE); if (entry.count != 1) { @@ -1019,16 +938,15 @@ bool Parameters::getAwbLockAvailable() const return (entry.data.u8[0] == CAMERA_AWB_LOCK_AVAILABLE_TRUE); } -int Parameters::setExposureTimeRange(camera_range_t exposureTimeRange) -{ +int Parameters::setExposureTimeRange(camera_range_t exposureTimeRange) { ParameterHelper::AutoWLock wl(mData); const int MEMBER_COUNT = 2; int values[MEMBER_COUNT] = {(int)exposureTimeRange.min, (int)exposureTimeRange.max}; - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_EXPOSURE_TIME_RANGE, values, MEMBER_COUNT); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_EXPOSURE_TIME_RANGE, values, + MEMBER_COUNT); } -int Parameters::getExposureTimeRange(camera_range_t& exposureTimeRange) const -{ +int Parameters::getExposureTimeRange(camera_range_t& exposureTimeRange) const { ParameterHelper::AutoRLock rl(mData); const int MEMBER_COUNT = 2; @@ -1042,16 +960,15 @@ int Parameters::getExposureTimeRange(camera_range_t& exposureTimeRange) const return OK; } -int Parameters::setSensitivityGainRange(camera_range_t sensitivityGainRange) -{ +int Parameters::setSensitivityGainRange(camera_range_t sensitivityGainRange) { ParameterHelper::AutoWLock wl(mData); float values[] = {sensitivityGainRange.min, sensitivityGainRange.max}; - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_SENSITIVITY_GAIN_RANGE, values, ARRAY_SIZE(values)); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_SENSITIVITY_GAIN_RANGE, values, + ARRAY_SIZE(values)); } -int Parameters::getSensitivityGainRange(camera_range_t& sensitivityGainRange) const -{ +int Parameters::getSensitivityGainRange(camera_range_t& sensitivityGainRange) const { ParameterHelper::AutoRLock rl(mData); const int MEMBER_COUNT = 2; @@ -1065,15 +982,13 @@ int Parameters::getSensitivityGainRange(camera_range_t& sensitivityGainRange) co return OK; } -int Parameters::setAeConvergeSpeed(camera_converge_speed_t speed) -{ +int Parameters::setAeConvergeSpeed(camera_converge_speed_t speed) { uint8_t aeSpeed = speed; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_AE_CONVERGE_SPEED, &aeSpeed, 1); } -int Parameters::getAeConvergeSpeed(camera_converge_speed_t& speed) const -{ +int Parameters::getAeConvergeSpeed(camera_converge_speed_t& speed) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_AE_CONVERGE_SPEED); if (entry.count != 1) { @@ -1084,15 +999,13 @@ int Parameters::getAeConvergeSpeed(camera_converge_speed_t& speed) const return OK; } -int Parameters::setAwbConvergeSpeed(camera_converge_speed_t speed) -{ +int Parameters::setAwbConvergeSpeed(camera_converge_speed_t speed) { uint8_t awbSpeed = speed; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_CONVERGE_SPEED, &awbSpeed, 1); } -int Parameters::getAwbConvergeSpeed(camera_converge_speed_t& speed) const -{ +int Parameters::getAwbConvergeSpeed(camera_converge_speed_t& speed) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_CONVERGE_SPEED); if (entry.count != 1) { @@ -1103,15 +1016,14 @@ int Parameters::getAwbConvergeSpeed(camera_converge_speed_t& speed) const return OK; } -int Parameters::setAeConvergeSpeedMode(camera_converge_speed_mode_t mode) -{ +int Parameters::setAeConvergeSpeedMode(camera_converge_speed_mode_t mode) { uint8_t speedMode = mode; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_AE_CONVERGE_SPEED_MODE, &speedMode, 1); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_AE_CONVERGE_SPEED_MODE, + &speedMode, 1); } -int Parameters::getAeConvergeSpeedMode(camera_converge_speed_mode_t& mode) const -{ +int Parameters::getAeConvergeSpeedMode(camera_converge_speed_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_AE_CONVERGE_SPEED_MODE); if (entry.count != 1) { @@ -1122,15 +1034,14 @@ int Parameters::getAeConvergeSpeedMode(camera_converge_speed_mode_t& mode) const return OK; } -int Parameters::setAwbConvergeSpeedMode(camera_converge_speed_mode_t mode) -{ +int Parameters::setAwbConvergeSpeedMode(camera_converge_speed_mode_t mode) { uint8_t speedMode = mode; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_CONVERGE_SPEED_MODE, &speedMode, 1); + return ParameterHelper::getMetadata(mData).update(CAMERA_AWB_CONVERGE_SPEED_MODE, &speedMode, + 1); } -int Parameters::getAwbConvergeSpeedMode(camera_converge_speed_mode_t& mode) const -{ +int Parameters::getAwbConvergeSpeedMode(camera_converge_speed_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AWB_CONVERGE_SPEED_MODE); if (entry.count != 1) { @@ -1141,16 +1052,15 @@ int Parameters::getAwbConvergeSpeedMode(camera_converge_speed_mode_t& mode) cons return OK; } -int Parameters::setMakernoteData(const void* data, unsigned int size) -{ +int Parameters::setMakernoteData(const void* data, unsigned int size) { CheckAndLogError(!data || size == 0, BAD_VALUE, "%s, invalid parameters", __func__); ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_MAKERNOTE_DATA, (uint8_t*)data, size); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_MAKERNOTE_DATA, (uint8_t*)data, + size); } -int Parameters::getMakernoteData(void* data, unsigned int* size) const -{ +int Parameters::getMakernoteData(void* data, unsigned int* size) const { CheckAndLogError(!data || !size, BAD_VALUE, "%s, invalid parameters", __func__); ParameterHelper::AutoRLock rl(mData); @@ -1165,16 +1075,15 @@ int Parameters::getMakernoteData(void* data, unsigned int* size) const return OK; } -int Parameters::setCustomAicParam(const void* data, unsigned int length) -{ +int Parameters::setCustomAicParam(const void* data, unsigned int length) { CheckAndLogError(!data, BAD_VALUE, "%s, invalid parameters", __func__); ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_CUSTOM_AIC_PARAM, (uint8_t*)data, length); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_CUSTOM_AIC_PARAM, + (uint8_t*)data, length); } -int Parameters::getCustomAicParam(void* data, unsigned int* length) const -{ +int Parameters::getCustomAicParam(void* data, unsigned int* length) const { CheckAndLogError(!data || !length, BAD_VALUE, "%s, invalid parameters", __func__); ParameterHelper::AutoRLock rl(mData); @@ -1189,15 +1098,13 @@ int Parameters::getCustomAicParam(void* data, unsigned int* length) const return OK; } -int Parameters::setMakernoteMode(camera_makernote_mode_t mode) -{ +int Parameters::setMakernoteMode(camera_makernote_mode_t mode) { uint8_t mknMode = mode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_MAKERNOTE_MODE, &mknMode, 1); } -int Parameters::getMakernoteMode(camera_makernote_mode_t &mode) const -{ +int Parameters::getMakernoteMode(camera_makernote_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_MAKERNOTE_MODE); if (entry.count != 1) { @@ -1210,8 +1117,7 @@ int Parameters::getMakernoteMode(camera_makernote_mode_t &mode) const } // ISP_CONTROL_S -int Parameters::setIspControl(uint32_t ctrlId, void* data) -{ +int Parameters::setIspControl(uint32_t ctrlId, void* data) { uint32_t size = IspControlUtils::getSizeById(ctrlId); uint32_t tag = IspControlUtils::getTagById(ctrlId); CheckAndLogError(size == 0, BAD_VALUE, "Unsupported ISP control id:%u", ctrlId); @@ -1223,8 +1129,7 @@ int Parameters::setIspControl(uint32_t ctrlId, void* data) return ParameterHelper::getMetadata(mData).update(tag, (uint8_t*)data, size); } -int Parameters::getIspControl(uint32_t ctrlId, void* data) const -{ +int Parameters::getIspControl(uint32_t ctrlId, void* data) const { uint32_t size = IspControlUtils::getSizeById(ctrlId); uint32_t tag = IspControlUtils::getTagById(ctrlId); CheckAndLogError(size == 0, BAD_VALUE, "Unsupported ISP control id:%u", ctrlId); @@ -1243,8 +1148,7 @@ int Parameters::getIspControl(uint32_t ctrlId, void* data) const return OK; } -int Parameters::setEnabledIspControls(const std::set& ctrlIds) -{ +int Parameters::setEnabledIspControls(const std::set& ctrlIds) { ParameterHelper::AutoWLock wl(mData); size_t size = ctrlIds.size(); @@ -1259,11 +1163,11 @@ int Parameters::setEnabledIspControls(const std::set& ctrlIds) index++; } - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_ISP_ENABLED_CTRL_IDS, data, size); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_ISP_ENABLED_CTRL_IDS, data, + size); } -int Parameters::getEnabledIspControls(std::set& ctrlIds) const -{ +int Parameters::getEnabledIspControls(std::set& ctrlIds) const { ctrlIds.clear(); ParameterHelper::AutoRLock rl(mData); @@ -1276,14 +1180,12 @@ int Parameters::getEnabledIspControls(std::set& ctrlIds) const } // ISP_CONTROL_E -int Parameters::setDigitalZoomRatio(float ratio) -{ +int Parameters::setDigitalZoomRatio(float ratio) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_DIGITAL_ZOOM_RATIO, &ratio, 1); } -int Parameters::getDigitalZoomRatio(float& ratio) const -{ +int Parameters::getDigitalZoomRatio(float& ratio) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_DIGITAL_ZOOM_RATIO); if (entry.count != 1) { @@ -1293,15 +1195,13 @@ int Parameters::getDigitalZoomRatio(float& ratio) const return OK; } -int Parameters::setLdcMode(camera_ldc_mode_t mode) -{ +int Parameters::setLdcMode(camera_ldc_mode_t mode) { uint8_t ldcMode = mode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_LDC_MODE, &ldcMode, 1); } -int Parameters::getLdcMode(camera_ldc_mode_t &mode) const -{ +int Parameters::getLdcMode(camera_ldc_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_LDC_MODE); if (entry.count != 1) { @@ -1311,15 +1211,13 @@ int Parameters::getLdcMode(camera_ldc_mode_t &mode) const return OK; } -int Parameters::setRscMode(camera_rsc_mode_t mode) -{ +int Parameters::setRscMode(camera_rsc_mode_t mode) { uint8_t rscMode = mode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_RSC_MODE, &rscMode, 1); } -int Parameters::getRscMode(camera_rsc_mode_t &mode) const -{ +int Parameters::getRscMode(camera_rsc_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_RSC_MODE); if (entry.count != 1) { @@ -1329,15 +1227,13 @@ int Parameters::getRscMode(camera_rsc_mode_t &mode) const return OK; } -int Parameters::setFlipMode(camera_flip_mode_t mode) -{ +int Parameters::setFlipMode(camera_flip_mode_t mode) { uint8_t flipMode = mode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_FLIP_MODE, &flipMode, 1); } -int Parameters::getFlipMode(camera_flip_mode_t &mode) const -{ +int Parameters::getFlipMode(camera_flip_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_FLIP_MODE); if (entry.count != 1) { @@ -1347,15 +1243,13 @@ int Parameters::getFlipMode(camera_flip_mode_t &mode) const return OK; } -int Parameters::setMonoDsMode(camera_mono_downscale_mode_t mode) -{ +int Parameters::setMonoDsMode(camera_mono_downscale_mode_t mode) { uint8_t monoDsMode = mode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_MONO_DOWNSCALE, &monoDsMode, 1); } -int Parameters::getMonoDsMode(camera_mono_downscale_mode_t &mode) const -{ +int Parameters::getMonoDsMode(camera_mono_downscale_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_MONO_DOWNSCALE); if (entry.count != 1) { @@ -1365,14 +1259,12 @@ int Parameters::getMonoDsMode(camera_mono_downscale_mode_t &mode) const return OK; } -int Parameters::setRun3ACadence(int cadence) -{ +int Parameters::setRun3ACadence(int cadence) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_RUN3_A_CADENCE, &cadence, 1); } -int Parameters::getRun3ACadence(int &cadence) const -{ +int Parameters::getRun3ACadence(int& cadence) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_RUN3_A_CADENCE); if (entry.count != 1) { @@ -1382,15 +1274,14 @@ int Parameters::getRun3ACadence(int &cadence) const return OK; } -int Parameters::setFisheyeDewarpingMode(camera_fisheye_dewarping_mode_t mode) -{ +int Parameters::setFisheyeDewarpingMode(camera_fisheye_dewarping_mode_t mode) { uint8_t dewarpingMode = mode; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_FISHEYE_DEWARPING_MODE, &dewarpingMode, 1); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_FISHEYE_DEWARPING_MODE, + &dewarpingMode, 1); } -int Parameters::getFisheyeDewarpingMode(camera_fisheye_dewarping_mode_t &mode) const -{ +int Parameters::getFisheyeDewarpingMode(camera_fisheye_dewarping_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_FISHEYE_DEWARPING_MODE); if (entry.count != 1) { @@ -1400,15 +1291,14 @@ int Parameters::getFisheyeDewarpingMode(camera_fisheye_dewarping_mode_t &mode) c return OK; } -int Parameters::setAeDistributionPriority(camera_ae_distribution_priority_t priority) -{ +int Parameters::setAeDistributionPriority(camera_ae_distribution_priority_t priority) { uint8_t distributionPriority = priority; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_AE_DISTRIBUTION_PRIORITY, &distributionPriority, 1); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_AE_DISTRIBUTION_PRIORITY, + &distributionPriority, 1); } -int Parameters::getAeDistributionPriority(camera_ae_distribution_priority_t& priority) const -{ +int Parameters::getAeDistributionPriority(camera_ae_distribution_priority_t& priority) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_AE_DISTRIBUTION_PRIORITY); if (entry.count != 1) { @@ -1419,15 +1309,13 @@ int Parameters::getAeDistributionPriority(camera_ae_distribution_priority_t& pri return OK; } -int Parameters::setYuvColorRangeMode(camera_yuv_color_range_mode_t colorRange) -{ +int Parameters::setYuvColorRangeMode(camera_yuv_color_range_mode_t colorRange) { uint8_t mode = colorRange; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_YUV_COLOR_RANGE, &mode, 1); } -int Parameters::getYuvColorRangeMode(camera_yuv_color_range_mode_t& colorRange) const -{ +int Parameters::getYuvColorRangeMode(camera_yuv_color_range_mode_t& colorRange) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_CONTROL_YUV_COLOR_RANGE); if (entry.count != 1) { @@ -1438,14 +1326,12 @@ int Parameters::getYuvColorRangeMode(camera_yuv_color_range_mode_t& colorRange) return OK; } -int Parameters::setJpegQuality(uint8_t quality) -{ +int Parameters::setJpegQuality(uint8_t quality) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_QUALITY, &quality, 1); } -int Parameters::getJpegQuality(uint8_t *quality) const -{ +int Parameters::getJpegQuality(uint8_t* quality) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_QUALITY); if (entry.count != 1) { @@ -1455,14 +1341,12 @@ int Parameters::getJpegQuality(uint8_t *quality) const return OK; } -int Parameters::setJpegThumbnailQuality(uint8_t quality) -{ +int Parameters::setJpegThumbnailQuality(uint8_t quality) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_THUMBNAIL_QUALITY, &quality, 1); } -int Parameters::getJpegThumbnailQuality(uint8_t *quality) const -{ +int Parameters::getJpegThumbnailQuality(uint8_t* quality) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_THUMBNAIL_QUALITY); if (entry.count != 1) { @@ -1472,33 +1356,29 @@ int Parameters::getJpegThumbnailQuality(uint8_t *quality) const return OK; } -int Parameters::setJpegThumbnailSize(const camera_resolution_t& res) -{ +int Parameters::setJpegThumbnailSize(const camera_resolution_t& res) { int size[2] = {res.width, res.height}; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_THUMBNAIL_SIZE, size, 2); } -int Parameters::getJpegThumbnailSize(camera_resolution_t& res) const -{ +int Parameters::getJpegThumbnailSize(camera_resolution_t& res) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_THUMBNAIL_SIZE); if (entry.count != 2) { return NAME_NOT_FOUND; } - res.width = entry.data.i32[0]; + res.width = entry.data.i32[0]; res.height = entry.data.i32[1]; return OK; } -int Parameters::setJpegRotation(int rotation) -{ +int Parameters::setJpegRotation(int rotation) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_ORIENTATION, &rotation, 1); } -int Parameters::getJpegRotation(int &rotation) const -{ +int Parameters::getJpegRotation(int& rotation) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_ORIENTATION); if (entry.count != 1) { @@ -1508,14 +1388,12 @@ int Parameters::getJpegRotation(int &rotation) const return OK; } -int Parameters::setJpegGpsCoordinates(const double *coordinates) -{ +int Parameters::setJpegGpsCoordinates(const double* coordinates) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_GPS_COORDINATES, coordinates, 3); } -int Parameters::getJpegGpsLatitude(double &latitude) const -{ +int Parameters::getJpegGpsLatitude(double& latitude) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_GPS_COORDINATES); if (entry.count != 3) { @@ -1525,8 +1403,7 @@ int Parameters::getJpegGpsLatitude(double &latitude) const return OK; } -int Parameters::getJpegGpsLongitude(double &longitude) const -{ +int Parameters::getJpegGpsLongitude(double& longitude) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_GPS_COORDINATES); if (entry.count != 3) { @@ -1536,8 +1413,7 @@ int Parameters::getJpegGpsLongitude(double &longitude) const return OK; } -int Parameters::getJpegGpsAltitude(double &altitude) const -{ +int Parameters::getJpegGpsAltitude(double& altitude) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_GPS_COORDINATES); if (entry.count != 3) { @@ -1547,14 +1423,12 @@ int Parameters::getJpegGpsAltitude(double &altitude) const return OK; } -int Parameters::setJpegGpsTimeStamp(int64_t timestamp) -{ +int Parameters::setJpegGpsTimeStamp(int64_t timestamp) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_GPS_TIMESTAMP, ×tamp, 1); } -int Parameters::getJpegGpsTimeStamp(int64_t ×tamp) const -{ +int Parameters::getJpegGpsTimeStamp(int64_t& timestamp) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_GPS_TIMESTAMP); if (entry.count != 1) { @@ -1564,14 +1438,13 @@ int Parameters::getJpegGpsTimeStamp(int64_t ×tamp) const return OK; } -int Parameters::setJpegGpsProcessingMethod(int processMethod) -{ +int Parameters::setJpegGpsProcessingMethod(int processMethod) { ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_GPS_PROCESSING_METHOD, &processMethod, 1); + return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_GPS_PROCESSING_METHOD, + &processMethod, 1); } -int Parameters::getJpegGpsProcessingMethod(int &processMethod) const -{ +int Parameters::getJpegGpsProcessingMethod(int& processMethod) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_GPS_PROCESSING_METHOD); if (entry.count != 1) { @@ -1581,14 +1454,14 @@ int Parameters::getJpegGpsProcessingMethod(int &processMethod) const return OK; } -int Parameters::setJpegGpsProcessingMethod(const char* processMethod) -{ +int Parameters::setJpegGpsProcessingMethod(const char* processMethod) { ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_GPS_PROCESSING_METHOD, (const uint8_t*)processMethod, strlen(processMethod) + 1); + return ParameterHelper::getMetadata(mData).update(CAMERA_JPEG_GPS_PROCESSING_METHOD, + (const uint8_t*)processMethod, + strlen(processMethod) + 1); } -int Parameters::getJpegGpsProcessingMethod(int size, char* processMethod) const -{ +int Parameters::getJpegGpsProcessingMethod(int size, char* processMethod) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_JPEG_GPS_PROCESSING_METHOD); if (entry.count <= 0) { @@ -1598,15 +1471,13 @@ int Parameters::getJpegGpsProcessingMethod(int size, char* processMethod) const return OK; } -int Parameters::setImageEffect(camera_effect_mode_t effect) -{ +int Parameters::setImageEffect(camera_effect_mode_t effect) { uint8_t effectmode = effect; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_CONTROL_EFFECT_MODE, &effectmode, 1); } -int Parameters::getImageEffect(camera_effect_mode_t &effect) const -{ +int Parameters::getImageEffect(camera_effect_mode_t& effect) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_CONTROL_EFFECT_MODE); if (entry.count != 1) { @@ -1616,15 +1487,14 @@ int Parameters::getImageEffect(camera_effect_mode_t &effect) const return OK; } -int Parameters::setVideoStabilizationMode(camera_video_stabilization_mode_t mode) -{ +int Parameters::setVideoStabilizationMode(camera_video_stabilization_mode_t mode) { uint8_t dvsMode = mode; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_CONTROL_VIDEO_STABILIZATION_MODE, &dvsMode, 1); + return ParameterHelper::getMetadata(mData).update(CAMERA_CONTROL_VIDEO_STABILIZATION_MODE, + &dvsMode, 1); } -int Parameters::getVideoStabilizationMode(camera_video_stabilization_mode_t &mode) const -{ +int Parameters::getVideoStabilizationMode(camera_video_stabilization_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_CONTROL_VIDEO_STABILIZATION_MODE); if (entry.count != 1) { @@ -1634,8 +1504,7 @@ int Parameters::getVideoStabilizationMode(camera_video_stabilization_mode_t &mod return OK; } -int Parameters::getFocalLength(float &focal) const -{ +int Parameters::getFocalLength(float& focal) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_FOCAL_LENGTH); if (entry.count != 1) { @@ -1645,14 +1514,12 @@ int Parameters::getFocalLength(float &focal) const return OK; } -int Parameters::setFocalLength(float focal) -{ +int Parameters::setFocalLength(float focal) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_LENS_FOCAL_LENGTH, &focal, 1); } -int Parameters::getAperture(float &aperture) const -{ +int Parameters::getAperture(float& aperture) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_APERTURE); if (entry.count != 1) { @@ -1662,13 +1529,12 @@ int Parameters::getAperture(float &aperture) const return OK; } -int Parameters::setAperture(float aperture) -{ +int Parameters::setAperture(float aperture) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_LENS_APERTURE, &aperture, 1); } -int Parameters::getFocusDistance(float &distance) const { +int Parameters::getFocusDistance(float& distance) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_FOCUS_DISTANCE); if (entry.count != 1) { @@ -1683,16 +1549,14 @@ int Parameters::setFocusDistance(float distance) { return ParameterHelper::getMetadata(mData).update(CAMERA_LENS_FOCUS_DISTANCE, &distance, 1); } -int Parameters::setFocusRange(const camera_range_t &focusRange) -{ +int Parameters::setFocusRange(const camera_range_t& focusRange) { float range[] = {focusRange.min, focusRange.max}; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_LENS_FOCUS_RANGE, - range, ARRAY_SIZE(range)); + return ParameterHelper::getMetadata(mData).update(CAMERA_LENS_FOCUS_RANGE, range, + ARRAY_SIZE(range)); } -int Parameters::getFocusRange(camera_range_t& focusRange) const -{ +int Parameters::getFocusRange(camera_range_t& focusRange) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_FOCUS_RANGE); if (entry.count != (sizeof(camera_range_t) / sizeof(float))) { @@ -1703,15 +1567,13 @@ int Parameters::getFocusRange(camera_range_t& focusRange) const return OK; } -int Parameters::setAfMode(camera_af_mode_t afMode) -{ +int Parameters::setAfMode(camera_af_mode_t afMode) { uint8_t mode = afMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AF_MODE, &mode, 1); } -int Parameters::getAfMode(camera_af_mode_t& afMode) const -{ +int Parameters::getAfMode(camera_af_mode_t& afMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AF_MODE); if (entry.count != 1) { @@ -1721,15 +1583,13 @@ int Parameters::getAfMode(camera_af_mode_t& afMode) const return OK; } -int Parameters::setAfTrigger(camera_af_trigger_t afTrigger) -{ +int Parameters::setAfTrigger(camera_af_trigger_t afTrigger) { uint8_t trigger = afTrigger; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AF_TRIGGER, &trigger, 1); } -int Parameters::getAfTrigger(camera_af_trigger_t& afTrigger) const -{ +int Parameters::getAfTrigger(camera_af_trigger_t& afTrigger) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AF_TRIGGER); if (entry.count != 1) { @@ -1739,27 +1599,23 @@ int Parameters::getAfTrigger(camera_af_trigger_t& afTrigger) const return OK; } -int Parameters::setAfRegions(camera_window_list_t afRegions) -{ +int Parameters::setAfRegions(camera_window_list_t afRegions) { ParameterHelper::AutoWLock wl(mData); return setRegions(ParameterHelper::getMetadata(mData), afRegions, CAMERA_AF_REGIONS); } -int Parameters::getAfRegions(camera_window_list_t& afRegions) const -{ +int Parameters::getAfRegions(camera_window_list_t& afRegions) const { ParameterHelper::AutoRLock rl(mData); return getRegions(ParameterHelper::getMetadataEntry(mData, CAMERA_AF_REGIONS), afRegions); } -int Parameters::setAfState(camera_af_state_t afState) -{ +int Parameters::setAfState(camera_af_state_t afState) { uint8_t state = afState; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_AF_STATE, &state, 1); } -int Parameters::getAfState(camera_af_state_t& afState) const -{ +int Parameters::getAfState(camera_af_state_t& afState) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_AF_STATE); if (entry.count != 1) { @@ -1769,15 +1625,13 @@ int Parameters::getAfState(camera_af_state_t& afState) const return OK; } -int Parameters::setLensState(bool lensMoving) -{ +int Parameters::setLensState(bool lensMoving) { uint8_t state = (lensMoving) ? 1 : 0; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_LENS_STATE, &state, 1); } -int Parameters::getLensState(bool& lensMoving) const -{ +int Parameters::getLensState(bool& lensMoving) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_STATE); if (entry.count != 1) { @@ -1787,8 +1641,7 @@ int Parameters::getLensState(bool& lensMoving) const return OK; } -int Parameters::getLensAperture(float &aperture) const -{ +int Parameters::getLensAperture(float& aperture) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_INFO_AVAILABLE_APERTURES); if (entry.count != 1) { @@ -1798,11 +1651,10 @@ int Parameters::getLensAperture(float &aperture) const return OK; } -int Parameters::getLensFilterDensity(float &filterDensity) const -{ +int Parameters::getLensFilterDensity(float& filterDensity) const { ParameterHelper::AutoRLock rl(mData); - auto entry = ParameterHelper::getMetadataEntry(mData, - CAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES); + auto entry = + ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES); if (entry.count != 1) { return NAME_NOT_FOUND; } @@ -1810,8 +1662,7 @@ int Parameters::getLensFilterDensity(float &filterDensity) const return OK; } -int Parameters::getLensMinFocusDistance(float &minFocusDistance) const -{ +int Parameters::getLensMinFocusDistance(float& minFocusDistance) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE); if (entry.count != 1) { @@ -1821,8 +1672,7 @@ int Parameters::getLensMinFocusDistance(float &minFocusDistance) const return OK; } -int Parameters::getLensHyperfocalDistance(float &hyperfocalDistance) const -{ +int Parameters::getLensHyperfocalDistance(float& hyperfocalDistance) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_INFO_HYPERFOCAL_DISTANCE); if (entry.count != 1) { @@ -1832,8 +1682,7 @@ int Parameters::getLensHyperfocalDistance(float &hyperfocalDistance) const return OK; } -int Parameters::getSensorMountType(camera_mount_type_t& sensorMountType) const -{ +int Parameters::getSensorMountType(camera_mount_type_t& sensorMountType) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_INFO_SENSOR_MOUNT_TYPE); if (entry.count != 1) { @@ -1845,22 +1694,20 @@ int Parameters::getSensorMountType(camera_mount_type_t& sensorMountType) const } // User can set envrionment and then call api to update the debug level. -int Parameters::updateDebugLevel() -{ +int Parameters::updateDebugLevel() { Log::setDebugLevel(); CameraDump::setDumpLevel(); return OK; } -int Parameters::setTestPatternMode(camera_test_pattern_mode_t mode) -{ +int Parameters::setTestPatternMode(camera_test_pattern_mode_t mode) { int32_t testPatterMode = mode; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_SENSOR_TEST_PATTERN_MODE, &testPatterMode, 1); + return ParameterHelper::getMetadata(mData).update(CAMERA_SENSOR_TEST_PATTERN_MODE, + &testPatterMode, 1); } -int Parameters::getTestPatternMode(camera_test_pattern_mode_t& mode) const -{ +int Parameters::getTestPatternMode(camera_test_pattern_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_SENSOR_TEST_PATTERN_MODE); if (entry.count != 1) { @@ -1870,15 +1717,14 @@ int Parameters::getTestPatternMode(camera_test_pattern_mode_t& mode) const return OK; } -int Parameters::setCropRegion(camera_crop_region_t cropRegion) -{ +int Parameters::setCropRegion(camera_crop_region_t cropRegion) { int values[] = {cropRegion.flag, cropRegion.x, cropRegion.y}; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_SCALER_CROP_REGION, values, ARRAY_SIZE(values)); + return ParameterHelper::getMetadata(mData).update(CAMERA_SCALER_CROP_REGION, values, + ARRAY_SIZE(values)); } -int Parameters::getCropRegion(camera_crop_region_t& cropRegion) const -{ +int Parameters::getCropRegion(camera_crop_region_t& cropRegion) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_SCALER_CROP_REGION); if (entry.count <= 0) { @@ -1890,20 +1736,19 @@ int Parameters::getCropRegion(camera_crop_region_t& cropRegion) const return OK; } -int Parameters::setControlSceneMode(uint8_t sceneModeValue) -{ +int Parameters::setControlSceneMode(uint8_t sceneModeValue) { ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_CONTROL_SCENE_MODE, &sceneModeValue, 1); + return ParameterHelper::getMetadata(mData).update(CAMERA_CONTROL_SCENE_MODE, &sceneModeValue, + 1); } -int Parameters::setFaceDetectMode(uint8_t faceDetectMode) -{ +int Parameters::setFaceDetectMode(uint8_t faceDetectMode) { ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); + return ParameterHelper::getMetadata(mData).update(CAMERA_STATISTICS_FACE_DETECT_MODE, + &faceDetectMode, 1); } -int Parameters::getFaceDetectMode(uint8_t& faceDetectMode) const -{ +int Parameters::getFaceDetectMode(uint8_t& faceDetectMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_STATISTICS_FACE_DETECT_MODE); if (entry.count != 1) { @@ -1913,14 +1758,12 @@ int Parameters::getFaceDetectMode(uint8_t& faceDetectMode) const return OK; } -int Parameters::setFaceIds(int *faceIds, int faceNum) -{ +int Parameters::setFaceIds(int* faceIds, int faceNum) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_STATISTICS_FACE_IDS, faceIds, faceNum); } -int Parameters::getSensorActiveArraySize(camera_coordinate_system_t& arraySize) const -{ +int Parameters::getSensorActiveArraySize(camera_coordinate_system_t& arraySize) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE); if (entry.count <= 0) { @@ -1928,20 +1771,18 @@ int Parameters::getSensorActiveArraySize(camera_coordinate_system_t& arraySize) } arraySize.left = entry.data.i32[0]; arraySize.top = entry.data.i32[1]; - arraySize.right = arraySize.left + entry.data.i32[2]; //width - arraySize.bottom = arraySize.top + entry.data.i32[3]; //height + arraySize.right = arraySize.left + entry.data.i32[2]; // width + arraySize.bottom = arraySize.top + entry.data.i32[3]; // height return OK; } -int Parameters::setShadingMode(camera_shading_mode_t shadingMode) -{ +int Parameters::setShadingMode(camera_shading_mode_t shadingMode) { uint8_t mode = shadingMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_SHADING_MODE, &mode, 1); } -int Parameters::getShadingMode(camera_shading_mode_t& shadingMode) const -{ +int Parameters::getShadingMode(camera_shading_mode_t& shadingMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_SHADING_MODE); if (entry.count != 1) { @@ -1951,8 +1792,7 @@ int Parameters::getShadingMode(camera_shading_mode_t& shadingMode) const return OK; } -int Parameters::setLensShadingMapMode(camera_lens_shading_map_mode_type_t lensShadingMapMode) -{ +int Parameters::setLensShadingMapMode(camera_lens_shading_map_mode_type_t lensShadingMapMode) { uint8_t mode = lensShadingMapMode; ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_STATISTICS_LENS_SHADING_MAP_MODE, @@ -1960,8 +1800,7 @@ int Parameters::setLensShadingMapMode(camera_lens_shading_map_mode_type_t lensSh } int Parameters::getLensShadingMapMode( - camera_lens_shading_map_mode_type_t &lensShadingMapMode) const -{ + camera_lens_shading_map_mode_type_t& lensShadingMapMode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_STATISTICS_LENS_SHADING_MAP_MODE); if (entry.count != 1) { @@ -1971,15 +1810,13 @@ int Parameters::getLensShadingMapMode( return OK; } -int Parameters::setLensShadingMap(const float *lensShadingMap, size_t lensShadingMapSize) -{ +int Parameters::setLensShadingMap(const float* lensShadingMap, size_t lensShadingMapSize) { ParameterHelper::AutoWLock wl(mData); return ParameterHelper::getMetadata(mData).update(CAMERA_STATISTICS_LENS_SHADING_MAP, lensShadingMap, lensShadingMapSize); } -int Parameters::getLensShadingMap(float **lensShadingMap, size_t &lensShadingMapSize) const -{ +int Parameters::getLensShadingMap(float** lensShadingMap, size_t& lensShadingMapSize) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_STATISTICS_LENS_SHADING_MAP); @@ -1992,8 +1829,7 @@ int Parameters::getLensShadingMap(float **lensShadingMap, size_t &lensShadingMap return OK; } -int Parameters::getLensInfoShadingMapSize(camera_coordinate_t &shadingMapSize) const -{ +int Parameters::getLensInfoShadingMapSize(camera_coordinate_t& shadingMapSize) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_LENS_INFO_SHADING_MAP_SIZE); if (entry.count <= 0) { @@ -2075,12 +1911,12 @@ int Parameters::getTonemapMaxCurvePoints(int32_t& number) const { int Parameters::setTonemapCurves(const camera_tonemap_curves_t& curves) { ParameterHelper::AutoWLock wl(mData); - int ret = ParameterHelper::getMetadata(mData).update(CAMERA_TONEMAP_CURVE_RED, - curves.rCurve, curves.rSize); - ret |= ParameterHelper::getMetadata(mData).update(CAMERA_TONEMAP_CURVE_BLUE, - curves.bCurve, curves.bSize); - ret |= ParameterHelper::getMetadata(mData).update(CAMERA_TONEMAP_CURVE_GREEN, - curves.gCurve, curves.gSize); + int ret = ParameterHelper::getMetadata(mData).update(CAMERA_TONEMAP_CURVE_RED, curves.rCurve, + curves.rSize); + ret |= ParameterHelper::getMetadata(mData).update(CAMERA_TONEMAP_CURVE_BLUE, curves.bCurve, + curves.bSize); + ret |= ParameterHelper::getMetadata(mData).update(CAMERA_TONEMAP_CURVE_GREEN, curves.gCurve, + curves.gSize); return ret; } @@ -2111,11 +1947,11 @@ int Parameters::getTonemapCurves(camera_tonemap_curves_t& curves) const { int Parameters::setRawDataOutput(raw_data_output_t mode) { uint8_t value = mode; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_RAW_DATA_OUTPUT, - &value, 1); + return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_RAW_DATA_OUTPUT, &value, + 1); } -int Parameters::getRawDataOutput(raw_data_output_t &mode) const { +int Parameters::getRawDataOutput(raw_data_output_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_VENDOR_CAMERA_RAW_DATA_OUTPUT); if (entry.count != 1) { @@ -2132,7 +1968,7 @@ int Parameters::setPowerMode(camera_power_mode_t mode) { return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_POWER_MODE, &value, 1); } -int Parameters::getPowerMode(camera_power_mode_t &mode) const { +int Parameters::getPowerMode(camera_power_mode_t& mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_VENDOR_CAMERA_POWER_MODE); if (entry.count != 1) { @@ -2149,10 +1985,10 @@ int Parameters::setTotalExposureTarget(int64_t totalExposureTarget) { &totalExposureTarget, 1); } -int Parameters::getTotalExposureTarget(int64_t &totalExposureTarget) const { +int Parameters::getTotalExposureTarget(int64_t& totalExposureTarget) const { ParameterHelper::AutoRLock rl(mData); - auto entry = ParameterHelper::getMetadataEntry(mData, - INTEL_VENDOR_CAMERA_TOTAL_EXPOSURE_TARGET); + auto entry = + ParameterHelper::getMetadataEntry(mData, INTEL_VENDOR_CAMERA_TOTAL_EXPOSURE_TARGET); if (entry.count != 1) { return NAME_NOT_FOUND; } @@ -2179,15 +2015,13 @@ int Parameters::getUserRequestId(int32_t& userRequestId) const { return OK; } -int Parameters::setCaptureIntent(uint8_t captureIntent) -{ +int Parameters::setCaptureIntent(uint8_t captureIntent) { ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(CAMERA_CONTROL_CAPTUREINTENT, - &captureIntent, 1); + return ParameterHelper::getMetadata(mData).update(CAMERA_CONTROL_CAPTUREINTENT, &captureIntent, + 1); } -int Parameters::getCaptureIntent(uint8_t& captureIntent) const -{ +int Parameters::getCaptureIntent(uint8_t& captureIntent) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, CAMERA_CONTROL_CAPTUREINTENT); if (entry.count != 1) { @@ -2197,16 +2031,14 @@ int Parameters::getCaptureIntent(uint8_t& captureIntent) const return OK; } -int Parameters::setCallbackRgbs(bool enabled) -{ +int Parameters::setCallbackRgbs(bool enabled) { uint8_t lockValue = enabled ? 1 : 0; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_CALLBACK_RGBS, - &lockValue, 1); + return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_CALLBACK_RGBS, &lockValue, + 1); } -int Parameters::getCallbackRgbs(bool *enabled) const -{ +int Parameters::getCallbackRgbs(bool* enabled) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_VENDOR_CAMERA_CALLBACK_RGBS); if (entry.count != 1) { @@ -2216,16 +2048,14 @@ int Parameters::getCallbackRgbs(bool *enabled) const return OK; } -int Parameters::setCallbackTmCurve(bool enabled) -{ +int Parameters::setCallbackTmCurve(bool enabled) { uint8_t value = enabled ? 1 : 0; ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_CALLBACK_TM_CURVE, - &value, 1); + return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_CALLBACK_TM_CURVE, &value, + 1); } -int Parameters::getCallbackTmCurve(bool *enabled) const -{ +int Parameters::getCallbackTmCurve(bool* enabled) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_VENDOR_CAMERA_CALLBACK_TM_CURVE); if (entry.count != 1) { @@ -2236,14 +2066,11 @@ int Parameters::getCallbackTmCurve(bool *enabled) const } // ENABLE_EVCP_S -int Parameters::setEvcpEccMode(uint8_t enabled) -{ +int Parameters::setEvcpEccMode(uint8_t enabled) { ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_IC_ECC_MODE, - &enabled, 1); + return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_IC_ECC_MODE, &enabled, 1); } -int Parameters::getEvcpEccMode(uint8_t* enabled) const -{ +int Parameters::getEvcpEccMode(uint8_t* enabled) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_VENDOR_CAMERA_IC_ECC_MODE); @@ -2255,14 +2082,11 @@ int Parameters::getEvcpEccMode(uint8_t* enabled) const return OK; } -int Parameters::setEvcpBCMode(uint8_t mode) -{ +int Parameters::setEvcpBCMode(uint8_t mode) { ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_IC_BC_MODE, - &mode, 1); + return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_IC_BC_MODE, &mode, 1); } -int Parameters::getEvcpBCMode(uint8_t* mode) const -{ +int Parameters::getEvcpBCMode(uint8_t* mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_VENDOR_CAMERA_IC_BC_MODE); @@ -2275,16 +2099,14 @@ int Parameters::getEvcpBCMode(uint8_t* mode) const return OK; } -int Parameters::setEvcpBRParameters(int height, int width, int fd) -{ +int Parameters::setEvcpBRParameters(int height, int width, int fd) { ParameterHelper::AutoWLock wl(mData); int32_t values[3] = {width, height, fd}; - return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_IC_BR_PARAMETERS, - values, 3); + return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_IC_BR_PARAMETERS, values, + 3); } -int Parameters::getEvcpBRParameters(int* height, int* width, int* fd) const -{ +int Parameters::getEvcpBRParameters(int* height, int* width, int* fd) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_VENDOR_CAMERA_IC_BR_PARAMETERS); @@ -2299,14 +2121,11 @@ int Parameters::getEvcpBRParameters(int* height, int* width, int* fd) const return OK; } -int Parameters::setEvcpFFMode(uint8_t mode) -{ +int Parameters::setEvcpFFMode(uint8_t mode) { ParameterHelper::AutoWLock wl(mData); - return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_IC_FF_MODE, - &mode, 1);; + return ParameterHelper::getMetadata(mData).update(INTEL_VENDOR_CAMERA_IC_FF_MODE, &mode, 1); } -int Parameters::getEvcpFFMode(uint8_t* mode) const -{ +int Parameters::getEvcpFFMode(uint8_t* mode) const { ParameterHelper::AutoRLock rl(mData); auto entry = ParameterHelper::getMetadataEntry(mData, INTEL_VENDOR_CAMERA_IC_FF_MODE); @@ -2322,8 +2141,7 @@ int Parameters::getEvcpFFMode(uint8_t* mode) const int Parameters::setZoomRegion(const camera_zoom_region_t& region) { ParameterHelper::AutoWLock wl(mData); int32_t values[4] = {region.left, region.top, region.right, region.bottom}; - return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_SCALER_CROP_REGION, - values, 4); + return ParameterHelper::getMetadata(mData).update(INTEL_CONTROL_SCALER_CROP_REGION, values, 4); } int Parameters::getZoomRegion(camera_zoom_region_t* region) const { @@ -2342,4 +2160,4 @@ int Parameters::getZoomRegion(camera_zoom_region_t* region) const { return OK; } -} // end of namespace icamera +} // end of namespace icamera diff --git a/src/metadata/icamera_metadata_base.cpp b/src/metadata/icamera_metadata_base.cpp index 96d3b425..1d9bb0e4 100644 --- a/src/metadata/icamera_metadata_base.cpp +++ b/src/metadata/icamera_metadata_base.cpp @@ -33,16 +33,16 @@ * array; otherwise, it can found in the parent's data array at index * data.offset. */ -#define ENTRY_ALIGNMENT ((size_t) 4) +#define ENTRY_ALIGNMENT ((size_t)4) typedef struct camera_metadata_buffer_entry { uint32_t tag; uint32_t count; union { uint32_t offset; - uint8_t value[4]; + uint8_t value[4]; } data; - uint8_t type; - uint8_t reserved[3]; + uint8_t type; + uint8_t reserved[3]; } camera_metadata_buffer_entry_t; typedef uint32_t metadata_uptrdiff_t; @@ -82,18 +82,18 @@ typedef uint32_t metadata_size_t; * In short, the entries and data are contiguous in memory after the metadata * header. */ -#define METADATA_ALIGNMENT ((size_t) 4) +#define METADATA_ALIGNMENT ((size_t)4) struct icamera_metadata { - metadata_size_t size; - uint32_t version; - uint32_t flags; - metadata_size_t entry_count; - metadata_size_t entry_capacity; - metadata_uptrdiff_t entries_start; // Offset from camera_metadata - metadata_size_t data_count; - metadata_size_t data_capacity; - metadata_uptrdiff_t data_start; // Offset from camera_metadata - uint8_t reserved[]; + metadata_size_t size; + uint32_t version; + uint32_t flags; + metadata_size_t entry_count; + metadata_size_t entry_capacity; + metadata_uptrdiff_t entries_start; // Offset from camera_metadata + metadata_size_t data_count; + metadata_size_t data_capacity; + metadata_uptrdiff_t data_start; // Offset from camera_metadata + uint8_t reserved[]; }; /** @@ -102,13 +102,13 @@ struct icamera_metadata { * non-pointer type description in order to figure out the largest alignment * requirement for data (DATA_ALIGNMENT). */ -#define DATA_ALIGNMENT ((size_t) 8) +#define DATA_ALIGNMENT ((size_t)8) typedef union camera_metadata_data { uint8_t u8; int32_t i32; - float f; + float f; int64_t i64; - double d; + double d; icamera_metadata_rational_t r; } camera_metadata_data_t; @@ -131,30 +131,28 @@ typedef union camera_metadata_data { #include "vendor_metadata_tag_info.c" const size_t icamera_metadata_type_size[ICAMERA_NUM_TYPES] = { - sizeof(uint8_t), // ICAMERA_TYPE_BYTE - sizeof(int32_t), // ICAMERA_TYPE_INT32 - sizeof(float), // ICAMERA_TYPE_FLOAT - sizeof(int64_t), // ICAMERA_TYPE_INT64 - sizeof(double), // ICAMERA_TYPE_DOUBLE - sizeof(icamera_metadata_rational_t) // ICAMERA_TYPE_RATIONAL + sizeof(uint8_t), // ICAMERA_TYPE_BYTE + sizeof(int32_t), // ICAMERA_TYPE_INT32 + sizeof(float), // ICAMERA_TYPE_FLOAT + sizeof(int64_t), // ICAMERA_TYPE_INT64 + sizeof(double), // ICAMERA_TYPE_DOUBLE + sizeof(icamera_metadata_rational_t) // ICAMERA_TYPE_RATIONAL }; -const char *icamera_metadata_type_names[ICAMERA_NUM_TYPES] = { - "byte", // ICAMERA_TYPE_BYTE - "int32", // ICAMERA_TYPE_INT32 - "float", // ICAMERA_TYPE_FLOAT - "int64", // ICAMERA_TYPE_INT64 - "double", // ICAMERA_TYPE_DOUBLE - "rational" // ICAMERA_TYPE_RATIONAL +const char* icamera_metadata_type_names[ICAMERA_NUM_TYPES] = { + "byte", // ICAMERA_TYPE_BYTE + "int32", // ICAMERA_TYPE_INT32 + "float", // ICAMERA_TYPE_FLOAT + "int64", // ICAMERA_TYPE_INT64 + "double", // ICAMERA_TYPE_DOUBLE + "rational" // ICAMERA_TYPE_RATIONAL }; -static camera_metadata_buffer_entry_t *get_entries( - const icamera_metadata_t *metadata) { - return (camera_metadata_buffer_entry_t*) - ((uint8_t*)metadata + metadata->entries_start); +static camera_metadata_buffer_entry_t* get_entries(const icamera_metadata_t* metadata) { + return (camera_metadata_buffer_entry_t*)((uint8_t*)metadata + metadata->entries_start); } -static uint8_t *get_data(const icamera_metadata_t *metadata) { +static uint8_t* get_data(const icamera_metadata_t* metadata) { return (uint8_t*)metadata + metadata->data_start; } @@ -162,18 +160,16 @@ size_t get_icamera_metadata_alignment() { return METADATA_PACKET_ALIGNMENT; } -icamera_metadata_t *allocate_copy_icamera_metadata_checked( - const icamera_metadata_t *src, - size_t src_size) { - +icamera_metadata_t* allocate_copy_icamera_metadata_checked(const icamera_metadata_t* src, + size_t src_size) { if (src == NULL) { return NULL; } - void *buffer = malloc(src_size); + void* buffer = malloc(src_size); MEMCPY_S(buffer, src_size, src, src_size); - icamera_metadata_t *metadata = (icamera_metadata_t*) buffer; + icamera_metadata_t* metadata = (icamera_metadata_t*)buffer; if (validate_icamera_metadata_structure(metadata, &src_size) != icamera::OK) { free(buffer); return NULL; @@ -182,50 +178,40 @@ icamera_metadata_t *allocate_copy_icamera_metadata_checked( return metadata; } -icamera_metadata_t *allocate_icamera_metadata(size_t entry_capacity, - size_t data_capacity) { - - size_t memory_needed = calculate_icamera_metadata_size(entry_capacity, - data_capacity); - void *buffer = malloc(memory_needed); - return place_icamera_metadata(buffer, memory_needed, - entry_capacity, - data_capacity); +icamera_metadata_t* allocate_icamera_metadata(size_t entry_capacity, size_t data_capacity) { + size_t memory_needed = calculate_icamera_metadata_size(entry_capacity, data_capacity); + void* buffer = malloc(memory_needed); + return place_icamera_metadata(buffer, memory_needed, entry_capacity, data_capacity); } -icamera_metadata_t *place_icamera_metadata(void *dst, - size_t dst_size, - size_t entry_capacity, +icamera_metadata_t* place_icamera_metadata(void* dst, size_t dst_size, size_t entry_capacity, size_t data_capacity) { if (dst == NULL) return NULL; - size_t memory_needed = calculate_icamera_metadata_size(entry_capacity, - data_capacity); + size_t memory_needed = calculate_icamera_metadata_size(entry_capacity, data_capacity); if (memory_needed > dst_size) return NULL; - icamera_metadata_t *metadata = (icamera_metadata_t*)dst; + icamera_metadata_t* metadata = (icamera_metadata_t*)dst; metadata->version = CURRENT_METADATA_VERSION; metadata->flags = 0; metadata->entry_count = 0; metadata->entry_capacity = entry_capacity; - metadata->entries_start = - ALIGN_TO(sizeof(icamera_metadata_t), ENTRY_ALIGNMENT); + metadata->entries_start = ALIGN_TO(sizeof(icamera_metadata_t), ENTRY_ALIGNMENT); metadata->data_count = 0; metadata->data_capacity = data_capacity; metadata->size = memory_needed; - size_t data_unaligned = (uint8_t*)(get_entries(metadata) + - metadata->entry_capacity) - (uint8_t*)metadata; + size_t data_unaligned = + (uint8_t*)(get_entries(metadata) + metadata->entry_capacity) - (uint8_t*)metadata; metadata->data_start = ALIGN_TO(data_unaligned, DATA_ALIGNMENT); assert(validate_icamera_metadata_structure(metadata, NULL) == icamera::OK); return metadata; } -void free_icamera_metadata(icamera_metadata_t *metadata) { +void free_icamera_metadata(icamera_metadata_t* metadata) { free(metadata); } -size_t calculate_icamera_metadata_size(size_t entry_count, - size_t data_count) { +size_t calculate_icamera_metadata_size(size_t entry_count, size_t data_count) { size_t memory_needed = sizeof(icamera_metadata_t); // Start entry list at aligned boundary memory_needed = ALIGN_TO(memory_needed, ENTRY_ALIGNMENT); @@ -236,43 +222,42 @@ size_t calculate_icamera_metadata_size(size_t entry_count, return memory_needed; } -size_t get_icamera_metadata_size(const icamera_metadata_t *metadata) { +size_t get_icamera_metadata_size(const icamera_metadata_t* metadata) { if (metadata == NULL) return icamera::UNKNOWN_ERROR; return metadata->size; } -size_t get_icamera_metadata_compact_size(const icamera_metadata_t *metadata) { +size_t get_icamera_metadata_compact_size(const icamera_metadata_t* metadata) { if (metadata == NULL) return icamera::UNKNOWN_ERROR; - return calculate_icamera_metadata_size(metadata->entry_count, - metadata->data_count); + return calculate_icamera_metadata_size(metadata->entry_count, metadata->data_count); } -size_t get_icamera_metadata_entry_count(const icamera_metadata_t *metadata) { +size_t get_icamera_metadata_entry_count(const icamera_metadata_t* metadata) { return metadata->entry_count; } -size_t get_icamera_metadata_entry_capacity(const icamera_metadata_t *metadata) { +size_t get_icamera_metadata_entry_capacity(const icamera_metadata_t* metadata) { return metadata->entry_capacity; } -size_t get_icamera_metadata_data_count(const icamera_metadata_t *metadata) { +size_t get_icamera_metadata_data_count(const icamera_metadata_t* metadata) { return metadata->data_count; } -size_t get_icamera_metadata_data_capacity(const icamera_metadata_t *metadata) { +size_t get_icamera_metadata_data_capacity(const icamera_metadata_t* metadata) { return metadata->data_capacity; } -icamera_metadata_t* copy_icamera_metadata(void *dst, size_t dst_size, - const icamera_metadata_t *src) { +icamera_metadata_t* copy_icamera_metadata(void* dst, size_t dst_size, + const icamera_metadata_t* src) { size_t memory_needed = get_icamera_metadata_compact_size(src); if (dst == NULL) return NULL; if (dst_size < memory_needed) return NULL; - icamera_metadata_t *metadata = + icamera_metadata_t* metadata = place_icamera_metadata(dst, dst_size, src->entry_count, src->data_count); if (metadata == NULL) { @@ -283,19 +268,18 @@ icamera_metadata_t* copy_icamera_metadata(void *dst, size_t dst_size, metadata->entry_count = src->entry_count; metadata->data_count = src->data_count; - MEMCPY_S(get_entries(metadata),sizeof(camera_metadata_buffer_entry_t[metadata->entry_count]), + MEMCPY_S(get_entries(metadata), sizeof(camera_metadata_buffer_entry_t[metadata->entry_count]), get_entries(src), sizeof(camera_metadata_buffer_entry_t[metadata->entry_count])); - MEMCPY_S(get_data(metadata), sizeof(uint8_t[metadata->data_count]), - get_data(src), sizeof(uint8_t[metadata->data_count])); + MEMCPY_S(get_data(metadata), sizeof(uint8_t[metadata->data_count]), get_data(src), + sizeof(uint8_t[metadata->data_count])); assert(validate_icamera_metadata_structure(metadata, NULL) == icamera::OK); return metadata; } -int validate_icamera_metadata_structure(const icamera_metadata_t *metadata, - const size_t *expected_size) { - if (!icamera::Log::isDebugLevelEnable(icamera::CAMERA_DEBUG_LOG_METADATA)) - return icamera::OK; +int validate_icamera_metadata_structure(const icamera_metadata_t* metadata, + const size_t* expected_size) { + if (!icamera::Log::isDebugLevelEnable(icamera::CAMERA_DEBUG_LOG_METADATA)) return icamera::OK; if (metadata == NULL) { LOGE("%s: metadata is null!", __func__); @@ -305,31 +289,22 @@ int validate_icamera_metadata_structure(const icamera_metadata_t *metadata, // Check that the metadata pointer is well-aligned first. { static const struct { - const char *name; + const char* name; size_t alignment; } alignments[] = { - { - .name = "icamera_metadata", - .alignment = METADATA_ALIGNMENT - }, - { - .name = "camera_metadata_buffer_entry", - .alignment = ENTRY_ALIGNMENT - }, - { - .name = "camera_metadata_data", - .alignment = DATA_ALIGNMENT - }, + {.name = "icamera_metadata", .alignment = METADATA_ALIGNMENT}, + {.name = "camera_metadata_buffer_entry", .alignment = ENTRY_ALIGNMENT}, + {.name = "camera_metadata_data", .alignment = DATA_ALIGNMENT}, }; size_t i = 0; - for (i = 0; i < sizeof(alignments)/sizeof(alignments[0]); ++i) { + for (i = 0; i < sizeof(alignments) / sizeof(alignments[0]); ++i) { uintptr_t aligned_ptr = ALIGN_TO(metadata, alignments[i].alignment); if ((uintptr_t)metadata != aligned_ptr) { LOGE("%s: Metadata pointer is not aligned (actual %p, " - "expected %p) to type %s", __func__, metadata, - (void*)aligned_ptr, alignments[i].name); + "expected %p) to type %s", + __func__, metadata, (void*)aligned_ptr, alignments[i].name); return icamera::UNKNOWN_ERROR; } } @@ -340,8 +315,8 @@ int validate_icamera_metadata_structure(const icamera_metadata_t *metadata, */ if (expected_size != NULL && metadata->size > *expected_size) { - LOGE("%s: Metadata size (%" PRIu32 ") should be <= expected size (%zu)", - __func__, metadata->size, *expected_size); + LOGE("%s: Metadata size (%" PRIu32 ") should be <= expected size (%zu)", __func__, + metadata->size, *expected_size); return icamera::UNKNOWN_ERROR; } @@ -352,38 +327,30 @@ int validate_icamera_metadata_structure(const icamera_metadata_t *metadata, return icamera::UNKNOWN_ERROR; } - const metadata_uptrdiff_t entries_end = - metadata->entries_start + metadata->entry_capacity; - if (entries_end < metadata->entries_start || // overflow check + const metadata_uptrdiff_t entries_end = metadata->entries_start + metadata->entry_capacity; + if (entries_end < metadata->entries_start || // overflow check entries_end > metadata->data_start) { - LOGE("%s: Entry start + capacity (%" PRIu32 ") should be <= data start " - "(%" PRIu32 ")", __func__, - (metadata->entries_start + metadata->entry_capacity), - metadata->data_start); + "(%" PRIu32 ")", + __func__, (metadata->entries_start + metadata->entry_capacity), metadata->data_start); return icamera::UNKNOWN_ERROR; } - const metadata_uptrdiff_t data_end = - metadata->data_start + metadata->data_capacity; - if (data_end < metadata->data_start || // overflow check + const metadata_uptrdiff_t data_end = metadata->data_start + metadata->data_capacity; + if (data_end < metadata->data_start || // overflow check data_end > metadata->size) { - LOGE("%s: Data start + capacity (%" PRIu32 ") should be <= total size " "(%" PRIu32 ")", - __func__, - (metadata->data_start + metadata->data_capacity), - metadata->size); + __func__, (metadata->data_start + metadata->data_capacity), metadata->size); return icamera::UNKNOWN_ERROR; } // Validate each entry const metadata_size_t entry_count = metadata->entry_count; - camera_metadata_buffer_entry_t *entries = get_entries(metadata); + camera_metadata_buffer_entry_t* entries = get_entries(metadata); size_t i = 0; for (i = 0; i < entry_count; ++i) { - if ((uintptr_t)&entries[i] != ALIGN_TO(&entries[i], ENTRY_ALIGNMENT)) { LOGE("%s: Entry index %zu had bad alignment (address %p)," " expected alignment %zu", @@ -394,62 +361,56 @@ int validate_icamera_metadata_structure(const icamera_metadata_t *metadata, camera_metadata_buffer_entry_t entry = entries[i]; if (entry.type >= ICAMERA_NUM_TYPES) { - LOGE("%s: Entry index %zu had a bad type %d", - __func__, i, entry.type); + LOGE("%s: Entry index %zu had a bad type %d", __func__, i, entry.type); return icamera::UNKNOWN_ERROR; } int tag_type = get_icamera_metadata_tag_type(entry.tag); if (tag_type != (int)entry.type) { - LOGE("%s: Entry index %zu had tag type %d, but the type was %d", - __func__, i, tag_type, entry.type); + LOGE("%s: Entry index %zu had tag type %d, but the type was %d", __func__, i, tag_type, + entry.type); return icamera::UNKNOWN_ERROR; } - size_t data_size = - calculate_icamera_metadata_entry_data_size(entry.type, - entry.count); + size_t data_size = calculate_icamera_metadata_entry_data_size(entry.type, entry.count); if (data_size != 0) { - camera_metadata_data_t *data = - (camera_metadata_data_t*) (get_data(metadata) + - entry.data.offset); + camera_metadata_data_t* data = + (camera_metadata_data_t*)(get_data(metadata) + entry.data.offset); if ((uintptr_t)data != ALIGN_TO(data, DATA_ALIGNMENT)) { LOGE("%s: Entry index %zu had bad data alignment (address %p)," " expected align %zu, (tag name %s, data size %zu)", __func__, i, data, DATA_ALIGNMENT, - get_icamera_metadata_tag_name(entry.tag) ?: "unknown", - data_size); + get_icamera_metadata_tag_name(entry.tag) ?: "unknown", data_size); return icamera::UNKNOWN_ERROR; } size_t data_entry_end = entry.data.offset + data_size; - if (data_entry_end < entry.data.offset || // overflow check + if (data_entry_end < entry.data.offset || // overflow check data_entry_end > metadata->data_capacity) { - LOGE("%s: Entry index %zu data ends (%zu) beyond the capacity " - "%" PRIu32, __func__, i, data_entry_end, - metadata->data_capacity); + "%" PRIu32, + __func__, i, data_entry_end, metadata->data_capacity); return icamera::UNKNOWN_ERROR; } } else if (entry.count == 0) { if (entry.data.offset != 0) { LOGE("%s: Entry index %zu had 0 items, but offset was non-0 " - "(%" PRIu32 "), tag name: %s", __func__, i, entry.data.offset, + "(%" PRIu32 "), tag name: %s", + __func__, i, entry.data.offset, get_icamera_metadata_tag_name(entry.tag) ?: "unknown"); return icamera::UNKNOWN_ERROR; } - } // else data stored inline, so we look at value which can be anything. + } // else data stored inline, so we look at value which can be anything. } return icamera::OK; } -int append_icamera_metadata(icamera_metadata_t *dst, - const icamera_metadata_t *src) { - if (dst == NULL || src == NULL ) return icamera::UNKNOWN_ERROR; +int append_icamera_metadata(icamera_metadata_t* dst, const icamera_metadata_t* src) { + if (dst == NULL || src == NULL) return icamera::UNKNOWN_ERROR; if (dst->entry_capacity < src->entry_count + dst->entry_count) return icamera::UNKNOWN_ERROR; if (dst->data_capacity < src->data_count + dst->data_count) return icamera::UNKNOWN_ERROR; @@ -467,11 +428,10 @@ int append_icamera_metadata(icamera_metadata_t *dst, MEMCPY_S(get_data(dst) + dst->data_count, sizeof(uint8_t[dst->data_capacity - dst->data_count]), get_data(src), sizeof(uint8_t[src->data_count])); if (dst->data_count != 0) { - camera_metadata_buffer_entry_t *entry = get_entries(dst) + dst->entry_count; + camera_metadata_buffer_entry_t* entry = get_entries(dst) + dst->entry_count; size_t i = 0; for (i = 0; i < src->entry_count; i++, entry++) { - if ( calculate_icamera_metadata_entry_data_size(entry->type, - entry->count) > 0 ) { + if (calculate_icamera_metadata_entry_data_size(entry->type, entry->count) > 0) { entry->data.offset += dst->data_count; } } @@ -492,11 +452,10 @@ int append_icamera_metadata(icamera_metadata_t *dst, return icamera::OK; } -icamera_metadata_t *clone_icamera_metadata(const icamera_metadata_t *src) { +icamera_metadata_t* clone_icamera_metadata(const icamera_metadata_t* src) { if (src == NULL) return NULL; - icamera_metadata_t *clone = allocate_icamera_metadata( - get_icamera_metadata_entry_count(src), - get_icamera_metadata_data_count(src)); + icamera_metadata_t* clone = allocate_icamera_metadata(get_icamera_metadata_entry_count(src), + get_icamera_metadata_data_count(src)); if (clone != NULL) { int res = append_icamera_metadata(clone, src); if (res != icamera::OK) { @@ -508,35 +467,27 @@ icamera_metadata_t *clone_icamera_metadata(const icamera_metadata_t *src) { return clone; } -size_t calculate_icamera_metadata_entry_data_size(uint8_t type, - size_t data_count) { +size_t calculate_icamera_metadata_entry_data_size(uint8_t type, size_t data_count) { if (type >= ICAMERA_NUM_TYPES) return 0; - size_t data_bytes = data_count * - icamera_metadata_type_size[type]; + size_t data_bytes = data_count * icamera_metadata_type_size[type]; return data_bytes <= 4 ? 0 : ALIGN_TO(data_bytes, DATA_ALIGNMENT); } -static int add_camera_metadata_entry_raw(icamera_metadata_t *dst, - uint32_t tag, - uint8_t type, - const void *data, - size_t data_count) { - +static int add_camera_metadata_entry_raw(icamera_metadata_t* dst, uint32_t tag, uint8_t type, + const void* data, size_t data_count) { if (dst == NULL) return icamera::UNKNOWN_ERROR; if (dst->entry_count == dst->entry_capacity) return icamera::UNKNOWN_ERROR; if (data == NULL) return icamera::UNKNOWN_ERROR; - size_t data_bytes = - calculate_icamera_metadata_entry_data_size(type, data_count); + size_t data_bytes = calculate_icamera_metadata_entry_data_size(type, data_count); if (data_bytes + dst->data_count > dst->data_capacity) return icamera::UNKNOWN_ERROR; if (type >= ICAMERA_NUM_TYPES) { LOGE("%s: Bad type %d", __func__, type); return icamera::UNKNOWN_ERROR; } - size_t data_payload_bytes = - data_count * icamera_metadata_type_size[type]; - camera_metadata_buffer_entry_t *entry = get_entries(dst) + dst->entry_count; + size_t data_payload_bytes = data_count * icamera_metadata_type_size[type]; + camera_metadata_buffer_entry_t* entry = get_entries(dst) + dst->entry_count; memset(entry, 0, sizeof(camera_metadata_buffer_entry_t)); entry->tag = tag; entry->type = type; @@ -555,59 +506,47 @@ static int add_camera_metadata_entry_raw(icamera_metadata_t *dst, return icamera::OK; } -int add_icamera_metadata_entry(icamera_metadata_t *dst, - uint32_t tag, - const void *data, - size_t data_count) { - +int add_icamera_metadata_entry(icamera_metadata_t* dst, uint32_t tag, const void* data, + size_t data_count) { int type = get_icamera_metadata_tag_type(tag); if (type == -1) { LOGE("%s: Unknown tag %04x.", __func__, tag); return icamera::UNKNOWN_ERROR; } - return add_camera_metadata_entry_raw(dst, - tag, - type, - data, - data_count); + return add_camera_metadata_entry_raw(dst, tag, type, data, data_count); } -static int compare_entry_tags(const void *p1, const void *p2) { +static int compare_entry_tags(const void* p1, const void* p2) { uint32_t tag1 = ((camera_metadata_buffer_entry_t*)p1)->tag; uint32_t tag2 = ((camera_metadata_buffer_entry_t*)p2)->tag; - return tag1 < tag2 ? -1 : - tag1 == tag2 ? 0 : - 1; + return tag1 < tag2 ? -1 : tag1 == tag2 ? 0 : 1; } -int sort_icamera_metadata(icamera_metadata_t *dst) { +int sort_icamera_metadata(icamera_metadata_t* dst) { if (dst == NULL) return icamera::UNKNOWN_ERROR; if (dst->flags & FLAG_SORTED) return icamera::OK; - qsort(get_entries(dst), dst->entry_count, - sizeof(camera_metadata_buffer_entry_t), - compare_entry_tags); + qsort(get_entries(dst), dst->entry_count, sizeof(camera_metadata_buffer_entry_t), + compare_entry_tags); dst->flags |= FLAG_SORTED; assert(validate_icamera_metadata_structure(dst, NULL) == icamera::OK); return icamera::OK; } -int get_icamera_metadata_entry(icamera_metadata_t *src, - size_t index, - icamera_metadata_entry_t *entry) { +int get_icamera_metadata_entry(icamera_metadata_t* src, size_t index, + icamera_metadata_entry_t* entry) { if (src == NULL || entry == NULL) return icamera::UNKNOWN_ERROR; if (index >= src->entry_count) return icamera::UNKNOWN_ERROR; - camera_metadata_buffer_entry_t *buffer_entry = get_entries(src) + index; + camera_metadata_buffer_entry_t* buffer_entry = get_entries(src) + index; entry->index = index; entry->tag = buffer_entry->tag; entry->type = buffer_entry->type; entry->count = buffer_entry->count; - if (buffer_entry->count * - icamera_metadata_type_size[buffer_entry->type] > 4) { + if (buffer_entry->count * icamera_metadata_type_size[buffer_entry->type] > 4) { entry->data.u8 = get_data(src) + buffer_entry->data.offset; } else { entry->data.u8 = buffer_entry->data.value; @@ -615,34 +554,30 @@ int get_icamera_metadata_entry(icamera_metadata_t *src, return icamera::OK; } -int get_icamera_metadata_ro_entry(const icamera_metadata_t *src, - size_t index, - icamera_metadata_ro_entry_t *entry) { +int get_icamera_metadata_ro_entry(const icamera_metadata_t* src, size_t index, + icamera_metadata_ro_entry_t* entry) { return get_icamera_metadata_entry((icamera_metadata_t*)src, index, - (icamera_metadata_entry_t*)entry); + (icamera_metadata_entry_t*)entry); } -int find_icamera_metadata_entry(icamera_metadata_t *src, - uint32_t tag, - icamera_metadata_entry_t *entry) { +int find_icamera_metadata_entry(icamera_metadata_t* src, uint32_t tag, + icamera_metadata_entry_t* entry) { if (src == NULL) return icamera::UNKNOWN_ERROR; uint32_t index; if (src->flags & FLAG_SORTED) { // Sorted entries, do a binary search - camera_metadata_buffer_entry_t *search_entry = NULL; + camera_metadata_buffer_entry_t* search_entry = NULL; camera_metadata_buffer_entry_t key; key.tag = tag; - search_entry = (camera_metadata_buffer_entry_t *)bsearch(&key, - get_entries(src), - src->entry_count, - sizeof(camera_metadata_buffer_entry_t), - compare_entry_tags); + search_entry = (camera_metadata_buffer_entry_t*)bsearch( + &key, get_entries(src), src->entry_count, sizeof(camera_metadata_buffer_entry_t), + compare_entry_tags); if (search_entry == NULL) return icamera::NAME_NOT_FOUND; index = search_entry - get_entries(src); } else { // Not sorted, linear search - camera_metadata_buffer_entry_t *search_entry = get_entries(src); + camera_metadata_buffer_entry_t* search_entry = get_entries(src); for (index = 0; index < src->entry_count; index++, search_entry++) { if (search_entry->tag == tag) { break; @@ -651,40 +586,35 @@ int find_icamera_metadata_entry(icamera_metadata_t *src, if (index == src->entry_count) return icamera::NAME_NOT_FOUND; } - return get_icamera_metadata_entry(src, index, - entry); + return get_icamera_metadata_entry(src, index, entry); } -int find_icamera_metadata_ro_entry(const icamera_metadata_t *src, - uint32_t tag, - icamera_metadata_ro_entry_t *entry) { +int find_icamera_metadata_ro_entry(const icamera_metadata_t* src, uint32_t tag, + icamera_metadata_ro_entry_t* entry) { return find_icamera_metadata_entry((icamera_metadata_t*)src, tag, - (icamera_metadata_entry_t*)entry); + (icamera_metadata_entry_t*)entry); } -int delete_icamera_metadata_entry(icamera_metadata_t *dst, - size_t index) { +int delete_icamera_metadata_entry(icamera_metadata_t* dst, size_t index) { if (dst == NULL) return icamera::UNKNOWN_ERROR; if (index >= dst->entry_count) return icamera::UNKNOWN_ERROR; - camera_metadata_buffer_entry_t *entry = get_entries(dst) + index; - size_t data_bytes = calculate_icamera_metadata_entry_data_size(entry->type, - entry->count); + camera_metadata_buffer_entry_t* entry = get_entries(dst) + index; + size_t data_bytes = calculate_icamera_metadata_entry_data_size(entry->type, entry->count); if (data_bytes > 0) { // Shift data buffer to overwrite deleted data - uint8_t *start = get_data(dst) + entry->data.offset; - uint8_t *end = start + data_bytes; + uint8_t* start = get_data(dst) + entry->data.offset; + uint8_t* end = start + data_bytes; size_t length = dst->data_count - entry->data.offset - data_bytes; memmove(start, end, length); // Update all entry indices to account for shift - camera_metadata_buffer_entry_t *e = get_entries(dst); + camera_metadata_buffer_entry_t* e = get_entries(dst); size_t i; for (i = 0; i < dst->entry_count; i++) { - if (calculate_icamera_metadata_entry_data_size( - e->type, e->count) > 0 && - e->data.offset > entry->data.offset) { + if (calculate_icamera_metadata_entry_data_size(e->type, e->count) > 0 && + e->data.offset > entry->data.offset) { e->data.offset -= data_bytes; } ++e; @@ -693,34 +623,25 @@ int delete_icamera_metadata_entry(icamera_metadata_t *dst, } // Shift entry array memmove(entry, entry + 1, - sizeof(camera_metadata_buffer_entry_t) * - (dst->entry_count - index - 1) ); + sizeof(camera_metadata_buffer_entry_t) * (dst->entry_count - index - 1)); dst->entry_count -= 1; assert(validate_icamera_metadata_structure(dst, NULL) == icamera::OK); return icamera::OK; } -int update_icamera_metadata_entry(icamera_metadata_t *dst, - size_t index, - const void *data, - size_t data_count, - icamera_metadata_entry_t *updated_entry) { +int update_icamera_metadata_entry(icamera_metadata_t* dst, size_t index, const void* data, + size_t data_count, icamera_metadata_entry_t* updated_entry) { if (dst == NULL) return icamera::UNKNOWN_ERROR; if (index >= dst->entry_count) return icamera::UNKNOWN_ERROR; - camera_metadata_buffer_entry_t *entry = get_entries(dst) + index; + camera_metadata_buffer_entry_t* entry = get_entries(dst) + index; if (entry->type >= ICAMERA_NUM_TYPES) return icamera::UNKNOWN_ERROR; - size_t data_bytes = - calculate_icamera_metadata_entry_data_size(entry->type, - data_count); - size_t data_payload_bytes = - data_count * icamera_metadata_type_size[entry->type]; + size_t data_bytes = calculate_icamera_metadata_entry_data_size(entry->type, data_count); + size_t data_payload_bytes = data_count * icamera_metadata_type_size[entry->type]; - size_t entry_bytes = - calculate_icamera_metadata_entry_data_size(entry->type, - entry->count); + size_t entry_bytes = calculate_icamera_metadata_entry_data_size(entry->type, entry->count); if (data_bytes != entry_bytes) { // May need to shift/add to data array if (dst->data_capacity < dst->data_count + data_bytes - entry_bytes) { @@ -729,19 +650,18 @@ int update_icamera_metadata_entry(icamera_metadata_t *dst, } if (entry_bytes != 0) { // Remove old data - uint8_t *start = get_data(dst) + entry->data.offset; - uint8_t *end = start + entry_bytes; + uint8_t* start = get_data(dst) + entry->data.offset; + uint8_t* end = start + entry_bytes; size_t length = dst->data_count - entry->data.offset - entry_bytes; memmove(start, end, length); dst->data_count -= entry_bytes; // Update all entry indices to account for shift - camera_metadata_buffer_entry_t *e = get_entries(dst); + camera_metadata_buffer_entry_t* e = get_entries(dst); size_t i; for (i = 0; i < dst->entry_count; i++) { - if (calculate_icamera_metadata_entry_data_size( - e->type, e->count) > 0 && - e->data.offset > entry->data.offset) { + if (calculate_icamera_metadata_entry_data_size(e->type, e->count) > 0 && + e->data.offset > entry->data.offset) { e->data.offset -= entry_bytes; } ++e; @@ -752,7 +672,8 @@ int update_icamera_metadata_entry(icamera_metadata_t *dst, // Append new data entry->data.offset = dst->data_count; - MEMCPY_S(get_data(dst) + entry->data.offset, data_payload_bytes, data, data_payload_bytes); + MEMCPY_S(get_data(dst) + entry->data.offset, data_payload_bytes, data, + data_payload_bytes); dst->data_count += data_bytes; } } else if (data_bytes != 0) { @@ -768,16 +689,14 @@ int update_icamera_metadata_entry(icamera_metadata_t *dst, entry->count = data_count; if (updated_entry != NULL) { - get_icamera_metadata_entry(dst, - index, - updated_entry); + get_icamera_metadata_entry(dst, index, updated_entry); } assert(validate_icamera_metadata_structure(dst, NULL) == icamera::OK); return icamera::OK; } -const char *get_icamera_metadata_section_name(uint32_t tag) { +const char* get_icamera_metadata_section_name(uint32_t tag) { uint32_t tag_section = tag >> 16; if (tag_section < CAMERA_SECTION_COUNT) { return icamera_metadata_section_names[tag_section]; @@ -789,7 +708,7 @@ const char *get_icamera_metadata_section_name(uint32_t tag) { return nullptr; } -const char *get_icamera_metadata_tag_name(uint32_t tag) { +const char* get_icamera_metadata_tag_name(uint32_t tag) { uint32_t tag_section = tag >> 16; uint32_t tag_index = tag & 0xFFFF; @@ -829,37 +748,29 @@ int get_icamera_metadata_tag_type(uint32_t tag) { return -1; } -static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, int type, - int count, - int indentation); +static void print_data(int fd, const uint8_t* data_ptr, uint32_t tag, int type, int count, + int indentation); -void dump_icamera_metadata(const icamera_metadata_t *metadata, - int fd, - int verbosity) { +void dump_icamera_metadata(const icamera_metadata_t* metadata, int fd, int verbosity) { dump_indented_icamera_metadata(metadata, fd, verbosity, 0); } -void dump_indented_icamera_metadata(const icamera_metadata_t *metadata, - int fd, - int verbosity, - int indentation) { +void dump_indented_icamera_metadata(const icamera_metadata_t* metadata, int fd, int verbosity, + int indentation) { if (metadata == NULL) { - dprintf(fd, "%*sDumping camera metadata array: Not allocated\n", - indentation, ""); + dprintf(fd, "%*sDumping camera metadata array: Not allocated\n", indentation, ""); return; } unsigned int i; dprintf(fd, "%*sDumping camera metadata array: %" PRIu32 " / %" PRIu32 " entries, " - "%" PRIu32 " / %" PRIu32 " bytes of extra data.\n", indentation, "", - metadata->entry_count, metadata->entry_capacity, - metadata->data_count, metadata->data_capacity); - dprintf(fd, "%*sVersion: %d, Flags: %08x\n", - indentation + 2, "", - metadata->version, metadata->flags); - camera_metadata_buffer_entry_t *entry = get_entries(metadata); - for (i=0; i < metadata->entry_count; i++, entry++) { - + "%" PRIu32 " / %" PRIu32 " bytes of extra data.\n", + indentation, "", metadata->entry_count, metadata->entry_capacity, metadata->data_count, + metadata->data_capacity); + dprintf(fd, "%*sVersion: %d, Flags: %08x\n", indentation + 2, "", metadata->version, + metadata->flags); + camera_metadata_buffer_entry_t* entry = get_entries(metadata); + for (i = 0; i < metadata->entry_count; i++, entry++) { const char *tag_name, *tag_section; tag_section = get_icamera_metadata_section_name(entry->tag); if (tag_section == NULL) { @@ -869,32 +780,25 @@ void dump_indented_icamera_metadata(const icamera_metadata_t *metadata, if (tag_name == NULL) { tag_name = "unknownTag"; } - const char *type_name; + const char* type_name; if (entry->type >= ICAMERA_NUM_TYPES) { type_name = "unknown"; } else { type_name = icamera_metadata_type_names[entry->type]; } - dprintf(fd, "%*s%s.%s (%05x): %s[%" PRIu32 "]\n", - indentation + 2, "", - tag_section, - tag_name, - entry->tag, - type_name, - entry->count); + dprintf(fd, "%*s%s.%s (%05x): %s[%" PRIu32 "]\n", indentation + 2, "", tag_section, + tag_name, entry->tag, type_name, entry->count); if (verbosity < 1) continue; if (entry->type >= ICAMERA_NUM_TYPES) continue; size_t type_size = icamera_metadata_type_size[entry->type]; - uint8_t *data_ptr; - if ( type_size * entry->count > 4 ) { + uint8_t* data_ptr; + if (type_size * entry->count > 4) { if (entry->data.offset >= metadata->data_count) { - LOGE("%s: Malformed entry data offset: %" PRIu32 " (max %" PRIu32 ")", - __func__, - entry->data.offset, - metadata->data_count); + LOGE("%s: Malformed entry data offset: %" PRIu32 " (max %" PRIu32 ")", __func__, + entry->data.offset, metadata->data_count); continue; } data_ptr = get_data(metadata) + entry->data.offset; @@ -908,15 +812,15 @@ void dump_indented_icamera_metadata(const icamera_metadata_t *metadata, } } -static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, - int type, int count, int indentation) { +static void print_data(int fd, const uint8_t* data_ptr, uint32_t tag, int type, int count, + int indentation) { static int values_per_line[ICAMERA_NUM_TYPES] = { - 16, // ICAMERA_TYPE_BYTE - 4, // ICAMERA_TYPE_INT32 - 8, // ICAMERA_TYPE_FLOAT - 2, // ICAMERA_TYPE_INT64 - 4, // ICAMERA_TYPE_DOUBLE - 2, // ICAMERA_TYPE_RATIONAL + 16, // ICAMERA_TYPE_BYTE + 4, // ICAMERA_TYPE_INT32 + 8, // ICAMERA_TYPE_FLOAT + 2, // ICAMERA_TYPE_INT64 + 4, // ICAMERA_TYPE_DOUBLE + 2, // ICAMERA_TYPE_RATIONAL }; size_t type_size = icamera_metadata_type_size[type]; char value_string_tmp[ICAMERA_METADATA_ENUM_STRING_MAX_SIZE]; @@ -929,10 +833,7 @@ static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, int j, k; for (j = 0; j < lines; j++) { dprintf(fd, "%*s[", indentation + 4, ""); - for (k = 0; - k < values_per_line[type] && count > 0; - k++, count--, index += type_size) { - + for (k = 0; k < values_per_line[type] && count > 0; k++, count--, index += type_size) { switch (type) { case ICAMERA_TYPE_BYTE: value = *(data_ptr + index); @@ -940,8 +841,8 @@ static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, sizeof(value_string_tmp)) == icamera::OK) { dprintf(fd, "%s ", value_string_tmp); } else if (vendor_metadata_enum_snprint(tag, value, value_string_tmp, - sizeof(value_string_tmp)) - == icamera::OK) { + sizeof(value_string_tmp)) == + icamera::OK) { dprintf(fd, "%s ", value_string_tmp); } else { dprintf(fd, "%hhu ", *(data_ptr + index)); @@ -953,8 +854,8 @@ static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, sizeof(value_string_tmp)) == icamera::OK) { dprintf(fd, "%s ", value_string_tmp); } else if (vendor_metadata_enum_snprint(tag, value, value_string_tmp, - sizeof(value_string_tmp)) - == icamera::OK) { + sizeof(value_string_tmp)) == + icamera::OK) { dprintf(fd, "%s ", value_string_tmp); } else { dprintf(fd, "%" PRId32 " ", *(int32_t*)(data_ptr + index)); diff --git a/src/metadata/icamera_metadata_base.h b/src/metadata/icamera_metadata_base.h index 14f85967..da14e000 100644 --- a/src/metadata/icamera_metadata_base.h +++ b/src/metadata/icamera_metadata_base.h @@ -24,8 +24,7 @@ extern "C" { #endif -#define ALIGN_TO(val, alignment) \ - (((uintptr_t)(val) + ((alignment) - 1)) & ~((alignment) - 1)) +#define ALIGN_TO(val, alignment) (((uintptr_t)(val) + ((alignment)-1)) & ~((alignment)-1)) /** * Tag hierarchy and enum definitions for camera_metadata_entry @@ -45,7 +44,7 @@ extern "C" { extern unsigned int icamera_metadata_section_bounds[CAMERA_SECTION_COUNT][2]; -extern const char *icamera_metadata_section_names[CAMERA_SECTION_COUNT]; +extern const char* icamera_metadata_section_names[CAMERA_SECTION_COUNT]; /** * Type definitions for camera_metadata_entry @@ -75,8 +74,8 @@ typedef struct icamera_metadata_rational { /** Tag information */ typedef struct tag_info { - const char *tag_name; - uint8_t tag_type; + const char* tag_name; + uint8_t tag_type; } tag_info_t; /** @@ -87,17 +86,17 @@ typedef struct tag_info { * number of entries in data of the entry's type, not a count of bytes. */ typedef struct icamera_metadata_entry { - size_t index; + size_t index; uint32_t tag; - uint8_t type; - size_t count; + uint8_t type; + size_t count; union { - uint8_t *u8; - int32_t *i32; - float *f; - int64_t *i64; - double *d; - icamera_metadata_rational_t *r; + uint8_t* u8; + int32_t* i32; + float* f; + int64_t* i64; + double* d; + icamera_metadata_rational_t* r; } data; } icamera_metadata_entry_t; @@ -106,17 +105,17 @@ typedef struct icamera_metadata_entry { * camera_metadata_entry in layout */ typedef struct icamera_metadata_ro_entry { - size_t index; + size_t index; uint32_t tag; - uint8_t type; - size_t count; + uint8_t type; + size_t count; union { - const uint8_t *u8; - const int32_t *i32; - const float *f; - const int64_t *i64; - const double *d; - const icamera_metadata_rational_t *r; + const uint8_t* u8; + const int32_t* i32; + const float* f; + const int64_t* i64; + const double* d; + const icamera_metadata_rational_t* r; } data; } icamera_metadata_ro_entry_t; @@ -170,8 +169,7 @@ typedef struct icamera_metadata icamera_metadata_t; * data_capacity in bytes. The resulting structure is all contiguous in memory, * and can be freed with free_camera_metadata(). */ -icamera_metadata_t *allocate_icamera_metadata(size_t entry_capacity, - size_t data_capacity); +icamera_metadata_t* allocate_icamera_metadata(size_t entry_capacity, size_t data_capacity); /** * Get the required alignment of a packet of camera metadata, which is the @@ -187,9 +185,8 @@ size_t get_icamera_metadata_alignment(); * * The resulting pointer can be freed with free_camera_metadata(). */ -icamera_metadata_t *allocate_copy_icamera_metadata_checked( - const icamera_metadata_t *src, - size_t src_size); +icamera_metadata_t* allocate_copy_icamera_metadata_checked(const icamera_metadata_t* src, + size_t src_size); /** * Place a camera metadata structure into an existing buffer. Returns NULL if @@ -201,35 +198,33 @@ icamera_metadata_t *allocate_copy_icamera_metadata_checked( * responsibility to free the original buffer; do not call * free_camera_metadata() with the returned pointer. */ -icamera_metadata_t *place_icamera_metadata(void *dst, size_t dst_size, - size_t entry_capacity, - size_t data_capacity); +icamera_metadata_t* place_icamera_metadata(void* dst, size_t dst_size, size_t entry_capacity, + size_t data_capacity); /** * Free a camera_metadata structure. Should only be used with structures * allocated with allocate_camera_metadata(). */ -void free_icamera_metadata(icamera_metadata_t *metadata); +void free_icamera_metadata(icamera_metadata_t* metadata); /** * Calculate the buffer size needed for a metadata structure of entry_count * metadata entries, needing a total of data_count bytes of extra data storage. */ -size_t calculate_icamera_metadata_size(size_t entry_count, - size_t data_count); +size_t calculate_icamera_metadata_size(size_t entry_count, size_t data_count); /** * Get current size of entire metadata structure in bytes, including reserved * but unused space. */ -size_t get_icamera_metadata_size(const icamera_metadata_t *metadata); +size_t get_icamera_metadata_size(const icamera_metadata_t* metadata); /** * Get size of entire metadata buffer in bytes, not including reserved but * unused space. This is the amount of space needed by copy_camera_metadata for * its dst buffer. */ -size_t get_icamera_metadata_compact_size(const icamera_metadata_t *metadata); +size_t get_icamera_metadata_compact_size(const icamera_metadata_t* metadata); /** * Get the current number of entries in the metadata packet. @@ -237,23 +232,23 @@ size_t get_icamera_metadata_compact_size(const icamera_metadata_t *metadata); * metadata packet must be valid, which can be checked before the call with * validate_camera_metadata_structure(). */ -size_t get_icamera_metadata_entry_count(const icamera_metadata_t *metadata); +size_t get_icamera_metadata_entry_count(const icamera_metadata_t* metadata); /** * Get the maximum number of entries that could fit in the metadata packet. */ -size_t get_icamera_metadata_entry_capacity(const icamera_metadata_t *metadata); +size_t get_icamera_metadata_entry_capacity(const icamera_metadata_t* metadata); /** * Get the current count of bytes used for value storage in the metadata packet. */ -size_t get_icamera_metadata_data_count(const icamera_metadata_t *metadata); +size_t get_icamera_metadata_data_count(const icamera_metadata_t* metadata); /** * Get the maximum count of bytes that could be used for value storage in the * metadata packet. */ -size_t get_icamera_metadata_data_capacity(const icamera_metadata_t *metadata); +size_t get_icamera_metadata_data_capacity(const icamera_metadata_t* metadata); /** * Copy a metadata structure to a memory buffer, compacting it along the @@ -269,8 +264,8 @@ size_t get_icamera_metadata_data_capacity(const icamera_metadata_t *metadata); * responsible for freeing the underlying buffer when needed; do not call * free_camera_metadata. */ -icamera_metadata_t *copy_icamera_metadata(void *dst, size_t dst_size, - const icamera_metadata_t *src); +icamera_metadata_t* copy_icamera_metadata(void* dst, size_t dst_size, + const icamera_metadata_t* src); /** * Validate that a metadata is structurally sane. That is, its internal @@ -285,8 +280,8 @@ icamera_metadata_t *copy_icamera_metadata(void *dst, size_t dst_size, * * Returns 0 on success. A non-0 value is returned on error. */ -int validate_icamera_metadata_structure(const icamera_metadata_t *metadata, - const size_t *expected_size); +int validate_icamera_metadata_structure(const icamera_metadata_t* metadata, + const size_t* expected_size); /** * Append camera metadata in src to an existing metadata structure in dst. This @@ -294,7 +289,7 @@ int validate_icamera_metadata_structure(const icamera_metadata_t *metadata, * value is returned. On success, 0 is returned. Appending onto a sorted * structure results in a non-sorted combined structure. */ -int append_icamera_metadata(icamera_metadata_t *dst, const icamera_metadata_t *src); +int append_icamera_metadata(icamera_metadata_t* dst, const icamera_metadata_t* src); /** * Clone an existing metadata buffer, compacting along the way. This is @@ -303,15 +298,14 @@ int append_icamera_metadata(icamera_metadata_t *dst, const icamera_metadata_t *s * can be freed with free_camera_metadata(). Returns NULL if cloning failed. */ -icamera_metadata_t *clone_icamera_metadata(const icamera_metadata_t *src); +icamera_metadata_t* clone_icamera_metadata(const icamera_metadata_t* src); /** * Calculate the number of bytes of extra data a given metadata entry will take * up. That is, if entry of 'type' with a payload of 'data_count' values is * added, how much will the value returned by get_camera_metadata_data_count() * be increased? This value may be zero, if no extra data storage is needed. */ -size_t calculate_icamera_metadata_entry_data_size(uint8_t type, - size_t data_count); +size_t calculate_icamera_metadata_entry_data_size(uint8_t type, size_t data_count); /** * Add a metadata entry to a metadata structure. Returns 0 if the addition @@ -323,10 +317,8 @@ size_t calculate_icamera_metadata_entry_data_size(uint8_t type, * * Returns 0 on success. A non-0 value is returned on error. */ -int add_icamera_metadata_entry(icamera_metadata_t *dst, - uint32_t tag, - const void *data, - size_t data_count); +int add_icamera_metadata_entry(icamera_metadata_t* dst, uint32_t tag, const void* data, + size_t data_count); /** * Sort the metadata buffer for fast searching. If already marked as sorted, @@ -335,7 +327,7 @@ int add_icamera_metadata_entry(icamera_metadata_t *dst, * * Returns 0 on success. A non-0 value is returned on error. */ -int sort_icamera_metadata(icamera_metadata_t *dst); +int sort_icamera_metadata(icamera_metadata_t* dst); /** * Get metadata entry at position index in the metadata buffer. @@ -348,16 +340,14 @@ int sort_icamera_metadata(icamera_metadata_t *dst); * * Returns 0 on success. A non-0 value is returned on error. */ -int get_icamera_metadata_entry(icamera_metadata_t *src, - size_t index, - icamera_metadata_entry_t *entry); +int get_icamera_metadata_entry(icamera_metadata_t* src, size_t index, + icamera_metadata_entry_t* entry); /** * Get metadata entry at position index, but disallow editing the data. */ -int get_icamera_metadata_ro_entry(const icamera_metadata_t *src, - size_t index, - icamera_metadata_ro_entry_t *entry); +int get_icamera_metadata_ro_entry(const icamera_metadata_t* src, size_t index, + icamera_metadata_ro_entry_t* entry); /** * Find an entry with given tag value. If not found, returns -ENOENT. Otherwise, @@ -367,16 +357,14 @@ int get_icamera_metadata_ro_entry(const icamera_metadata_t *src, * which is returned. To speed up searching for tags, sort the metadata * structure first by calling sort_camera_metadata(). */ -int find_icamera_metadata_entry(icamera_metadata_t *src, - uint32_t tag, - icamera_metadata_entry_t *entry); +int find_icamera_metadata_entry(icamera_metadata_t* src, uint32_t tag, + icamera_metadata_entry_t* entry); /** * Find an entry with given tag value, but disallow editing the data */ -int find_icamera_metadata_ro_entry(const icamera_metadata_t *src, - uint32_t tag, - icamera_metadata_ro_entry_t *entry); +int find_icamera_metadata_ro_entry(const icamera_metadata_t* src, uint32_t tag, + icamera_metadata_ro_entry_t* entry); /** * Delete an entry at given index. This is an expensive operation, since it @@ -384,8 +372,7 @@ int find_icamera_metadata_ro_entry(const icamera_metadata_t *src, * existing camera_metadata_entry.data pointers to this buffer. Sorting is * maintained. */ -int delete_icamera_metadata_entry(icamera_metadata_t *dst, - size_t index); +int delete_icamera_metadata_entry(icamera_metadata_t* dst, size_t index); /** * Updates a metadata entry with new data. If the data size is changing, may @@ -396,23 +383,20 @@ int delete_icamera_metadata_entry(icamera_metadata_t *dst, * is updated to match the new buffer state. Returns a non-zero value if there * is no room for the new data in the buffer. */ -int update_icamera_metadata_entry(icamera_metadata_t *dst, - size_t index, - const void *data, - size_t data_count, - icamera_metadata_entry_t *updated_entry); +int update_icamera_metadata_entry(icamera_metadata_t* dst, size_t index, const void* data, + size_t data_count, icamera_metadata_entry_t* updated_entry); /** * Retrieve human-readable name of section the tag is in. Returns NULL if * no such tag is defined. */ -const char *get_icamera_metadata_section_name(uint32_t tag); +const char* get_icamera_metadata_section_name(uint32_t tag); /** * Retrieve human-readable name of tag (not including section). Returns NULL if * no such tag is defined. */ -const char *get_icamera_metadata_tag_name(uint32_t tag); +const char* get_icamera_metadata_tag_name(uint32_t tag); /** * Retrieve the type of a tag. Returns -1 if no such tag is defined. @@ -425,27 +409,20 @@ int get_icamera_metadata_tag_type(uint32_t tag); * verbosity = 1: Tag entry information plus at most 16 data values * verbosity = 2: All information */ -void dump_icamera_metadata(const icamera_metadata_t *metadata, - int fd, - int verbosity); +void dump_icamera_metadata(const icamera_metadata_t* metadata, int fd, int verbosity); /** * Print fields in the metadata to the log; adds indentation parameter, which * specifies the number of spaces to insert before each line of the dump */ -void dump_indented_icamera_metadata(const icamera_metadata_t *metadata, - int fd, - int verbosity, - int indentation); +void dump_indented_icamera_metadata(const icamera_metadata_t* metadata, int fd, int verbosity, + int indentation); /** * Prints the specified tag value as a string. Only works for enum tags. * Returns 0 on success, -1 on failure. */ -int icamera_metadata_enum_snprint(uint32_t tag, - int32_t value, - char *dst, - size_t size); +int icamera_metadata_enum_snprint(uint32_t tag, int32_t value, char* dst, size_t size); #ifdef __cplusplus } diff --git a/src/metadata/icamera_metadata_tags.h b/src/metadata/icamera_metadata_tags.h index f8223c89..46938991 100644 --- a/src/metadata/icamera_metadata_tags.h +++ b/src/metadata/icamera_metadata_tags.h @@ -70,35 +70,35 @@ typedef enum icamera_metadata_section { * Hierarchy positions in enum space. */ typedef enum icamera_metadata_section_start { - CAMERA_AE_START = CAMERA_AE << 16, - CAMERA_AWB_START = CAMERA_AWB << 16, - CAMERA_AF_START = CAMERA_AF << 16, - CAMERA_CONTROL_START = CAMERA_CONTROL << 16, - CAMERA_DEMOSAIC_START = CAMERA_DEMOSAIC << 16, - CAMERA_EDGE_START = CAMERA_EDGE << 16, - CAMERA_FLASH_START = CAMERA_FLASH << 16, - CAMERA_FLASH_INFO_START = CAMERA_FLASH_INFO << 16, - CAMERA_HOT_PIXEL_START = CAMERA_HOT_PIXEL << 16, - CAMERA_JPEG_START = CAMERA_JPEG << 16, - CAMERA_LENS_START = CAMERA_LENS << 16, - CAMERA_LENS_INFO_START = CAMERA_LENS_INFO << 16, - CAMERA_NOISE_REDUCTION_START = CAMERA_NOISE_REDUCTION << 16, - CAMERA_REQUEST_START = CAMERA_REQUEST << 16, - CAMERA_SCALER_START = CAMERA_SCALER << 16, - CAMERA_SENSOR_START = CAMERA_SENSOR << 16, - CAMERA_SENSOR_INFO_START = CAMERA_SENSOR_INFO << 16, - CAMERA_SHADING_START = CAMERA_SHADING << 16, - CAMERA_STATISTICS_START = CAMERA_STATISTICS << 16, - CAMERA_STATISTICS_INFO_START = CAMERA_STATISTICS_INFO << 16, - CAMERA_TONEMAP_START = CAMERA_TONEMAP << 16, - CAMERA_LED_START = CAMERA_LED << 16, - CAMERA_INFO_START = CAMERA_INFO << 16, - CAMERA_BLACK_LEVEL_START = CAMERA_BLACK_LEVEL << 16, - CAMERA_SYNC_START = CAMERA_SYNC << 16, - CAMERA_REPROCESS_START = CAMERA_REPROCESS << 16, - INTEL_INFO_START = INTEL_INFO << 16, - INTEL_CONTROL_START = INTEL_CONTROL << 16, - INTEL_CONTROL_ISP_START = INTEL_CONTROL_ISP << 16, + CAMERA_AE_START = CAMERA_AE << 16, + CAMERA_AWB_START = CAMERA_AWB << 16, + CAMERA_AF_START = CAMERA_AF << 16, + CAMERA_CONTROL_START = CAMERA_CONTROL << 16, + CAMERA_DEMOSAIC_START = CAMERA_DEMOSAIC << 16, + CAMERA_EDGE_START = CAMERA_EDGE << 16, + CAMERA_FLASH_START = CAMERA_FLASH << 16, + CAMERA_FLASH_INFO_START = CAMERA_FLASH_INFO << 16, + CAMERA_HOT_PIXEL_START = CAMERA_HOT_PIXEL << 16, + CAMERA_JPEG_START = CAMERA_JPEG << 16, + CAMERA_LENS_START = CAMERA_LENS << 16, + CAMERA_LENS_INFO_START = CAMERA_LENS_INFO << 16, + CAMERA_NOISE_REDUCTION_START = CAMERA_NOISE_REDUCTION << 16, + CAMERA_REQUEST_START = CAMERA_REQUEST << 16, + CAMERA_SCALER_START = CAMERA_SCALER << 16, + CAMERA_SENSOR_START = CAMERA_SENSOR << 16, + CAMERA_SENSOR_INFO_START = CAMERA_SENSOR_INFO << 16, + CAMERA_SHADING_START = CAMERA_SHADING << 16, + CAMERA_STATISTICS_START = CAMERA_STATISTICS << 16, + CAMERA_STATISTICS_INFO_START = CAMERA_STATISTICS_INFO << 16, + CAMERA_TONEMAP_START = CAMERA_TONEMAP << 16, + CAMERA_LED_START = CAMERA_LED << 16, + CAMERA_INFO_START = CAMERA_INFO << 16, + CAMERA_BLACK_LEVEL_START = CAMERA_BLACK_LEVEL << 16, + CAMERA_SYNC_START = CAMERA_SYNC << 16, + CAMERA_REPROCESS_START = CAMERA_REPROCESS << 16, + INTEL_INFO_START = INTEL_INFO << 16, + INTEL_CONTROL_START = INTEL_CONTROL << 16, + INTEL_CONTROL_ISP_START = INTEL_CONTROL_ISP << 16, } icamera_metadata_section_start_t; /** @@ -108,340 +108,340 @@ typedef enum icamera_metadata_section_start { * src/metadata/icamera_metadata_tag_info.c */ typedef enum icamera_metadata_tag { - CAMERA_AE_MODE = // enum | public - CAMERA_AE_START, - CAMERA_AE_LOCK, // enum | public - CAMERA_AE_REGIONS, // int32[] | public - CAMERA_AE_ANTIBANDING_MODE, // enum | public - CAMERA_AE_COMPENSATION, // int32 | public - CAMERA_AE_TARGET_FPS_RANGE, // float[] | public - CAMERA_AE_PRECAPTURE_TRIGGER, // enum | public - CAMERA_AE_STATE, // enum | public - CAMERA_AE_AVAILABLE_MODES, // byte[] | public - CAMERA_AE_AVAILABLE_ANTIBANDING_MODES, // byte[] | public - CAMERA_AE_COMPENSATION_STEP, // rational | public - CAMERA_AE_COMPENSATION_RANGE, // int32[] | public - CAMERA_AE_AVAILABLE_TARGET_FPS_RANGES, // float[] | public - CAMERA_AE_LOCK_AVAILABLE, // enum | public + CAMERA_AE_MODE = // enum | public + CAMERA_AE_START, + CAMERA_AE_LOCK, // enum | public + CAMERA_AE_REGIONS, // int32[] | public + CAMERA_AE_ANTIBANDING_MODE, // enum | public + CAMERA_AE_COMPENSATION, // int32 | public + CAMERA_AE_TARGET_FPS_RANGE, // float[] | public + CAMERA_AE_PRECAPTURE_TRIGGER, // enum | public + CAMERA_AE_STATE, // enum | public + CAMERA_AE_AVAILABLE_MODES, // byte[] | public + CAMERA_AE_AVAILABLE_ANTIBANDING_MODES, // byte[] | public + CAMERA_AE_COMPENSATION_STEP, // rational | public + CAMERA_AE_COMPENSATION_RANGE, // int32[] | public + CAMERA_AE_AVAILABLE_TARGET_FPS_RANGES, // float[] | public + CAMERA_AE_LOCK_AVAILABLE, // enum | public CAMERA_AE_END, - CAMERA_AWB_MODE = // enum | public - CAMERA_AWB_START, - CAMERA_AWB_COLOR_TRANSFORM, // float[] | public - CAMERA_AWB_COLOR_GAINS, // float[] | public - CAMERA_AWB_LOCK, // enum | public - CAMERA_AWB_REGIONS, // int32[] | public - CAMERA_AWB_CCT_RANGE, // int32[] | public - CAMERA_AWB_GAINS, // int32[] | public - CAMERA_AWB_GAIN_SHIFT, // int32[] | public - CAMERA_AWB_WHITE_POINT, // int32[] | public - CAMERA_AWB_CONVERGE_SPEED, // enum | public - CAMERA_AWB_CONVERGE_SPEED_MODE, // enum | public - CAMERA_AWB_STATE, // enum | public - CAMERA_AWB_RESULT, // byte[] | public - CAMERA_AWB_AVAILABLE_MODES, // byte[] | public - CAMERA_AWB_LOCK_AVAILABLE, // enum | public + CAMERA_AWB_MODE = // enum | public + CAMERA_AWB_START, + CAMERA_AWB_COLOR_TRANSFORM, // float[] | public + CAMERA_AWB_COLOR_GAINS, // float[] | public + CAMERA_AWB_LOCK, // enum | public + CAMERA_AWB_REGIONS, // int32[] | public + CAMERA_AWB_CCT_RANGE, // int32[] | public + CAMERA_AWB_GAINS, // int32[] | public + CAMERA_AWB_GAIN_SHIFT, // int32[] | public + CAMERA_AWB_WHITE_POINT, // int32[] | public + CAMERA_AWB_CONVERGE_SPEED, // enum | public + CAMERA_AWB_CONVERGE_SPEED_MODE, // enum | public + CAMERA_AWB_STATE, // enum | public + CAMERA_AWB_RESULT, // byte[] | public + CAMERA_AWB_AVAILABLE_MODES, // byte[] | public + CAMERA_AWB_LOCK_AVAILABLE, // enum | public CAMERA_AWB_END, - CAMERA_AF_MODE = // enum | public - CAMERA_AF_START, - CAMERA_AF_REGIONS, // int32[] | public - CAMERA_AF_TRIGGER, // enum | public - CAMERA_AF_AVAILABLE_MODES, // byte[] | public - CAMERA_AF_STATE, // enum | public + CAMERA_AF_MODE = // enum | public + CAMERA_AF_START, + CAMERA_AF_REGIONS, // int32[] | public + CAMERA_AF_TRIGGER, // enum | public + CAMERA_AF_AVAILABLE_MODES, // byte[] | public + CAMERA_AF_STATE, // enum | public CAMERA_AF_END, - CAMERA_CONTROL_CAPTUREINTENT = // enum | public - CAMERA_CONTROL_START, - CAMERA_CONTROL_EFFECT_MODE, // enum | public - CAMERA_CONTROL_MODE, // enum | public - CAMERA_CONTROL_SCENE_MODE, // enum | public - CAMERA_CONTROL_VIDEO_STABILIZATION_MODE, // enum | public - CAMERA_CONTROL_AVAILABLE_EFFECTS, // byte[] | public - CAMERA_CONTROL_AVAILABLE_MODES, // byte[] | public - CAMERA_CONTROL_AVAILABLE_SCENE_MODES, // byte[] | public + CAMERA_CONTROL_CAPTUREINTENT = // enum | public + CAMERA_CONTROL_START, + CAMERA_CONTROL_EFFECT_MODE, // enum | public + CAMERA_CONTROL_MODE, // enum | public + CAMERA_CONTROL_SCENE_MODE, // enum | public + CAMERA_CONTROL_VIDEO_STABILIZATION_MODE, // enum | public + CAMERA_CONTROL_AVAILABLE_EFFECTS, // byte[] | public + CAMERA_CONTROL_AVAILABLE_MODES, // byte[] | public + CAMERA_CONTROL_AVAILABLE_SCENE_MODES, // byte[] | public CAMERA_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, - // byte[] | public - CAMERA_CONTROL_MAX_REGIONS, // int32[] | hidden - CAMERA_CONTROL_SCENE_MODE_OVERRIDES, // byte[] | system + // byte[] | public + CAMERA_CONTROL_MAX_REGIONS, // int32[] | hidden + CAMERA_CONTROL_SCENE_MODE_OVERRIDES, // byte[] | system CAMERA_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, - // int32[] | hidden + // int32[] | hidden CAMERA_CONTROL_END, - CAMERA_DEMOSAIC_MODE = // enum | system - CAMERA_DEMOSAIC_START, + CAMERA_DEMOSAIC_MODE = // enum | system + CAMERA_DEMOSAIC_START, CAMERA_DEMOSAIC_END, - CAMERA_EDGE_MODE = // enum | public - CAMERA_EDGE_START, - CAMERA_EDGE_STRENGTH, // byte | system - CAMERA_EDGE_AVAILABLE_EDGE_MODES, // byte[] | public + CAMERA_EDGE_MODE = // enum | public + CAMERA_EDGE_START, + CAMERA_EDGE_STRENGTH, // byte | system + CAMERA_EDGE_AVAILABLE_EDGE_MODES, // byte[] | public CAMERA_EDGE_END, - CAMERA_FLASH_FIRING_POWER = // byte | system - CAMERA_FLASH_START, - CAMERA_FLASH_FIRING_TIME, // int64 | system - CAMERA_FLASH_MODE, // enum | public - CAMERA_FLASH_COLOR_TEMPERATURE, // byte | system - CAMERA_FLASH_MAX_ENERGY, // byte | system - CAMERA_FLASH_STATE, // enum | public + CAMERA_FLASH_FIRING_POWER = // byte | system + CAMERA_FLASH_START, + CAMERA_FLASH_FIRING_TIME, // int64 | system + CAMERA_FLASH_MODE, // enum | public + CAMERA_FLASH_COLOR_TEMPERATURE, // byte | system + CAMERA_FLASH_MAX_ENERGY, // byte | system + CAMERA_FLASH_STATE, // enum | public CAMERA_FLASH_END, - CAMERA_FLASH_INFO_AVAILABLE = // enum | public - CAMERA_FLASH_INFO_START, - CAMERA_FLASH_INFO_CHARGE_DURATION, // int64 | system + CAMERA_FLASH_INFO_AVAILABLE = // enum | public + CAMERA_FLASH_INFO_START, + CAMERA_FLASH_INFO_CHARGE_DURATION, // int64 | system CAMERA_FLASH_INFO_END, - CAMERA_HOT_PIXEL_MODE = // enum | public - CAMERA_HOT_PIXEL_START, - CAMERA_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, // byte[] | public + CAMERA_HOT_PIXEL_MODE = // enum | public + CAMERA_HOT_PIXEL_START, + CAMERA_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, // byte[] | public CAMERA_HOT_PIXEL_END, - CAMERA_JPEG_GPS_COORDINATES = // double[] | hidden - CAMERA_JPEG_START, - CAMERA_JPEG_GPS_PROCESSING_METHOD, // byte | hidden - CAMERA_JPEG_GPS_TIMESTAMP, // int64 | hidden - CAMERA_JPEG_ORIENTATION, // int32 | public - CAMERA_JPEG_QUALITY, // byte | public - CAMERA_JPEG_THUMBNAIL_QUALITY, // byte | public - CAMERA_JPEG_THUMBNAIL_SIZE, // int32[] | public - CAMERA_JPEG_AVAILABLE_THUMBNAIL_SIZES, // int32[] | public - CAMERA_JPEG_MAX_SIZE, // int32 | system - CAMERA_JPEG_SIZE, // int32 | system + CAMERA_JPEG_GPS_COORDINATES = // double[] | hidden + CAMERA_JPEG_START, + CAMERA_JPEG_GPS_PROCESSING_METHOD, // byte | hidden + CAMERA_JPEG_GPS_TIMESTAMP, // int64 | hidden + CAMERA_JPEG_ORIENTATION, // int32 | public + CAMERA_JPEG_QUALITY, // byte | public + CAMERA_JPEG_THUMBNAIL_QUALITY, // byte | public + CAMERA_JPEG_THUMBNAIL_SIZE, // int32[] | public + CAMERA_JPEG_AVAILABLE_THUMBNAIL_SIZES, // int32[] | public + CAMERA_JPEG_MAX_SIZE, // int32 | system + CAMERA_JPEG_SIZE, // int32 | system CAMERA_JPEG_END, - CAMERA_LENS_APERTURE = // float | public - CAMERA_LENS_START, - CAMERA_LENS_FILTER_DENSITY, // float | public - CAMERA_LENS_FOCAL_LENGTH, // float | public - CAMERA_LENS_FOCUS_DISTANCE, // float | public - CAMERA_LENS_OPTICAL_STABILIZATION_MODE, // enum | public - CAMERA_LENS_FACING, // enum | public - CAMERA_LENS_FOCUS_RANGE, // float[] | public - CAMERA_LENS_STATE, // enum | public + CAMERA_LENS_APERTURE = // float | public + CAMERA_LENS_START, + CAMERA_LENS_FILTER_DENSITY, // float | public + CAMERA_LENS_FOCAL_LENGTH, // float | public + CAMERA_LENS_FOCUS_DISTANCE, // float | public + CAMERA_LENS_OPTICAL_STABILIZATION_MODE, // enum | public + CAMERA_LENS_FACING, // enum | public + CAMERA_LENS_FOCUS_RANGE, // float[] | public + CAMERA_LENS_STATE, // enum | public CAMERA_LENS_END, - CAMERA_LENS_INFO_AVAILABLE_APERTURES = // float[] | public - CAMERA_LENS_INFO_START, - CAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES, // float[] | public - CAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, // float[] | public - CAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, // byte[] | public - CAMERA_LENS_INFO_HYPERFOCAL_DISTANCE, // float | public - CAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE, // float | public - CAMERA_LENS_INFO_SHADING_MAP_SIZE, // int32[] | hidden - CAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, // enum | public + CAMERA_LENS_INFO_AVAILABLE_APERTURES = // float[] | public + CAMERA_LENS_INFO_START, + CAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES, // float[] | public + CAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, // float[] | public + CAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, // byte[] | public + CAMERA_LENS_INFO_HYPERFOCAL_DISTANCE, // float | public + CAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE, // float | public + CAMERA_LENS_INFO_SHADING_MAP_SIZE, // int32[] | hidden + CAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, // enum | public CAMERA_LENS_INFO_END, - CAMERA_NOISE_REDUCTION_MODE = // enum | public - CAMERA_NOISE_REDUCTION_START, - CAMERA_NOISE_REDUCTION_STRENGTH, // byte | system + CAMERA_NOISE_REDUCTION_MODE = // enum | public + CAMERA_NOISE_REDUCTION_START, + CAMERA_NOISE_REDUCTION_STRENGTH, // byte | system CAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, - // byte[] | public + // byte[] | public CAMERA_NOISE_REDUCTION_END, - CAMERA_REQUEST_ID = // int32 | hidden - CAMERA_REQUEST_START, - CAMERA_REQUEST_METADATA_MODE, // enum | system - CAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS, // int32[] | hidden - CAMERA_REQUEST_MAX_NUM_INPUT_STREAMS, // int32 | hidden - CAMERA_REQUEST_PIPELINE_DEPTH, // byte | public - CAMERA_REQUEST_PIPELINE_MAX_DEPTH, // byte | public - CAMERA_REQUEST_PARTIAL_RESULT_COUNT, // int32 | public - CAMERA_REQUEST_AVAILABLE_CAPABILITIES, // enum[] | public - CAMERA_REQUEST_AVAILABLE_REQUEST_KEYS, // int32[] | hidden - CAMERA_REQUEST_AVAILABLE_RESULT_KEYS, // int32[] | hidden - CAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, // int32[] | hidden + CAMERA_REQUEST_ID = // int32 | hidden + CAMERA_REQUEST_START, + CAMERA_REQUEST_METADATA_MODE, // enum | system + CAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS, // int32[] | hidden + CAMERA_REQUEST_MAX_NUM_INPUT_STREAMS, // int32 | hidden + CAMERA_REQUEST_PIPELINE_DEPTH, // byte | public + CAMERA_REQUEST_PIPELINE_MAX_DEPTH, // byte | public + CAMERA_REQUEST_PARTIAL_RESULT_COUNT, // int32 | public + CAMERA_REQUEST_AVAILABLE_CAPABILITIES, // enum[] | public + CAMERA_REQUEST_AVAILABLE_REQUEST_KEYS, // int32[] | hidden + CAMERA_REQUEST_AVAILABLE_RESULT_KEYS, // int32[] | hidden + CAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, // int32[] | hidden CAMERA_REQUEST_END, - CAMERA_SCALER_CROP_REGION = // int32[] | public - CAMERA_SCALER_START, - CAMERA_SCALER_AVAILABLE_JPEG_SIZES, // int32[] | hidden - CAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, // float | public - CAMERA_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, // int32[] | hidden - CAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, // int32[] | hidden - CAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, // int64[] | hidden - CAMERA_SCALER_AVAILABLE_STALL_DURATIONS, // int64[] | hidden - CAMERA_SCALER_CROPPING_TYPE, // enum | public + CAMERA_SCALER_CROP_REGION = // int32[] | public + CAMERA_SCALER_START, + CAMERA_SCALER_AVAILABLE_JPEG_SIZES, // int32[] | hidden + CAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, // float | public + CAMERA_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, // int32[] | hidden + CAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, // int32[] | hidden + CAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, // int64[] | hidden + CAMERA_SCALER_AVAILABLE_STALL_DURATIONS, // int64[] | hidden + CAMERA_SCALER_CROPPING_TYPE, // enum | public CAMERA_SCALER_END, - CAMERA_SENSOR_EXPOSURE_TIME = // int64 | public - CAMERA_SENSOR_START, - CAMERA_SENSOR_FRAME_DURATION, // int64 | public - CAMERA_SENSOR_SENSITIVITY, // int32 | public - CAMERA_SENSOR_REFERENCE_ILLUMINANT1, // enum | public - CAMERA_SENSOR_REFERENCE_ILLUMINANT2, // byte | public - CAMERA_SENSOR_CALIBRATION_TRANSFORM1, // rational[] | public - CAMERA_SENSOR_CALIBRATION_TRANSFORM2, // rational[] | public - CAMERA_SENSOR_COLOR_TRANSFORM1, // rational[] | public - CAMERA_SENSOR_COLOR_TRANSFORM2, // rational[] | public - CAMERA_SENSOR_FORWARD_MATRIX1, // rational[] | public - CAMERA_SENSOR_FORWARD_MATRIX2, // rational[] | public - CAMERA_SENSOR_BASE_GAIN_FACTOR, // rational | system - CAMERA_SENSOR_BLACK_LEVEL_PATTERN, // int32[] | public - CAMERA_SENSOR_MAX_ANALOG_SENSITIVITY, // int32 | public - CAMERA_SENSOR_ORIENTATION, // int32 | public - CAMERA_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS, // int32[] | system - CAMERA_SENSOR_TIMESTAMP, // int64 | public - CAMERA_SENSOR_TEMPERATURE, // float | system - CAMERA_SENSOR_NEUTRAL_COLOR_POINT, // rational[] | public - CAMERA_SENSOR_NOISE_PROFILE, // double[] | public - CAMERA_SENSOR_PROFILE_HUE_SAT_MAP, // float[] | system - CAMERA_SENSOR_PROFILE_TONE_CURVE, // float[] | system - CAMERA_SENSOR_GREEN_SPLIT, // float | public - CAMERA_SENSOR_TEST_PATTERN_DATA, // int32[] | public - CAMERA_SENSOR_TEST_PATTERN_MODE, // enum | public - CAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES, // int32[] | public - CAMERA_SENSOR_OPAQUE_RAW_SIZE, // int32[] | system - CAMERA_SENSOR_ROLLING_SHUTTER_SKEW, // int64 | public + CAMERA_SENSOR_EXPOSURE_TIME = // int64 | public + CAMERA_SENSOR_START, + CAMERA_SENSOR_FRAME_DURATION, // int64 | public + CAMERA_SENSOR_SENSITIVITY, // int32 | public + CAMERA_SENSOR_REFERENCE_ILLUMINANT1, // enum | public + CAMERA_SENSOR_REFERENCE_ILLUMINANT2, // byte | public + CAMERA_SENSOR_CALIBRATION_TRANSFORM1, // rational[] | public + CAMERA_SENSOR_CALIBRATION_TRANSFORM2, // rational[] | public + CAMERA_SENSOR_COLOR_TRANSFORM1, // rational[] | public + CAMERA_SENSOR_COLOR_TRANSFORM2, // rational[] | public + CAMERA_SENSOR_FORWARD_MATRIX1, // rational[] | public + CAMERA_SENSOR_FORWARD_MATRIX2, // rational[] | public + CAMERA_SENSOR_BASE_GAIN_FACTOR, // rational | system + CAMERA_SENSOR_BLACK_LEVEL_PATTERN, // int32[] | public + CAMERA_SENSOR_MAX_ANALOG_SENSITIVITY, // int32 | public + CAMERA_SENSOR_ORIENTATION, // int32 | public + CAMERA_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS, // int32[] | system + CAMERA_SENSOR_TIMESTAMP, // int64 | public + CAMERA_SENSOR_TEMPERATURE, // float | system + CAMERA_SENSOR_NEUTRAL_COLOR_POINT, // rational[] | public + CAMERA_SENSOR_NOISE_PROFILE, // double[] | public + CAMERA_SENSOR_PROFILE_HUE_SAT_MAP, // float[] | system + CAMERA_SENSOR_PROFILE_TONE_CURVE, // float[] | system + CAMERA_SENSOR_GREEN_SPLIT, // float | public + CAMERA_SENSOR_TEST_PATTERN_DATA, // int32[] | public + CAMERA_SENSOR_TEST_PATTERN_MODE, // enum | public + CAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES, // int32[] | public + CAMERA_SENSOR_OPAQUE_RAW_SIZE, // int32[] | system + CAMERA_SENSOR_ROLLING_SHUTTER_SKEW, // int64 | public CAMERA_SENSOR_END, - CAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE = // int32[] | public - CAMERA_SENSOR_INFO_START, - CAMERA_SENSOR_INFO_SENSITIVITY_RANGE, // int32[] | public - CAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, // enum | public - CAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, // int64[] | public - CAMERA_SENSOR_INFO_MAX_FRAME_DURATION, // int64 | public - CAMERA_SENSOR_INFO_PHYSICAL_SIZE, // float[] | public - CAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, // int32[] | public - CAMERA_SENSOR_INFO_WHITE_LEVEL, // int32 | public - CAMERA_SENSOR_INFO_TIMESTAMP_SOURCE, // enum | public + CAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE = // int32[] | public + CAMERA_SENSOR_INFO_START, + CAMERA_SENSOR_INFO_SENSITIVITY_RANGE, // int32[] | public + CAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, // enum | public + CAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, // int64[] | public + CAMERA_SENSOR_INFO_MAX_FRAME_DURATION, // int64 | public + CAMERA_SENSOR_INFO_PHYSICAL_SIZE, // float[] | public + CAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, // int32[] | public + CAMERA_SENSOR_INFO_WHITE_LEVEL, // int32 | public + CAMERA_SENSOR_INFO_TIMESTAMP_SOURCE, // enum | public CAMERA_SENSOR_INFO_END, - CAMERA_SHADING_MODE = // enum | public - CAMERA_SHADING_START, - CAMERA_SHADING_STRENGTH, // byte | system - CAMERA_SHADING_AVAILABLE_MODES, // byte[] | public + CAMERA_SHADING_MODE = // enum | public + CAMERA_SHADING_START, + CAMERA_SHADING_STRENGTH, // byte | system + CAMERA_SHADING_AVAILABLE_MODES, // byte[] | public CAMERA_SHADING_END, - CAMERA_STATISTICS_FACE_DETECT_MODE = // enum | public - CAMERA_STATISTICS_START, - CAMERA_STATISTICS_HISTOGRAM_MODE, // enum | system - CAMERA_STATISTICS_SHARPNESS_MAP_MODE, // enum | system - CAMERA_STATISTICS_HOT_PIXEL_MAP_MODE, // enum | public - CAMERA_STATISTICS_FACE_IDS, // int32[] | hidden - CAMERA_STATISTICS_FACE_LANDMARKS, // int32[] | hidden - CAMERA_STATISTICS_FACE_RECTANGLES, // int32[] | hidden - CAMERA_STATISTICS_FACE_SCORES, // byte[] | hidden - CAMERA_STATISTICS_HISTOGRAM, // int32[] | system - CAMERA_STATISTICS_SHARPNESS_MAP, // int32[] | system - CAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP, // byte | public - CAMERA_STATISTICS_LENS_SHADING_MAP, // float[] | hidden - CAMERA_STATISTICS_PREDICTED_COLOR_GAINS, // float[] | hidden - CAMERA_STATISTICS_PREDICTED_COLOR_TRANSFORM, // rational[] | hidden - CAMERA_STATISTICS_SCENE_FLICKER, // enum | public - CAMERA_STATISTICS_HOT_PIXEL_MAP, // int32[] | public - CAMERA_STATISTICS_LENS_SHADING_MAP_MODE, // enum | public + CAMERA_STATISTICS_FACE_DETECT_MODE = // enum | public + CAMERA_STATISTICS_START, + CAMERA_STATISTICS_HISTOGRAM_MODE, // enum | system + CAMERA_STATISTICS_SHARPNESS_MAP_MODE, // enum | system + CAMERA_STATISTICS_HOT_PIXEL_MAP_MODE, // enum | public + CAMERA_STATISTICS_FACE_IDS, // int32[] | hidden + CAMERA_STATISTICS_FACE_LANDMARKS, // int32[] | hidden + CAMERA_STATISTICS_FACE_RECTANGLES, // int32[] | hidden + CAMERA_STATISTICS_FACE_SCORES, // byte[] | hidden + CAMERA_STATISTICS_HISTOGRAM, // int32[] | system + CAMERA_STATISTICS_SHARPNESS_MAP, // int32[] | system + CAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP, // byte | public + CAMERA_STATISTICS_LENS_SHADING_MAP, // float[] | hidden + CAMERA_STATISTICS_PREDICTED_COLOR_GAINS, // float[] | hidden + CAMERA_STATISTICS_PREDICTED_COLOR_TRANSFORM, // rational[] | hidden + CAMERA_STATISTICS_SCENE_FLICKER, // enum | public + CAMERA_STATISTICS_HOT_PIXEL_MAP, // int32[] | public + CAMERA_STATISTICS_LENS_SHADING_MAP_MODE, // enum | public CAMERA_STATISTICS_END, - CAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = - // byte[] | public - CAMERA_STATISTICS_INFO_START, - CAMERA_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, // int32 | system - CAMERA_STATISTICS_INFO_MAX_FACE_COUNT, // int32 | public - CAMERA_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, // int32 | system - CAMERA_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, // int32 | system - CAMERA_STATISTICS_INFO_SHARPNESS_MAP_SIZE, // int32[] | system + CAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = + // byte[] | public + CAMERA_STATISTICS_INFO_START, + CAMERA_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, // int32 | system + CAMERA_STATISTICS_INFO_MAX_FACE_COUNT, // int32 | public + CAMERA_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, // int32 | system + CAMERA_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, // int32 | system + CAMERA_STATISTICS_INFO_SHARPNESS_MAP_SIZE, // int32[] | system CAMERA_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, - // byte[] | public + // byte[] | public CAMERA_STATISTICS_INFO_END, - CAMERA_TONEMAP_CURVE_BLUE = // float[] | public - CAMERA_TONEMAP_START, - CAMERA_TONEMAP_CURVE_GREEN, // float[] | public - CAMERA_TONEMAP_CURVE_RED, // float[] | public - CAMERA_TONEMAP_MODE, // enum | public - CAMERA_TONEMAP_MAX_CURVE_POINTS, // int32 | public - CAMERA_TONEMAP_AVAILABLE_TONE_MAP_MODES, // byte[] | public - CAMERA_TONEMAP_GAMMA, // float | public - CAMERA_TONEMAP_PRESET_CURVE, // enum | public + CAMERA_TONEMAP_CURVE_BLUE = // float[] | public + CAMERA_TONEMAP_START, + CAMERA_TONEMAP_CURVE_GREEN, // float[] | public + CAMERA_TONEMAP_CURVE_RED, // float[] | public + CAMERA_TONEMAP_MODE, // enum | public + CAMERA_TONEMAP_MAX_CURVE_POINTS, // int32 | public + CAMERA_TONEMAP_AVAILABLE_TONE_MAP_MODES, // byte[] | public + CAMERA_TONEMAP_GAMMA, // float | public + CAMERA_TONEMAP_PRESET_CURVE, // enum | public CAMERA_TONEMAP_END, - CAMERA_LED_TRANSMIT = // enum | hidden - CAMERA_LED_START, - CAMERA_LED_AVAILABLE_LEDS, // enum[] | hidden + CAMERA_LED_TRANSMIT = // enum | hidden + CAMERA_LED_START, + CAMERA_LED_AVAILABLE_LEDS, // enum[] | hidden CAMERA_LED_END, - CAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // enum | public - CAMERA_INFO_START, + CAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // enum | public + CAMERA_INFO_START, CAMERA_INFO_END, - CAMERA_BLACK_LEVEL_LOCK = // enum | public - CAMERA_BLACK_LEVEL_START, + CAMERA_BLACK_LEVEL_LOCK = // enum | public + CAMERA_BLACK_LEVEL_START, CAMERA_BLACK_LEVEL_END, - CAMERA_SYNC_FRAME_NUMBER = // enum | hidden - CAMERA_SYNC_START, - CAMERA_SYNC_MAX_LATENCY, // enum | public + CAMERA_SYNC_FRAME_NUMBER = // enum | hidden + CAMERA_SYNC_START, + CAMERA_SYNC_MAX_LATENCY, // enum | public CAMERA_SYNC_END, - CAMERA_REPROCESS_MAX_CAPTURE_STALL = // int32 | public - CAMERA_REPROCESS_START, + CAMERA_REPROCESS_MAX_CAPTURE_STALL = // int32 | public + CAMERA_REPROCESS_START, CAMERA_REPROCESS_END, - INTEL_INFO_AVAILABLE_CONFIGURATIONS = // int32[] | hidden - INTEL_INFO_START, - INTEL_INFO_AVAILABLE_FEATURES, // enum[] | public - INTEL_INFO_AE_EXPOSURE_TIME_RANGE, // int32[] | public - INTEL_INFO_AE_GAIN_RANGE, // int32[] | public - INTEL_INFO_WFOV, // enum | public - INTEL_INFO_SENSOR_MOUNT_TYPE, // enum | public + INTEL_INFO_AVAILABLE_CONFIGURATIONS = // int32[] | hidden + INTEL_INFO_START, + INTEL_INFO_AVAILABLE_FEATURES, // enum[] | public + INTEL_INFO_AE_EXPOSURE_TIME_RANGE, // int32[] | public + INTEL_INFO_AE_GAIN_RANGE, // int32[] | public + INTEL_INFO_WFOV, // enum | public + INTEL_INFO_SENSOR_MOUNT_TYPE, // enum | public INTEL_INFO_END, - INTEL_CONTROL_IMAGE_ENHANCEMENT = // int32 | public - INTEL_CONTROL_START, - INTEL_CONTROL_SENSITIVITY_GAIN, // float | public - INTEL_CONTROL_FRAME_RATE, // float | public - INTEL_CONTROL_AE_CONVERGE_SPEED, // enum | public - INTEL_CONTROL_NR_MODE, // enum | public - INTEL_CONTROL_NR_LEVEL, // int32[] | public - INTEL_CONTROL_IRIS_MODE, // enum | public - INTEL_CONTROL_AE_DISTRIBUTION_PRIORITY, // enum | public - INTEL_CONTROL_IRIS_LEVEL, // int32 | public - INTEL_CONTROL_WDR_MODE, // enum | public - INTEL_CONTROL_WDR_LEVEL, // byte | public - INTEL_CONTROL_BLC_AREA_MODE, // enum | public - INTEL_CONTROL_SCENE_MODE, // enum | public - INTEL_CONTROL_WEIGHT_GRID_MODE, // enum | public - INTEL_CONTROL_AE_CONVERGE_SPEED_MODE, // enum | public - INTEL_CONTROL_DEINTERLACE_MODE, // enum | public - INTEL_CONTROL_MAKERNOTE_DATA, // byte | public - INTEL_CONTROL_CUSTOM_AIC_PARAM, // byte | public - INTEL_CONTROL_MAKERNOTE_MODE, // enum | public - INTEL_CONTROL_YUV_COLOR_RANGE, // enum | public - INTEL_CONTROL_SENSITIVITY_GAIN_RANGE, // float[] | public - INTEL_CONTROL_EXPOSURE_TIME_RANGE, // int32[] | public - INTEL_CONTROL_FISHEYE_DEWARPING_MODE, // enum | public - INTEL_CONTROL_LTM_TUNING_DATA, // byte[] | public - INTEL_CONTROL_DIGITAL_ZOOM_RATIO, // float | public - INTEL_CONTROL_LDC_MODE, // enum | public - INTEL_CONTROL_RSC_MODE, // enum | public - INTEL_CONTROL_FLIP_MODE, // enum | public - INTEL_CONTROL_MONO_DOWNSCALE, // enum | public - INTEL_CONTROL_RUN3_A_CADENCE, // int32 | public - INTEL_CONTROL_VIEW_PROJECTION, // byte[] | public - INTEL_CONTROL_VIEW_ROTATION, // byte[] | public - INTEL_CONTROL_VIEW_FINE_ADJUSTMENTS, // byte[] | public - INTEL_CONTROL_CAMERA_ROTATION, // byte[] | public - INTEL_CONTROL_SCALER_CROP_REGION, // int32[] | public + INTEL_CONTROL_IMAGE_ENHANCEMENT = // int32 | public + INTEL_CONTROL_START, + INTEL_CONTROL_SENSITIVITY_GAIN, // float | public + INTEL_CONTROL_FRAME_RATE, // float | public + INTEL_CONTROL_AE_CONVERGE_SPEED, // enum | public + INTEL_CONTROL_NR_MODE, // enum | public + INTEL_CONTROL_NR_LEVEL, // int32[] | public + INTEL_CONTROL_IRIS_MODE, // enum | public + INTEL_CONTROL_AE_DISTRIBUTION_PRIORITY, // enum | public + INTEL_CONTROL_IRIS_LEVEL, // int32 | public + INTEL_CONTROL_WDR_MODE, // enum | public + INTEL_CONTROL_WDR_LEVEL, // byte | public + INTEL_CONTROL_BLC_AREA_MODE, // enum | public + INTEL_CONTROL_SCENE_MODE, // enum | public + INTEL_CONTROL_WEIGHT_GRID_MODE, // enum | public + INTEL_CONTROL_AE_CONVERGE_SPEED_MODE, // enum | public + INTEL_CONTROL_DEINTERLACE_MODE, // enum | public + INTEL_CONTROL_MAKERNOTE_DATA, // byte | public + INTEL_CONTROL_CUSTOM_AIC_PARAM, // byte | public + INTEL_CONTROL_MAKERNOTE_MODE, // enum | public + INTEL_CONTROL_YUV_COLOR_RANGE, // enum | public + INTEL_CONTROL_SENSITIVITY_GAIN_RANGE, // float[] | public + INTEL_CONTROL_EXPOSURE_TIME_RANGE, // int32[] | public + INTEL_CONTROL_FISHEYE_DEWARPING_MODE, // enum | public + INTEL_CONTROL_LTM_TUNING_DATA, // byte[] | public + INTEL_CONTROL_DIGITAL_ZOOM_RATIO, // float | public + INTEL_CONTROL_LDC_MODE, // enum | public + INTEL_CONTROL_RSC_MODE, // enum | public + INTEL_CONTROL_FLIP_MODE, // enum | public + INTEL_CONTROL_MONO_DOWNSCALE, // enum | public + INTEL_CONTROL_RUN3_A_CADENCE, // int32 | public + INTEL_CONTROL_VIEW_PROJECTION, // byte[] | public + INTEL_CONTROL_VIEW_ROTATION, // byte[] | public + INTEL_CONTROL_VIEW_FINE_ADJUSTMENTS, // byte[] | public + INTEL_CONTROL_CAMERA_ROTATION, // byte[] | public + INTEL_CONTROL_SCALER_CROP_REGION, // int32[] | public INTEL_CONTROL_END, - INTEL_CONTROL_ISP_SUPPORTED_CTRL_IDS = // int32[] | public - INTEL_CONTROL_ISP_START, - INTEL_CONTROL_ISP_ENABLED_CTRL_IDS, // int32[] | public - INTEL_CONTROL_ISP_WB_GAINS, // byte[] | public - INTEL_CONTROL_ISP_COLOR_CORRECTION_MATRIX, // byte[] | public + INTEL_CONTROL_ISP_SUPPORTED_CTRL_IDS = // int32[] | public + INTEL_CONTROL_ISP_START, + INTEL_CONTROL_ISP_ENABLED_CTRL_IDS, // int32[] | public + INTEL_CONTROL_ISP_WB_GAINS, // byte[] | public + INTEL_CONTROL_ISP_COLOR_CORRECTION_MATRIX, // byte[] | public INTEL_CONTROL_ISP_ADVANCED_COLOR_CORRECTION_MATRIX, - // byte[] | public - INTEL_CONTROL_ISP_BXT_CSC, // byte[] | public - INTEL_CONTROL_ISP_BXT_DEMOSAIC, // byte[] | public - INTEL_CONTROL_ISP_SC_IEFD, // byte[] | public - INTEL_CONTROL_ISP_SEE, // byte[] | public - INTEL_CONTROL_ISP_BNLM, // byte[] | public - INTEL_CONTROL_ISP_TNR5_21, // byte[] | public - INTEL_CONTROL_ISP_XNR_DSS, // byte[] | public - INTEL_CONTROL_ISP_GAMMA_TONE_MAP, // byte[] | public - INTEL_CONTROL_ISP_TNR5_22, // byte[] | public - INTEL_CONTROL_ISP_TNR5_25, // byte[] | public + // byte[] | public + INTEL_CONTROL_ISP_BXT_CSC, // byte[] | public + INTEL_CONTROL_ISP_BXT_DEMOSAIC, // byte[] | public + INTEL_CONTROL_ISP_SC_IEFD, // byte[] | public + INTEL_CONTROL_ISP_SEE, // byte[] | public + INTEL_CONTROL_ISP_BNLM, // byte[] | public + INTEL_CONTROL_ISP_TNR5_21, // byte[] | public + INTEL_CONTROL_ISP_XNR_DSS, // byte[] | public + INTEL_CONTROL_ISP_GAMMA_TONE_MAP, // byte[] | public + INTEL_CONTROL_ISP_TNR5_22, // byte[] | public + INTEL_CONTROL_ISP_TNR5_25, // byte[] | public INTEL_CONTROL_ISP_END, } icamera_metadata_tag_t; @@ -604,7 +604,7 @@ typedef enum icamera_metadata_enum_camera_control_mode { // CAMERA_CONTROL_SCENE_MODE typedef enum icamera_metadata_enum_camera_control_scene_mode { - CAMERA_CONTROL_SCENE_MODE_DISABLED = 0, + CAMERA_CONTROL_SCENE_MODE_DISABLED = 0, CAMERA_CONTROL_SCENE_MODE_FACE_PRIORITY, CAMERA_CONTROL_SCENE_MODE_ACTION, CAMERA_CONTROL_SCENE_MODE_PORTRAIT, @@ -733,25 +733,25 @@ typedef enum icamera_metadata_enum_camera_scaler_cropping_type { // CAMERA_SENSOR_REFERENCE_ILLUMINANT1 typedef enum icamera_metadata_enum_camera_sensor_reference_illuminant1 { - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT = 1, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A = 17, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B = 18, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C = 19, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_D55 = 20, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_D65 = 21, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_D75 = 22, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_D50 = 23, - CAMERA_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT = 1, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A = 17, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B = 18, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C = 19, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_D55 = 20, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_D65 = 21, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_D75 = 22, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_D50 = 23, + CAMERA_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24, } icamera_metadata_enum_camera_sensor_reference_illuminant1_t; // CAMERA_SENSOR_TEST_PATTERN_MODE @@ -761,7 +761,7 @@ typedef enum icamera_metadata_enum_camera_sensor_test_pattern_mode { CAMERA_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAMERA_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAMERA_SENSOR_TEST_PATTERN_MODE_PN9, - CAMERA_SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256, + CAMERA_SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256, } icamera_metadata_enum_camera_sensor_test_pattern_mode_t; // CAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT @@ -865,26 +865,26 @@ typedef enum icamera_metadata_enum_camera_black_level_lock { // CAMERA_SYNC_FRAME_NUMBER typedef enum icamera_metadata_enum_camera_sync_frame_number { - CAMERA_SYNC_FRAME_NUMBER_CONVERGING = -1, - CAMERA_SYNC_FRAME_NUMBER_UNKNOWN = -2, + CAMERA_SYNC_FRAME_NUMBER_CONVERGING = -1, + CAMERA_SYNC_FRAME_NUMBER_UNKNOWN = -2, } icamera_metadata_enum_camera_sync_frame_number_t; // CAMERA_SYNC_MAX_LATENCY typedef enum icamera_metadata_enum_camera_sync_max_latency { - CAMERA_SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0, - CAMERA_SYNC_MAX_LATENCY_UNKNOWN = -1, + CAMERA_SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0, + CAMERA_SYNC_MAX_LATENCY_UNKNOWN = -1, } icamera_metadata_enum_camera_sync_max_latency_t; // INTEL_INFO_AVAILABLE_FEATURES typedef enum icamera_metadata_enum_intel_info_available_features { - INTEL_INFO_AVAILABLE_FEATURES_MANUAL_EXPOSURE = 0, - INTEL_INFO_AVAILABLE_FEATURES_MANUAL_WHITE_BALANCE = 1, - INTEL_INFO_AVAILABLE_FEATURES_IMAGE_ENHANCEMENT = 2, - INTEL_INFO_AVAILABLE_FEATURES_NOISE_REDUCTION = 3, - INTEL_INFO_AVAILABLE_FEATURES_SCENE_MODE = 4, - INTEL_INFO_AVAILABLE_FEATURES_WEIGHT_GRID_MODE = 5, - INTEL_INFO_AVAILABLE_FEATURES_PER_FRAME_CONTROL = 6, - INTEL_INFO_AVAILABLE_FEATURES_ISP_CONTROL = 7, + INTEL_INFO_AVAILABLE_FEATURES_MANUAL_EXPOSURE = 0, + INTEL_INFO_AVAILABLE_FEATURES_MANUAL_WHITE_BALANCE = 1, + INTEL_INFO_AVAILABLE_FEATURES_IMAGE_ENHANCEMENT = 2, + INTEL_INFO_AVAILABLE_FEATURES_NOISE_REDUCTION = 3, + INTEL_INFO_AVAILABLE_FEATURES_SCENE_MODE = 4, + INTEL_INFO_AVAILABLE_FEATURES_WEIGHT_GRID_MODE = 5, + INTEL_INFO_AVAILABLE_FEATURES_PER_FRAME_CONTROL = 6, + INTEL_INFO_AVAILABLE_FEATURES_ISP_CONTROL = 7, } icamera_metadata_enum_intel_info_available_features_t; // INTEL_INFO_WFOV @@ -1016,4 +1016,3 @@ typedef enum icamera_metadata_enum_intel_control_mono_downscale { INTEL_CONTROL_MONO_DOWNSCALE_OFF, INTEL_CONTROL_MONO_DOWNSCALE_ON, } icamera_metadata_enum_intel_control_mono_downscale_t; - diff --git a/src/platformdata/AiqInitData.cpp b/src/platformdata/AiqInitData.cpp index 7b009b71..8915baa8 100644 --- a/src/platformdata/AiqInitData.cpp +++ b/src/platformdata/AiqInitData.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015-2021 Intel Corporation + * Copyright (C) 2015-2022 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -93,7 +93,7 @@ void AiqData::loadFile(const std::string& fileName, ia_binary_data* data, int ma CheckWarning(readSize != (size_t)usedFileSize, VOID_VALUE, "Failed to read %s, error %s", fileName.c_str(), strerror(errno)); - mDataPtr = move(dataPtr); + mDataPtr = std::move(dataPtr); data->data = mDataPtr.get(); data->size = usedFileSize; LOG1("%s, file %s, size %d", __func__, fileName.c_str(), data->size); @@ -124,7 +124,7 @@ void AiqData::saveDataToFile(const std::string& fileName, const ia_binary_data* AiqInitData::AiqInitData(const std::string& sensorName, const std::string& camCfgDir, const std::vector& tuningCfg, const std::string& nvmDir, - int maxNvmSize, std::string* camModuleName) + int maxNvmSize, const std::string& camModuleName) : mSensorName(sensorName), mMaxNvmSize(maxNvmSize), mTuningCfg(tuningCfg), @@ -133,23 +133,14 @@ AiqInitData::AiqInitData(const std::string& sensorName, const std::string& camCf std::string aiqbNameFromModuleInfo; if (nvmDir.length() > 0) { - mNvmPath.append(NVM_DATA_PATH); + mNvmPath = nvmDir; - mNvmPath.append(nvmDir); - if (mNvmPath.back() != '/') mNvmPath.append("/"); - - mNvmPath.append("eeprom"); - LOG2("NVM data is located in %s", mNvmPath.c_str()); - - std::string cameraModule; - int ret = getCameraModuleFromEEPROM(mNvmPath, &cameraModule); - if (ret == OK) { - if (camModuleName) *camModuleName = cameraModule; + if (camModuleName.length() > 0) { DIR* dir = opendir(camCfgDir.c_str()); if (dir) { std::string aiqbName("camera_"); std::string postfix(".aiqb"); - aiqbName.append(cameraModule); + aiqbName.append(camModuleName); struct dirent* direntPtr = nullptr; while ((direntPtr = readdir(dir)) != nullptr) { if ((strncmp(direntPtr->d_name, aiqbName.c_str(), aiqbName.length()) == 0) && @@ -197,48 +188,6 @@ AiqInitData::~AiqInitData() { delete mNvm; } -int AiqInitData::getCameraModuleFromEEPROM(const std::string& nvmPath, std::string* cameraModule) { - LOG1("@%s, nvmPath %s", __func__, nvmPath.c_str()); - - CheckAndLogError(nvmPath.empty(), NAME_NOT_FOUND, "nvmPath is empty"); - - const int moduleInfoSize = CAMERA_MODULE_INFO_SIZE; - const int moduleInfoOffset = CAMERA_MODULE_INFO_OFFSET; - struct CameraModuleInfo cameraModuleInfo; - CLEAR(cameraModuleInfo); - FILE* eepromFile = fopen(nvmPath.c_str(), "rb"); - CheckAndLogError(!eepromFile, UNKNOWN_ERROR, "Failed to open EEPROM file in %s", - nvmPath.c_str()); - - // file size should be larger than CAMERA_MODULE_INFO_OFFSET - fseek(eepromFile, 0, SEEK_END); - int nvmDataSize = static_cast(ftell(eepromFile)); - if (nvmDataSize < moduleInfoOffset) { - LOGE("EEPROM data is too small"); - fclose(eepromFile); - return NOT_ENOUGH_DATA; - } - - fseek(eepromFile, -1 * moduleInfoOffset, SEEK_END); - int ret = fread(&cameraModuleInfo, moduleInfoSize, 1, eepromFile); - fclose(eepromFile); - CheckAndLogError(!ret, UNKNOWN_ERROR, "Failed to read module info %d", ret); - - if (strncmp(cameraModuleInfo.mOsInfo, NVM_OS, strlen(NVM_OS)) != 0) { - LOG1("NVM OS string doesn't match with module info"); - return NO_ENTRY; - } - - char tmpName[CAMERA_MODULE_INFO_SIZE]; - snprintf(tmpName, CAMERA_MODULE_INFO_SIZE, "%c%c_%04x", cameraModuleInfo.mModuleVendor[0], - cameraModuleInfo.mModuleVendor[1], cameraModuleInfo.mModuleProduct); - - cameraModule->assign(tmpName); - LOG1("%s, aiqb name %s", __func__, cameraModule->c_str()); - - return OK; -} - /** * findConfigFile * diff --git a/src/platformdata/AiqInitData.h b/src/platformdata/AiqInitData.h index 555ac3d8..5af0cd49 100644 --- a/src/platformdata/AiqInitData.h +++ b/src/platformdata/AiqInitData.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015-2021 Intel Corporation. + * Copyright (C) 2015-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,29 +29,6 @@ namespace icamera { -#define NVM_OS "CrOS" -/** - * Camera Module Information - * - * Camera Module Information is gotten from the EEPROM, which needs to be programmed with - * an identification block located in the last 32 bytes of the EEPROM. - */ -struct CameraModuleInfo { - char mOsInfo[4]; - uint16_t mCRC; - uint8_t mVersion; - uint8_t mLengthOfFields; - uint16_t mDataFormat; - uint16_t mModuleProduct; - char mModuleVendor[2]; - char mSensorVendor[2]; - uint16_t mSensorModel; - uint8_t mI2cAddress; - uint8_t mReserved[13]; -}; -#define CAMERA_MODULE_INFO_OFFSET 32 -#define CAMERA_MODULE_INFO_SIZE 32 - class AiqData { public: explicit AiqData(const std::string& fileName, int maxSize = -1); @@ -79,7 +56,7 @@ class AiqInitData { public: AiqInitData(const std::string& sensorName, const std::string& camCfgDir, const std::vector& tuningCfg, const std::string& nvmDir, - int maxNvmSize, std::string* camModuleName); + int maxNvmSize, const std::string& camModuleName); ~AiqInitData(); // cpf @@ -100,7 +77,6 @@ class AiqInitData { void updateMakernoteTimeStamp(int64_t sequence, uint64_t timestamp); void acquireMakernoteData(uint64_t timestamp, Parameters* param); - int getCameraModuleFromEEPROM(const std::string& nvmPath, std::string* cameraModule); std::string getAiqdFileNameWithPath(TuningMode mode); int findConfigFile(const std::string& camCfgDir, std::string* cpfPathName); diff --git a/src/platformdata/CameraParser.cpp b/src/platformdata/CameraParser.cpp index 8932a998..0389902d 100644 --- a/src/platformdata/CameraParser.cpp +++ b/src/platformdata/CameraParser.cpp @@ -227,7 +227,7 @@ void CameraParser::checkField(CameraParser* profiles, const char* name, const ch profiles->pCurrentCam->sensorName = val; } else if (strcmp(key, "description") == 0) { profiles->pCurrentCam->sensorDescription = val; - // VIRTUAL_CHANNEL_S + // VIRTUAL_CHANNEL_S } else if (strcmp(key, "virtualChannel") == 0) { profiles->pCurrentCam->mVirtualChannel = strcmp(val, "true") == 0 ? true : false; @@ -237,7 +237,7 @@ void CameraParser::checkField(CameraParser* profiles, const char* name, const ch profiles->pCurrentCam->mVCSeq = strtoul(val, nullptr, 10); } else if (strcmp(key, "vcGroupId") == 0) { profiles->pCurrentCam->mVCGroupId = strtoul(val, nullptr, 10); - // VIRTUAL_CHANNEL_E + // VIRTUAL_CHANNEL_E } idx += 2; } @@ -296,10 +296,12 @@ void CameraParser::handleCommon(CameraParser* profiles, const char* name, const cfg->supportIspTuningUpdate = strcmp(atts[1], "true") == 0; } else if (strcmp(name, "supportHwJpegEncode") == 0) { cfg->supportHwJpegEncode = strcmp(atts[1], "true") == 0; -// ENABLE_EVCP_S + } else if (strcmp(name, "maxIsysTimeoutValue") == 0) { + cfg->maxIsysTimeoutValue = atoi(atts[1]); + // ENABLE_EVCP_S } else if (strcmp(name, "useGpuEvcp") == 0) { cfg->isGpuEvcpEnabled = strcmp(atts[1], "true") == 0; -// ENABLE_EVCP_E + // ENABLE_EVCP_E } } @@ -386,10 +388,10 @@ void CameraParser::handleSensor(CameraParser* profiles, const char* name, const } } else if (strcmp(name, "useCrlModule") == 0) { pCurrentCam->mUseCrlModule = strcmp(atts[1], "true") == 0; - // DOL_FEATURE_S + // DOL_FEATURE_S } else if (strcmp(name, "dolVbpOffset") == 0) { parseXmlConvertStrings(atts[1], pCurrentCam->mDolVbpOffset, atoi); - // DOL_FEATURE_E + // DOL_FEATURE_E } else if (strcmp(name, "skipFrameV4L2Error") == 0) { pCurrentCam->mSkipFrameV4L2Error = strcmp(atts[1], "true") == 0; } else if (strcmp(name, "useSensorDigitalGain") == 0) { @@ -398,10 +400,10 @@ void CameraParser::handleSensor(CameraParser* profiles, const char* name, const pCurrentCam->mUseIspDigitalGain = strcmp(atts[1], "true") == 0; } else if (strcmp(name, "preRegisterBuffer") == 0) { pCurrentCam->mNeedPreRegisterBuffers = strcmp(atts[1], "true") == 0; - // FRAME_SYNC_S + // FRAME_SYNC_S } else if (strcmp(name, "enableFrameSyncCheck") == 0) { pCurrentCam->mFrameSyncCheckEnabled = strcmp(atts[1], "true") == 0; - // FRAME_SYNC_E + // FRAME_SYNC_E } else if (strcmp(name, "lensName") == 0) { string vcmName = atts[1]; if (!profiles->mI2CBus.empty()) { @@ -454,7 +456,7 @@ void CameraParser::handleSensor(CameraParser* profiles, const char* name, const pCurrentCam->mDigitalGainLag = atoi(atts[1]); } else if (strcmp(name, "exposureLag") == 0) { pCurrentCam->mExposureLag = atoi(atts[1]); - // HDR_FEATURE_S + // HDR_FEATURE_S } else if (strcmp(name, "hdrExposureType") == 0) { if (strcmp(atts[1], "fix-exposure-ratio") == 0) { pCurrentCam->mSensorExposureType = SENSOR_FIX_EXPOSURE_RATIO; @@ -487,7 +489,7 @@ void CameraParser::handleSensor(CameraParser* profiles, const char* name, const LOGE("unknown sensor gain type %s, set to SENSOR_GAIN_NONE", atts[1]); pCurrentCam->mSensorGainType = SENSOR_GAIN_NONE; } - // HDR_FEATURE_E + // HDR_FEATURE_E } else if (strcmp(name, "graphSettingsFile") == 0) { pCurrentCam->mGraphSettingsFile = atts[1]; } else if (strcmp(name, "graphSettingsType") == 0) { @@ -603,6 +605,17 @@ void CameraParser::handleSensor(CameraParser* profiles, const char* name, const mNvmDeviceInfo.push_back(info); + tablePtr = strtok_r(nullptr, ",", &savePtr); + } + } else if (strcmp(name, "supportModuleNames") == 0) { + int sz = strlen(atts[1]); + char src[sz + 1]; + MEMCPY_S(src, sz, atts[1], sz); + src[sz] = '\0'; + char* savePtr; + char* tablePtr = strtok_r(src, ",", &savePtr); + while (tablePtr) { + pCurrentCam->mSupportModuleNames.push_back(tablePtr); tablePtr = strtok_r(nullptr, ",", &savePtr); } } else if (strcmp(name, "isISYSCompression") == 0) { @@ -611,6 +624,8 @@ void CameraParser::handleSensor(CameraParser* profiles, const char* name, const pCurrentCam->mPSACompression = strcmp(atts[1], "true") == 0; } else if (strcmp(name, "isOFSCompression") == 0) { pCurrentCam->mOFSCompression = strcmp(atts[1], "true") == 0; + } else if (strcmp(name, "schedulerEnabled") == 0) { + pCurrentCam->mSchedulerEnabled = strcmp(atts[1], "true") == 0; } else if (strcmp(name, "faceAeEnabled") == 0) { pCurrentCam->mFaceAeEnabled = strcmp(atts[1], "true") == 0; } else if (strcmp(name, "psysAlignWithSof") == 0) { @@ -621,8 +636,7 @@ void CameraParser::handleSensor(CameraParser* profiles, const char* name, const pCurrentCam->mSwProcessingAlignWithIsp = strcmp(atts[1], "true") == 0; } else if (strcmp(name, "faceEngineVendor") == 0) { int val = atoi(atts[1]); - pCurrentCam->mFaceEngineVendor = - val >= 0 ? val : FACE_ENGINE_INTEL_PVL; + pCurrentCam->mFaceEngineVendor = val >= 0 ? val : FACE_ENGINE_INTEL_PVL; } else if (strcmp(name, "faceEngineRunningInterval") == 0) { int val = atoi(atts[1]); pCurrentCam->mFaceEngineRunningInterval = @@ -749,10 +763,10 @@ void CameraParser::parseMediaCtlConfigElement(CameraParser* profiles, const char mc.outputHeight = strtoul(atts[idx + 1], nullptr, 10); } else if (strcmp(key, "format") == 0) { mc.format = CameraUtils::string2PixelCode(atts[idx + 1]); - // DOL_FEATURE_S + // DOL_FEATURE_S } else if (strcmp(key, "vbp") == 0) { mc.vbp = strtoul(atts[idx + 1], nullptr, 10); - // DOL_FEATURE_E + // DOL_FEATURE_E } idx += 2; } @@ -804,11 +818,11 @@ void CameraParser::parseControlElement(CameraParser* profiles, const char* name, } else if (!strcmp(val, "V4L2_CID_MIPI_LANES")) { ctl.ctlCmd = V4L2_CID_MIPI_LANES; #endif - // HDR_FEATURE_S + // HDR_FEATURE_S } else if (!strcmp(val, "V4L2_CID_WDR_MODE")) { ctl.ctlCmd = V4L2_CID_WDR_MODE; - // HDR_FEATURE_E - // CRL_MODULE_S + // HDR_FEATURE_E + // CRL_MODULE_S } else if (!strcmp(val, "V4L2_CID_LINE_LENGTH_PIXELS")) { ctl.ctlCmd = V4L2_CID_LINE_LENGTH_PIXELS; } else if (!strcmp(val, "V4L2_CID_FRAME_LENGTH_LINES")) { @@ -819,7 +833,7 @@ void CameraParser::parseControlElement(CameraParser* profiles, const char* name, ctl.ctlCmd = CRL_CID_EXPOSURE_MODE; } else if (!strcmp(val, "CRL_CID_EXPOSURE_HDR_RATIO")) { ctl.ctlCmd = CRL_CID_EXPOSURE_HDR_RATIO; - // CRL_MODULE_E + // CRL_MODULE_E } else { LOGE("Unknow ioctl command %s", val); ctl.ctlCmd = -1; @@ -1825,7 +1839,7 @@ void CameraParser::handleStaticMetaData(CameraParser* profiles, const char* name } mMetadata.update(CAMERA_AE_AVAILABLE_ANTIBANDING_MODES, antibandingModes, supportedAntibandingMode.size()); - // ISP_CONTROL_S + // ISP_CONTROL_S } else if (strcmp(name, "supportedIspControls") == 0) { vector ispCtrlIds; parseSupportedIspControls(atts[1], ispCtrlIds); @@ -1838,7 +1852,7 @@ void CameraParser::handleStaticMetaData(CameraParser* profiles, const char* name } mMetadata.update(INTEL_CONTROL_ISP_SUPPORTED_CTRL_IDS, data, dataCount); } - // ISP_CONTROL_E + // ISP_CONTROL_E } else if (strcmp(name, "sensorMountType") == 0) { uint8_t mountType = WALL_MOUNTED; @@ -2033,15 +2047,33 @@ void CameraParser::endParseElement(void* userData, const char* name) { profiles->mNvmDeviceInfo.clear(); - // Merge the content of mMetadata into mCapability. - ParameterHelper::merge(profiles->mMetadata, &profiles->pCurrentCam->mCapability); - profiles->mMetadata.clear(); + bool isCameraAvailable = true; + // Check if the camera is available + if (!profiles->pCurrentCam->mSupportModuleNames.empty()) { + isCameraAvailable = false; + for (size_t i = 0; i < profiles->pCurrentCam->mSupportModuleNames.size(); i++) { + if (strcmp(pCurrentCam->mSupportModuleNames[i].c_str(), + profiles->pCurrentCam->mCamModuleName.c_str()) == 0) { + isCameraAvailable = true; + break; + } + } + } + + if (isCameraAvailable) { + // Merge the content of mMetadata into mCapability. + ParameterHelper::merge(profiles->mMetadata, &profiles->pCurrentCam->mCapability); - // For non-extended camera, it should be in order by mCurrentSensor - profiles->mStaticCfg->mCameras.insert( - profiles->mStaticCfg->mCameras.begin() + profiles->mCurrentSensor, - *(profiles->pCurrentCam)); + // For non-extended camera, it should be in order by mCurrentSensor + profiles->mStaticCfg->mCameras.insert( + profiles->mStaticCfg->mCameras.begin() + profiles->mCurrentSensor, + *(profiles->pCurrentCam)); + } else { + profiles->mSensorNum--; + if (profiles->mCurrentSensor > 0) profiles->mCurrentSensor--; + } + profiles->mMetadata.clear(); delete profiles->pCurrentCam; profiles->pCurrentCam = nullptr; } @@ -2069,6 +2101,46 @@ void CameraParser::endParseElement(void* userData, const char* name) { if (strcmp(name, "Common") == 0) profiles->mCurrentDataField = FIELD_INVALID; } +int CameraParser::getCameraModuleNameFromEEPROM(const std::string& nvmDir, + std::string* cameraModule) { + const int moduleInfoOffset = CAMERA_MODULE_INFO_OFFSET; + FILE* eepromFile = fopen(nvmDir.c_str(), "rb"); + CheckAndLogError(!eepromFile, UNKNOWN_ERROR, "Failed to open EEPROM file in %s", + nvmDir.c_str()); + + // file size should be larger than CAMERA_MODULE_INFO_OFFSET + fseek(eepromFile, 0, SEEK_END); + int nvmDataSize = static_cast(ftell(eepromFile)); + if (nvmDataSize < moduleInfoOffset) { + LOGE("EEPROM data is too small"); + fclose(eepromFile); + return NOT_ENOUGH_DATA; + } + + fseek(eepromFile, -1 * moduleInfoOffset, SEEK_END); + + const int moduleInfoSize = CAMERA_MODULE_INFO_SIZE; + struct CameraModuleInfo cameraModuleInfo; + CLEAR(cameraModuleInfo); + int ret = fread(&cameraModuleInfo, moduleInfoSize, 1, eepromFile); + fclose(eepromFile); + CheckAndLogError(!ret, UNKNOWN_ERROR, "Failed to read module info %d", ret); + + if (strncmp(cameraModuleInfo.mOsInfo, NVM_OS, strlen(NVM_OS)) != 0) { + LOG1("NVM OS string doesn't match with module info"); + return NO_ENTRY; + } + + char tmpName[CAMERA_MODULE_INFO_SIZE]; + snprintf(tmpName, CAMERA_MODULE_INFO_SIZE, "%c%c_%04x", cameraModuleInfo.mModuleVendor[0], + cameraModuleInfo.mModuleVendor[1], cameraModuleInfo.mModuleProduct); + + cameraModule->assign(tmpName); + LOG1("%s, aiqb name %s", __func__, cameraModule->c_str()); + + return OK; +} + /* the path of NVM device is in /sys/bus/i2c/devices/i2c-'adaptorId'/firmware_node/XXXX/path. */ void CameraParser::getNVMDirectory(CameraParser* profiles) { LOG2("@%s", __func__); @@ -2124,15 +2196,28 @@ void CameraParser::getNVMDirectory(CameraParser* profiles) { if (found) break; } closedir(dir); + } else { + LOGE("Failed to open dir %s", nvmPath.c_str()); } for (auto nvm : profiles->mNvmDeviceInfo) { if (!nvm.directory.empty()) { // The first one in list is prioritized and should be selected. - profiles->pCurrentCam->mNvmDirectory = nvm.directory; + std::string nvmPath; + nvmPath.append(NVM_DATA_PATH); + nvmPath.append(nvm.directory); + if (nvmPath.back() != '/') nvmPath.append("/"); + + nvmPath.append("eeprom"); + LOG2("NVM data is located in %s", nvmPath.c_str()); + profiles->pCurrentCam->mNvmDirectory = nvmPath; profiles->pCurrentCam->mMaxNvmDataSize = nvm.dataSize; - LOG2("NVM dir %s", profiles->pCurrentCam->mNvmDirectory.c_str()); + int ret = getCameraModuleNameFromEEPROM(profiles->pCurrentCam->mNvmDirectory, + &profiles->pCurrentCam->mCamModuleName); + LOG2("NVM dir %s, ret %d", profiles->pCurrentCam->mNvmDirectory.c_str(), ret); break; + } else { + LOGE("Failed to find NVM directory"); } } } diff --git a/src/platformdata/CameraParser.h b/src/platformdata/CameraParser.h index fa3b39f8..60ad2792 100644 --- a/src/platformdata/CameraParser.h +++ b/src/platformdata/CameraParser.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015-2021 Intel Corporation. + * Copyright (C) 2015-2022 Intel Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,6 +36,30 @@ namespace icamera { +#define NVM_OS "CrOS" +/** + * Camera Module Information + * + * Camera Module Information is gotten from the EEPROM, which needs to be programmed with + * an identification block located in the last 32 bytes of the EEPROM. + */ +struct CameraModuleInfo { + char mOsInfo[4]; + uint16_t mCRC; + uint8_t mVersion; + uint8_t mLengthOfFields; + uint16_t mDataFormat; + uint16_t mModuleProduct; + char mModuleVendor[2]; + char mSensorVendor[2]; + uint16_t mSensorModel; + uint8_t mI2cAddress; + uint8_t mReserved[13]; +}; + +#define CAMERA_MODULE_INFO_OFFSET 32 +#define CAMERA_MODULE_INFO_SIZE 32 + /** * \class CameraParser * @@ -136,6 +160,7 @@ class CameraParser : public ParserBase { std::string replaceStringInXml(CameraParser* profiles, const char* value); void getNVMDirectory(CameraParser* profiles); + int getCameraModuleNameFromEEPROM(const std::string& nvmDir, std::string* cameraModule); private: DISALLOW_COPY_AND_ASSIGN(CameraParser); diff --git a/src/platformdata/CameraTypes.h b/src/platformdata/CameraTypes.h index aad126dc..fede4ce1 100644 --- a/src/platformdata/CameraTypes.h +++ b/src/platformdata/CameraTypes.h @@ -27,13 +27,7 @@ namespace icamera { /** * Use to link buffer producers and consumers */ -enum Port { - MAIN_PORT = 0, - SECOND_PORT, - THIRD_PORT, - FORTH_PORT, - INVALID_PORT -}; +enum Port { MAIN_PORT = 0, SECOND_PORT, THIRD_PORT, FORTH_PORT, INVALID_PORT }; enum { FACING_BACK = 0, @@ -47,26 +41,23 @@ enum { ORIENTATION_270 = 270, }; -enum { - LENS_VCM_HW = 0, - LENS_NONE_HW -}; +enum { LENS_VCM_HW = 0, LENS_NONE_HW }; enum { - SENSOR_EXPOSURE_SINGLE = 0, /* sensor is single exposure */ - SENSOR_FIX_EXPOSURE_RATIO, /* Fix exposure ratio between long and short exposure */ - SENSOR_RELATIVE_MULTI_EXPOSURES, /* AE output exposures are converted to Shutter and - Readout time, then set to sensor driver */ - SENSOR_MULTI_EXPOSURES, /* Multi-exposures are set to sensor driver directly */ - SENSOR_DUAL_EXPOSURES_DCG_AND_VS /* Dual-exposure and multiple gains, i.e. DCG + VS */ + SENSOR_EXPOSURE_SINGLE = 0, /* sensor is single exposure */ + SENSOR_FIX_EXPOSURE_RATIO, /* Fix exposure ratio between long and short exposure */ + SENSOR_RELATIVE_MULTI_EXPOSURES, /* AE output exposures are converted to Shutter and + Readout time, then set to sensor driver */ + SENSOR_MULTI_EXPOSURES, /* Multi-exposures are set to sensor driver directly */ + SENSOR_DUAL_EXPOSURES_DCG_AND_VS /* Dual-exposure and multiple gains, i.e. DCG + VS */ }; enum { SENSOR_GAIN_NONE = 0, - SENSOR_MULTI_DG_AND_CONVERTION_AG, /* Multi-DigitalGain and convertion AnalogGain are set - to sensor driver */ - ISP_DG_AND_SENSOR_DIRECT_AG, /* All digital gain is passed to ISP */ - SENSOR_MULTI_DG_AND_DIRECT_AG /* Multi analog and digital gains, i.e. DCG */ + SENSOR_MULTI_DG_AND_CONVERTION_AG, /* Multi-DigitalGain and convertion AnalogGain are set + to sensor driver */ + ISP_DG_AND_SENSOR_DIRECT_AG, /* All digital gain is passed to ISP */ + SENSOR_MULTI_DG_AND_DIRECT_AG /* Multi analog and digital gains, i.e. DCG */ }; /** @@ -93,35 +84,32 @@ typedef enum { */ typedef enum { SENSOR_DG_TYPE_NONE, - SENSOR_DG_TYPE_X, //linear relationship, gain = n*value (value: register value, n: ratio) - SENSOR_DG_TYPE_2_X, //exponential relationship, gain = 2 ^ value (value: register value) + SENSOR_DG_TYPE_X, // linear relationship, gain = n*value (value: register value, n: ratio) + SENSOR_DG_TYPE_2_X, // exponential relationship, gain = 2 ^ value (value: register value) } SensorDgType; -typedef enum { - MORPH_TABLE = 0, - IMG_TRANS -} DvsType; +typedef enum { MORPH_TABLE = 0, IMG_TRANS } DvsType; // Imaging algorithms typedef enum { IMAGING_ALGO_NONE = 0, - IMAGING_ALGO_AE = 1, - IMAGING_ALGO_AWB = 1 << 1, - IMAGING_ALGO_AF = 1 << 2, + IMAGING_ALGO_AE = 1, + IMAGING_ALGO_AWB = 1 << 1, + IMAGING_ALGO_AF = 1 << 2, IMAGING_ALGO_GBCE = 1 << 3, - IMAGING_ALGO_PA = 1 << 4, - IMAGING_ALGO_SA = 1 << 5 + IMAGING_ALGO_PA = 1 << 4, + IMAGING_ALGO_SA = 1 << 5 } imaging_algorithm_t; // Note AUTO is not real config mode in the HAL. typedef camera_stream_configuration_mode_t ConfigMode; typedef struct TuningConfig { - ConfigMode configMode; /*!< configMode is internal usage to select AIQ and - Pipeline. AUTO is not real config mode. */ - TuningMode tuningMode; /*!< tuningMode is used to define user cases, - like video or still. */ - std::string aiqbName; /*!< special aiqb name corresponding with TuningMode */ + ConfigMode configMode; /*!< configMode is internal usage to select AIQ and + Pipeline. AUTO is not real config mode. */ + TuningMode tuningMode; /*!< tuningMode is used to define user cases, + like video or still. */ + std::string aiqbName; /*!< special aiqb name corresponding with TuningMode */ } TuningConfig; typedef struct { @@ -148,7 +136,7 @@ typedef struct { typedef struct { unsigned short width; unsigned short height; - unsigned char *table; + unsigned char* table; } WeightGridTable; // CUSTOM_WEIGHT_GRID_E @@ -186,7 +174,10 @@ struct PolicyConfig { std::vector shareReferPairList; // i: producer; i+1: consumer bool enableBundleInSdv; - PolicyConfig() { graphId = -1; enableBundleInSdv = true; } + PolicyConfig() { + graphId = -1; + enableBundleInSdv = true; + } }; #define DEFAULT_VIDEO_STREAM_NUM 2 @@ -203,9 +194,10 @@ struct CommonConfig { int videoStreamNum; bool supportIspTuningUpdate; bool supportHwJpegEncode; -// ENABLE_EVCP_S + int maxIsysTimeoutValue; + // ENABLE_EVCP_S bool isGpuEvcpEnabled; -// ENABLE_EVCP_E + // ENABLE_EVCP_E CommonConfig() { xmlVersion = 1.0; @@ -217,9 +209,10 @@ struct CommonConfig { videoStreamNum = DEFAULT_VIDEO_STREAM_NUM; supportIspTuningUpdate = false; supportHwJpegEncode = true; -// ENABLE_EVCP_S + maxIsysTimeoutValue = 0; + // ENABLE_EVCP_S isGpuEvcpEnabled = false; -// ENABLE_EVCP_E + // ENABLE_EVCP_E } }; @@ -241,7 +234,7 @@ struct ExpRange { /** * Multi exposure range information -*/ + */ struct MultiExpRange { camera_resolution_t Resolution; ExpRange SHS1; diff --git a/src/platformdata/PlatformData.cpp b/src/platformdata/PlatformData.cpp index 358c4e6c..c2b0c5a1 100644 --- a/src/platformdata/PlatformData.cpp +++ b/src/platformdata/PlatformData.cpp @@ -106,7 +106,7 @@ int PlatformData::init() { AiqInitData* aiqInitData = new AiqInitData( staticCfg->mCameras[i].sensorName, getCameraCfgPath(), staticCfg->mCameras[i].mSupportedTuningConfig, staticCfg->mCameras[i].mNvmDirectory, - staticCfg->mCameras[i].mMaxNvmDataSize, &camModuleName); + staticCfg->mCameras[i].mMaxNvmDataSize, staticCfg->mCameras[i].mCamModuleName); getInstance()->mAiqInitData.push_back(aiqInitData); if (!camModuleName.empty() && @@ -262,7 +262,7 @@ bool PlatformData::isEnableLtmThread(int cameraId) { } bool PlatformData::isFaceDetectionSupported(int cameraId) { - Parameters *source = &(getInstance()->mStaticCfg.mCameras[cameraId].mCapability); + Parameters* source = &(getInstance()->mStaticCfg.mCameras[cameraId].mCapability); const icamera::CameraMetadata& meta = icamera::ParameterHelper::getMetadata(*source); auto entry = meta.find(CAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES); for (size_t i = 0; i < entry.count; i++) { @@ -272,6 +272,10 @@ bool PlatformData::isFaceDetectionSupported(int cameraId) { return false; } +bool PlatformData::isSchedulerEnabled(int cameraId) { + return getInstance()->mStaticCfg.mCameras[cameraId].mSchedulerEnabled; +} + bool PlatformData::isFaceAeEnabled(int cameraId) { return (isFaceDetectionSupported(cameraId) && getInstance()->mStaticCfg.mCameras[cameraId].mFaceAeEnabled); @@ -1046,10 +1050,9 @@ int PlatformData::calculateFrameParams(int cameraId, SensorFrameParams& sensorFr return BAD_VALUE; } if (current.width == 0 || current.height == 0) { - LOGW( - "%s: Invalid XML configuration for TGT_COMPOSE," - "0 value detected in width or height", - __func__); + LOGW("%s: Invalid XML configuration for TGT_COMPOSE," + "0 value detected in width or height", + __func__); return BAD_VALUE; } else { LOG2("%s: Compose width %d/%d, height %d/%d", __func__, width, current.width, @@ -1375,8 +1378,8 @@ camera_resolution_t* PlatformData::getPslOutputForRotation(int width, int height vector& outputMap = getInstance()->mStaticCfg.mCameras[cameraId].mOutputMap; for (auto& map : outputMap) { if (width == map.User.width && height == map.User.height) { - LOG2(" find the psl output resoltion(%d, %d) for %dx%d", cameraId, - map.Psl.width, map.Psl.height, map.User.width, map.User.height); + LOG2(" find the psl output resoltion(%d, %d) for %dx%d", cameraId, map.Psl.width, + map.Psl.height, map.User.width, map.User.height); return &map.Psl; } } @@ -1559,6 +1562,10 @@ bool PlatformData::supportHwJpegEncode() { return getInstance()->mStaticCfg.mCommonConfig.supportHwJpegEncode; } +int PlatformData::getMaxIsysTimeout() { + return getInstance()->mStaticCfg.mCommonConfig.maxIsysTimeoutValue; +} + bool PlatformData::isUsingGpuAlgo() { bool enabled = false; enabled |= isGpuTnrEnabled(); diff --git a/src/platformdata/PlatformData.h b/src/platformdata/PlatformData.h index dfd1b1d6..2bb9d37b 100644 --- a/src/platformdata/PlatformData.h +++ b/src/platformdata/PlatformData.h @@ -183,6 +183,7 @@ class PlatformData { mISYSCompression(false), mPSACompression(false), mOFSCompression(false), + mSchedulerEnabled(false), mFaceAeEnabled(true), mFaceEngineVendor(FACE_ENGINE_INTEL_PVL), mFaceEngineRunningInterval(FACE_ENGINE_DEFAULT_RUNNING_INTERVAL), @@ -296,6 +297,7 @@ class PlatformData { bool mISYSCompression; bool mPSACompression; bool mOFSCompression; + bool mSchedulerEnabled; bool mFaceAeEnabled; int mFaceEngineVendor; int mFaceEngineRunningInterval; @@ -318,6 +320,8 @@ class PlatformData { std::string mNvmDirectory; int mNvmOverwrittenFileSize; std::string mNvmOverwrittenFile; // overwrite NVM data + std::string mCamModuleName; + std::vector mSupportModuleNames; /* key: camera module name, value: camera module info */ std::unordered_map mCameraModuleInfoMap; std::vector mScalerInfo; @@ -735,6 +739,14 @@ class PlatformData { */ static bool isEnableLtmThread(int cameraId); + /** + * Check if H-Scheduler is enabled + * + * \param cameraId: [0, MAX_CAMERA_NUMBER - 1] + * \return if H-Scheduler is enabled + */ + static bool isSchedulerEnabled(int cameraId); + /** * Check face engine is enabled or not * @@ -1525,6 +1537,11 @@ class PlatformData { */ static bool supportHwJpegEncode(); + /** + * get the max ISYS timeout value + */ + static int getMaxIsysTimeout(); + /** * Check should connect gpu algo or not * should connect gpu algo service if any gpu algorithm is used diff --git a/src/platformdata/PnpDebugControl.cpp b/src/platformdata/PnpDebugControl.cpp index 1ae48d9c..e60b3e19 100644 --- a/src/platformdata/PnpDebugControl.cpp +++ b/src/platformdata/PnpDebugControl.cpp @@ -77,8 +77,8 @@ bool PnpDebugControl::isFaceDisabled() { } bool PnpDebugControl::isFaceAeDisabled() { - return getInstance()->mStaticCfg.isFaceDisabled ? true - : getInstance()->mStaticCfg.isFaceAeDisabled; + return getInstance()->mStaticCfg.isFaceDisabled ? true : + getInstance()->mStaticCfg.isFaceAeDisabled; } bool PnpDebugControl::isBypassFDAlgo() { diff --git a/src/platformdata/gc/GraphConfig.cpp b/src/platformdata/gc/GraphConfig.cpp index eec942ef..2e314f1b 100644 --- a/src/platformdata/gc/GraphConfig.cpp +++ b/src/platformdata/gc/GraphConfig.cpp @@ -118,7 +118,7 @@ int GraphConfig::getTuningModeByStreamId(const int32_t streamId) { CheckAndLogError(mGraphData.tuningModes.empty(), -1, "%s, The tuningModes vector is empty", __func__); - for (auto &mode : mGraphData.tuningModes) { + for (auto& mode : mGraphData.tuningModes) { if (mode.streamId == streamId) return mode.tuningMode; } @@ -197,8 +197,7 @@ status_t GraphConfig::getPgRbmValue(string pgName, IGraphType::StageAttr* stageA } status_t GraphConfig::pipelineGetConnections( - const vector& pgList, - vector* confVector, + const vector& pgList, vector* confVector, std::vector* tnrPortFormat) { CheckAndLogError(!confVector, UNKNOWN_ERROR, "%s, The confVector is nullptr", __func__); diff --git a/src/platformdata/gc/GraphConfigManager.h b/src/platformdata/gc/GraphConfigManager.h index 9b41a200..154b925b 100644 --- a/src/platformdata/gc/GraphConfigManager.h +++ b/src/platformdata/gc/GraphConfigManager.h @@ -61,12 +61,8 @@ class GraphConfigManager : public IGraphConfigManager { virtual status_t queryGraphSettings(const stream_config_t* streamList); virtual status_t configStreams(const stream_config_t* streamList); virtual std::shared_ptr getGraphConfig(ConfigMode configMode); - virtual int getSelectedMcId() { - return mMcId; - } - virtual bool isGcConfigured(void) { - return mGcConfigured; - } + virtual int getSelectedMcId() { return mMcId; } + virtual bool isGcConfigured(void) { return mGcConfigured; } private: // Disable copy constructor and assignment operator diff --git a/src/platformdata/gc/GraphUtils.cpp b/src/platformdata/gc/GraphUtils.cpp index fa939fd4..0c467e35 100644 --- a/src/platformdata/gc/GraphUtils.cpp +++ b/src/platformdata/gc/GraphUtils.cpp @@ -31,26 +31,24 @@ void GraphUtils::dumpConnections(const std::vectorinput_width, - curRunKernel.resolution_info->input_height, - curRunKernel.resolution_info->input_crop.left, - curRunKernel.resolution_info->input_crop.top, - curRunKernel.resolution_info->input_crop.right, - curRunKernel.resolution_info->input_crop.bottom, - curRunKernel.resolution_info->output_width, - curRunKernel.resolution_info->output_height, - curRunKernel.resolution_info->output_crop.left, - curRunKernel.resolution_info->output_crop.top, - curRunKernel.resolution_info->output_crop.right, - curRunKernel.resolution_info->output_crop.bottom); + LOG3("Resolution: inputWidth %d, inputHeight %d, inputCrop %d %d %d %d," + "outputWidth %d, outputHeight %d, outputCrop %d %d %d %d,", + curRunKernel.resolution_info->input_width, + curRunKernel.resolution_info->input_height, + curRunKernel.resolution_info->input_crop.left, + curRunKernel.resolution_info->input_crop.top, + curRunKernel.resolution_info->input_crop.right, + curRunKernel.resolution_info->input_crop.bottom, + curRunKernel.resolution_info->output_width, + curRunKernel.resolution_info->output_height, + curRunKernel.resolution_info->output_crop.left, + curRunKernel.resolution_info->output_crop.top, + curRunKernel.resolution_info->output_crop.right, + curRunKernel.resolution_info->output_crop.bottom); } if (programGroup.run_kernels[i].resolution_history) { - LOG3( - "Resolution history: inputWidth %d, inputHeight %d, inputCrop %d %d %d %d," - "outputWidth %d, outputHeight %d, outputCrop %d %d %d %d,", - curRunKernel.resolution_history->input_width, - curRunKernel.resolution_history->input_height, - curRunKernel.resolution_history->input_crop.left, - curRunKernel.resolution_history->input_crop.top, - curRunKernel.resolution_history->input_crop.right, - curRunKernel.resolution_history->input_crop.bottom, - curRunKernel.resolution_history->output_width, - curRunKernel.resolution_history->output_height, - curRunKernel.resolution_history->output_crop.left, - curRunKernel.resolution_history->output_crop.top, - curRunKernel.resolution_history->output_crop.right, - curRunKernel.resolution_history->output_crop.bottom); + LOG3("Resolution history: inputWidth %d, inputHeight %d, inputCrop %d %d %d %d," + "outputWidth %d, outputHeight %d, outputCrop %d %d %d %d,", + curRunKernel.resolution_history->input_width, + curRunKernel.resolution_history->input_height, + curRunKernel.resolution_history->input_crop.left, + curRunKernel.resolution_history->input_crop.top, + curRunKernel.resolution_history->input_crop.right, + curRunKernel.resolution_history->input_crop.bottom, + curRunKernel.resolution_history->output_width, + curRunKernel.resolution_history->output_height, + curRunKernel.resolution_history->output_crop.left, + curRunKernel.resolution_history->output_crop.top, + curRunKernel.resolution_history->output_crop.right, + curRunKernel.resolution_history->output_crop.bottom); } LOG3("metadata %d %d %d %d, bppInfo: %d %d, outputCount %d", curRunKernel.metadata[0], diff --git a/src/platformdata/gc/HalStream.h b/src/platformdata/gc/HalStream.h index 97e53445..0393d62a 100644 --- a/src/platformdata/gc/HalStream.h +++ b/src/platformdata/gc/HalStream.h @@ -26,7 +26,7 @@ enum StreamUseCase { USE_CASE_STILL_CAPTURE = 1 << 2, // For HAL_PIXEL_FORMAT_BLOB/HAL_PIXEL_FORMAT_YCbCr_420_888 USE_CASE_RAW = 1 << 3, // For HAL_PIXEL_FORMAT_RAW16/HAL_PIXEL_FORMAT_RAW_OPAQUE USE_CASE_ZSL = 1 << 4, // For ZSL stream - USE_CASE_INPUT = 1 << 5, // For input stream + USE_CASE_INPUT = 1 << 5, // For input stream }; struct streamProps { @@ -37,28 +37,26 @@ struct streamProps { StreamUseCase useCase; }; -class HalStream -{ +class HalStream { public: - HalStream(struct streamProps &props, void *priv): - mWidth(props.width), - mHeight(props.height), - mFormat(props.format), - mStreamId(props.streamId), - mUseCase(props.useCase) - { + HalStream(struct streamProps& props, void* priv) + : mWidth(props.width), + mHeight(props.height), + mFormat(props.format), + mStreamId(props.streamId), + mUseCase(props.useCase) { maxBuffers = 0; mPrivate = priv; } - ~HalStream() { } + ~HalStream() {} uint32_t width() const { return mWidth; } uint32_t height() const { return mHeight; } int format() const { return mFormat; } int streamId() const { return mStreamId; } StreamUseCase useCase() const { return mUseCase; } - void *priv() { return mPrivate; } + void* priv() { return mPrivate; } public: uint32_t mWidth; @@ -68,7 +66,7 @@ class HalStream StreamUseCase mUseCase; int maxBuffers; - void *mPrivate; + void* mPrivate; }; } /* namespace icamera */ diff --git a/src/platformdata/gc/IGraphConfig.h b/src/platformdata/gc/IGraphConfig.h index d55d0ffb..78ff6a20 100644 --- a/src/platformdata/gc/IGraphConfig.h +++ b/src/platformdata/gc/IGraphConfig.h @@ -30,10 +30,10 @@ typedef uint32_t ia_uid; namespace GCSS { - class GraphConfigNode; - class GraphQueryManager; - class ItemUID; -} +class GraphConfigNode; +class GraphQueryManager; +class ItemUID; +} // namespace GCSS typedef GCSS::GraphConfigNode Node; typedef std::vector NodesPtrVector; @@ -51,36 +51,33 @@ static const int32_t STILL_TNR_STREAM_ID = 60009; // Stream id associated with still capture. static const int32_t STILL_STREAM_ID = 60000; -#define MAX_RBM_STR_SIZE 128 +#define MAX_RBM_STR_SIZE 128 namespace IGraphType { class ConnectionConfig { public: - ConnectionConfig(): mSourceStage(0), - mSourceTerminal(0), - mSourceIteration(0), - mSinkStage(0), - mSinkTerminal(0), - mSinkIteration(0), - mConnectionType(0) {} - - ConnectionConfig(ia_uid sourceStage, - ia_uid sourceTerminal, - ia_uid sourceIteration, - ia_uid sinkStage, - ia_uid sinkTerminal, - ia_uid sinkIteration, - int connectionType): - mSourceStage(sourceStage), - mSourceTerminal(sourceTerminal), - mSourceIteration(sourceIteration), - mSinkStage(sinkStage), - mSinkTerminal(sinkTerminal), - mSinkIteration(sinkIteration), - mConnectionType(connectionType) {} + ConnectionConfig() + : mSourceStage(0), + mSourceTerminal(0), + mSourceIteration(0), + mSinkStage(0), + mSinkTerminal(0), + mSinkIteration(0), + mConnectionType(0) {} + + ConnectionConfig(ia_uid sourceStage, ia_uid sourceTerminal, ia_uid sourceIteration, + ia_uid sinkStage, ia_uid sinkTerminal, ia_uid sinkIteration, + int connectionType) + : mSourceStage(sourceStage), + mSourceTerminal(sourceTerminal), + mSourceIteration(sourceIteration), + mSinkStage(sinkStage), + mSinkTerminal(sinkTerminal), + mSinkIteration(sinkIteration), + mConnectionType(connectionType) {} void dump() { - LOG1("connection src 0x%x (0x%x) sink 0x%x(0x%x)", - mSourceStage, mSourceTerminal, mSinkStage, mSinkTerminal); + LOG1("connection src 0x%x (0x%x) sink 0x%x(0x%x)", mSourceStage, mSourceTerminal, + mSinkStage, mSinkTerminal); } ia_uid mSourceStage; @@ -93,17 +90,17 @@ class ConnectionConfig { }; /** -* \struct PortFormatSettings -* Format settings for a port in the graph -*/ + * \struct PortFormatSettings + * Format settings for a port in the graph + */ struct PortFormatSettings { - int32_t enabled; - uint32_t terminalId; /**< Unique terminal id (is a fourcc code) */ - int32_t width; /**< Width of the frame in pixels */ - int32_t height; /**< Height of the frame in lines */ - int32_t fourcc; /**< Frame format */ - int32_t bpl; /**< Bytes per line*/ - int32_t bpp; /**< Bits per pixel */ + int32_t enabled; + uint32_t terminalId; /**< Unique terminal id (is a fourcc code) */ + int32_t width; /**< Width of the frame in pixels */ + int32_t height; /**< Height of the frame in lines */ + int32_t fourcc; /**< Frame format */ + int32_t bpl; /**< Bytes per line*/ + int32_t bpp; /**< Bits per pixel */ }; /** @@ -115,11 +112,11 @@ struct PipelineConnection { PipelineConnection() : stream(nullptr), hasEdgePort(false) { CLEAR(portFormatSettings); } PortFormatSettings portFormatSettings; ConnectionConfig connectionConfig; - HalStream *stream; + HalStream* stream; bool hasEdgePort; }; -struct StageAttr{ +struct StageAttr { char rbm[MAX_RBM_STR_SIZE]; uint32_t rbm_bytes; StageAttr() : rbm_bytes(0) {} @@ -147,15 +144,21 @@ struct DolInfo { // DOL_FEATURE_E struct MbrInfo { - MbrInfo() { streamId = -1; CLEAR(data); } + MbrInfo() { + streamId = -1; + CLEAR(data); + } int streamId; ia_isp_bxt_gdc_limits data; }; struct ProgramGroupInfo { - ProgramGroupInfo() { streamId = -1; pgPtr = nullptr; } + ProgramGroupInfo() { + streamId = -1; + pgPtr = nullptr; + } int streamId; - ia_isp_bxt_program_group *pgPtr; + ia_isp_bxt_program_group* pgPtr; }; struct TuningModeInfo { @@ -168,7 +171,7 @@ struct GraphConfigData { int graphId; uint32_t gdcKernelId; // DOL_FEATURE_S - DolInfo dolInfo; + DolInfo dolInfo; // DOL_FEATURE_E camera_resolution_t csiReso; ia_isp_bxt_resolution_info_t gdcReso; @@ -178,9 +181,7 @@ struct GraphConfigData { std::vector pgNames; std::vector programGroup; std::vector tuningModes; - GraphConfigData() : mcId(-1), - graphId(-1), - gdcKernelId(-1) { + GraphConfigData() : mcId(-1), graphId(-1), gdcKernelId(-1) { CLEAR(csiReso); CLEAR(gdcReso); } @@ -199,30 +200,33 @@ struct PrivPortFormat { } // namespace IGraphType class IGraphConfig { -public: + public: virtual ~IGraphConfig() = default; - virtual void getCSIOutputResolution(camera_resolution_t &reso) = 0; - virtual status_t getGdcKernelSetting(uint32_t *kernelId, - ia_isp_bxt_resolution_info_t *resolution) = 0; - virtual status_t graphGetStreamIds(std::vector &streamIds) = 0; + virtual void getCSIOutputResolution(camera_resolution_t& reso) = 0; + virtual status_t getGdcKernelSetting(uint32_t* kernelId, + ia_isp_bxt_resolution_info_t* resolution) = 0; + virtual status_t graphGetStreamIds(std::vector& streamIds) = 0; virtual int getGraphId(void) = 0; virtual int getStreamIdByPgName(std::string pgName) = 0; virtual int getTuningModeByStreamId(const int32_t streamId) = 0; virtual int getPgIdByPgName(std::string pgName) = 0; // DOL_FEATURE_S - virtual int getDolInfo(float &gain, std::string &mode) = 0; + virtual int getDolInfo(float& gain, std::string& mode) = 0; // DOL_FEATURE_E - virtual ia_isp_bxt_program_group *getProgramGroup(int32_t streamId) = 0; - virtual status_t getMBRData(int32_t streamId, ia_isp_bxt_gdc_limits *data) = 0; - virtual status_t getPgRbmValue(std::string pgName, - IGraphType::StageAttr *stageAttr) {return OK;} - virtual status_t getPgIdForKernel(const uint32_t streamIds, - const int32_t kernelId, int32_t *pgId) {return OK;} + virtual ia_isp_bxt_program_group* getProgramGroup(int32_t streamId) = 0; + virtual status_t getMBRData(int32_t streamId, ia_isp_bxt_gdc_limits* data) = 0; + virtual status_t getPgRbmValue(std::string pgName, IGraphType::StageAttr* stageAttr) { + return OK; + } + virtual status_t getPgIdForKernel(const uint32_t streamIds, const int32_t kernelId, + int32_t* pgId) { + return OK; + } virtual status_t getPgNames(std::vector* pgNames) = 0; virtual status_t pipelineGetConnections( - const std::vector &pgList, - std::vector *confVector, - std::vector *tnrPortFormat = nullptr) = 0; + const std::vector& pgList, + std::vector* confVector, + std::vector* tnrPortFormat = nullptr) = 0; }; -} +} // namespace icamera diff --git a/src/platformdata/gc/IGraphConfigManager.h b/src/platformdata/gc/IGraphConfigManager.h index e0492be2..05512890 100644 --- a/src/platformdata/gc/IGraphConfigManager.h +++ b/src/platformdata/gc/IGraphConfigManager.h @@ -25,10 +25,10 @@ namespace icamera { class IGraphConfigManager { -public: + public: virtual ~IGraphConfigManager() = default; - virtual int queryGraphSettings(const stream_config_t *streamList) = 0; + virtual int queryGraphSettings(const stream_config_t* streamList) = 0; virtual int configStreams(const stream_config_t* streams) = 0; virtual int getSelectedMcId() = 0; virtual std::shared_ptr getGraphConfig(ConfigMode configMode) = 0; @@ -36,9 +36,9 @@ class IGraphConfigManager { static void releaseInstance(int cameraId); static IGraphConfigManager* getInstance(int cameraId); -private: + private: // Guard for singleton instance creation. static Mutex sLock; static std::map sInstances; }; -} +} // namespace icamera diff --git a/src/platformdata/gc/custom_gcss_keys.h b/src/platformdata/gc/custom_gcss_keys.h index 624d750c..deaa2377 100644 --- a/src/platformdata/gc/custom_gcss_keys.h +++ b/src/platformdata/gc/custom_gcss_keys.h @@ -13,38 +13,38 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -GCSS_KEY (BAYER_ORDER, bayer_order) -GCSS_KEY (GAIN, analogue_gain) -GCSS_KEY (CSI_BE, csi_be) -GCSS_KEY (CSI_BE_SOC, csi_be_soc) -GCSS_KEY (CSI_PORT, csi_port) -GCSS_KEY (EXPOSURE, exposure) -GCSS_KEY (FLL, min_fll) -GCSS_KEY (INTERLACED, interlaced) -GCSS_KEY (HFLIP, hflip) -GCSS_KEY (LINK_FREQ, link_freq) -GCSS_KEY (LLP, min_llp) -GCSS_KEY (SENSOR_MODE, sensor_mode) -GCSS_KEY (PIXEL_ARRAY, pixel_array) -GCSS_KEY (BINNER, binner) -GCSS_KEY (SCALER, scaler) -GCSS_KEY (BINNING_H_FACTOR, h_factor) -GCSS_KEY (BINNING_V_FACTOR, v_factor) -GCSS_KEY (SCALING_FACTOR_NUM, num_factor) -GCSS_KEY (SCALING_FACTOR_DENOM, denom_factor) -GCSS_KEY (PORT_0, port_0) -GCSS_KEY (SCALER_PAD, scaler_pad) -GCSS_KEY (TARGET, target) -GCSS_KEY (SENSOR_NAME, sensor_name) -GCSS_KEY (TPG, tpg) -GCSS_KEY (STILL_POST_GDC, still_post_gdc) -GCSS_KEY (VFLIP, vflip) -GCSS_KEY (VIDEO0, video0) -GCSS_KEY (VIDEO1, video1) -GCSS_KEY (VIDEO2, video2) -GCSS_KEY (STILL0, still0) -GCSS_KEY (STILL1, still1) -GCSS_KEY (STILL2, still2) -GCSS_KEY (RAW, raw) -GCSS_KEY (OP_MODE, op_mode) -GCSS_KEY (MC_ID, mc_id) +GCSS_KEY(BAYER_ORDER, bayer_order) +GCSS_KEY(GAIN, analogue_gain) +GCSS_KEY(CSI_BE, csi_be) +GCSS_KEY(CSI_BE_SOC, csi_be_soc) +GCSS_KEY(CSI_PORT, csi_port) +GCSS_KEY(EXPOSURE, exposure) +GCSS_KEY(FLL, min_fll) +GCSS_KEY(INTERLACED, interlaced) +GCSS_KEY(HFLIP, hflip) +GCSS_KEY(LINK_FREQ, link_freq) +GCSS_KEY(LLP, min_llp) +GCSS_KEY(SENSOR_MODE, sensor_mode) +GCSS_KEY(PIXEL_ARRAY, pixel_array) +GCSS_KEY(BINNER, binner) +GCSS_KEY(SCALER, scaler) +GCSS_KEY(BINNING_H_FACTOR, h_factor) +GCSS_KEY(BINNING_V_FACTOR, v_factor) +GCSS_KEY(SCALING_FACTOR_NUM, num_factor) +GCSS_KEY(SCALING_FACTOR_DENOM, denom_factor) +GCSS_KEY(PORT_0, port_0) +GCSS_KEY(SCALER_PAD, scaler_pad) +GCSS_KEY(TARGET, target) +GCSS_KEY(SENSOR_NAME, sensor_name) +GCSS_KEY(TPG, tpg) +GCSS_KEY(STILL_POST_GDC, still_post_gdc) +GCSS_KEY(VFLIP, vflip) +GCSS_KEY(VIDEO0, video0) +GCSS_KEY(VIDEO1, video1) +GCSS_KEY(VIDEO2, video2) +GCSS_KEY(STILL0, still0) +GCSS_KEY(STILL1, still1) +GCSS_KEY(STILL2, still2) +GCSS_KEY(RAW, raw) +GCSS_KEY(OP_MODE, op_mode) +GCSS_KEY(MC_ID, mc_id) diff --git a/src/scheduler/Android.mk b/src/scheduler/Android.mk new file mode 100644 index 00000000..20193b8f --- /dev/null +++ b/src/scheduler/Android.mk @@ -0,0 +1,19 @@ +# +#Copyright(C) 2022 Intel Corporation +# +#Licensed under the Apache License, Version 2.0(the "License"); +#you may not use this file except in compliance with the License. +#You may obtain a copy of the License at +# +#http: // www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. +# + +LOCAL_SRC_FILES += \ + src/scheduler/CameraScheduler.cpp \ + src/scheduler/CameraSchedulerPolicy.cpp diff --git a/src/scheduler/CMakeLists.txt b/src/scheduler/CMakeLists.txt new file mode 100644 index 00000000..f29d0a0a --- /dev/null +++ b/src/scheduler/CMakeLists.txt @@ -0,0 +1,20 @@ +# +#Copyright(C) 2022 Intel Corporation +# +#Licensed under the Apache License, Version 2.0(the "License"); +#you may not use this file except in compliance with the License. +#You may obtain a copy of the License at +# +#http: // www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. +# + +set(SCHEDULER_SRCS + ${SCHEDULER_DIR}/CameraScheduler.cpp + ${SCHEDULER_DIR}/CameraSchedulerPolicy.cpp + CACHE INTERNAL "scheduler sources") diff --git a/src/scheduler/CameraScheduler.cpp b/src/scheduler/CameraScheduler.cpp new file mode 100644 index 00000000..7166d199 --- /dev/null +++ b/src/scheduler/CameraScheduler.cpp @@ -0,0 +1,189 @@ +/* + * Copyright (C) 2022 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG Scheduler + +#include "src/scheduler/CameraScheduler.h" + +#include +#include +#include + +#include "iutils/CameraLog.h" +#include "iutils/Errors.h" + +namespace icamera { + +CameraScheduler::CameraScheduler() : mTriggerCount(0) { + mPolicy = CameraSchedulerPolicy::getInstance(); +} + +CameraScheduler::~CameraScheduler() { + destoryExecutors(); +} + +int32_t CameraScheduler::configurate(int32_t graphId) { + int ret = mPolicy->setConfig(graphId); + CheckAndLogError(ret != OK, ret, "configurate %d error", graphId); + + mTriggerCount = 0; + destoryExecutors(); + return createExecutors(); +} + +int32_t CameraScheduler::createExecutors() { + std::map executors; + int32_t exeNumber = mPolicy->getExecutors(&executors); + CheckAndLogError(exeNumber <= 0, UNKNOWN_ERROR, "Can't get Executors' names"); + + std::lock_guard l(mLock); + for (auto& exe : executors) { + ExecutorGroup group; + group.executor = std::shared_ptr(new Executor(exe.first)); + group.triggerSource = exe.second; + if (!group.triggerSource.empty()) { + // Check if trigger source is one executor + std::shared_ptr source = findExecutor(group.triggerSource.c_str()); + if (source) source->addListener(group.executor); + } + mPolicy->getNodeList(exe.first, &group.nodeList); + + mExeGroups.push_back(group); + group.executor->run(exe.first, PRIORITY_NORMAL); + } + return OK; +} + +void CameraScheduler::destoryExecutors() { + std::lock_guard l(mLock); + mRegisteredNodes.clear(); + mExeGroups.clear(); +} + +int32_t CameraScheduler::registerNode(ISchedulerNode* node) { + std::lock_guard l(mLock); + + ExecutorGroup* group = nullptr; + for (size_t i = 0; i < mExeGroups.size(); i++) { + for (auto& nodeName : mExeGroups[i].nodeList) { + if (strcmp(nodeName.c_str(), node->getName()) == 0) { + group = &mExeGroups[i]; + break; + } + } + } + CheckWarning(!group, BAD_VALUE, "register node %s fail", node->getName()); + + group->executor->addNode(node); + mRegisteredNodes[node] = group; + return OK; +} + +void CameraScheduler::unregisterNode(ISchedulerNode* node) { + std::lock_guard l(mLock); + if (mRegisteredNodes.find(node) != mRegisteredNodes.end()) { + mRegisteredNodes[node]->executor->removeNode(node); + mRegisteredNodes.erase(node); + } +} + +int32_t CameraScheduler::executeNode(std::string triggerSource, int64_t triggerId) { + mTriggerCount++; + for (auto& group : mExeGroups) { + if (group.triggerSource == triggerSource) + group.executor->trigger(triggerId < 0 ? mTriggerCount : triggerId); + } + return OK; +} + +std::shared_ptr CameraScheduler::findExecutor(const char* exeName) { + if (!exeName) return nullptr; + + for (auto& group : mExeGroups) { + if (strcmp(group.executor->getName(), exeName) == 0) return group.executor; + } + + return nullptr; +} + +CameraScheduler::Executor::Executor(const char* name) + : mName(name ? name : "unknown"), + mActive(false), + mTriggerTick(0) {} + +CameraScheduler::Executor::~Executor() { + LOG1("%s: destory", getName()); + requestExit(); +} + +void CameraScheduler::Executor::addNode(ISchedulerNode* node) { + std::lock_guard l(mNodeLock); + mNodes.push_back(node); + LOG1("%s: %s added to %s, pos %d", __func__, node->getName(), getName(), mNodes.size()); +} + +void CameraScheduler::Executor::removeNode(ISchedulerNode* node) { + std::lock_guard l(mNodeLock); + for (size_t i = 0; i < mNodes.size(); i++) { + if (mNodes[i] == node) { + LOG1("%s: %s moved from %s", __func__, node->getName(), getName()); + mNodes.erase(mNodes.begin() + i); + break; + } + } +} + +void CameraScheduler::Executor::trigger(int64_t tick) { + PERF_CAMERA_ATRACE_PARAM1(getName(), tick); + std::lock_guard l(mNodeLock); + mActive = true; + mTriggerTick = tick; + mTriggerSignal.signal(); +} + +void CameraScheduler::Executor::requestExit() { + LOG2("%s: requestExit", getName()); + mActive = false; + icamera::Thread::requestExit(); + std::lock_guard l(mNodeLock); + mTriggerSignal.signal(); +} + +bool CameraScheduler::Executor::threadLoop() { + int64_t tick = -1; + { + ConditionLock lock(mNodeLock); + int ret = mTriggerSignal.waitRelative(lock, kWaitDuration * SLOWLY_MULTIPLIER); + CheckWarning(ret == TIMED_OUT && !mNodes.empty(), true, "%s: wait trigger time out", + getName()); + tick = mTriggerTick; + } + if (!mActive) return false; + + for (auto& node : mNodes) { + LOG2("%s process %d", getName(), tick); + bool ret = node->process(tick); + CheckAndLogError(!ret, true, "%s: node %s process error", getName(), node->getName()); + } + + for (auto& listener : mListeners) { + LOG2("%s: trigger listener %s", getName(), listener->getName()); + listener->trigger(tick); + } + return true; +} + +} // namespace icamera diff --git a/src/scheduler/CameraScheduler.h b/src/scheduler/CameraScheduler.h new file mode 100644 index 00000000..43519f96 --- /dev/null +++ b/src/scheduler/CameraScheduler.h @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2022 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include "CameraEvent.h" +#include "CameraSchedulerPolicy.h" +#include "ISchedulerNode.h" + +namespace icamera { + +/** + * \class CameraScheduler + * + * The call sequence as follows: + * 1. configure(); + * 2. registerNode(); + * 3. loop: executeNode(); + * 4. unregisterNode(); (optional) + */ +class CameraScheduler { + public: + CameraScheduler(); + ~CameraScheduler(); + + int32_t configurate(int32_t graphId); + + int32_t registerNode(ISchedulerNode* node); + void unregisterNode(ISchedulerNode* node); + + /** + * triggerSource: + * emptry string: no designated source, will trigger executors WITHOUT trigger sources + * in configuration file. + * triggerId: + * >= 0: will be passed to ISchedulerNode for processing sync. + * others: Provide internal trigger count. + */ + int32_t executeNode(std::string triggerSource, int64_t triggerId = -1); + + private: + class Executor : public icamera::Thread { + public: + explicit Executor(const char* name); + ~Executor(); + + virtual bool threadLoop(); + virtual void requestExit(); + + void addNode(ISchedulerNode*); + void removeNode(ISchedulerNode* node); + void addListener(std::shared_ptr executor) { mListeners.push_back(executor); } + void trigger(int64_t tick); + + const char* getName() { return mName.c_str(); } + + private: + static const nsecs_t kWaitDuration = 2000000000; // 2s + + std::string mName; + + std::mutex mNodeLock; + std::vector mNodes; + std::vector> mListeners; + Condition mTriggerSignal; + bool mActive; + int64_t mTriggerTick; + + private: + DISALLOW_COPY_AND_ASSIGN(Executor); + }; + + private: + int32_t createExecutors(); + void destoryExecutors(); + + std::shared_ptr findExecutor(const char* exeName); + + private: + struct ExecutorGroup { + std::shared_ptr executor; + std::string triggerSource; // emptry string means no designated source + std::vector nodeList; + }; + + std::mutex mLock; + std::vector mExeGroups; + // Record owner exe of nodes (after policy switch) + std::unordered_map mRegisteredNodes; + + int64_t mTriggerCount; + + private: + CameraSchedulerPolicy* mPolicy; + + private: + DISALLOW_COPY_AND_ASSIGN(CameraScheduler); +}; + +} // namespace icamera diff --git a/src/scheduler/CameraSchedulerPolicy.cpp b/src/scheduler/CameraSchedulerPolicy.cpp new file mode 100644 index 00000000..a64974f3 --- /dev/null +++ b/src/scheduler/CameraSchedulerPolicy.cpp @@ -0,0 +1,191 @@ +/* + * Copyright (C) 2022 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG SchedPolicy + +#include "src/scheduler/CameraSchedulerPolicy.h" + +#include +#include +#include + +#include "iutils/CameraLog.h" +#include "iutils/Errors.h" + +namespace icamera { + +#define SCHEDULER_POLICY_FILE_NAME "pipe_scheduler_profiles.xml" + +CameraSchedulerPolicy* CameraSchedulerPolicy::sInstance = nullptr; +Mutex CameraSchedulerPolicy::sLock; + +CameraSchedulerPolicy* CameraSchedulerPolicy::getInstance() { + AutoMutex lock(sLock); + if (!sInstance) { + sInstance = new CameraSchedulerPolicy(); + } + return sInstance; +} + +void CameraSchedulerPolicy::releaseInstance() { + AutoMutex lock(sLock); + if (sInstance) { + delete sInstance; + sInstance = nullptr; + } +} + +CameraSchedulerPolicy::CameraSchedulerPolicy() + : mCurrentDataField(FIELD_INVALID), + mCurrentConfig(0), + mActiveConfig(nullptr) { + LOG1("%s", __func__); + getDataFromXmlFile(SCHEDULER_POLICY_FILE_NAME); + if (!mPolicyConfigs.empty()) mActiveConfig = &mPolicyConfigs.front(); +} + +CameraSchedulerPolicy::~CameraSchedulerPolicy() { + LOG1("%s", __func__); +} + +int32_t CameraSchedulerPolicy::setConfig(uint32_t graphId) { + for (auto& iter : mPolicyConfigs) { + if (iter.graphId == graphId) { + mActiveConfig = &iter; + LOG1("%s: config id %u, graphId %u", __func__, iter.configId, graphId); + return OK; + } + } + + LOGE("%s: no config for graphId %u", __func__, graphId); + return BAD_VALUE; +} + +int32_t CameraSchedulerPolicy::getExecutors(std::map* executors) const { + CheckAndLogError(!executors, 0, "%s: nullptr", __func__); + CheckAndLogError(!mActiveConfig, 0, "%s: No config", __func__); + + for (auto& iter : mActiveConfig->exeList) { + (*executors)[iter.exeName.c_str()] = iter.triggerName.c_str(); + } + return mActiveConfig->exeList.size(); +} + +int32_t CameraSchedulerPolicy::getNodeList(const char* exeName, + std::vector* nodeList) const { + CheckAndLogError(!nodeList, BAD_VALUE, "nullptr input"); + CheckAndLogError(!mActiveConfig, BAD_VALUE, "No config"); + + for (auto& exe : mActiveConfig->exeList) { + if (strcmp(exe.exeName.c_str(), exeName) == 0) { + *nodeList = exe.nodeList; + return OK; + } + } + return BAD_VALUE; +} + +void CameraSchedulerPolicy::checkField(CameraSchedulerPolicy* profiles, const char* name, + const char** atts) { + LOG1("@%s, name:%s", __func__, name); + if (strcmp(name, "PipeSchedulerPolicy") == 0) { + profiles->mCurrentDataField = FIELD_INVALID; + return; + } else if (strcmp(name, "scheduler") == 0 && + profiles->mCurrentConfig == profiles->mPolicyConfigs.size()) { + // Start the new one only when the last one is done + PolicyConfigDesc desc; + profiles->mPolicyConfigs.push_back(desc); + + int idx = 0; + while (atts[idx]) { + const char* key = atts[idx]; + const char* val = atts[idx + 1]; + LOG2("@%s, name:%s, atts[%d]:%s, atts[%d]:%s", __func__, name, idx, key, idx + 1, val); + if (strcmp(key, "id") == 0) { + profiles->mPolicyConfigs[profiles->mCurrentConfig].configId = atoi(val); + } else if (strcmp(key, "graphId") == 0) { + profiles->mPolicyConfigs[profiles->mCurrentConfig].graphId = atoi(val); + } + idx += 2; + } + profiles->mCurrentDataField = FIELD_SCHED; + return; + } + + LOGE("@%s, name:%s, atts[0]:%s, xml format wrong", __func__, name, atts[0]); + return; +} + +void CameraSchedulerPolicy::handleExecutor(CameraSchedulerPolicy* profiles, const char* name, + const char** atts) { + int idx = 0; + ExecutorDesc desc; + + while (atts[idx]) { + const char* key = atts[idx]; + LOG2("%s: name: %s, value: %s", __func__, atts[idx], atts[idx + 1]); + if (strcmp(key, "name") == 0) { + desc.exeName = atts[idx + 1]; + } else if (strcmp(key, "nodes") == 0) { + parseXmlConvertStrings(atts[idx + 1], desc.nodeList, convertCharToString); + } else if (strcmp(key, "trigger") == 0) { + desc.triggerName = atts[idx + 1]; + } else { + LOGW("Invalid policy attribute: %s", key); + } + idx += 2; + } + + LOG2("@%s, name:%s, atts[0]:%s", __func__, name, atts[0]); + profiles->mPolicyConfigs[profiles->mCurrentConfig].exeList.push_back(desc); +} + +void CameraSchedulerPolicy::handlePolicyConfig(CameraSchedulerPolicy* profiles, const char* name, + const char** atts) { + LOG2("@%s, name:%s, atts[0]:%s", __func__, name, atts[0]); + if (strcmp(name, "pipe_executor") == 0) { + handleExecutor(profiles, name, atts); + } +} + +void CameraSchedulerPolicy::startParseElement(void* userData, const char* name, const char** atts) { + CameraSchedulerPolicy* profiles = reinterpret_cast(userData); + + if (profiles->mCurrentDataField == FIELD_INVALID) { + profiles->checkField(profiles, name, atts); + return; + } + + switch (profiles->mCurrentDataField) { + case FIELD_SCHED: + profiles->handlePolicyConfig(profiles, name, atts); + break; + default: + LOGE("@%s, line:%d, go to default handling", __func__, __LINE__); + break; + } +} + +void CameraSchedulerPolicy::endParseElement(void* userData, const char* name) { + CameraSchedulerPolicy* profiles = reinterpret_cast(userData); + if (strcmp(name, "scheduler") == 0) { + profiles->mCurrentDataField = FIELD_INVALID; + profiles->mCurrentConfig++; + } +} + +} // namespace icamera diff --git a/src/scheduler/CameraSchedulerPolicy.h b/src/scheduler/CameraSchedulerPolicy.h new file mode 100644 index 00000000..a3c7d1e6 --- /dev/null +++ b/src/scheduler/CameraSchedulerPolicy.h @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2022 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "ParserBase.h" + +namespace icamera { + +class CameraSchedulerPolicy : public ParserBase { + public: + static CameraSchedulerPolicy* getInstance(); + static void releaseInstance(); + + private: + // Prevent to create multiple instances + CameraSchedulerPolicy(); + ~CameraSchedulerPolicy(); + + public: + int32_t setConfig(uint32_t graphId); + // Return + int32_t getExecutors(std::map* executors) const; + int32_t getNodeList(const char* exeName, std::vector* nodeList) const; + + void startParseElement(void* userData, const char* name, const char** atts); + void endParseElement(void* userData, const char* name); + + private: + struct ExecutorDesc { + std::string exeName; + std::string triggerName; + std::vector nodeList; + }; + + struct PolicyConfigDesc { + // static data + uint32_t configId; + uint32_t graphId; + std::vector exeList; + + PolicyConfigDesc() { + configId = 0; + graphId = 0; + } + }; + + private: + void checkField(CameraSchedulerPolicy* profiles, const char* name, const char** atts); + void handlePolicyConfig(CameraSchedulerPolicy* profiles, const char* name, const char** atts); + void handleExecutor(CameraSchedulerPolicy* profiles, const char* name, const char** atts); + + private: + enum DataField { + FIELD_INVALID = 0, + FIELD_SCHED, + } mCurrentDataField; + uint32_t mCurrentConfig; + + private: + static CameraSchedulerPolicy* sInstance; + static Mutex sLock; + + std::vector mPolicyConfigs; + PolicyConfigDesc* mActiveConfig; + + private: + DISALLOW_COPY_AND_ASSIGN(CameraSchedulerPolicy); +}; + +} // namespace icamera diff --git a/src/scheduler/ISchedulerNode.h b/src/scheduler/ISchedulerNode.h new file mode 100644 index 00000000..6115dd75 --- /dev/null +++ b/src/scheduler/ISchedulerNode.h @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2022 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +namespace icamera { + +/** + * \Interface ISchedulerNode + */ +class ISchedulerNode { + public: + explicit ISchedulerNode(const char* name) : mName(name ? name : "unknown") {} + virtual ~ISchedulerNode() {} + + virtual bool process(int64_t triggerId) = 0; + + const char* getName() const { return mName.c_str(); } + + private: + std::string mName; +}; + +} // namespace icamera diff --git a/src/v4l2/MediaControl.cpp b/src/v4l2/MediaControl.cpp index 011bfd38..55f960d5 100644 --- a/src/v4l2/MediaControl.cpp +++ b/src/v4l2/MediaControl.cpp @@ -381,8 +381,8 @@ int MediaControl::enumInfo() { media_device_info info; int ret = sc->ioctl(fd, MEDIA_IOC_DEVICE_INFO, &info); if (ret < 0) { - LOGE("Unable to retrieve media device information for device %s (%s)", - mDevName.c_str(), strerror(errno)); + LOGE("Unable to retrieve media device information for device %s (%s)", mDevName.c_str(), + strerror(errno)); goto done; } @@ -652,7 +652,10 @@ const char* MediaControl::entitySubtype2String(unsigned type) { "Unknown", "V4L", "FB", "ALSA", "DVB", }; static const char* subdevTypes[] = { - "Unknown", "Sensor", "Flash", "Lens", + "Unknown", + "Sensor", + "Flash", + "Lens", }; uint32_t subtype = type & MEDIA_ENT_SUBTYPE_MASK; @@ -843,8 +846,8 @@ int MediaControl::mediaCtlSetup(int cameraId, MediaCtlConf* mc, int width, int h /* Set routing */ for (auto& route : mc->routes) { LOG1(" route entity:%s, sinkPad:%d, srcPad:%d, sinkStream:%d, srcStream:%d, flag:%d", - cameraId, route.entityName.c_str(), route.sinkPad, route.srcPad, - route.sinkStream, route.srcStream, route.flag); + cameraId, route.entityName.c_str(), route.sinkPad, route.srcPad, route.sinkStream, + route.srcStream, route.flag); string subDeviceNodeName; CameraUtils::getSubDeviceName(route.entityName.c_str(), subDeviceNodeName); @@ -995,7 +998,8 @@ void MediaControl::dumpTopologyDot() { // to make KW happy. if (devname) printf("\tn%08x [label=\"%s\\n%s\", shape=box, style=filled, " - "fillcolor=yellow]\n", info->id, info->name, devname); + "fillcolor=yellow]\n", + info->id, info->name, devname); break; case MEDIA_ENT_T_V4L2_SUBDEV: diff --git a/src/v4l2/NodeInfo.cpp b/src/v4l2/NodeInfo.cpp index f52c1b0a..0fe35eb0 100644 --- a/src/v4l2/NodeInfo.cpp +++ b/src/v4l2/NodeInfo.cpp @@ -17,23 +17,22 @@ #include "src/v4l2/NodeInfo.h" const VideoNodeInfo gVideoNodeInfos[] = { - { VIDEO_GENERIC, "VIDEO_GENERIC", "Generic" }, - { VIDEO_GENERIC_MEDIUM_EXPO, "VIDEO_GENERIC_MEDIUM_EXPO", "GenericMediumExpo" }, - { VIDEO_GENERIC_SHORT_EXPO, "VIDEO_GENERIC_SHORT_EXPO", "GenericShortExpo" }, + {VIDEO_GENERIC, "VIDEO_GENERIC", "Generic"}, + {VIDEO_GENERIC_MEDIUM_EXPO, "VIDEO_GENERIC_MEDIUM_EXPO", "GenericMediumExpo"}, + {VIDEO_GENERIC_SHORT_EXPO, "VIDEO_GENERIC_SHORT_EXPO", "GenericShortExpo"}, // CSI_META_S - { VIDEO_CSI_META, "VIDEO_CSI_META", "CsiMeta" }, + {VIDEO_CSI_META, "VIDEO_CSI_META", "CsiMeta"}, // CSI_META_E - { VIDEO_PIXEL_ARRAY, "VIDEO_PIXEL_ARRAY", "PixelArray" }, - { VIDEO_PIXEL_BINNER, "VIDEO_PIXEL_BINNER", "PixelBinner" }, - { VIDEO_PIXEL_SCALER, "VIDEO_PIXEL_SCALER", "PixelScaler" }, + {VIDEO_PIXEL_ARRAY, "VIDEO_PIXEL_ARRAY", "PixelArray"}, + {VIDEO_PIXEL_BINNER, "VIDEO_PIXEL_BINNER", "PixelBinner"}, + {VIDEO_PIXEL_SCALER, "VIDEO_PIXEL_SCALER", "PixelScaler"}, - { VIDEO_ISYS_RECEIVER, "VIDEO_ISYS_RECEIVER", "ISysReceiver" }, - { VIDEO_ISYS_RECEIVER_BACKEND, "VIDEO_ISYS_RECEIVER_BACKEND", "CsiBE"}, + {VIDEO_ISYS_RECEIVER, "VIDEO_ISYS_RECEIVER", "ISysReceiver"}, + {VIDEO_ISYS_RECEIVER_BACKEND, "VIDEO_ISYS_RECEIVER_BACKEND", "CsiBE"}, }; -const char* GetNodeName(VideoNodeType nodeType) -{ +const char* GetNodeName(VideoNodeType nodeType) { int size = ARRAY_SIZE(gVideoNodeInfos); for (int i = 0; i < size; i++) { if (gVideoNodeInfos[i].type == nodeType) { @@ -43,8 +42,7 @@ const char* GetNodeName(VideoNodeType nodeType) return "InvalidNode"; } -VideoNodeType GetNodeType(const char* nodeName) -{ +VideoNodeType GetNodeType(const char* nodeName) { int size = ARRAY_SIZE(gVideoNodeInfos); for (int i = 0; i < size; i++) { if (strcmp(gVideoNodeInfos[i].fullName, nodeName) == 0) {