diff --git a/CMakeLists.txt b/CMakeLists.txt
index 6678f52c..9a5e85f6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -32,6 +32,7 @@ set(IIO_DIR ${SRC_ROOT_DIR}/iio)
set(ISP_CTRL_DIR ${SRC_ROOT_DIR}/isp_control)
# ISP_CONTROL_E
set(IUTILS_DIR ${SRC_ROOT_DIR}/iutils)
+set(SCHEDULER_DIR ${SRC_ROOT_DIR}/scheduler)
set(METADATA_DIR ${SRC_ROOT_DIR}/metadata)
set(PLATFORMDATA_DIR ${SRC_ROOT_DIR}/platformdata)
set(V4L2_DIR ${SRC_ROOT_DIR}/v4l2)
@@ -144,7 +145,7 @@ include_directories(include
src/v4l2 src/core src/metadata
src/platformdata src/platformdata/gc
src/3a src/3a/intel3a src/3a/external
- src/fd
+ src/fd src/scheduler
src/core/psysprocessor
src/image_process
)
@@ -216,6 +217,7 @@ set(LIBCAMHAL_SRCS
${IUTILS_SRCS}
${METADATA_SRCS}
${PLATFORMDATA_SRCS}
+ ${SCHEDULER_SRCS}
${V4L2_SRCS}
${ALGOWRAPPER_SRCS}
${IMAGE_PROCESS_SRCS}
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 00000000..53d86be8
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,59 @@
+Apache License
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
+If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
+You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work
+To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
+
diff --git a/config/linux/ipu6ep/sensors/lt6911uxc.xml b/config/linux/ipu6ep/sensors/lt6911uxc.xml
index 2214db94..0921aeec 100644
--- a/config/linux/ipu6ep/sensors/lt6911uxc.xml
+++ b/config/linux/ipu6ep/sensors/lt6911uxc.xml
@@ -19,10 +19,10 @@
-
+
-
-
+
+
@@ -32,10 +32,10 @@
-
+
-
-
+
+
@@ -45,10 +45,10 @@
-
+
-
-
+
+
@@ -58,10 +58,10 @@
-
+
-
-
+
+
@@ -71,10 +71,10 @@
-
+
-
-
+
+
@@ -84,11 +84,10 @@
-
+
-
-
-
+
+
@@ -116,12 +115,12 @@
-
+
-
-
+
+
-
+
@@ -129,12 +128,12 @@
-
+
-
-
+
+
-
+
@@ -142,12 +141,12 @@
-
+
-
-
+
+
-
+
@@ -155,12 +154,12 @@
-
+
-
-
+
+
-
+
@@ -168,12 +167,12 @@
-
+
-
-
+
+
-
+
diff --git a/include/api/ICamera.h b/include/api/ICamera.h
index 956a95a2..105b585b 100644
--- a/include/api/ICamera.h
+++ b/include/api/ICamera.h
@@ -50,7 +50,8 @@
*******************************************************************************
* Version 0.44 Change output parameter of get_frame_size
*******************************************************************************
- * Version 0.45 Add two parameters(number of buffers, parameters) for camera_stream_qbuf
+ * Version 0.45 Add two parameters(number of buffers, parameters)
+ for camera_stream_qbuf
Add one parameter(parameters) for camera_stream_dqbuf
*******************************************************************************
* Version 0.46 Add virtual channel camera number for camera_device_open
@@ -75,7 +76,7 @@
#include "Parameters.h"
-#include // For including definition of NULL
+#include // For including definition of NULL
extern "C" {
namespace icamera {
@@ -85,9 +86,10 @@ namespace icamera {
* \struct vc_info_t: Define the virtual channel information for the device
*/
typedef struct {
- int total_num; /**< the total camera number of virtual channel. 0: the virtual channel is disabled */
- int sequence; /**< the current camera's sequence in all the virtual channel cameras */
- int group; /**< the virtual channel group id */
+ int total_num; /**< the total camera number of virtual channel. 0: the virtual channel is
+ disabled */
+ int sequence; /**< the current camera's sequence in all the virtual channel cameras */
+ int group; /**< the virtual channel group id */
} vc_info_t;
// VIRTUAL_CHANNEL_E
@@ -98,16 +100,16 @@ typedef struct {
int facing;
int orientation;
int device_version;
- const char* name; /**< Sensor name */
- const char* description; /**< Sensor description */
- const Parameters *capability; /**< camera capability */
+ const char* name; /**< Sensor name */
+ const char* description; /**< Sensor description */
+ const Parameters* capability; /**< camera capability */
// VIRTUAL_CHANNEL_S
- vc_info_t vc; /**< Virtual Channel information */
+ vc_info_t vc; /**< Virtual Channel information */
/** The following three field are replaced by the vc_info_t, please don't use them */
int vc_total_num; /**deprecated */
- int vc_sequence; /**deprecated */
- int vc_group; /** deprecated */
+ int vc_sequence; /**deprecated */
+ int vc_group; /** deprecated */
// VIRTUAL_CHANNEL_E
} camera_info_t;
@@ -204,7 +206,7 @@ int camera_hal_deinit();
* camera_callback_ops_t *callback: callback handle
*
**/
-void camera_callback_register(int camera_id, const camera_callback_ops_t *callback);
+void camera_callback_register(int camera_id, const camera_callback_ops_t* callback);
/**
* \brief
@@ -282,7 +284,7 @@ void camera_device_close(int camera_id);
* ret = camera_device_config_sensor_input(camera_id, &input_config);
* \endcode
**/
-int camera_device_config_sensor_input(int camera_id, const stream_t *inputConfig);
+int camera_device_config_sensor_input(int camera_id, const stream_t* inputConfig);
/**
* \brief
@@ -295,7 +297,8 @@ int camera_device_config_sensor_input(int camera_id, const stream_t *inputConfig
* \param[in]
* int camera_id: ID of the camera
* \param[in]
- * stream_config_t stream_list: stream configuration list, if success, stream id is filled in streams[]
+ * stream_config_t stream_list: stream configuration list, if success, stream id is filled in
+ * streams[]
*
* \return
* 0 succeed to configure streams
@@ -317,7 +320,7 @@ int camera_device_config_sensor_input(int camera_id, const stream_t *inputConfig
* ret = camera_device_config_streams(camera_id, &stream_list);
* \endcode
**/
-int camera_device_config_streams(int camera_id, stream_config_t *stream_list);
+int camera_device_config_streams(int camera_id, stream_config_t* stream_list);
/**
* \brief
@@ -396,7 +399,7 @@ int camera_device_stop(int camera_id);
* \endcode
*
*/
-int camera_device_allocate_memory(int camera_id, camera_buffer_t *buffer);
+int camera_device_allocate_memory(int camera_id, camera_buffer_t* buffer);
/**
* \brief
@@ -425,8 +428,8 @@ int camera_device_allocate_memory(int camera_id, camera_buffer_t *buffer);
*
* \see camera_stream_qbuf();
**/
-int camera_stream_qbuf(int camera_id, camera_buffer_t **buffer,
- int num_buffers = 1, const Parameters* settings = NULL);
+int camera_stream_qbuf(int camera_id, camera_buffer_t** buffer, int num_buffers = 1,
+ const Parameters* settings = NULL);
/**
* \brief
@@ -454,13 +457,15 @@ int camera_stream_qbuf(int camera_id, camera_buffer_t **buffer,
* \code
* const int buffer_count = 8;
* int bpp = 0;
- * int buffer_size = get_frame_size(camera_id, V4L2_PIX_FMT_SGRBG8, 1920, 1080, V4L2_FIELD_ANY, &bpp);
+ * int buffer_size = get_frame_size(camera_id, V4L2_PIX_FMT_SGRBG8, 1920, 1080, V4L2_FIELD_ANY,
+ * &bpp);
* camera_buffer_t buffers[buffer_count];
* camera_buffer_t *buf = nullptr;
* for (int i = 0; i < buffer_count; i++) {
* buf = &buffers[i];
* posix_memalign(&buf->addr, getpagesize(), buffer_size);
- * buf->s = stream; // stream here comes from parameter and result of camera_device_config_streams.
+ * // stream here comes from parameter and result of camera_device_config_streams.
+ * buf->s = stream;
* }
*
* for (int i = 0; i < buffer_count; i++) {
@@ -477,7 +482,7 @@ int camera_stream_qbuf(int camera_id, camera_buffer_t **buffer,
* \endcode
*
**/
-int camera_stream_dqbuf(int camera_id, int stream_id, camera_buffer_t **buffer,
+int camera_stream_dqbuf(int camera_id, int stream_id, camera_buffer_t** buffer,
Parameters* settings = NULL);
/**
@@ -574,7 +579,7 @@ int camera_get_parameters(int camera_id, Parameters& param, int64_t sequence = -
* \return
* frame size.
**/
-int get_frame_size(int camera_id, int format, int width, int height, int field, int *bpp);
+int get_frame_size(int camera_id, int format, int width, int height, int field, int* bpp);
-} // namespace icamera
-} // extern "C"
+} // namespace icamera
+} // extern "C"
diff --git a/include/api/IspControl.h b/include/api/IspControl.h
index c863d7c1..1a22a2de 100644
--- a/include/api/IspControl.h
+++ b/include/api/IspControl.h
@@ -25,8 +25,7 @@
namespace icamera {
-typedef enum
-{
+typedef enum {
camera_control_isp_ctrl_id_wb_gains = 34710,
camera_control_isp_ctrl_id_color_correction_matrix = 30009,
camera_control_isp_ctrl_id_advanced_color_correction_matrix = 51448,
@@ -45,8 +44,7 @@ typedef enum
/**
* \struct camera_control_isp_wb_gains_t
*/
-struct camera_control_isp_wb_gains_t
-{
+struct camera_control_isp_wb_gains_t {
/*!< gr Gr gain.*/
float gr;
/*!< r R gain.*/
@@ -55,26 +53,23 @@ struct camera_control_isp_wb_gains_t
float b;
/*!< gb Gb gain.*/
float gb;
-
};
/**
* \struct camera_control_isp_color_correction_matrix_t
*/
-struct camera_control_isp_color_correction_matrix_t
-{
- /*!< ccm_gains[9] Color correction matrix from sensor RGB to sRGB/target color space. Row-major order.*/
+struct camera_control_isp_color_correction_matrix_t {
+ /*!< ccm_gains[9] Color correction matrix from sensor RGB to sRGB/target color space. Row-major
+ * order.*/
float ccm_gains[9];
/*!< ccm_gains_media_format colorspace the ccm matrix was calibrated to*/
int32_t ccm_gains_media_format;
-
};
/**
* \struct camera_control_isp_advanced_color_correction_matrix_t
*/
-struct camera_control_isp_advanced_color_correction_matrix_t
-{
+struct camera_control_isp_advanced_color_correction_matrix_t {
/*!< bypass bypass*/
int32_t bypass;
/*!< number_of_sectors number of sectors (CCM matrices)*/
@@ -85,24 +80,20 @@ struct camera_control_isp_advanced_color_correction_matrix_t
float hue_of_sectors[24];
/*!< ccm_matrices_media_format colorspace the ccm matrices where calibrated to*/
int32_t ccm_matrices_media_format;
-
};
/**
* \struct camera_control_isp_bxt_csc_t
*/
-struct camera_control_isp_bxt_csc_t
-{
+struct camera_control_isp_bxt_csc_t {
/*!< rgb2yuv_coef[9] Matrix for RGB to YUV conversion*/
int32_t rgb2yuv_coef[9];
-
};
/**
* \struct camera_control_isp_bxt_demosaic_t
*/
-struct camera_control_isp_bxt_demosaic_t
-{
+struct camera_control_isp_bxt_demosaic_t {
/*!< high_frequency_denoise_enable High frequency denoise enbale flag*/
int32_t high_frequency_denoise_enable;
/*!< false_color_correction_enable False color correction enable flag*/
@@ -117,14 +108,12 @@ struct camera_control_isp_bxt_demosaic_t
int32_t high_frequency_denoise_power;
/*!< checkers_removal_w Checkers removal homogeneity weight*/
int32_t checkers_removal_w;
-
};
/**
* \struct camera_control_isp_sc_iefd_t
*/
-struct camera_control_isp_sc_iefd_t
-{
+struct camera_control_isp_sc_iefd_t {
/*!< sharpening_power[2] base power of sharpening*/
int32_t sharpening_power[2];
/*!< sharp_power_edge shapening power for direct edge*/
@@ -183,14 +172,12 @@ struct camera_control_isp_sc_iefd_t
int32_t vssnlm_y2;
/*!< vssnlm_y3 Edge denoising LUT y2*/
int32_t vssnlm_y3;
-
};
/**
* \struct camera_control_isp_see_t
*/
-struct camera_control_isp_see_t
-{
+struct camera_control_isp_see_t {
/*!< bypass bypass*/
int32_t bypass;
/*!< edge_max max edge value for clamping*/
@@ -205,47 +192,58 @@ struct camera_control_isp_see_t
int32_t alpha_width;
/*!< alpha_bias */
int32_t alpha_bias;
-
};
/**
* \struct camera_control_isp_bnlm_t
*/
-struct camera_control_isp_bnlm_t
-{
+struct camera_control_isp_bnlm_t {
/*!< nm_offset NR level for noise model adaptation*/
int32_t nm_offset;
/*!< nm_th Threshold for noise model adaptation*/
int32_t nm_th;
/*!< bypass bypass for the filter*/
int32_t bypass;
- /*!< detailix_x_range[2] detailIx noise model - noise input range - tunned automatically from data - set QNN fit range - [Q14.0] [0..16383]*/
+ /*!< detailix_x_range[2] detailIx noise model - noise input range - tunned automatically from
+ * data - set QNN fit range - [Q14.0] [0..16383]*/
int32_t detailix_x_range[2];
/*!< detailix_radgain detailIx Lens shading radial compensation power - [Q8.16], [0..256%]*/
int32_t detailix_radgain;
/*!< detailix_coeffs[3] detailIx SAD noise model - NoiseModel=SQRT(C1*mu^2+C2*mu+C3)*/
int32_t detailix_coeffs[3];
- /*!< sad_spatialrad[4] Neighbors spatial (radial) weight - filter radial bias - allowing reduction of effective filter size [Q3.5] [1:0.25:4]*/
+ /*!< sad_spatialrad[4] Neighbors spatial (radial) weight - filter radial bias - allowing
+ * reduction of effective filter size [Q3.5] [1:0.25:4]*/
int32_t sad_spatialrad[4];
- /*!< sad_mu_x_range[2] detailIx noise model - noise input range - tunned automatically from data - set QNN fit range [Q14.0] [0..16383]*/
+ /*!< sad_mu_x_range[2] detailIx noise model - noise input range - tunned automatically from data
+ * - set QNN fit range [Q14.0] [0..16383]*/
int32_t sad_mu_x_range[2];
/*!< sad_radgain SAD Lens shading radial compensation power - [Q8.16], [0..256%]*/
int32_t sad_radgain;
/*!< sad_mu_coeffs[3] SAD noise model - NoiseModel=SQRT(C1*mu^2+C2*mu+C3)*/
int32_t sad_mu_coeffs[3];
- /*!< detailth[3] detailTH - 3 THs classifying content(detail) type to {flat/weak texture/texture/edge} based on detailIx - [14.0] [0..16383]*/
+ /*!< detailth[3] detailTH - 3 THs classifying content(detail) type to {flat/weak
+ * texture/texture/edge} based on detailIx - [14.0] [0..16383]*/
int32_t detailth[3];
- /*!< sad_detailixlutx[4] Continuos LUT aligning SAD from different detailIx levels to a common scale before appling exponent scaling - texture to flat & edge discrimination, X axis is currently fixed 0:8:1023 - [14.0] [0..16383]*/
+ /*!< sad_detailixlutx[4] Continuos LUT aligning SAD from different detailIx levels to a common
+ * scale before appling exponent scaling - texture to flat & edge discrimination, X axis is
+ * currently fixed 0:8:1023 - [14.0] [0..16383]*/
int32_t sad_detailixlutx[4];
- /*!< sad_detailixluty[4] Continuos LUT aligning SAD from different detailIx levels to a common scale before appling exponent scaling - texture to flat & edge discrimination - Yaxis is continuos description of (2^10-1)/QNN_o - [14.0] [0..16383]*/
+ /*!< sad_detailixluty[4] Continuos LUT aligning SAD from different detailIx levels to a common
+ * scale before appling exponent scaling - texture to flat & edge discrimination - Yaxis is
+ * continuos description of (2^10-1)/QNN_o - [14.0] [0..16383]*/
int32_t sad_detailixluty[4];
- /*!< numcandforavg[4] max num neighbors to average for smoothing per detail type - [4.0] [1..16]*/
+ /*!< numcandforavg[4] max num neighbors to average for smoothing per detail type - [4.0]
+ * [1..16]*/
int32_t numcandforavg[4];
- /*!< blend_power[4] blend alpha(alpha0) - Increase central pixel effect to enhance detail preservation vs smoothing - [8.24], [0..256%]*/
+ /*!< blend_power[4] blend alpha(alpha0) - Increase central pixel effect to enhance detail
+ * preservation vs smoothing - [8.24], [0..256%]*/
int32_t blend_power[4];
- /*!< blend_th[4] blend alpha(alpha0) - preserve details based on texture classification of the form alpha0>TH - [0:2^10-1]*/
+ /*!< blend_th[4] blend alpha(alpha0) - preserve details based on texture classification of the
+ * form alpha0>TH - [0:2^10-1]*/
int32_t blend_th[4];
- /*!< blend_texturegain[4] blend alpha(alpha0) - define steepness of blent_th effect for preserve details - alphaRes=2^10-1; gainScale=255; outScale=0.01; curSlope = outScale*tan(pi/2*((fixGain/(gainScale-1))^2))*alphaRes/(alphaRes-curTH) - [0..256]*/
+ /*!< blend_texturegain[4] blend alpha(alpha0) - define steepness of blent_th effect for
+ * preserve details - alphaRes=2^10-1; gainScale=255; outScale=0.01; curSlope =
+ * outScale*tan(pi/2*((fixGain/(gainScale-1))^2))*alphaRes/(alphaRes-curTH) - [0..256]*/
int32_t blend_texturegain[4];
/*!< blend_radgain blend radial weigh - [8.16], [0..256%]*/
int32_t blend_radgain;
@@ -257,14 +255,12 @@ struct camera_control_isp_bnlm_t
int32_t wmaxminth;
/*!< rad_enable Radial LSC correction*/
int32_t rad_enable;
-
};
/**
* \struct camera_control_isp_tnr5_21_t
*/
-struct camera_control_isp_tnr5_21_t
-{
+struct camera_control_isp_tnr5_21_t {
/*!< bypass bypass filter*/
int32_t bypass;
/*!< nm_yy_xcu_b[64] base y noise model - y dependency*/
@@ -333,14 +329,12 @@ struct camera_control_isp_tnr5_21_t
int32_t bypass_g_mv;
/*!< bypass_NS Bypass Noise Stream*/
int32_t bypass_NS;
-
};
/**
* \struct camera_control_isp_xnr_dss_t
*/
-struct camera_control_isp_xnr_dss_t
-{
+struct camera_control_isp_xnr_dss_t {
/*!< rad_enable */
int32_t rad_enable;
/*!< bypass Bypass all XNR4*/
@@ -413,14 +407,12 @@ struct camera_control_isp_xnr_dss_t
int32_t blnd_hf_power_y;
/*!< blnd_hf_power_c New in DSS_XNR*/
int32_t blnd_hf_power_c;
-
};
/**
* \struct camera_control_isp_gamma_tone_map_t
*/
-struct camera_control_isp_gamma_tone_map_t
-{
+struct camera_control_isp_gamma_tone_map_t {
/*!< gamma[2048] Gamma table for all channels*/
float gamma[2048];
/*!< gamma_lut_size Gamma LUT size*/
@@ -429,14 +421,12 @@ struct camera_control_isp_gamma_tone_map_t
float tone_map[2048];
/*!< tone_map_lut_size Tone map LUT size*/
uint32_t tone_map_lut_size;
-
};
/**
* \struct camera_control_isp_tnr5_22_t
*/
-struct camera_control_isp_tnr5_22_t
-{
+struct camera_control_isp_tnr5_22_t {
/*!< bypass bypass filter*/
int32_t bypass;
/*!< nm_yy_xcu_b[64] base y noise model - y dependency*/
@@ -505,14 +495,12 @@ struct camera_control_isp_tnr5_22_t
int32_t bypass_g_mv;
/*!< bypass_NS Bypass Noise Stream*/
int32_t bypass_NS;
-
};
/**
* \struct camera_control_isp_tnr5_25_t
*/
-struct camera_control_isp_tnr5_25_t
-{
+struct camera_control_isp_tnr5_25_t {
/*!< bypass bypass filter*/
int32_t bypass;
/*!< nm_yy_xcu_b[64] base y noise model - y dependency*/
@@ -555,7 +543,6 @@ struct camera_control_isp_tnr5_25_t
int32_t tbd_sim_gain;
/*!< bypass_g_mv Bypass Global Motion Vector*/
int32_t bypass_g_mv;
-
};
-} // end of icamera
+} // namespace icamera
diff --git a/include/api/Parameters.h b/include/api/Parameters.h
index 29384503..876a8b78 100644
--- a/include/api/Parameters.h
+++ b/include/api/Parameters.h
@@ -76,6 +76,7 @@
#include
#include
#include
+#include
namespace icamera {
@@ -96,94 +97,97 @@ typedef struct {
* MUST use int if new member added.
*/
typedef struct {
- int format; /**< stream format refer to v4l2 definition https://linuxtv.org/downloads/v4l-dvb-apis/pixfmt.html */
- int width; /**< image width */
- int height; /**< image height */
- int field; /**< refer to v4l2 definition https://linuxtv.org/downloads/v4l-dvb-apis/field-order.html#v4l2-field */
+ int format; /**< stream format refer to v4l2 definition
+ https://linuxtv.org/downloads/v4l-dvb-apis/pixfmt.html */
+ int width; /**< image width */
+ int height; /**< image height */
+ int field; /**< refer to v4l2 definition
+ https://linuxtv.org/downloads/v4l-dvb-apis/field-order.html#v4l2-field */
-/*
-* The buffer geometry introduction.
-* The YUV image is formed with Y:Luma and UV:Chroma. And there are
-* two kinds of styles for YUV format: planar and packed.
-*
-* YUV420:NV12
-*
-* YUV420(720x480) sampling
-*
-* |<----width+padding=alignedBpl----->|
-* Y *-------*-------*-------*-------*....-----
-* | | : ^
-* | # UV # | : |
-* | | : |
-* *-------*-------*-------*-------*.... |
-* | | : |
-* | # # | : |
-* | | : |
-* *-------*-------*-------*-------*.... (height * 3 / 2)
-* | | : |
-* | # # | : |
-* | | : |
-* *-------*-------*-------*-------*.... |
-* | | : |
-* | # # | : |
-* | | : v
-* *-------*-------*-------*-------*....-----
-*
-* The data stored in memory
-* ____________w___________ .....
-* |Y0|Y1 | :
-* | | :
-* h h :
-* | | :
-* | | :
-* |________________________|....:
-* |U|V|U|V | :
-* h/2 h/2 :
-* |____________w___________|....:
-*
-* bpp = 12
-* bpl = width;
-* stride = align64(bpl):
-*
-* YUV422:YUY2
-*
-* YUV422(720x480) sampling
-*
-* |<--(width*2)+padding=alignedBpl-->|
-* YUV *#----*#-----*#-----*#-----*#....-----
-* *#----*#-----*#-----*#-----*#.... |
-* *#----*#-----*#-----*#-----*#.... |
-* *#----*#-----*#-----*#-----*#.... |
-* *#----*#-----*#-----*#-----*#.... (height)
-* *#----*#-----*#-----*#-----*#.... |
-* *#----*#-----*#-----*#-----*#.... |
-* *#----*#-----*#-----*#-----*#.... |
-* *#----*#-----*#-----*#-----*#.... |
-* *#----*#-----*#-----*#-----*#....-----
-*
-* The data stored in memory
-* ____________w___________ .....
-* |Y0|Cb|Y1|Cr | :
-* | | :
-* | | :
-* | | :
-* h h :
-* | | :
-* | | :
-* | | :
-* |____________w___________|....:
-*
-* bpp = 16
-* bpl = width * bpp / 8 = width * 2;
-* stride = align64(bpl):
-*
-* Note: The stride defined in HAL is same as aligned bytes per line.
-*/
- int stride; /**< stride = aligned bytes per line */
- int size; /**< real buffer size */
-
- int id; /**< Id that is filled by HAL. */
- int memType; /**< buffer memory type filled by app, refer to https://linuxtv.org/downloads/v4l-dvb-apis/io.html */
+ /*
+ * The buffer geometry introduction.
+ * The YUV image is formed with Y:Luma and UV:Chroma. And there are
+ * two kinds of styles for YUV format: planar and packed.
+ *
+ * YUV420:NV12
+ *
+ * YUV420(720x480) sampling
+ *
+ * |<----width+padding=alignedBpl----->|
+ * Y *-------*-------*-------*-------*....-----
+ * | | : ^
+ * | # UV # | : |
+ * | | : |
+ * *-------*-------*-------*-------*.... |
+ * | | : |
+ * | # # | : |
+ * | | : |
+ * *-------*-------*-------*-------*.... (height * 3 / 2)
+ * | | : |
+ * | # # | : |
+ * | | : |
+ * *-------*-------*-------*-------*.... |
+ * | | : |
+ * | # # | : |
+ * | | : v
+ * *-------*-------*-------*-------*....-----
+ *
+ * The data stored in memory
+ * ____________w___________ .....
+ * |Y0|Y1 | :
+ * | | :
+ * h h :
+ * | | :
+ * | | :
+ * |________________________|....:
+ * |U|V|U|V | :
+ * h/2 h/2 :
+ * |____________w___________|....:
+ *
+ * bpp = 12
+ * bpl = width;
+ * stride = align64(bpl):
+ *
+ * YUV422:YUY2
+ *
+ * YUV422(720x480) sampling
+ *
+ * |<--(width*2)+padding=alignedBpl-->|
+ * YUV *#----*#-----*#-----*#-----*#....-----
+ * *#----*#-----*#-----*#-----*#.... |
+ * *#----*#-----*#-----*#-----*#.... |
+ * *#----*#-----*#-----*#-----*#.... |
+ * *#----*#-----*#-----*#-----*#.... (height)
+ * *#----*#-----*#-----*#-----*#.... |
+ * *#----*#-----*#-----*#-----*#.... |
+ * *#----*#-----*#-----*#-----*#.... |
+ * *#----*#-----*#-----*#-----*#.... |
+ * *#----*#-----*#-----*#-----*#....-----
+ *
+ * The data stored in memory
+ * ____________w___________ .....
+ * |Y0|Cb|Y1|Cr | :
+ * | | :
+ * | | :
+ * | | :
+ * h h :
+ * | | :
+ * | | :
+ * | | :
+ * |____________w___________|....:
+ *
+ * bpp = 16
+ * bpl = width * bpp / 8 = width * 2;
+ * stride = align64(bpl):
+ *
+ * Note: The stride defined in HAL is same as aligned bytes per line.
+ */
+ int stride; /**< stride = aligned bytes per line */
+ int size; /**< real buffer size */
+
+ int id; /**< Id that is filled by HAL. */
+ int memType; /**< buffer memory type filled by app, refer to
+ https://linuxtv.org/downloads/v4l-dvb-apis/io.html */
/**
* The maximum number of buffers the HAL device may need to have dequeued at
@@ -192,7 +196,7 @@ typedef struct {
*/
uint32_t max_buffers;
- int usage; /** stream_array_t;
* Contains all streams info in this configuration.
*/
typedef struct {
- int num_streams; /**< number of streams in this configuration */
- stream_t *streams; /**< streams list */
+ int num_streams; /**< number of streams in this configuration */
+ stream_t* streams; /**< streams list */
/**
* The operation mode of the streams in this configuration. It should be one of the value
* defined in camera_stream_configuration_mode_t.
@@ -224,10 +228,10 @@ typedef struct {
* The buffer's properties can be one of them or combined with some of them.
*/
typedef enum {
- BUFFER_FLAG_DMA_EXPORT = 1<<0,
- BUFFER_FLAG_INTERNAL = 1<<1,
- BUFFER_FLAG_SW_READ = 1<<2,
- BUFFER_FLAG_SW_WRITE = 1<<3,
+ BUFFER_FLAG_DMA_EXPORT = 1 << 0,
+ BUFFER_FLAG_INTERNAL = 1 << 1,
+ BUFFER_FLAG_SW_READ = 1 << 2,
+ BUFFER_FLAG_SW_WRITE = 1 << 3,
} camera_buffer_flags_t;
/**
@@ -237,15 +241,17 @@ typedef enum {
* according to memory type to allocate memory and queue to device.
*/
typedef struct {
- stream_t s; /**< stream info */
- void *addr; /**< buffer addr for userptr and mmap memory mode */
- int index; /**< buffer index, filled by HAL. it is used for qbuf and dqbuf in order */
- int64_t sequence; /**< buffer sequence, filled by HAL, to record buffer dqueue sequence from device */
- int dmafd; /**< buffer dmafd for DMA import and export mode */
- int flags; /**< buffer flags, its type is camera_buffer_flags_t, used to specify buffer properties */
+ stream_t s; /**< stream info */
+ void* addr; /**< buffer addr for userptr and mmap memory mode */
+ int index; /**< buffer index, filled by HAL. it is used for qbuf and dqbuf in order */
+ int64_t sequence; /**< buffer sequence, filled by HAL, to record buffer dqueue sequence from
+ device */
+ int dmafd; /**< buffer dmafd for DMA import and export mode */
+ int flags; /**< buffer flags, its type is camera_buffer_flags_t, used to specify buffer
+ properties */
uint64_t timestamp; /**< buffer timestamp, it's a time reference measured in nanosecond */
uint32_t requestId; /**< buffer requestId, it's a request id of buffer */
- int reserved; /**< reserved for future */
+ int reserved; /**< reserved for future */
} camera_buffer_t;
/**
@@ -464,14 +470,14 @@ typedef enum {
* \enum camera_features: camera supported features.
*/
typedef enum {
- MANUAL_EXPOSURE, /**< Allow user to control exposure time and ISO manually */
- MANUAL_WHITE_BALANCE, /**< Allow user to control AWB mode, cct range, and gain */
- IMAGE_ENHANCEMENT, /**< Sharpness, Brightness, Contrast, Hue, Saturation */
- NOISE_REDUCTION, /**< Allow user to control NR mode and NR level */
- SCENE_MODE, /**< Allow user to control scene mode */
- WEIGHT_GRID_MODE, /**< Allow user to control custom weight grid mode */
- PER_FRAME_CONTROL, /**< Allow user to control most of parameters for each frame */
- ISP_CONTROL, /**< Allow user to control low level ISP features */
+ MANUAL_EXPOSURE, /**< Allow user to control exposure time and ISO manually */
+ MANUAL_WHITE_BALANCE, /**< Allow user to control AWB mode, cct range, and gain */
+ IMAGE_ENHANCEMENT, /**< Sharpness, Brightness, Contrast, Hue, Saturation */
+ NOISE_REDUCTION, /**< Allow user to control NR mode and NR level */
+ SCENE_MODE, /**< Allow user to control scene mode */
+ WEIGHT_GRID_MODE, /**< Allow user to control custom weight grid mode */
+ PER_FRAME_CONTROL, /**< Allow user to control most of parameters for each frame */
+ ISP_CONTROL, /**< Allow user to control low level ISP features */
INVALID_FEATURE
} camera_features;
typedef std::vector camera_features_list_t;
@@ -494,10 +500,7 @@ typedef enum {
AE_MODE_MAX /**< Invalid AE mode, any new mode should be added before this */
} camera_ae_mode_t;
-typedef enum {
- AE_STATE_NOT_CONVERGED,
- AE_STATE_CONVERGED
-} camera_ae_state_t;
+typedef enum { AE_STATE_NOT_CONVERGED, AE_STATE_CONVERGED } camera_ae_state_t;
/**
* \enum camera_antibanding_mode_t: Used to control antibanding mode.
@@ -528,7 +531,8 @@ typedef enum {
} camera_scene_mode_t;
/**
- * \struct camera_ae_exposure_time_range_t: Provide supported exposure time range info per scene mode.
+ * \struct camera_ae_exposure_time_range_t: Provide supported exposure time range info per scene
+ * mode.
*/
typedef struct {
camera_scene_mode_t scene_mode;
@@ -565,8 +569,8 @@ typedef enum {
* \enum camera_yuv_color_range_mode_t: Specify which YUV color range will be used.
*/
typedef enum {
- CAMERA_FULL_MODE_YUV_COLOR_RANGE, /*!< Full range (0 - 255) YUV data. */
- CAMERA_REDUCED_MODE_YUV_COLOR_RANGE /*!< Reduced range aka. BT.601 (16-235) YUV data range. */
+ CAMERA_FULL_MODE_YUV_COLOR_RANGE, /*!< Full range (0 - 255) YUV data. */
+ CAMERA_REDUCED_MODE_YUV_COLOR_RANGE /*!< Reduced range aka. BT.601 (16-235) YUV data range. */
} camera_yuv_color_range_mode_t;
/**
@@ -588,10 +592,7 @@ typedef enum {
AWB_MODE_MAX
} camera_awb_mode_t;
-typedef enum {
- AWB_STATE_NOT_CONVERGED,
- AWB_STATE_CONVERGED
-} camera_awb_state_t;
+typedef enum { AWB_STATE_NOT_CONVERGED, AWB_STATE_CONVERGED } camera_awb_state_t;
/**
* \enum camera_af_mode_t: Used to control af working mode.
@@ -653,11 +654,11 @@ typedef enum {
* \enum camera_af_state_t: Used to return af state.
*/
typedef enum {
- AF_STATE_IDLE, /*!< Focus is idle */
- AF_STATE_LOCAL_SEARCH, /*!< Focus is in local search state */
- AF_STATE_EXTENDED_SEARCH, /*!< Focus is in extended search state */
- AF_STATE_SUCCESS, /*!< Focus has succeeded */
- AF_STATE_FAIL /*!< Focus has failed */
+ AF_STATE_IDLE, /*!< Focus is idle */
+ AF_STATE_LOCAL_SEARCH, /*!< Focus is in local search state */
+ AF_STATE_EXTENDED_SEARCH, /*!< Focus is in extended search state */
+ AF_STATE_SUCCESS, /*!< Focus has succeeded */
+ AF_STATE_FAIL /*!< Focus has failed */
} camera_af_state_t;
/**
@@ -782,7 +783,7 @@ typedef struct {
* \struct camera_callback_ops_t
*/
typedef struct camera_callback_ops {
- void (*notify)(const camera_callback_ops* cb, const camera_msg_data_t &data);
+ void (*notify)(const camera_callback_ops* cb, const camera_msg_data_t& data);
} camera_callback_ops_t;
/**
@@ -930,12 +931,7 @@ typedef struct {
/**
* \enum camera_converge_speed_t: Used to control AE/AWB converge speed.
*/
-typedef enum {
- CONVERGE_NORMAL,
- CONVERGE_MID,
- CONVERGE_LOW,
- CONVERGE_MAX
-} camera_converge_speed_t;
+typedef enum { CONVERGE_NORMAL, CONVERGE_MID, CONVERGE_LOW, CONVERGE_MAX } camera_converge_speed_t;
/**
* \enum camera_converge_speed_mode_t: Used to control AE/AWB converge speed mode.
@@ -984,18 +980,12 @@ typedef enum {
/**
* \enum camera_ldc_mode_t: Used to toggle lens distortion correction.
*/
-typedef enum {
- LDC_MODE_OFF,
- LDC_MODE_ON
-} camera_ldc_mode_t;
+typedef enum { LDC_MODE_OFF, LDC_MODE_ON } camera_ldc_mode_t;
/**
* \enum camera_rsc_mode_t: Used to toggle rolling shutter correction.
*/
-typedef enum {
- RSC_MODE_OFF,
- RSC_MODE_ON
-} camera_rsc_mode_t;
+typedef enum { RSC_MODE_OFF, RSC_MODE_ON } camera_rsc_mode_t;
/**
* \enum camera_flip_mode_t: Used to set output slip.
@@ -1010,10 +1000,7 @@ typedef enum {
/**
* \enum camera_mono_downscale_mode_t: Used to enable/disable MONO Downscale.
*/
-typedef enum {
- MONO_DS_MODE_OFF,
- MONO_DS_MODE_ON
-} camera_mono_downscale_mode_t;
+typedef enum { MONO_DS_MODE_OFF, MONO_DS_MODE_ON } camera_mono_downscale_mode_t;
/**
* \enum camera_video_stabilization_mode_t: Used to control the video stabilization mode.
@@ -1033,8 +1020,8 @@ typedef enum {
} camera_mount_type_t;
/**
-* \enum camera_shading_mode_t: camera shading mode type
-*/
+ * \enum camera_shading_mode_t: camera shading mode type
+ */
typedef enum {
SHADING_MODE_OFF,
SHADING_MODE_FAST,
@@ -1042,8 +1029,8 @@ typedef enum {
} camera_shading_mode_t;
/**
-* \enum camera_lens_shading_map_mode_type_t: camera lens shading map mode type
-*/
+ * \enum camera_lens_shading_map_mode_type_t: camera lens shading map mode type
+ */
typedef enum {
LENS_SHADING_MAP_MODE_OFF,
LENS_SHADING_MAP_MODE_ON
@@ -1073,7 +1060,7 @@ typedef struct {
*
*/
class Parameters {
-public:
+ public:
Parameters();
Parameters(const Parameters& other);
Parameters& operator=(const Parameters& other);
@@ -1132,7 +1119,8 @@ class Parameters {
* Camera application MUST check if the feature is supported before trying to enable it.
* Otherwise the behavior is undefined currently, HAL may just ignore the request.
*
- * \param[out] camera_features_list_t& features: All supported feature will be filled in "features"
+ * \param[out] camera_features_list_t& features: All supported feature will be filled in
+ * "features"
*
* \return: If no feature supported, features will be empty
*/
@@ -1145,7 +1133,8 @@ class Parameters {
* Camera application MUST check if the feature is supported before trying to enable it.
* Otherwise the behavior is undefined currently, HAL may just ignore the request.
*
- * \param[out] vector& controls: All supported ISP control features will be filled in it.
+ * \param[out] vector& controls: All supported ISP control features will be filled in
+ * it.
*
* \return: If no ISP control supported, the controls will be empty
*/
@@ -1157,7 +1146,8 @@ class Parameters {
*
* \param[out] camera_range_t& evRange
*
- * \return 0 if ae compensation supported, non-0 or evRange equals [0, 0] means ae compensation not supported.
+ * \return 0 if ae compensation supported, non-0 or evRange equals [0, 0] means ae compensation
+ * not supported.
*/
int getAeCompensationRange(camera_range_t& evRange) const;
@@ -1190,7 +1180,8 @@ class Parameters {
*
* \return 0 if exposure time range is filled by HAL.
*/
- int getSupportedAeExposureTimeRange(std::vector& etRanges) const;
+ int getSupportedAeExposureTimeRange(
+ std::vector& etRanges) const;
/**
* \brief Get supported manual sensor gain range
@@ -1272,11 +1263,12 @@ class Parameters {
* Camera application MUST check if the video stabilization mode is supported before trying
* to enable it. Otherwise one error occurring, HAL may just ignore the request.
*
- * \param[out] supportedModes: All supported video stabilization mode will be filled in "supportedModes"
+ * \param[out] supportedModes: All supported video stabilization mode will be filled in
+ * "supportedModes"
*
* \return: If no mode supported, supportedModes will be empty
*/
- int getSupportedVideoStabilizationMode(camera_video_stabilization_list_t &supportedModes) const;
+ int getSupportedVideoStabilizationMode(camera_video_stabilization_list_t& supportedModes) const;
/**
* \brief Get supported ae mode
@@ -1288,7 +1280,7 @@ class Parameters {
*
* \return: If no ae mode supported, supportedAeModes will be empty
*/
- int getSupportedAeMode(std::vector &supportedAeModes) const;
+ int getSupportedAeMode(std::vector& supportedAeModes) const;
/**
* \brief Get supported awb mode
@@ -1300,7 +1292,7 @@ class Parameters {
*
* \return: If no awb mode supported, supportedAwbModes will be empty
*/
- int getSupportedAwbMode(std::vector &supportedAwbModes) const;
+ int getSupportedAwbMode(std::vector& supportedAwbModes) const;
/**
* \brief Get supported af mode
@@ -1312,7 +1304,7 @@ class Parameters {
*
* \return: If no af mode supported, supportedAfModes will be empty
*/
- int getSupportedAfMode(std::vector &supportedAfModes) const;
+ int getSupportedAfMode(std::vector& supportedAfModes) const;
/**
* \brief Get supported scene mode
@@ -1320,23 +1312,26 @@ class Parameters {
* Camera application MUST check if the scene mode is supported before trying to enable it.
* Otherwise one error occurring, HAL may just ignore the request.
*
- * \param[out] supportedSceneModes: All supported scene mode will be filled in "supportedSceneModes"
+ * \param[out] supportedSceneModes: All supported scene mode will be filled in
+ * "supportedSceneModes"
*
* \return: If no scene mode supported, supportedSceneModes will be empty
*/
- int getSupportedSceneMode(std::vector &supportedSceneModes) const;
+ int getSupportedSceneMode(std::vector& supportedSceneModes) const;
/**
* \brief Get supported antibanding mode
*
- * Camera application MUST check if the antibanding mode is supported before trying to enable it.
- * Otherwise one error occurring, HAL may just ignore the request.
+ * Camera application MUST check if the antibanding mode is supported before trying to enable
+ * it. Otherwise one error occurring, HAL may just ignore the request.
*
- * \param[out] supportedAntibindingModes: All supported scene mode will be filled in "supportedAntibindingModes"
+ * \param[out] supportedAntibindingModes: All supported scene mode will be filled in
+ * "supportedAntibindingModes"
*
* \return: If no antibanding mode supported, supportedAntibindingModes will be empty
*/
- int getSupportedAntibandingMode(std::vector &supportedAntibindingModes) const;
+ int getSupportedAntibandingMode(
+ std::vector& supportedAntibindingModes) const;
/**
* \brief Get if ae lock is available
@@ -1504,7 +1499,8 @@ class Parameters {
/**
* \brief Set AE distribution priority.
*
- * \param[in] camera_ae_distribution_priority_t priority: the AE distribution priority to be set.
+ * \param[in] camera_ae_distribution_priority_t priority: the AE distribution priority to be
+ * set.
*
* \return 0 if set successfully, otherwise non-0 value is returned.
*/
@@ -1598,8 +1594,8 @@ class Parameters {
/**
* \brief Set white balance mode
*
- * White balance mode could be one of totally auto, preset cct range, customized cct range, customized
- * white area, customize gains.
+ * White balance mode could be one of totally auto, preset cct range, customized cct range,
+ * customized white area, customize gains.
*
* \param[in] camera_awb_mode_t awbMode
*
@@ -1701,7 +1697,8 @@ class Parameters {
*
* The range of each gain shift is (0, 255).
*
- * \param[in] camera_awb_gains_t awb gain shift, which specify r,g,b gains for updating awb result.
+ * \param[in] camera_awb_gains_t awb gain shift, which specify r,g,b gains for updating awb
+ * result.
*
* \return 0 if set successfully, otherwise non-0 value is returned.
*/
@@ -1725,7 +1722,7 @@ class Parameters {
*
* \return 0 if set successfully, otherwise non-0 value is returned.
*/
- int setAwbResult(void *data);
+ int setAwbResult(void* data);
/**
* \brief Get awb result currently used.
@@ -1736,7 +1733,7 @@ class Parameters {
*
* \return 0 if get successfully, otherwise non-0 value is returned.
*/
- int getAwbResult(void *data) const;
+ int getAwbResult(void* data) const;
/**
* \brief Set manual white point coordinate.
@@ -1762,7 +1759,8 @@ class Parameters {
/**
* \brief Set customized color transform which is a 3x3 matrix.
*
- * Manual color transform only takes effect when awb mode set to AWB_MODE_MANUAL_COLOR_TRANSFORM.
+ * Manual color transform only takes effect when awb mode set to
+ * AWB_MODE_MANUAL_COLOR_TRANSFORM.
*
* \param[in] camera_color_transform_t colorTransform: a 3x3 matrix for color convertion.
*
@@ -1782,7 +1780,8 @@ class Parameters {
/**
* \brief Set customized color correction gains which is a 4 array.
*
- * Manual color correction gains only takes effect when awb mode set to AWB_MODE_MANUAL_COLOR_TRANSFORM.
+ * Manual color correction gains only takes effect when awb mode set to
+ * AWB_MODE_MANUAL_COLOR_TRANSFORM.
*
* \param[in] camera_color_gains_t colorGains: a 4 array for color correction gains.
*
@@ -1916,7 +1915,7 @@ class Parameters {
*
* \return 0 if get successfully, otherwise non-0 value is returned.
*/
- int getYuvColorRangeMode(camera_yuv_color_range_mode_t & colorRange) const;
+ int getYuvColorRangeMode(camera_yuv_color_range_mode_t& colorRange) const;
/**
* \brief Set customized effects.
@@ -1946,7 +1945,7 @@ class Parameters {
int setIrisLevel(int level);
int getIrisLevel(int& level);
-// HDR_FEATURE_S
+ // HDR_FEATURE_S
/**
* \brief Set WDR mode
*
@@ -1964,7 +1963,7 @@ class Parameters {
* \return 0 if awb mode was set, non-0 means no awb mode was set.
*/
int getWdrMode(camera_wdr_mode_t& wdrMode) const;
-// HDR_FEATURE_E
+ // HDR_FEATURE_E
/**
* \brief Set WDR Level
@@ -2039,7 +2038,7 @@ class Parameters {
*
* \return 0 if deinterlace mode was set, non-0 means no deinterlace mode was set.
*/
- int getDeinterlaceMode(camera_deinterlace_mode_t &deinterlaceMode) const;
+ int getDeinterlaceMode(camera_deinterlace_mode_t& deinterlaceMode) const;
/**
* \brief Set Makernote Data
@@ -2097,7 +2096,7 @@ class Parameters {
*
* \return 0 if makernote mode was set, otherwise return non-0 value.
*/
- int getMakernoteMode(camera_makernote_mode_t &mode) const;
+ int getMakernoteMode(camera_makernote_mode_t& mode) const;
// ISP_CONTROL_S
/**
@@ -2187,7 +2186,7 @@ class Parameters {
*
* \return 0 if find the corresponding data, otherwise non-0 value is returned.
*/
- int getLdcMode(camera_ldc_mode_t &mode) const;
+ int getLdcMode(camera_ldc_mode_t& mode) const;
/**
* \brief Set rolling shutter correction mode
@@ -2205,7 +2204,7 @@ class Parameters {
*
* \return 0 if find the corresponding data, otherwise non-0 value is returned.
*/
- int getRscMode(camera_rsc_mode_t &mode) const;
+ int getRscMode(camera_rsc_mode_t& mode) const;
/**
* \brief flip mode
@@ -2223,7 +2222,7 @@ class Parameters {
*
* \return 0 if find the corresponding data, otherwise non-0 value is returned.
*/
- int getFlipMode(camera_flip_mode_t &mode) const;
+ int getFlipMode(camera_flip_mode_t& mode) const;
/**
* \brief set frame interval to run 3A
@@ -2241,7 +2240,7 @@ class Parameters {
*
* \return 0 if find the corresponding data, otherwise non-0 value is returned.
*/
- int getRun3ACadence(int &cadence) const;
+ int getRun3ACadence(int& cadence) const;
/**
* \brief mono downscale mode
@@ -2259,7 +2258,7 @@ class Parameters {
*
* \return 0 if find the corresponding data, otherwise non-0 value is returned.
*/
- int getMonoDsMode(camera_mono_downscale_mode_t &mode) const;
+ int getMonoDsMode(camera_mono_downscale_mode_t& mode) const;
/**
* \brief Set Fisheye Dewarping Mode
@@ -2280,42 +2279,42 @@ class Parameters {
*
* \return 0 if dewarping mode was set, non-0 means no dewarping mode was set.
*/
- int getFisheyeDewarpingMode(camera_fisheye_dewarping_mode_t &dewarpingMode) const;
+ int getFisheyeDewarpingMode(camera_fisheye_dewarping_mode_t& dewarpingMode) const;
// Belows are Jpeg related parameters operations
- int getJpegQuality(uint8_t *quality) const;
+ int getJpegQuality(uint8_t* quality) const;
int setJpegQuality(uint8_t quality);
- int getJpegThumbnailQuality(uint8_t *quality) const;
+ int getJpegThumbnailQuality(uint8_t* quality) const;
int setJpegThumbnailQuality(uint8_t quality);
int setJpegThumbnailSize(const camera_resolution_t& res);
int getJpegThumbnailSize(camera_resolution_t& res) const;
- int getJpegRotation(int &rotation) const;
- int setJpegRotation(int rotation);
+ int getJpegRotation(int& rotation) const;
+ int setJpegRotation(int rotation);
- int setJpegGpsCoordinates(const double *coordinates);
- int getJpegGpsLatitude(double &latitude) const;
- int getJpegGpsLongitude(double &longitude) const;
- int getJpegGpsAltitude(double &altiude) const;
+ int setJpegGpsCoordinates(const double* coordinates);
+ int getJpegGpsLatitude(double& latitude) const;
+ int getJpegGpsLongitude(double& longitude) const;
+ int getJpegGpsAltitude(double& altiude) const;
- int getJpegGpsTimeStamp(int64_t ×tamp) const;
- int setJpegGpsTimeStamp(int64_t timestamp);
+ int getJpegGpsTimeStamp(int64_t& timestamp) const;
+ int setJpegGpsTimeStamp(int64_t timestamp);
- int getJpegGpsProcessingMethod(int &processMethod) const;
- int setJpegGpsProcessingMethod(int processMethod);
+ int getJpegGpsProcessingMethod(int& processMethod) const;
+ int setJpegGpsProcessingMethod(int processMethod);
int getJpegGpsProcessingMethod(int size, char* processMethod) const;
int setJpegGpsProcessingMethod(const char* processMethod);
- int getImageEffect(camera_effect_mode_t &effect) const;
- int setImageEffect(camera_effect_mode_t effect);
+ int getImageEffect(camera_effect_mode_t& effect) const;
+ int setImageEffect(camera_effect_mode_t effect);
- int getVideoStabilizationMode(camera_video_stabilization_mode_t &mode) const;
+ int getVideoStabilizationMode(camera_video_stabilization_mode_t& mode) const;
int setVideoStabilizationMode(camera_video_stabilization_mode_t mode);
- int getFocalLength(float &focal) const;
+ int getFocalLength(float& focal) const;
int setFocalLength(float focal);
/**
@@ -2325,7 +2324,7 @@ class Parameters {
*
* \return 0 if aperture was set, non=0 means no aperture was set
*/
- int getAperture(float &aperture) const;
+ int getAperture(float& aperture) const;
/**
* \brief Set aperture value
*
@@ -2342,7 +2341,7 @@ class Parameters {
*
* \return 0 if distance was set, non-0 means no focus distance was set
*/
- int getFocusDistance(float &distance) const;
+ int getFocusDistance(float& distance) const;
/**
* \brief Set focus distance value
*
@@ -2448,7 +2447,7 @@ class Parameters {
*
* \return 0 if set successfully, otherwise non-0 value is returned.
*/
- int getLensAperture(float &aperture) const;
+ int getLensAperture(float& aperture) const;
/**
* \brief Get lens filter density.
@@ -2457,7 +2456,7 @@ class Parameters {
*
* \return 0 if set successfully, otherwise non-0 value is returned.
*/
- int getLensFilterDensity(float &filterDensity) const;
+ int getLensFilterDensity(float& filterDensity) const;
/**
* \brief Get lens min focus distance.
@@ -2466,7 +2465,7 @@ class Parameters {
*
* \return 0 if set successfully, otherwise non-0 value is returned.
*/
- int getLensMinFocusDistance(float &minFocusDistance) const;
+ int getLensMinFocusDistance(float& minFocusDistance) const;
/**
* \brief Get lens hyperfocal distance.
@@ -2475,7 +2474,7 @@ class Parameters {
*
* \return 0 if set successfully, otherwise non-0 value is returned.
*/
- int getLensHyperfocalDistance(float &hyperfocalDistance) const;
+ int getLensHyperfocalDistance(float& hyperfocalDistance) const;
/**
* \brief Set af region
@@ -2543,40 +2542,40 @@ class Parameters {
int getCropRegion(camera_crop_region_t& cropRegion) const;
/**
- * \brief Set control scene mode
- *
- * \param[in] sceneModeValue the control scene mode related parameters
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Set control scene mode
+ *
+ * \param[in] sceneModeValue the control scene mode related parameters
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int setControlSceneMode(uint8_t sceneModeValue);
/**
- * \brief Set face detect mode
- *
- * \param[in] faceDetectMode the face detect mode related parameters
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Set face detect mode
+ *
+ * \param[in] faceDetectMode the face detect mode related parameters
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int setFaceDetectMode(uint8_t faceDetectMode);
/**
- * \brief Get face detect mode
- *
- * \param[out] faceDetectMode the face detect mode related parameters, 0:OFF 1:SIMPLE 2:FULL
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Get face detect mode
+ *
+ * \param[out] faceDetectMode the face detect mode related parameters, 0:OFF 1:SIMPLE 2:FULL
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int getFaceDetectMode(uint8_t& faceDetectMode) const;
/**
- * \brief Set face id
- *
- * \param[in] int *faceIds, int faceNum
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
- int setFaceIds(int *faceIds, int faceNum);
+ * \brief Set face id
+ *
+ * \param[in] int *faceIds, int faceNum
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
+ int setFaceIds(int* faceIds, int faceNum);
/**
* Get sensor active array size
@@ -2587,158 +2586,158 @@ class Parameters {
int getSensorActiveArraySize(camera_coordinate_system_t& arraySize) const;
/**
- * \brief Set shading mode
- *
- * \param[in] shadingMode the shading mode related parameters
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Set shading mode
+ *
+ * \param[in] shadingMode the shading mode related parameters
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int setShadingMode(camera_shading_mode_t shadingMode);
/**
- * \brief Get shading mode
- *
- * \param[out] shadingMode the shading mode related parameters, 0:OFF 1:FAST 2:HIGH_QUALITY
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Get shading mode
+ *
+ * \param[out] shadingMode the shading mode related parameters, 0:OFF 1:FAST 2:HIGH_QUALITY
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int getShadingMode(camera_shading_mode_t& shadingMode) const;
/**
- * \brief Set statistics lens shading map mode
- *
- * \param[in] lensShadingMapMode the lens shading map mode related parameters
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Set statistics lens shading map mode
+ *
+ * \param[in] lensShadingMapMode the lens shading map mode related parameters
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int setLensShadingMapMode(camera_lens_shading_map_mode_type_t lensShadingMapMode);
/**
- * \brief Get statistics lens shading map mode
- *
- * \param[out] lensShadingMapMode the lens shading map mode related parameters, 0:OFF 1:ON
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
- int getLensShadingMapMode(camera_lens_shading_map_mode_type_t &lensShadingMapMode) const;
+ * \brief Get statistics lens shading map mode
+ *
+ * \param[out] lensShadingMapMode the lens shading map mode related parameters, 0:OFF 1:ON
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
+ int getLensShadingMapMode(camera_lens_shading_map_mode_type_t& lensShadingMapMode) const;
/**
- * \brief Set lens shading map
- *
- * \param[in] lensShadingMap the lens shading map
- * \param[in] lensShadingMapSize lensShadingMap's size
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
- int setLensShadingMap(const float *lensShadingMap, size_t lensShadingMapSize);
+ * \brief Set lens shading map
+ *
+ * \param[in] lensShadingMap the lens shading map
+ * \param[in] lensShadingMapSize lensShadingMap's size
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
+ int setLensShadingMap(const float* lensShadingMap, size_t lensShadingMapSize);
/**
- * \brief Get lens shading map
- *
- * \param[out] lensShadingMap the lens shading map
- * \param[out] lensShadingMapSize the lens shading map's size
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
- int getLensShadingMap(float **lensShadingMap, size_t &lensShadingMapSize) const;
+ * \brief Get lens shading map
+ *
+ * \param[out] lensShadingMap the lens shading map
+ * \param[out] lensShadingMapSize the lens shading map's size
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
+ int getLensShadingMap(float** lensShadingMap, size_t& lensShadingMapSize) const;
/**
- * \brief Get lens shading map size
- *
- * \param[out] arraySize the lens shading map size related parameters
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
- int getLensInfoShadingMapSize(camera_coordinate_t &shadingMapSize) const;
+ * \brief Get lens shading map size
+ *
+ * \param[out] arraySize the lens shading map size related parameters
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
+ int getLensInfoShadingMapSize(camera_coordinate_t& shadingMapSize) const;
/*
- * \brief Set tonemap mode
- *
- * \param[in] camera_tonemap_mode_t& mode
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Set tonemap mode
+ *
+ * \param[in] camera_tonemap_mode_t& mode
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int setTonemapMode(camera_tonemap_mode_t mode);
/**
- * \brief Get tonemap mode
- *
- * \param[out] camera_tonemap_mode_t& mode
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Get tonemap mode
+ *
+ * \param[out] camera_tonemap_mode_t& mode
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int getTonemapMode(camera_tonemap_mode_t& mode) const;
/**
- * \brief Get supported tonemap modes
- *
- * \param[out] vector& tonemapModes
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Get supported tonemap modes
+ *
+ * \param[out] vector& tonemapModes
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int getSupportedTonemapMode(std::vector& tonemapModes) const;
/**
- * \brief Set the type of tonemap preset curve
- *
- * \param[in] camera_tonemap_preset_curve_t type
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Set the type of tonemap preset curve
+ *
+ * \param[in] camera_tonemap_preset_curve_t type
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int setTonemapPresetCurve(camera_tonemap_preset_curve_t type);
/**
- * \brief Get tonemap gamma
- *
- * \param[out] camera_tonemap_preset_curve_t& type
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Get tonemap gamma
+ *
+ * \param[out] camera_tonemap_preset_curve_t& type
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int getTonemapPresetCurve(camera_tonemap_preset_curve_t& type) const;
/**
- * \brief Set tonemap gamma
- *
- * \param[in] float gamma
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Set tonemap gamma
+ *
+ * \param[in] float gamma
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int setTonemapGamma(float gamma);
/**
- * \brief Get tonemap gamma
- *
- * \param[out] float& gamma
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Get tonemap gamma
+ *
+ * \param[out] float& gamma
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int getTonemapGamma(float& gamma) const;
/**
- * \brief Get number of tonemap curve points
- *
- * \param[out] int32_t& number
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Get number of tonemap curve points
+ *
+ * \param[out] int32_t& number
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int getTonemapMaxCurvePoints(int32_t& number) const;
/**
- * \brief Set tonemap curves
- *
- * \param[in] const camera_tonemap_curves_t& curve
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Set tonemap curves
+ *
+ * \param[in] const camera_tonemap_curves_t& curve
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int setTonemapCurves(const camera_tonemap_curves_t& curves);
/**
- * \brief Get tonemap curves
- *
- * \param[out] camera_tonemap_curves_t& curve
- *
- * \return 0 if successfully, otherwise non-0 value is returned.
- */
+ * \brief Get tonemap curves
+ *
+ * \param[out] camera_tonemap_curves_t& curve
+ *
+ * \return 0 if successfully, otherwise non-0 value is returned.
+ */
int getTonemapCurves(camera_tonemap_curves_t& curves) const;
/**
@@ -2757,7 +2756,7 @@ class Parameters {
*
* \return 0 if power mode was set, otherwise non-0 value is returned.
*/
- int getPowerMode(camera_power_mode_t &mode) const;
+ int getPowerMode(camera_power_mode_t& mode) const;
/**
* \brief Set raw data output mode.
@@ -2775,7 +2774,7 @@ class Parameters {
*
* \return 0 if raw data output mode was set, otherwise non-0 value is returned.
*/
- int getRawDataOutput(raw_data_output_t &mode) const;
+ int getRawDataOutput(raw_data_output_t& mode) const;
/**
* \brief Set total exposure target
@@ -2793,7 +2792,7 @@ class Parameters {
*
* \return 0 if total exposure target was set, otherwise non-0 value is returned.
*/
- int getTotalExposureTarget(int64_t &totalExposureTarget) const;
+ int getTotalExposureTarget(int64_t& totalExposureTarget) const;
/**
* \brief Set user request id
@@ -2847,7 +2846,7 @@ class Parameters {
*
* \return 0 if flag was set, otherwise non-0 value is returned.
*/
- int getCallbackRgbs(bool *enabled) const;
+ int getCallbackRgbs(bool* enabled) const;
/**
* \brief Set callback tonemap curve flags
@@ -2865,9 +2864,9 @@ class Parameters {
*
* \return 0 if flag was set, otherwise non-0 value is returned.
*/
- int getCallbackTmCurve(bool *enabled) const;
+ int getCallbackTmCurve(bool* enabled) const;
-// ENABLE_EVCP_S
+ // ENABLE_EVCP_S
/**
* \brief Set EVCP ECC status
*
@@ -2941,7 +2940,7 @@ class Parameters {
* \return 0 if flag was set, otherwise non-0 value is returned.
*/
int getEvcpFFMode(uint8_t* mode) const;
-// ENABLE_EVCP_E
+ // ENABLE_EVCP_E
/**
* \brief Set scale & crop region
@@ -2961,10 +2960,10 @@ class Parameters {
*/
int getZoomRegion(camera_zoom_region_t* region) const;
-private:
+ private:
friend class ParameterHelper;
- void* mData; // The internal data to save the all of the parameters.
-}; // class Parameters
+ void* mData; // The internal data to save the all of the parameters.
+}; // class Parameters
/*******************End of Camera Parameters Definition**********************/
} // namespace icamera
diff --git a/include/api/intel_vendor_metadata_tags.h b/include/api/intel_vendor_metadata_tags.h
index f0f1fc9e..7ef269e6 100644
--- a/include/api/intel_vendor_metadata_tags.h
+++ b/include/api/intel_vendor_metadata_tags.h
@@ -37,7 +37,7 @@ typedef enum vendor_metadata_section {
* Hierarchy positions in enum space.
*/
typedef enum vendor_metadata_section_start {
- INTEL_VENDOR_CAMERA_START = uint32_t(INTEL_VENDOR_CAMERA << 16),
+ INTEL_VENDOR_CAMERA_START = uint32_t(INTEL_VENDOR_CAMERA << 16),
} vendor_metadata_section_start_t;
/**
@@ -47,32 +47,32 @@ typedef enum vendor_metadata_section_start {
* src/metadata/vendor_metadata_tag_info.c
*/
typedef enum vendor_metadata_tag {
- INTEL_VENDOR_CAMERA_CALLBACK_RGBS = // enum | public
- INTEL_VENDOR_CAMERA_START,
+ INTEL_VENDOR_CAMERA_CALLBACK_RGBS = // enum | public
+ INTEL_VENDOR_CAMERA_START,
INTEL_VENDOR_CAMERA_RGBS_GRID_SIZE, // int32[] | public
INTEL_VENDOR_CAMERA_SHADING_CORRECTION, // enum | public
INTEL_VENDOR_CAMERA_RGBS_STATS_BLOCKS, // byte[] | public
INTEL_VENDOR_CAMERA_CALLBACK_TM_CURVE, // enum | public
INTEL_VENDOR_CAMERA_TONE_MAP_CURVE, // float[] | public
INTEL_VENDOR_CAMERA_POWER_MODE, // enum | public
-// ENABLE_EVCP_S
+ // ENABLE_EVCP_S
INTEL_VENDOR_CAMERA_IC_CAPS, // int32 | public
-// ENABLE_EVCP_E
-// ENABLE_EVCP_S
+ // ENABLE_EVCP_E
+ // ENABLE_EVCP_S
INTEL_VENDOR_CAMERA_IC_PEACE_FEATURES, // int32[] | public
-// ENABLE_EVCP_E
-// ENABLE_EVCP_S
+ // ENABLE_EVCP_E
+ // ENABLE_EVCP_S
INTEL_VENDOR_CAMERA_IC_ECC_MODE, // enum | public
-// ENABLE_EVCP_E
-// ENABLE_EVCP_S
+ // ENABLE_EVCP_E
+ // ENABLE_EVCP_S
INTEL_VENDOR_CAMERA_IC_BC_MODE, // enum | public
-// ENABLE_EVCP_E
-// ENABLE_EVCP_S
+ // ENABLE_EVCP_E
+ // ENABLE_EVCP_S
INTEL_VENDOR_CAMERA_IC_BR_PARAMETERS, // int32[] | public
-// ENABLE_EVCP_E
-// ENABLE_EVCP_S
+ // ENABLE_EVCP_E
+ // ENABLE_EVCP_S
INTEL_VENDOR_CAMERA_IC_FF_MODE, // enum | public
-// ENABLE_EVCP_E
+ // ENABLE_EVCP_E
INTEL_VENDOR_CAMERA_TOTAL_EXPOSURE_TARGET, // int64 | public
INTEL_VENDOR_CAMERA_TOTAL_EXPOSURE_TARGET_RANGE, // int64[] | public
INTEL_VENDOR_CAMERA_RAW_DATA_OUTPUT, // enum | public
@@ -137,4 +137,3 @@ typedef enum vendor_metadata_enum_intel_vendor_camera_raw_data_output {
INTEL_VENDOR_CAMERA_RAW_DATA_OUTPUT_OFF,
INTEL_VENDOR_CAMERA_RAW_DATA_OUTPUT_ON,
} vendor_metadata_enum_intel_vendor_camera_raw_data_output_t;
-
diff --git a/include/utils/ScopedAtrace.h b/include/utils/ScopedAtrace.h
index ea58c307..312d9692 100644
--- a/include/utils/ScopedAtrace.h
+++ b/include/utils/ScopedAtrace.h
@@ -33,44 +33,41 @@ namespace icamera {
* is enabled.
*/
class ScopedAtrace {
- public:
- ScopedAtrace(const int level, const char* func, const char* tag,
- const char* note = NULL, long value = -1,
- const char* note2 = NULL, int value2 = -1,
- const char* note3 = NULL, int value3 = -1);
- ~ScopedAtrace();
- static void setTraceLevel(int);
- private:
- bool mEnableAtraceEnd;
+ public:
+ ScopedAtrace(const int level, const char* func, const char* tag, const char* note = NULL,
+ long value = -1, const char* note2 = NULL, int value2 = -1,
+ const char* note3 = NULL, int value3 = -1);
+ ~ScopedAtrace();
+ static void setTraceLevel(int);
+
+ private:
+ bool mEnableAtraceEnd;
};
-#define CAMERA_DEBUG_LOG_ATRACE_OS (1<<4)
-#define CAMERA_DEBUG_LOG_ATRACE_IMAGING (1<<7)
+#define CAMERA_DEBUG_LOG_ATRACE_OS (1 << 4)
+#define CAMERA_DEBUG_LOG_ATRACE_IMAGING (1 << 7)
-#define PERF_CAMERA_ATRACE() ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, \
- __func__, PERF_LOG_TAG_STR(LOG_TAG));
-#define PERF_CAMERA_ATRACE_PARAM1(note, value) \
- ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, \
- PERF_LOG_TAG_STR(LOG_TAG), note, value);
-#define PERF_CAMERA_ATRACE_PARAM2(note, value, note2, value2) \
- ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), \
- note, value, note2, value2);
-#define PERF_CAMERA_ATRACE_PARAM3(note, value, note2, value2, note3, value3) \
- ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), \
- note, value, note2, value2, note3, value3);
+#define PERF_CAMERA_ATRACE() \
+ ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG));
+#define PERF_CAMERA_ATRACE_PARAM1(note, value) \
+ ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), note, \
+ value);
+#define PERF_CAMERA_ATRACE_PARAM2(note, value, note2, value2) \
+ ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), note, \
+ value, note2, value2);
+#define PERF_CAMERA_ATRACE_PARAM3(note, value, note2, value2, note3, value3) \
+ ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_OS, __func__, PERF_LOG_TAG_STR(LOG_TAG), note, \
+ value, note2, value2, note3, value3);
#define PERF_CAMERA_ATRACE_IMAGING() \
- ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, \
- PERF_LOG_TAG_STR(LOG_TAG));
-#define PERF_CAMERA_ATRACE_PARAM1_IMAGING(note, value) \
- ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, \
- PERF_LOG_TAG_STR(LOG_TAG), note, value);
-#define PERF_CAMERA_ATRACE_PARAM2_IMAGING(note, value, note2, value2) \
- ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, \
- PERF_LOG_TAG_STR(LOG_TAG), note, value, note2, value2);
-#define PERF_CAMERA_ATRACE_PARAM3_IMAGING(note, value, note2, value2, note3, \
- value3) \
- ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, \
- PERF_LOG_TAG_STR(LOG_TAG), note, value, note2, value2, note3, \
- value3);
-} // namespace icamera
+ ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, PERF_LOG_TAG_STR(LOG_TAG));
+#define PERF_CAMERA_ATRACE_PARAM1_IMAGING(note, value) \
+ ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, PERF_LOG_TAG_STR(LOG_TAG), \
+ note, value);
+#define PERF_CAMERA_ATRACE_PARAM2_IMAGING(note, value, note2, value2) \
+ ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, PERF_LOG_TAG_STR(LOG_TAG), \
+ note, value, note2, value2);
+#define PERF_CAMERA_ATRACE_PARAM3_IMAGING(note, value, note2, value2, note3, value3) \
+ ScopedAtrace atrace(CAMERA_DEBUG_LOG_ATRACE_IMAGING, __func__, PERF_LOG_TAG_STR(LOG_TAG), \
+ note, value, note2, value2, note3, value3);
+} // namespace icamera
diff --git a/modules/algowrapper/IntelCca.cpp b/modules/algowrapper/IntelCca.cpp
index 9e4a2bb6..5b907f9f 100644
--- a/modules/algowrapper/IntelCca.cpp
+++ b/modules/algowrapper/IntelCca.cpp
@@ -33,7 +33,7 @@ IntelCca* IntelCca::getInstance(int cameraId, TuningMode mode) {
sCcaInstance.size());
AutoMutex lock(sLock);
- for (auto &it : sCcaInstance) {
+ for (auto& it : sCcaInstance) {
if (cameraId == it.cameraId) {
if (it.ccaHandle.find(mode) == it.ccaHandle.end()) {
it.ccaHandle[mode] = new IntelCca(cameraId, mode);
@@ -54,9 +54,9 @@ void IntelCca::releaseInstance(int cameraId, TuningMode mode) {
LOG2("@%s, tuningMode:%d", cameraId, __func__, mode);
AutoMutex lock(sLock);
- for (auto &it : sCcaInstance) {
+ for (auto& it : sCcaInstance) {
if (cameraId == it.cameraId && it.ccaHandle.find(mode) != it.ccaHandle.end()) {
- IntelCca *cca = it.ccaHandle[mode];
+ IntelCca* cca = it.ccaHandle[mode];
it.ccaHandle.erase(mode);
delete cca;
}
@@ -66,8 +66,8 @@ void IntelCca::releaseInstance(int cameraId, TuningMode mode) {
void IntelCca::releaseAllInstances() {
AutoMutex lock(sLock);
LOG2("@%s, cca instance size:%zu", __func__, sCcaInstance.size());
- for (auto &it : sCcaInstance) {
- for (auto &oneCcaHandle : it.ccaHandle) {
+ for (auto& it : sCcaInstance) {
+ for (auto& oneCcaHandle : it.ccaHandle) {
IntelCca* intelCca = oneCcaHandle.second;
delete intelCca;
}
@@ -75,9 +75,7 @@ void IntelCca::releaseAllInstances() {
}
}
-IntelCca::IntelCca(int cameraId, TuningMode mode) :
- mCameraId(cameraId),
- mTuningMode(mode) {
+IntelCca::IntelCca(int cameraId, TuningMode mode) : mCameraId(cameraId), mTuningMode(mode) {
mIntelCCA = nullptr;
}
@@ -140,15 +138,15 @@ ia_err IntelCca::runLTM(uint64_t frameId, const cca::cca_ltm_input_params& param
return ret;
}
-ia_err IntelCca::updateZoom(const cca::cca_dvs_zoom& params) {
- ia_err ret = getIntelCCA()->updateZoom(params);
+ia_err IntelCca::updateZoom(uint32_t streamId, const cca::cca_dvs_zoom& params) {
+ ia_err ret = getIntelCCA()->updateZoom(streamId, params);
LOG2("@%s, ret:%d", __func__, ret);
return ret;
}
-ia_err IntelCca::runDVS(uint64_t frameId) {
- ia_err ret = getIntelCCA()->runDVS(frameId);
+ia_err IntelCca::runDVS(uint32_t streamId, uint64_t frameId) {
+ ia_err ret = getIntelCCA()->runDVS(streamId, frameId);
LOG2("@%s, ret:%d", __func__, ret);
return ret;
@@ -213,7 +211,7 @@ bool IntelCca::allocStatsDataMem(unsigned int size) {
for (int i = 0; i < kMaxQueueSize; i++) {
void* p = malloc(size);
CheckAndLogError(!p, false, "failed to malloc stats buffer");
- StatsBufInfo info = { size, p, 0 };
+ StatsBufInfo info = {size, p, 0};
int64_t index = i * (-1) - 1; // default index list: -1, -2, -3, ...
mMemStatsInfoMap[index] = info;
@@ -243,8 +241,8 @@ void* IntelCca::getStatsDataBuffer() {
}
void IntelCca::decodeHwStatsDone(int64_t sequence, unsigned int byteUsed) {
- LOG2("@%s, tuningMode:%d, sequence:%ld, byteUsed:%d", mCameraId, __func__,
- mTuningMode, sequence, byteUsed);
+ LOG2("@%s, tuningMode:%d, sequence:%ld, byteUsed:%d", mCameraId, __func__, mTuningMode,
+ sequence, byteUsed);
AutoMutex l(mMemStatsMLock);
if (mMemStatsInfoMap.empty()) return;
@@ -296,8 +294,8 @@ uint32_t IntelCca::getPalDataSize(const cca::cca_program_group& programGroup) {
}
void* IntelCca::allocMem(int streamId, const std::string& name, int index, int size) {
- LOG1("@%s, name:%s, index: %d, streamId: %d, size: %d", __func__,
- name.c_str(), index, streamId, size);
+ LOG1("@%s, name:%s, index: %d, streamId: %d, size: %d", __func__, name.c_str(), index, streamId,
+ size);
return calloc(1, size);
}
diff --git a/modules/algowrapper/IntelCca.h b/modules/algowrapper/IntelCca.h
index 642bcde5..8bc56c00 100644
--- a/modules/algowrapper/IntelCca.h
+++ b/modules/algowrapper/IntelCca.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2020-2021 Intel Corporation.
+ * Copyright (C) 2020-2022 Intel Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -48,9 +48,9 @@ class IntelCca {
ia_err runLTM(uint64_t frameId, const cca::cca_ltm_input_params& params);
- ia_err updateZoom(const cca::cca_dvs_zoom& params);
+ ia_err updateZoom(uint32_t streamId, const cca::cca_dvs_zoom& params);
- ia_err runDVS(uint64_t frameId);
+ ia_err runDVS(uint32_t streamId, uint64_t frameId);
ia_err runAIC(uint64_t frameId, const cca::cca_pal_input_params* params, ia_binary_data* pal);
@@ -81,8 +81,8 @@ class IntelCca {
void freeStatsDataMem();
private:
- int mCameraId;
- TuningMode mTuningMode;
+ int mCameraId;
+ TuningMode mTuningMode;
// Only 3 buffers will be held in AiqResultStorage (kAiqResultStorageSize is 3),
// So it is safe to use other 3 buffers.
diff --git a/modules/algowrapper/IntelEvcp.cpp b/modules/algowrapper/IntelEvcp.cpp
index ecf5963d..bcc3d5f2 100644
--- a/modules/algowrapper/IntelEvcp.cpp
+++ b/modules/algowrapper/IntelEvcp.cpp
@@ -42,16 +42,16 @@ bool IntelEvcp::runEvcpFrame(void* inBufAddr, int size) {
const int CACHE_LINE_SIZE_FOR_ADL = 64;
const int CACHE_LINE_MASK_FOR_ADL = CACHE_LINE_SIZE_FOR_ADL - 1;
- char* p = reinterpret_cast(reinterpret_cast(start) &
- ~CACHE_LINE_MASK_FOR_ADL);
+ char* p =
+ reinterpret_cast(reinterpret_cast(start) & ~CACHE_LINE_MASK_FOR_ADL);
char* end = reinterpret_cast(start) + sz;
- asm volatile("mfence" :::"memory");
+ asm volatile("mfence" ::: "memory");
while (p < end) {
asm volatile("clflush (%0)" ::"r"(p));
p += CACHE_LINE_SIZE_FOR_ADL;
}
- asm volatile("mfence" :::"memory");
+ asm volatile("mfence" ::: "memory");
return true;
};
diff --git a/modules/algowrapper/IntelFaceDetection.cpp b/modules/algowrapper/IntelFaceDetection.cpp
index 5175bef3..7a60eb8c 100644
--- a/modules/algowrapper/IntelFaceDetection.cpp
+++ b/modules/algowrapper/IntelFaceDetection.cpp
@@ -26,11 +26,9 @@
#include "iutils/CameraLog.h"
namespace icamera {
-IntelFaceDetection::IntelFaceDetection() : mFDHandle(nullptr), mMaxFacesNum(0) {
-}
+IntelFaceDetection::IntelFaceDetection() : mFDHandle(nullptr), mMaxFacesNum(0) {}
-IntelFaceDetection::~IntelFaceDetection() {
-}
+IntelFaceDetection::~IntelFaceDetection() {}
status_t IntelFaceDetection::init(FaceDetectionInitParams* pData, int dataSize) {
CheckAndLogError(!pData, UNKNOWN_ERROR, "pData is nullptr");
@@ -67,7 +65,7 @@ status_t IntelFaceDetection::init(FaceDetectionInitParams* pData, int dataSize)
status_t IntelFaceDetection::deinit(FaceDetectionDeinitParams* pData, int dataSize) {
CheckAndLogError(!pData, UNKNOWN_ERROR, "pData is nullptr");
CheckAndLogError(dataSize < static_cast(sizeof(FaceDetectionDeinitParams)), UNKNOWN_ERROR,
- "buffer is small");
+ "buffer is small");
LOG1("@%s", pData->cameraId, __func__);
if (mFDHandle) {
diff --git a/modules/algowrapper/IntelPGParam.cpp b/modules/algowrapper/IntelPGParam.cpp
index 3e47ce86..7c3f375a 100644
--- a/modules/algowrapper/IntelPGParam.cpp
+++ b/modules/algowrapper/IntelPGParam.cpp
@@ -109,8 +109,7 @@ int IntelPGParam::getFragmentDescriptors(int descCount, ia_p2p_fragment_desc* de
CheckAndLogError(!terminal, BAD_VALUE, "terminal is nullptr");
int termIdx = terminal->tm_index;
- if (!IS_DATA_TERMINAL(mPgReqs.terminals[termIdx].type))
- continue;
+ if (!IS_DATA_TERMINAL(mPgReqs.terminals[termIdx].type)) continue;
if (mFragmentConfig) {
int kernelId = kernel_id_ffs(mPgReqs.terminals[termIdx].kernelBitmap);
@@ -244,19 +243,18 @@ int IntelPGParam::prepare(const ia_binary_data* ipuParameters, const ia_css_rbm_
ret = ia_p2p_get_kernel_terminal_requirements(mP2pHandle, mPgId, (uint32_t)kernelId,
&mKernel.mSections[kernelId]);
CheckAndLogError(ret != ia_err_none, ret,
- "%s: failed to get requirements for pg %d kernel %d", __func__,
- mPgId, kernelId);
+ "%s: failed to get requirements for pg %d kernel %d", __func__, mPgId,
+ kernelId);
/* Get payload descriptor */
- ret = ia_p2p_get_kernel_payload_desc(
- mP2pHandle, mPgId, (uint32_t)kernelId,
+ ret = ia_p2p_get_kernel_payload_desc(mP2pHandle, mPgId, (uint32_t)kernelId,
#if defined(IPU_SYSVER_IPU6) && defined(UNIFIED_PROG_TERM_FRAG_DESC)
- 1,
+ 1,
#else
- mFragmentCount,
+ mFragmentCount,
#endif
- mFragmentConfig->pixel_fragment_descs[kernelId],
- &mKernel.mPayloads[kernelId]);
+ mFragmentConfig->pixel_fragment_descs[kernelId],
+ &mKernel.mPayloads[kernelId]);
CheckAndLogError(ret != ia_err_none, ret,
"%s: failed to get payload for pg %d kernel %d, ret %d", __func__, mPgId,
kernelId, ret);
@@ -456,8 +454,7 @@ int IntelPGParam::allocatePayloads(int payloadCount, ia_binary_data* payloads) {
for (int idx = 0; idx < payloadCount; idx++) {
ia_binary_data payload = {nullptr, payloads[idx].size};
if (payload.size) {
- payload.data = CIPR::mallocAlignedMemory(PAGE_ALIGN(payload.size),
- CIPR::getPageSize());
+ payload.data = CIPR::mallocAlignedMemory(PAGE_ALIGN(payload.size), CIPR::getPageSize());
CheckAndLogError(!payload.data, BAD_VALUE, "no memory for payload size %d!",
payload.size);
mAllocatedPayloads.push_back(payload);
@@ -469,8 +466,7 @@ int IntelPGParam::allocatePayloads(int payloadCount, ia_binary_data* payloads) {
void IntelPGParam::destroyPayloads() {
while (!mAllocatedPayloads.empty()) {
- if (mAllocatedPayloads.back().data)
- CIPR::freeMemory(mAllocatedPayloads.back().data);
+ if (mAllocatedPayloads.back().data) CIPR::freeMemory(mAllocatedPayloads.back().data);
mAllocatedPayloads.pop_back();
}
}
@@ -533,7 +529,7 @@ int IntelPGParam::encodeTerminal(ia_css_terminal_t* terminal, ia_binary_data pay
mP2pHandle, mPgId, mFragmentCount, mFragmentConfig,
mPgReqs.terminals[terminalIndex].userParamAddress.get());
CheckAndLogError(ret != ia_err_none, ret,
- "Failed to call ia_p2p_get_kernel_user_parameters_v2.");
+ "Failed to call ia_p2p_get_kernel_user_parameters_v2.");
ia_css_kernel_user_param_t* userParam = reinterpret_cast(
mPgReqs.terminals[terminalIndex].userParamAddress.get());
@@ -697,9 +693,10 @@ int IntelPGParam::decodeTerminal(ia_css_terminal_t* terminal, ia_binary_data pay
/* Use specific ordering of kernels when available */
if (mPgReqs.terminals[terminalIndex].kernelOrder) {
kernelId = mPgReqs.terminals[terminalIndex].kernelOrder[kernelIndex++].id;
- CheckAndLogError(kernelId >= PSYS_MAX_KERNELS_PER_PG, css_err_internal,
- "%s: Kernel bitmap for terminal %d covers more kernels than in manifest",
- __func__, terminalIndex);
+ CheckAndLogError(
+ kernelId >= PSYS_MAX_KERNELS_PER_PG, css_err_internal,
+ "%s: Kernel bitmap for terminal %d covers more kernels than in manifest", __func__,
+ terminalIndex);
} else {
kernelId = getKernelIdByBitmap(kernelBitmap);
}
@@ -1057,26 +1054,24 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren
ia_p2p_payload_desc init = mKernel.mPayloads[kernelId];
/* calculate again the memory requirements for each kernel
* and compare it with what we stored at init time. */
- ia_err ia_ret = ia_p2p_get_kernel_payload_desc(
- mP2pHandle, mPgId, kernelId,
+ ia_err ia_ret =
+ ia_p2p_get_kernel_payload_desc(mP2pHandle, mPgId, kernelId,
#if defined(IPU_SYSVER_IPU6) && defined(UNIFIED_PROG_TERM_FRAG_DESC)
- 1,
+ 1,
#else
- mFragmentCount,
+ mFragmentCount,
#endif
- mFragmentConfig->pixel_fragment_descs[kernelId],
- current);
+ mFragmentConfig->pixel_fragment_descs[kernelId], current);
CheckAndLogError(ia_ret != ia_err_none, css_err_internal,
"Failed to get payload description during sanity check (kernel %d)", kernelId);
switch (mPgReqs.terminals[terminalIndex].type) {
case IA_CSS_TERMINAL_TYPE_PARAM_CACHED_IN:
if (current->param_in_payload_size > init.param_in_payload_size) {
- LOGW(
- "%s: param-in section size mismatch in pg[%d] kernel[%d]"
- " p2p size %d pg_die size %d",
- __func__, mPgId, kernelId, current->param_in_payload_size,
- init.param_in_payload_size);
+ LOGW("%s: param-in section size mismatch in pg[%d] kernel[%d]"
+ " p2p size %d pg_die size %d",
+ __func__, mPgId, kernelId, current->param_in_payload_size,
+ init.param_in_payload_size);
} else {
current->param_in_payload_size = init.param_in_payload_size;
}
@@ -1084,11 +1079,10 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren
break;
case IA_CSS_TERMINAL_TYPE_PARAM_CACHED_OUT:
if (current->param_out_payload_size > init.param_out_payload_size) {
- LOGW(
- "%s: param-out section size mismatch in pg[%d] kernel[%d]"
- " p2p size %d pg_die size %d",
- __func__, mPgId, kernelId, current->param_out_payload_size,
- init.param_out_payload_size);
+ LOGW("%s: param-out section size mismatch in pg[%d] kernel[%d]"
+ " p2p size %d pg_die size %d",
+ __func__, mPgId, kernelId, current->param_out_payload_size,
+ init.param_out_payload_size);
} else {
current->param_out_payload_size = init.param_out_payload_size;
}
@@ -1096,11 +1090,10 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren
break;
case IA_CSS_TERMINAL_TYPE_PROGRAM:
if (current->program_payload_size > init.program_payload_size) {
- LOG1(
- "%s: program section size mismatch in pg[%d] kernel[%d]"
- " p2p size %d pg_die size %d",
- __func__, mPgId, kernelId, current->program_payload_size,
- init.program_payload_size);
+ LOG1("%s: program section size mismatch in pg[%d] kernel[%d]"
+ " p2p size %d pg_die size %d",
+ __func__, mPgId, kernelId, current->program_payload_size,
+ init.program_payload_size);
} else {
current->program_payload_size = init.program_payload_size;
}
@@ -1108,11 +1101,10 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren
break;
case IA_CSS_TERMINAL_TYPE_PARAM_SPATIAL_IN:
if (current->spatial_param_in_payload_size > init.spatial_param_in_payload_size) {
- LOGW(
- "%s: spatial-in section size mismatch in pg[%d] kernel[%d]"
- " p2p size %d pg_die size %d",
- __func__, mPgId, kernelId, current->spatial_param_in_payload_size,
- init.spatial_param_in_payload_size);
+ LOGW("%s: spatial-in section size mismatch in pg[%d] kernel[%d]"
+ " p2p size %d pg_die size %d",
+ __func__, mPgId, kernelId, current->spatial_param_in_payload_size,
+ init.spatial_param_in_payload_size);
} else {
current->spatial_param_in_payload_size = init.spatial_param_in_payload_size;
}
@@ -1120,11 +1112,10 @@ css_err_t IntelPGParam::payloadSectionSizeSanityTest(ia_p2p_payload_desc* curren
break;
case IA_CSS_TERMINAL_TYPE_PARAM_SPATIAL_OUT:
if (current->spatial_param_out_payload_size > init.spatial_param_out_payload_size) {
- LOGW(
- "%s: spatial-out section size mismatch in pg[%d] kernel[%d]"
- " p2p size %d pg_die size %d",
- __func__, mPgId, kernelId, current->spatial_param_out_payload_size,
- init.spatial_param_out_payload_size);
+ LOGW("%s: spatial-out section size mismatch in pg[%d] kernel[%d]"
+ " p2p size %d pg_die size %d",
+ __func__, mPgId, kernelId, current->spatial_param_out_payload_size,
+ init.spatial_param_out_payload_size);
} else {
current->spatial_param_out_payload_size = init.spatial_param_out_payload_size;
}
diff --git a/modules/algowrapper/IntelTNR7US.cpp b/modules/algowrapper/IntelTNR7US.cpp
index 17998bd2..404d7a8e 100644
--- a/modules/algowrapper/IntelTNR7US.cpp
+++ b/modules/algowrapper/IntelTNR7US.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2020-2021 Intel Corporation
+ * Copyright (C) 2020-2022 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -120,9 +120,9 @@ int IntelTNR7US::runTnrFrame(const void* inBufAddr, void* outBufAddr, uint32_t i
struct timespec beginTime = {};
if (Log::isLogTagEnabled(ST_GPU_TNR)) clock_gettime(CLOCK_MONOTONIC, &beginTime);
/* call Tnr api to run tnr for the inSurface and store the result in outSurface */
- int ret = run_tnr7us_frame(mWidth, CM_SURFACE_ALIGN_HEIGHT(mHeight), mWidth, inSurface,
- outSurface, &tnrParam->scale, &tnrParam->ims, &tnrParam->bc,
- &tnrParam->blend, syncUpdate, mTnrType);
+ int ret =
+ run_tnr7us_frame(mWidth, mHeight, mWidth, inSurface, outSurface, &tnrParam->scale,
+ &tnrParam->ims, &tnrParam->bc, &tnrParam->blend, syncUpdate, mTnrType);
if (fd >= 0) {
destroyCMSurface(outSurface);
}
@@ -185,8 +185,7 @@ CmSurface2DUP* IntelTNR7US::getBufferCMSurface(void* bufAddr) {
CmSurface2DUP* IntelTNR7US::createCMSurface(void* bufAddr) {
PERF_CAMERA_ATRACE();
CmSurface2DUP* cmSurface = nullptr;
- int32_t ret = createCmSurface2DUP(mWidth, CM_SURFACE_ALIGN_HEIGHT(mHeight),
- CM_SURFACE_FORMAT_NV12, bufAddr, cmSurface);
+ int32_t ret = createCmSurface2DUP(mWidth, mHeight, CM_SURFACE_FORMAT_NV12, bufAddr, cmSurface);
CheckAndLogError(ret != 0, nullptr, "failed to create CmSurface2DUP object");
return cmSurface;
}
diff --git a/modules/algowrapper/graph/GraphConfigImpl.cpp b/modules/algowrapper/graph/GraphConfigImpl.cpp
index 6d250077..5a037ddb 100644
--- a/modules/algowrapper/graph/GraphConfigImpl.cpp
+++ b/modules/algowrapper/graph/GraphConfigImpl.cpp
@@ -93,7 +93,7 @@ void GraphConfigImpl::addCustomKeyMap() {
*/
#define GCSS_KEY(key, str) std::make_pair(#str, GCSS_KEY_##key),
map CUSTOM_GRAPH_KEYS = {
- #include "custom_gcss_keys.h"
+#include "custom_gcss_keys.h"
};
#undef GCSS_KEY
@@ -360,9 +360,9 @@ status_t GraphConfigImpl::getRawInputSize(GCSS::IGraphConfig* query, camera_reso
return UNKNOWN_ERROR;
}
-status_t GraphConfigImpl::queryAllMatchedResults(const std::vector& activeStreams,
- bool dummyStillSink,
- std::map> *queryResults) {
+status_t GraphConfigImpl::queryAllMatchedResults(
+ const std::vector& activeStreams, bool dummyStillSink,
+ std::map>* queryResults) {
CheckAndLogError(!queryResults, UNKNOWN_ERROR, "%s, The queryResults is nullptr", __func__);
status_t ret = createQueryRule(activeStreams, dummyStillSink);
@@ -395,7 +395,7 @@ status_t GraphConfigImpl::queryAllMatchedResults(const std::vector&
}
bool GraphConfigImpl::queryGraphSettings(const std::vector& activeStreams) {
- std::map > useCaseToQueryResults;
+ std::map> useCaseToQueryResults;
status_t ret = queryAllMatchedResults(activeStreams, false, &useCaseToQueryResults);
return ret == OK ? true : false;
}
@@ -407,7 +407,7 @@ status_t GraphConfigImpl::configStreams(const vector& activeStreams,
bool dummyStillSink) {
HAL_TRACE_CALL(CAMERA_DEBUG_LOG_LEVEL1);
- map > useCaseToQueryResults;
+ map> useCaseToQueryResults;
status_t ret = queryAllMatchedResults(activeStreams, dummyStillSink, &useCaseToQueryResults);
CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, Faild to queryAllMatchedResults", __func__);
// Filter the results with same isys output if there are
@@ -434,7 +434,7 @@ status_t GraphConfigImpl::configStreams(const vector& activeStreams,
camera_resolution_t stillReso;
ret = getRawInputSize(still, &stillReso);
CheckAndLogError(ret != OK, UNKNOWN_ERROR,
- "%s, Failed to get csi ouput resolution for still pipe", __func__);
+ "%s, Failed to get csi ouput resolution for still pipe", __func__);
LOG2("Isys output resolution for still pipe: %dx%d", stillReso.width,
stillReso.height);
@@ -559,7 +559,7 @@ string GraphConfigImpl::format2GraphBpp(int format) {
* Do the secondary filter: configMode and stream format.
*/
status_t GraphConfigImpl::selectSetting(
- int useCase, std::map >* queryResults) {
+ int useCase, std::map>* queryResults) {
CheckAndLogError(!queryResults, UNKNOWN_ERROR, "%s, The queryResults is nullptr", __func__);
string opMode;
vector internalQueryResults;
@@ -603,9 +603,8 @@ status_t GraphConfigImpl::selectSetting(
string bpp = format2GraphBpp(s->format());
queryItem[bppKey] = bpp;
- LOG2("The stream: %dx%d, format: %s, graphFmt: %s, bpp: %s",
- s->width(), s->height(), CameraUtils::format2string(s->format()).c_str(),
- fmt.c_str(), bpp.c_str());
+ LOG2("The stream: %dx%d, format: %s, graphFmt: %s, bpp: %s", s->width(), s->height(),
+ CameraUtils::format2string(s->format()).c_str(), fmt.c_str(), bpp.c_str());
}
LOG1("dumpQuery with format condition");
@@ -833,11 +832,11 @@ status_t GraphConfigImpl::pipelineGetConnections(
std::vector stillScalerInfo, videoScalerInfo;
std::vector stillTnrPortFmt, videoTnrPortFmt;
- int ret = videoGraphPipe->pipelineGetConnections(pgList, &videoScalerInfo,
- &videoConnVector, &videoTnrPortFmt);
+ int ret = videoGraphPipe->pipelineGetConnections(pgList, &videoScalerInfo, &videoConnVector,
+ &videoTnrPortFmt);
CheckAndLogError(ret != OK, UNKNOWN_ERROR, "Failed to get the connetction from video pipe");
- ret = stillGraphPipe->pipelineGetConnections(pgList, &stillScalerInfo,
- &stillConnVector, &stillTnrPortFmt);
+ ret = stillGraphPipe->pipelineGetConnections(pgList, &stillScalerInfo, &stillConnVector,
+ &stillTnrPortFmt);
CheckAndLogError(ret != OK, UNKNOWN_ERROR, "Failed to get the connetction from still pipe");
LOG2("The connetction in video: %zu, in still: %zu; the scalera in video: %zu, in still: %zu",
diff --git a/modules/algowrapper/graph/GraphConfigImpl.h b/modules/algowrapper/graph/GraphConfigImpl.h
index daee690c..224e65f4 100644
--- a/modules/algowrapper/graph/GraphConfigImpl.h
+++ b/modules/algowrapper/graph/GraphConfigImpl.h
@@ -44,7 +44,7 @@ namespace icamera {
#define GCSS_KEY(key, str) GCSS_KEY_##key,
enum AndroidGraphConfigKey {
GCSS_ANDROID_KEY_START = GCSS_KEY_START_CUSTOM_KEYS,
- #include "custom_gcss_keys.h"
+#include "custom_gcss_keys.h"
};
#undef GCSS_KEY
@@ -112,13 +112,14 @@ class GraphConfigImpl {
status_t prepareGraphConfig();
bool isVideoStream(HalStream* stream);
status_t selectSetting(int useCase,
- std::map >* queryResults);
+ std::map>* queryResults);
status_t queryGraphs(const std::vector& activeStreams, bool dummyStillSink);
status_t createQueryRule(const std::vector& activeStreams, bool dummyStillSink);
status_t getRawInputSize(GCSS::IGraphConfig* query, camera_resolution_t* reso);
status_t queryAllMatchedResults(const std::vector& activeStreams,
- bool dummyStillSink, std::map> *queryResults);
+ bool dummyStillSink,
+ std::map>* queryResults);
status_t getGdcKernelSetting(uint32_t* kernelId, ia_isp_bxt_resolution_info_t* resolution);
status_t graphGetStreamIds(std::vector* streamIds);
int getStreamIdByPgName(std::string pgName);
@@ -152,7 +153,7 @@ class GraphConfigImpl {
* - The first item of mQuery is stream useCase(VIDEO or STILL),
* - and the second is an query rule map(GCSS_KEY_, VALUE).
*/
- std::map > mQuery;
+ std::map> mQuery;
/**
* Map to get the virtual sink id from a client stream pointer.
@@ -163,7 +164,7 @@ class GraphConfigImpl {
* - The first item is streams useCase(VIDEO or STILL)
* - and the second is the stream to virtual sink map
*/
- std::map > mStreamToSinkIdMap;
+ std::map> mStreamToSinkIdMap;
/*
* This vector is used to store the first query result.
@@ -176,7 +177,7 @@ class GraphConfigImpl {
std::map mQueryResult;
// The stream useCase to GraphConfigPipe map
- std::map > mGraphConfigPipe;
+ std::map> mGraphConfigPipe;
ConfigMode mConfigMode;
GraphSettingType mType;
diff --git a/modules/algowrapper/graph/GraphConfigPipe.cpp b/modules/algowrapper/graph/GraphConfigPipe.cpp
index 2c9845f0..96330cd4 100644
--- a/modules/algowrapper/graph/GraphConfigPipe.cpp
+++ b/modules/algowrapper/graph/GraphConfigPipe.cpp
@@ -61,9 +61,7 @@ uint32_t pppKernel[PPP_KERNEL_SIZE] = {ia_pal_uuid_isp_sc_outputscaler_ppp,
ia_pal_uuid_isp_sc_outputscaler_ppp_1_1};
uint32_t dsKernel[DS_KERNEL_SIZE] = {ia_pal_uuid_isp_b2i_ds_1_0_0, ia_pal_uuid_isp_b2i_ds_1_0_1};
-GraphConfigPipe::GraphConfigPipe(int pipeUseCase)
- : mSettings(nullptr),
- mPipeUseCase(pipeUseCase) {
+GraphConfigPipe::GraphConfigPipe(int pipeUseCase) : mSettings(nullptr), mPipeUseCase(pipeUseCase) {
mCsiOutput = {0, 0};
}
@@ -216,7 +214,7 @@ status_t GraphConfigPipe::getActiveOutputPorts(const StreamToSinkMap& streamToSi
ret = sink->getValue(GCSS_KEY_STREAM_ID, streamId);
CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to get stream id", __func__);
- Node *outputPort = getOutputPortForSink(sinkName);
+ Node* outputPort = getOutputPortForSink(sinkName);
CheckAndLogError(!outputPort, BAD_VALUE, "%s, No output port found for sink", __func__);
LOG2("%s, sink name: %s, stream id: %d, output port name: %s", __func__, sinkName.c_str(),
@@ -369,12 +367,12 @@ status_t GraphConfigPipe::getPgRbmValue(string pgName, IGraphType::StageAttr* st
if (ret != css_err_none) return NAME_NOT_FOUND;
GCSS::GraphCameraUtil mGCSSCameraUtil;
- void *rbmAddr = mGCSSCameraUtil.numString2binary(rbmString, &stageAttr->rbm_bytes);
+ void* rbmAddr = mGCSSCameraUtil.numString2binary(rbmString, &stageAttr->rbm_bytes);
CheckAndLogError(!rbmAddr, NO_MEMORY, "%s get rbm value: %s", __func__, rbmString.c_str());
if (stageAttr->rbm_bytes > MAX_RBM_STR_SIZE) {
- LOGE("%s, memory is too small to save rbm value: %d, %d", __func__,
- stageAttr->rbm_bytes, MAX_RBM_STR_SIZE);
+ LOGE("%s, memory is too small to save rbm value: %d, %d", __func__, stageAttr->rbm_bytes,
+ MAX_RBM_STR_SIZE);
stageAttr->rbm_bytes = 0;
return NO_MEMORY;
}
@@ -392,7 +390,7 @@ status_t GraphConfigPipe::getScalerKernelResolutionRatio(uint32_t* kenerArray, u
const ia_isp_bxt_resolution_info_t* resolutionInfo;
resolutionInfo = getScalerKernelResolutionInfo(kenerArray, sizeArray);
- if (!resolutionInfo) return OK; // no scaling in current setting
+ if (!resolutionInfo) return OK; // no scaling in current setting
*widthRatio = 1.0;
*heightRatio = 1.0;
@@ -586,8 +584,8 @@ status_t GraphConfigPipe::getPgIdForKernel(const uint32_t streamId, const int32_
if (ret != css_err_none) continue;
ret = ndVec->getValue(GCSS_KEY_PG_ID, *pgId);
- CheckAndLogError(ret != css_err_none, BAD_VALUE,
- "Couldn't get pg id for kernel: %d", kernelId);
+ CheckAndLogError(ret != css_err_none, BAD_VALUE, "Couldn't get pg id for kernel: %d",
+ kernelId);
LOG2("got the pgid:%d for kernel id:%d in stream:%d", *pgId, kernelId, streamId);
return OK;
@@ -825,8 +823,8 @@ status_t GraphConfigPipe::getPrivatePortFormat(Node* port,
}
ia_uid stageId;
- status_t status = GCSS::GraphCameraUtil::portGetFourCCInfo(port, stageId,
- format.formatSetting.terminalId);
+ status_t status =
+ GCSS::GraphCameraUtil::portGetFourCCInfo(port, stageId, format.formatSetting.terminalId);
CheckAndLogError(status != OK, INVALID_OPERATION, "Failed to get port uid", __func__);
ret = port->getValue(GCSS_KEY_WIDTH, format.formatSetting.width);
CheckAndLogError(ret != css_err_none, BAD_VALUE, "Failed to get port width", __func__);
@@ -838,8 +836,8 @@ status_t GraphConfigPipe::getPrivatePortFormat(Node* port,
CheckAndLogError(ret != css_err_none, BAD_VALUE, "Failed to find port fourcc", __func__);
format.formatSetting.fourcc = CameraUtils::string2IaFourccCode(fourccFormat.c_str());
- format.formatSetting.bpl = CameraUtils::getBpl(format.formatSetting.fourcc,
- format.formatSetting.width);
+ format.formatSetting.bpl =
+ CameraUtils::getBpl(format.formatSetting.fourcc, format.formatSetting.width);
format.formatSetting.bpp = CameraUtils::getBpp(format.formatSetting.fourcc);
LOG2("%s, Tnr ref out: streamId: %d, %dx%d, terminalId: %d, fmt: %s, bpp: %d, bpl: %d",
@@ -1112,7 +1110,7 @@ int32_t GraphConfigPipe::getTuningMode(const int32_t streamId) {
ret = result->getValue(GCSS_KEY_STREAM_ID, graphStreamId);
if (ret == css_err_none && graphStreamId == streamId && graphStreamId != -1) {
- GraphConfigNode *tuningModeNode = nullptr;
+ GraphConfigNode* tuningModeNode = nullptr;
ret = result->getDescendant(GCSS_KEY_TUNING_MODE, &tuningModeNode);
if (ret == css_err_none && tuningModeNode) {
string tuningModeStr;
@@ -1215,8 +1213,8 @@ status_t GraphConfigPipe::portGetPeer(Node* port, Node** peer) {
CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to get peer attribute", __func__);
ret = mSettings->getDescendantByString(peerName, peer);
- CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to find peer by name %s",
- __func__, peerName.c_str());
+ CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to find peer by name %s", __func__,
+ peerName.c_str());
return OK;
}
@@ -1263,8 +1261,8 @@ status_t GraphConfigPipe::portGetConnection(Node* port,
// input port is the sink in a connection
status = GCSS::GraphCameraUtil::portGetFourCCInfo(port, connectionInfo->mSinkStage,
connectionInfo->mSinkTerminal);
- CheckAndLogError(status != OK, BAD_VALUE,
- "%s, Failed to create fourcc info for sink port", __func__);
+ CheckAndLogError(status != OK, BAD_VALUE, "%s, Failed to create fourcc info for sink port",
+ __func__);
if (*peerPort != nullptr && !portIsVirtual(*peerPort)) {
status = GCSS::GraphCameraUtil::portGetFourCCInfo(
@@ -1279,8 +1277,8 @@ status_t GraphConfigPipe::portGetConnection(Node* port,
// output port is the source in a connection
status = GCSS::GraphCameraUtil::portGetFourCCInfo(port, connectionInfo->mSourceStage,
connectionInfo->mSourceTerminal);
- CheckAndLogError(status != OK, BAD_VALUE,
- "%s, Failed to create fourcc info for sink port", __func__);
+ CheckAndLogError(status != OK, BAD_VALUE, "%s, Failed to create fourcc info for sink port",
+ __func__);
if (*peerPort != nullptr && !portIsVirtual(*peerPort)) {
status = GCSS::GraphCameraUtil::portGetFourCCInfo(*peerPort, connectionInfo->mSinkStage,
@@ -1412,15 +1410,15 @@ int32_t GraphConfigPipe::portGetDirection(Node* port) {
* \return BAD_VALUE if any of the graph queries failed.
*/
status_t GraphConfigPipe::portGetFullName(Node* port, string* fullName) {
- CheckAndLogError(!fullName || !port, UNKNOWN_ERROR,
- "%s, the fullName or port is nullptr", __func__);
+ CheckAndLogError(!fullName || !port, UNKNOWN_ERROR, "%s, the fullName or port is nullptr",
+ __func__);
string portName, ancestorName;
Node* ancestor;
css_err_t ret = css_err_none;
ret = port->getAncestor(&ancestor);
- CheckAndLogError(ret != css_err_none, BAD_VALUE,
- "%s, Failed to retrieve port ancestor", __func__);
+ CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to retrieve port ancestor",
+ __func__);
ret = ancestor->getValue(GCSS_KEY_NAME, ancestorName);
if (ret != css_err_none) {
@@ -1430,8 +1428,7 @@ status_t GraphConfigPipe::portGetFullName(Node* port, string* fullName) {
}
ret = port->getValue(GCSS_KEY_NAME, portName);
- CheckAndLogError(ret != css_err_none, BAD_VALUE,
- "%s, Failed to retrieve port name", __func__);
+ CheckAndLogError(ret != css_err_none, BAD_VALUE, "%s, Failed to retrieve port name", __func__);
*fullName = ancestorName + ":" + portName;
return OK;
@@ -1493,8 +1490,8 @@ bool GraphConfigPipe::portIsVirtual(Node* port) {
*/
status_t GraphConfigPipe::portGetClientStream(Node* port, HalStream** stream) {
CheckAndLogError(!port || !stream, BAD_VALUE, "%s, Invalid parameters", __func__);
- CheckAndLogError(!portIsVirtual(port), INVALID_OPERATION,
- "%s, port is not a virtual port", __func__);
+ CheckAndLogError(!portIsVirtual(port), INVALID_OPERATION, "%s, port is not a virtual port",
+ __func__);
string portName;
css_err_t ret = port->getValue(GCSS_KEY_NAME, portName);
@@ -1532,8 +1529,8 @@ bool GraphConfigPipe::portIsEdgePort(Node* port) {
LOG2("port is disabled, so it is an edge port");
return true;
}
- CheckAndLogError(status != OK, false,
- "%s, Failed to create fourcc info for source port", __func__);
+ CheckAndLogError(status != OK, false, "%s, Failed to create fourcc info for source port",
+ __func__);
streamId = portGetStreamId(port);
if (streamId < 0) return false;
diff --git a/modules/ia_cipr/include/ipu-psys.h b/modules/ia_cipr/include/ipu-psys.h
index 657027a5..60fbc241 100644
--- a/modules/ia_cipr/include/ipu-psys.h
+++ b/modules/ia_cipr/include/ipu-psys.h
@@ -21,40 +21,40 @@
#define _UAPI_IPU_PSYS_H
#include
struct ipu_psys_capability {
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- uint32_t version;
- uint8_t driver[20];
- uint32_t pg_count;
- uint8_t dev_model[32];
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- uint32_t reserved[17];
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t version;
+ uint8_t driver[20];
+ uint32_t pg_count;
+ uint8_t dev_model[32];
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t reserved[17];
} __attribute__((packed));
struct ipu_psys_event {
- uint32_t type;
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- uint64_t user_token;
- uint64_t issue_id;
- uint32_t buffer_idx;
- uint32_t error;
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- int32_t reserved[2];
+ uint32_t type;
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint64_t user_token;
+ uint64_t issue_id;
+ uint32_t buffer_idx;
+ uint32_t error;
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ int32_t reserved[2];
} __attribute__((packed));
#define IPU_PSYS_EVENT_TYPE_CMD_COMPLETE 1
#define IPU_PSYS_EVENT_TYPE_BUFFER_COMPLETE 2
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
struct ipu_psys_buffer {
- uint64_t len;
- union {
- int fd;
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- void* userptr;
- uint64_t reserved;
- } base;
- uint32_t data_offset;
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- uint32_t bytes_used;
- uint32_t flags;
- uint32_t reserved[2];
+ uint64_t len;
+ union {
+ int fd;
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ void* userptr;
+ uint64_t reserved;
+ } base;
+ uint32_t data_offset;
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t bytes_used;
+ uint32_t flags;
+ uint32_t reserved[2];
} __attribute__((packed));
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define IPU_BUFFER_FLAG_INPUT (1 << 0)
@@ -70,33 +70,33 @@ struct ipu_psys_buffer {
#define IPU_PSYS_CMD_PRIORITY_LOW 2
#define IPU_PSYS_CMD_PRIORITY_NUM 3
struct ipu_psys_command {
- uint64_t issue_id;
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- uint64_t user_token;
- uint32_t priority;
- void* pg_manifest;
- struct ipu_psys_buffer* buffers;
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- int pg;
- uint32_t pg_manifest_size;
- uint32_t bufcount;
- uint32_t min_psys_freq;
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- uint32_t frame_counter;
- uint32_t kernel_enable_bitmap[4];
- uint32_t terminal_enable_bitmap[4];
- uint32_t routing_enable_bitmap[4];
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- uint32_t rbm[5];
- uint32_t reserved[2];
+ uint64_t issue_id;
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint64_t user_token;
+ uint32_t priority;
+ void* pg_manifest;
+ struct ipu_psys_buffer* buffers;
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ int pg;
+ uint32_t pg_manifest_size;
+ uint32_t bufcount;
+ uint32_t min_psys_freq;
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t frame_counter;
+ uint32_t kernel_enable_bitmap[4];
+ uint32_t terminal_enable_bitmap[4];
+ uint32_t routing_enable_bitmap[4];
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t rbm[5];
+ uint32_t reserved[2];
} __attribute__((packed));
struct ipu_psys_manifest {
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
- uint32_t index;
- uint32_t size;
- void* manifest;
- uint32_t reserved[5];
-/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t index;
+ uint32_t size;
+ void* manifest;
+ uint32_t reserved[5];
+ /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
} __attribute__((packed));
#define IPU_IOC_QUERYCAP _IOR('A', 1, struct ipu_psys_capability)
#define IPU_IOC_MAPBUF _IOWR('A', 2, int)
diff --git a/modules/ia_cipr/src/Buffer.cpp b/modules/ia_cipr/src/Buffer.cpp
index d331dfac..611f0dfb 100644
--- a/modules/ia_cipr/src/Buffer.cpp
+++ b/modules/ia_cipr/src/Buffer.cpp
@@ -22,8 +22,8 @@
#include "iutils/Utils.h"
#include "modules/ia_cipr/include/Context.h"
-using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_ERR;
+using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_WARNING;
namespace icamera {
@@ -98,8 +98,7 @@ Result Buffer::validateBuffer(const MemoryDesc* memory) {
valid &= false;
}
- bool haveMemory = mem->flags & MemoryFlag::Allocated ||
- mem->flags & MemoryFlag::MemoryFromUser;
+ bool haveMemory = mem->flags & MemoryFlag::Allocated || mem->flags & MemoryFlag::MemoryFromUser;
if (!haveMemory &&
((mem->flags & MemoryFlag::MemoryHandle) || (mem->flags & MemoryFlag::CpuPtr))) {
valid &= false;
diff --git a/modules/ia_cipr/src/Command.cpp b/modules/ia_cipr/src/Command.cpp
index 52f1db3f..1a17ea0e 100644
--- a/modules/ia_cipr/src/Command.cpp
+++ b/modules/ia_cipr/src/Command.cpp
@@ -23,8 +23,8 @@
#include "iutils/CameraLog.h"
#include "iutils/Utils.h"
-using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_ERR;
+using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_WARNING;
namespace icamera {
@@ -79,19 +79,19 @@ Result Command::updateKernel(const PSysCommandConfig& cfg, const MemoryDesc& mem
ProcessGroupCommand* ppg_command_ext = reinterpret_cast(memory.cpuPtr);
CheckAndLogError(ppg_command_ext->header.size != memory.size ||
- ppg_command_ext->header.offset != sizeof(PSysCmdExtHeader) ||
- (ppg_command_ext->header.version != psys_command_ext_ppg_0 &&
- ppg_command_ext->header.version != psys_command_ext_ppg_1),
+ ppg_command_ext->header.offset != sizeof(PSysCmdExtHeader) ||
+ (ppg_command_ext->header.version != psys_command_ext_ppg_0 &&
+ ppg_command_ext->header.version != psys_command_ext_ppg_1),
Result::InvaildArg, "Invalid command extension buffer received! (%p)",
cfg.extBuf);
if (ppg_command_ext->header.version == psys_command_ext_ppg_1) {
CheckAndLogError(sizeof(mCmd->iocCmd.kernel_enable_bitmap) !=
- sizeof(ppg_command_ext->dynamicKernelBitmap), Result::DataError,
- "Invalid bitmap buffer size");
- MEMCPY_S(
- &(mCmd->iocCmd.kernel_enable_bitmap), sizeof(mCmd->iocCmd.kernel_enable_bitmap),
- ppg_command_ext->dynamicKernelBitmap, sizeof(ppg_command_ext->dynamicKernelBitmap));
+ sizeof(ppg_command_ext->dynamicKernelBitmap),
+ Result::DataError, "Invalid bitmap buffer size");
+ MEMCPY_S(&(mCmd->iocCmd.kernel_enable_bitmap), sizeof(mCmd->iocCmd.kernel_enable_bitmap),
+ ppg_command_ext->dynamicKernelBitmap,
+ sizeof(ppg_command_ext->dynamicKernelBitmap));
}
mCmd->iocCmd.frame_counter = static_cast(ppg_command_ext->frameCounter);
diff --git a/modules/ia_cipr/src/Context.cpp b/modules/ia_cipr/src/Context.cpp
index 85af000e..1896491c 100644
--- a/modules/ia_cipr/src/Context.cpp
+++ b/modules/ia_cipr/src/Context.cpp
@@ -29,8 +29,8 @@
#include "iutils/CameraLog.h"
#include "iutils/Utils.h"
-using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_ERR;
+using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_WARNING;
#include "modules/ia_cipr/include/Context.h"
@@ -120,13 +120,13 @@ Result Context::getCapabilities(PSYSCapability* cap) {
cap->version = psys_capability.version;
CheckAndLogError(sizeof(cap->driver) != sizeof(psys_capability.driver), Result::DataError,
"the driver array size wasn't matching");
- MEMCPY_S(cap->driver, sizeof(cap->driver),
- psys_capability.driver, sizeof(psys_capability.driver));
+ MEMCPY_S(cap->driver, sizeof(cap->driver), psys_capability.driver,
+ sizeof(psys_capability.driver));
CheckAndLogError(sizeof(cap->devModel) != sizeof(psys_capability.dev_model), Result::DataError,
"the dev model array size wasn't matching");
- MEMCPY_S(cap->devModel, sizeof(cap->devModel),
- psys_capability.dev_model, sizeof(psys_capability.dev_model));
+ MEMCPY_S(cap->devModel, sizeof(cap->devModel), psys_capability.dev_model,
+ sizeof(psys_capability.dev_model));
cap->programGroupCount = psys_capability.pg_count;
diff --git a/modules/ia_cipr/src/Event.cpp b/modules/ia_cipr/src/Event.cpp
index d14bce35..6e6a08de 100644
--- a/modules/ia_cipr/src/Event.cpp
+++ b/modules/ia_cipr/src/Event.cpp
@@ -31,8 +31,8 @@
#include "iutils/CameraLog.h"
#include "iutils/Utils.h"
-using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_ERR;
+using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_WARNING;
#include "modules/ia_cipr/include/Context.h"
diff --git a/modules/ia_cipr/src/Utils.cpp b/modules/ia_cipr/src/Utils.cpp
index c5c43d22..003f9a84 100644
--- a/modules/ia_cipr/src/Utils.cpp
+++ b/modules/ia_cipr/src/Utils.cpp
@@ -23,8 +23,8 @@
#include "iutils/CameraLog.h"
#include "iutils/Utils.h"
-using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_ERR;
+using icamera::CAMERA_DEBUG_LOG_INFO;
using icamera::CAMERA_DEBUG_LOG_WARNING;
namespace icamera {
diff --git a/src/3a/AiqCore.cpp b/src/3a/AiqCore.cpp
index 438d138e..7e825ca5 100644
--- a/src/3a/AiqCore.cpp
+++ b/src/3a/AiqCore.cpp
@@ -20,6 +20,7 @@
#include
+#include
#include
#include
@@ -101,8 +102,7 @@ int AiqCore::initAiqPlusParams() {
}
mGbceParams.gbce_on = (tonemapMaxCurvePoints > 0) ? true : false;
mGbceParams.athena_mode = PlatformData::getPLCEnable(mCameraId);
- LOG1("%s, gbce_on: %d, plc enable: %d", __func__, mGbceParams.gbce_on,
- mGbceParams.athena_mode);
+ LOG1("%s, gbce_on: %d, plc enable: %d", __func__, mGbceParams.gbce_on, mGbceParams.athena_mode);
// HDR_FEATURE_S
if (PlatformData::getSensorAeEnable(mCameraId)) {
@@ -308,7 +308,6 @@ int AiqCore::runAe(long requestId, AiqResult* aiqResult) {
CheckAndLogError(!aiqResult, BAD_VALUE, "@%s, aiqResult is nullptr", __func__);
LOG2("@%s, aiqResult %p", requestId, __func__, aiqResult);
-
// run AE
return runAEC(requestId, &aiqResult->mAeResults);
}
@@ -316,7 +315,6 @@ int AiqCore::runAe(long requestId, AiqResult* aiqResult) {
int AiqCore::runAiq(long requestId, AiqResult* aiqResult) {
CheckAndLogError(!aiqResult, BAD_VALUE, "@%s, aiqResult is nullptr", __func__);
-
int aaaRunType = IMAGING_ALGO_AWB | IMAGING_ALGO_GBCE | IMAGING_ALGO_PA;
if (PlatformData::getLensHwType(mCameraId) == LENS_VCM_HW) {
aaaRunType |= IMAGING_ALGO_AF;
@@ -696,7 +694,8 @@ bool AiqCore::bypassAe(const aiq_parameter_t& param) {
// run AE if manual AE or total exposure target is set
if (param.aeMode != AE_MODE_AUTO || param.powerMode != CAMERA_LOW_POWER ||
- param.totalExposureTarget > 0) return false;
+ param.totalExposureTarget > 0)
+ return false;
bool converged = mLastAeResult.exposures[0].converged;
diff --git a/src/3a/AiqEngine.cpp b/src/3a/AiqEngine.cpp
index d39d736e..3944a6fc 100644
--- a/src/3a/AiqEngine.cpp
+++ b/src/3a/AiqEngine.cpp
@@ -116,8 +116,8 @@ int AiqEngine::run3A(long requestId, int64_t applyingSeq, int64_t* effectSeq) {
AutoMutex l(mEngineLock);
AiqStatistics* aiqStats =
- mFirstAiqRunning ? nullptr
- : const_cast(mAiqResultStorage->getAndLockAiqStatistics());
+ mFirstAiqRunning ? nullptr :
+ const_cast(mAiqResultStorage->getAndLockAiqStatistics());
AiqState state = AIQ_STATE_IDLE;
AiqResult* aiqResult = mAiqResultStorage->acquireAiqResult();
@@ -155,7 +155,6 @@ int AiqEngine::run3A(long requestId, int64_t applyingSeq, int64_t* effectSeq) {
mAiqRunningHistory.statsSequnce);
}
-
PlatformData::saveMakernoteData(mCameraId, aiqResult->mAiqParam.makernoteMode,
mAiqResultStorage->getAiqResult()->mSequence,
aiqResult->mTuningMode);
@@ -174,8 +173,8 @@ void AiqEngine::handleEvent(EventData eventData) {
mLensManager->handleSofEvent(eventData);
}
-int AiqEngine::prepareStatsParams(cca::cca_stats_params* statsParams,
- AiqStatistics* aiqStatistics, AiqResult* aiqResult) {
+int AiqEngine::prepareStatsParams(cca::cca_stats_params* statsParams, AiqStatistics* aiqStatistics,
+ AiqResult* aiqResult) {
LOG2("%s, sequence %ld", __func__, aiqStatistics->mSequence);
// update face detection related parameters
@@ -218,7 +217,7 @@ int AiqEngine::prepareStatsParams(cca::cca_stats_params* statsParams,
if (PlatformData::isDvsSupported(mCameraId) &&
PlatformData::getGraphConfigNodes(mCameraId)) {
std::shared_ptr gc = nullptr;
- IGraphConfigManager *GCM = IGraphConfigManager::getInstance(mCameraId);
+ IGraphConfigManager* GCM = IGraphConfigManager::getInstance(mCameraId);
if (GCM) {
gc = GCM->getGraphConfig(CAMERA_STREAM_CONFIGURATION_MODE_NORMAL);
}
@@ -399,9 +398,9 @@ AiqEngine::AiqState AiqEngine::handleAiqResult(AiqResult* aiqResult) {
LOG2("%s: aiqResult->mTuningMode = %d", __func__, aiqResult->mTuningMode);
// HDR_FEATURE_S
- aec_scene_t aecScene = (aiqResult->mAeResults.multiframe == ia_aiq_bracket_mode_ull)
- ? AEC_SCENE_ULL
- : AEC_SCENE_HDR;
+ aec_scene_t aecScene = (aiqResult->mAeResults.multiframe == ia_aiq_bracket_mode_ull) ?
+ AEC_SCENE_ULL :
+ AEC_SCENE_HDR;
mAiqSetting->updateTuningMode(aecScene);
// HDR_FEATURE_E
@@ -433,8 +432,8 @@ int AiqEngine::applyManualTonemaps(AiqResult* aiqResult) {
aiqResult->mAiqParam.tonemapMode == TONEMAP_MODE_HIGH_QUALITY) {
aiqResult->mGbceResults.have_manual_settings = false;
- if (aiqResult->mAiqParam.aeMode != AE_MODE_AUTO && aiqResult->mAiqParam.manualIso != 0
- && aiqResult->mAiqParam.manualExpTimeUs != 0) {
+ if (aiqResult->mAiqParam.aeMode != AE_MODE_AUTO && aiqResult->mAiqParam.manualIso != 0 &&
+ aiqResult->mAiqParam.manualExpTimeUs != 0) {
aiqResult->mGbceResults.have_manual_settings = true;
}
}
diff --git a/src/3a/AiqResult.cpp b/src/3a/AiqResult.cpp
index a7c19013..48a56bbe 100644
--- a/src/3a/AiqResult.cpp
+++ b/src/3a/AiqResult.cpp
@@ -23,18 +23,18 @@
namespace icamera {
-AiqResult::AiqResult(int cameraId) :
- mCameraId(cameraId),
- mTimestamp(0),
- mSequence(-1),
- mFrameId(-1),
- mTuningMode(TUNING_MODE_VIDEO),
- mAfDistanceDiopters(0.0f),
- mSkip(false),
- mLensPosition(0),
- mSceneMode(SCENE_MODE_AUTO),
- mFrameDuration(0),
- mRollingShutter(0) {
+AiqResult::AiqResult(int cameraId)
+ : mCameraId(cameraId),
+ mTimestamp(0),
+ mSequence(-1),
+ mFrameId(-1),
+ mTuningMode(TUNING_MODE_VIDEO),
+ mAfDistanceDiopters(0.0f),
+ mSkip(false),
+ mLensPosition(0),
+ mSceneMode(SCENE_MODE_AUTO),
+ mFrameDuration(0),
+ mRollingShutter(0) {
CLEAR(mCustomControls);
CLEAR(mCustomControlsParams);
CLEAR(mAwbResults);
@@ -74,7 +74,7 @@ int AiqResult::deinit() {
return OK;
}
-AiqResult &AiqResult::operator=(const AiqResult &other) {
+AiqResult& AiqResult::operator=(const AiqResult& other) {
mCameraId = other.mCameraId;
mSequence = other.mSequence;
mFrameId = other.mFrameId;
@@ -98,8 +98,8 @@ AiqResult &AiqResult::operator=(const AiqResult &other) {
for (int i = 0; i < mCustomControls.count; i++) {
mCustomControlsParams[i] = other.mCustomControlsParams[i];
}
- MEMCPY_S(mLensShadingMap, sizeof(mLensShadingMap),
- other.mLensShadingMap, sizeof(other.mLensShadingMap));
+ MEMCPY_S(mLensShadingMap, sizeof(mLensShadingMap), other.mLensShadingMap,
+ sizeof(other.mLensShadingMap));
mAiqParam = other.mAiqParam;
mFrameDuration = other.mFrameDuration;
diff --git a/src/3a/AiqResult.h b/src/3a/AiqResult.h
index 715bfe89..3fb75d9f 100644
--- a/src/3a/AiqResult.h
+++ b/src/3a/AiqResult.h
@@ -34,8 +34,7 @@ namespace icamera {
* Then we can do deep copy of the results
*/
class AiqResult {
-
-public:
+ public:
AiqResult(int cameraId);
~AiqResult();
@@ -44,7 +43,7 @@ class AiqResult {
AiqResult& operator=(const AiqResult& other);
-public:
+ public:
int mCameraId;
unsigned long long mTimestamp;
int64_t mSequence;
@@ -72,10 +71,9 @@ class AiqResult {
int64_t mFrameDuration; // us
int64_t mRollingShutter; // us
-private:
+ private:
/*!< ia_isp_custom_controls pointer content */
float mCustomControlsParams[MAX_CUSTOM_CONTROLS_PARAM_SIZE];
-
};
} /* namespace icamera */
diff --git a/src/3a/AiqResultStorage.cpp b/src/3a/AiqResultStorage.cpp
index cb4f8984..6e55a96e 100644
--- a/src/3a/AiqResultStorage.cpp
+++ b/src/3a/AiqResultStorage.cpp
@@ -36,8 +36,7 @@ void AiqResultStorage::releaseAiqResultStorage(int cameraId) {
delete storage;
}
-AiqResultStorage::AiqResultStorage(int cameraId) :
- mCameraId(cameraId) {
+AiqResultStorage::AiqResultStorage(int cameraId) : mCameraId(cameraId) {
for (int i = 0; i < kStorageSize; i++) {
mAiqResults[i] = new AiqResult(mCameraId);
mAiqResults[i]->init();
@@ -82,11 +81,10 @@ void AiqResultStorage::resetAiqStatistics() {
const AiqStatistics* AiqResultStorage::getAndLockAiqStatistics() {
AutoRMutex rlock(mDataLock);
- if (mCurrentAiqStatsIndex == -1)
- return nullptr;
+ if (mCurrentAiqStatsIndex == -1) return nullptr;
- CheckAndLogError(mAiqStatistics[mCurrentAiqStatsIndex].mSequence == -1,
- nullptr, "Invalid sequence id -1 of stored aiq statistics");
+ CheckAndLogError(mAiqStatistics[mCurrentAiqStatsIndex].mSequence == -1, nullptr,
+ "Invalid sequence id -1 of stored aiq statistics");
mAiqStatistics[mCurrentAiqStatsIndex].mInUse = true;
return &mAiqStatistics[mCurrentAiqStatsIndex];
@@ -150,5 +148,4 @@ AiqResultStorage* AiqResultStorage::getInstanceLocked(int cameraId) {
return sInstances[cameraId];
}
-} //namespace icamera
-
+} // namespace icamera
diff --git a/src/3a/AiqResultStorage.h b/src/3a/AiqResultStorage.h
index 9a2f94d3..70c0af4c 100644
--- a/src/3a/AiqResultStorage.h
+++ b/src/3a/AiqResultStorage.h
@@ -43,7 +43,7 @@ namespace icamera {
* its static methods getInstance and releaseAiqResultStorage.
*/
class AiqResultStorage {
-public:
+ public:
/**
* \brief Get internal instance for cameraId.
*
@@ -84,7 +84,8 @@ class AiqResultStorage {
* param[in] int64_t sequence: specify which aiq result is needed.
*
* return 1. when sequence id is -1 or not provided, the lastest result will be returned.
- * 2. when sequence id is larger than -1, the result with gaven sequence id will be returned.
+ * 2. when sequence id is larger than -1, the result with gaven sequence id will be
+ * returned.
* 3. if cannot find in result storage, it means either sequence id is too old and its
* result was overrided, or the sequence id is too new, and its result has not been
* saved into storage yet. For both cases, nullptr will be returned.
@@ -125,27 +126,27 @@ class AiqResultStorage {
*/
void resetAiqStatistics();
-private:
+ private:
AiqResultStorage(int cameraId);
~AiqResultStorage();
static AiqResultStorage* getInstanceLocked(int cameraId);
-private:
+ private:
static std::map sInstances;
// Guard for singleton creation.
static Mutex sLock;
int mCameraId;
- RWLock mDataLock; // lock for all the data storage below
+ RWLock mDataLock; // lock for all the data storage below
- static const int kStorageSize = MAX_SETTING_COUNT; // Should > MAX_BUFFER_COUNT + sensorLag
+ static const int kStorageSize = MAX_SETTING_COUNT; // Should > MAX_BUFFER_COUNT + sensorLag
int mCurrentIndex = -1;
AiqResult* mAiqResults[kStorageSize];
- static const int kAiqStatsStorageSize = 3; // Always use the latest, but may hold for long time
+ static const int kAiqStatsStorageSize = 3; // Always use the latest, but may hold for long time
int mCurrentAiqStatsIndex = -1;
AiqStatistics mAiqStatistics[kAiqStatsStorageSize];
};
-} //namespace icamera
+} // namespace icamera
diff --git a/src/3a/AiqSetting.cpp b/src/3a/AiqSetting.cpp
index 12e8159e..0b6d1bef 100644
--- a/src/3a/AiqSetting.cpp
+++ b/src/3a/AiqSetting.cpp
@@ -27,12 +27,9 @@
namespace icamera {
-AiqSetting::AiqSetting(int cameraId) :
- mCameraId(cameraId) {
-}
+AiqSetting::AiqSetting(int cameraId) : mCameraId(cameraId) {}
-AiqSetting::~AiqSetting() {
-}
+AiqSetting::~AiqSetting() {}
int AiqSetting::init(void) {
AutoWMutex wlock(mParamLock);
@@ -53,7 +50,7 @@ int AiqSetting::deinit(void) {
return OK;
}
-int AiqSetting::configure(const stream_config_t *streamList) {
+int AiqSetting::configure(const stream_config_t* streamList) {
AutoWMutex wlock(mParamLock);
camera_resolution_t resolution = {streamList->streams[0].width, streamList->streams[0].height};
@@ -90,21 +87,21 @@ int AiqSetting::configure(const stream_config_t *streamList) {
mAiqParam.tuningMode = mTuningModes[0];
}
LOG1("%s, tuningMode %d, configMode %x, fame usage %d, res %dx%d", __func__,
- mAiqParam.tuningMode, configModes[0], mAiqParam.frameUsage,
- mAiqParam.resolution.width, mAiqParam.resolution.height);
+ mAiqParam.tuningMode, configModes[0], mAiqParam.frameUsage, mAiqParam.resolution.width,
+ mAiqParam.resolution.height);
return OK;
}
-void AiqSetting::updateFrameUsage(const stream_config_t *streamList) {
+void AiqSetting::updateFrameUsage(const stream_config_t* streamList) {
bool preview = false, still = false, video = false;
for (int i = 0; i < streamList->num_streams; i++) {
if (streamList->streams[i].usage == CAMERA_STREAM_VIDEO_CAPTURE) {
video = true;
} else if (streamList->streams[i].usage == CAMERA_STREAM_STILL_CAPTURE) {
still = true;
- } else if (streamList->streams[i].usage == CAMERA_STREAM_PREVIEW
- || streamList->streams[i].usage == CAMERA_STREAM_APP) {
+ } else if (streamList->streams[i].usage == CAMERA_STREAM_PREVIEW ||
+ streamList->streams[i].usage == CAMERA_STREAM_APP) {
preview = true;
}
}
@@ -146,8 +143,8 @@ int AiqSetting::setParameters(const Parameters& params) {
mAiqParam.evShift = 0.0;
} else {
ev = CLIP(ev, mAiqParam.evRange.max, mAiqParam.evRange.min);
- mAiqParam.evShift = static_cast(ev) *
- mAiqParam.evStep.numerator / mAiqParam.evStep.denominator;
+ mAiqParam.evShift =
+ static_cast(ev) * mAiqParam.evStep.numerator / mAiqParam.evStep.denominator;
}
params.getFrameRate(mAiqParam.fps);
@@ -221,8 +218,8 @@ int AiqSetting::setParameters(const Parameters& params) {
CheckWarningNoReturn(curves.bSize > DEFAULT_TONEMAP_CURVE_POINT_NUM,
"user v curve size is too big %d", curves.bSize);
int curveSize = sizeof(float) * DEFAULT_TONEMAP_CURVE_POINT_NUM;
- MEMCPY_S(&mAiqParam.tonemapCurveMem[0], curveSize,
- curves.rCurve, sizeof(float) * curves.rSize);
+ MEMCPY_S(&mAiqParam.tonemapCurveMem[0], curveSize, curves.rCurve,
+ sizeof(float) * curves.rSize);
MEMCPY_S(&mAiqParam.tonemapCurveMem[DEFAULT_TONEMAP_CURVE_POINT_NUM], curveSize,
curves.gCurve, sizeof(float) * curves.gSize);
MEMCPY_S(&mAiqParam.tonemapCurveMem[DEFAULT_TONEMAP_CURVE_POINT_NUM * 2], curveSize,
@@ -240,19 +237,19 @@ int AiqSetting::setParameters(const Parameters& params) {
uint8_t captureIntent = 0;
if (params.getCaptureIntent(captureIntent) == OK) {
switch (captureIntent) {
- case CAMERA_CONTROL_CAPTUREINTENT_STILL_CAPTURE:
- mAiqParam.frameUsage = FRAME_USAGE_STILL;
- break;
- case CAMERA_CONTROL_CAPTUREINTENT_VIDEO_RECORD:
- case CAMERA_CONTROL_CAPTUREINTENT_VIDEO_SNAPSHOT:
- mAiqParam.frameUsage = FRAME_USAGE_VIDEO;
- break;
- case CAMERA_CONTROL_CAPTUREINTENT_PREVIEW:
- mAiqParam.frameUsage = FRAME_USAGE_PREVIEW;
- break;
- default:
- mAiqParam.frameUsage = FRAME_USAGE_CONTINUOUS;
- break;
+ case CAMERA_CONTROL_CAPTUREINTENT_STILL_CAPTURE:
+ mAiqParam.frameUsage = FRAME_USAGE_STILL;
+ break;
+ case CAMERA_CONTROL_CAPTUREINTENT_VIDEO_RECORD:
+ case CAMERA_CONTROL_CAPTUREINTENT_VIDEO_SNAPSHOT:
+ mAiqParam.frameUsage = FRAME_USAGE_VIDEO;
+ break;
+ case CAMERA_CONTROL_CAPTUREINTENT_PREVIEW:
+ mAiqParam.frameUsage = FRAME_USAGE_PREVIEW;
+ break;
+ default:
+ mAiqParam.frameUsage = FRAME_USAGE_CONTINUOUS;
+ break;
}
}
@@ -264,7 +261,7 @@ int AiqSetting::setParameters(const Parameters& params) {
return OK;
}
-int AiqSetting::getAiqParameter(aiq_parameter_t ¶m) {
+int AiqSetting::getAiqParameter(aiq_parameter_t& param) {
AutoRMutex rlock(mParamLock);
param = mAiqParam;
@@ -276,9 +273,8 @@ int AiqSetting::getAiqParameter(aiq_parameter_t ¶m) {
based on AE result. Current it only has HDR and ULL mode switching case,
this maybe changed if more cases are supported. */
void AiqSetting::updateTuningMode(aec_scene_t aecScene) {
- if (!PlatformData::isEnableHDR(mCameraId)
- || mTuningModes.size() <= 1
- || mAiqParam.aeMode != AE_MODE_AUTO) {
+ if (!PlatformData::isEnableHDR(mCameraId) || mTuningModes.size() <= 1 ||
+ mAiqParam.aeMode != AE_MODE_AUTO) {
return;
}
@@ -290,7 +286,7 @@ void AiqSetting::updateTuningMode(aec_scene_t aecScene) {
}
bool found = false;
- for (auto &tMode : mTuningModes) {
+ for (auto& tMode : mTuningModes) {
// Check tuningMode if support or not
if (tMode == tuningMode) {
found = true;
@@ -320,12 +316,12 @@ void aiq_parameter_t::reset() {
evStep = {1, 3};
evRange = {-6, 6};
fps = 0;
- aeFpsRange = { 0.0, 0.0 };
+ aeFpsRange = {0.0, 0.0};
antibandingMode = ANTIBANDING_MODE_AUTO;
- cctRange = { 0, 0 };
- whitePoint = { 0, 0 };
- awbManualGain = { 0, 0, 0 };
- awbGainShift = { 0, 0, 0 };
+ cctRange = {0, 0};
+ whitePoint = {0, 0};
+ awbManualGain = {0, 0, 0};
+ awbGainShift = {0, 0, 0};
CLEAR(manualColorMatrix);
CLEAR(manualColorGains);
aeRegions.clear();
@@ -388,38 +384,36 @@ void aiq_parameter_t::dump() {
LOG3("converge speed mode: ae %d, awb %d", aeConvergeSpeedMode, awbConvergeSpeedMode);
LOG3("converge speed: ae %d, awb %d", aeConvergeSpeed, awbConvergeSpeed);
- LOG3("EV:%f, range (%f-%f), step %d/%d", evShift, evRange.min, evRange.max,
- evStep.numerator, evStep.denominator);
- LOG3("manualExpTimeUs:%ld, time range (%f-%f)", manualExpTimeUs,
- exposureTimeRange.min, exposureTimeRange.max);
+ LOG3("EV:%f, range (%f-%f), step %d/%d", evShift, evRange.min, evRange.max, evStep.numerator,
+ evStep.denominator);
+ LOG3("manualExpTimeUs:%ld, time range (%f-%f)", manualExpTimeUs, exposureTimeRange.min,
+ exposureTimeRange.max);
LOG3("manualGain %f, manualIso %d, gain range (%f-%f)", manualGain, manualIso,
sensitivityGainRange.min, sensitivityGainRange.max);
LOG3("FPS %f, range (%f-%f)", fps, aeFpsRange.min, aeFpsRange.max);
- for (auto ®ion : aeRegions) {
- LOG3("ae region (%d, %d, %d, %d, %d)",
- region.left, region.top, region.right, region.bottom, region.weight);
+ for (auto& region : aeRegions) {
+ LOG3("ae region (%d, %d, %d, %d, %d)", region.left, region.top, region.right, region.bottom,
+ region.weight);
}
LOG3("Antibanding mode:%d", antibandingMode);
LOG3("AE Distribution Priority:%d", aeDistributionPriority);
LOG3("cctRange:(%f-%f)", cctRange.min, cctRange.max);
LOG3("manual awb: white point:(%d,%d)", whitePoint.x, whitePoint.y);
- LOG3("manual awb gain:(%d,%d,%d), gain shift:(%d,%d,%d)",
- awbManualGain.r_gain, awbManualGain.g_gain, awbManualGain.b_gain,
- awbGainShift.r_gain, awbGainShift.g_gain, awbGainShift.b_gain);
+ LOG3("manual awb gain:(%d,%d,%d), gain shift:(%d,%d,%d)", awbManualGain.r_gain,
+ awbManualGain.g_gain, awbManualGain.b_gain, awbGainShift.r_gain, awbGainShift.g_gain,
+ awbGainShift.b_gain);
for (int i = 0; i < 3; i++) {
- LOG3("manual color matrix: [%.3f %.3f %.3f]",
- manualColorMatrix.color_transform[i][0],
- manualColorMatrix.color_transform[i][1],
- manualColorMatrix.color_transform[i][2]);
+ LOG3("manual color matrix: [%.3f %.3f %.3f]", manualColorMatrix.color_transform[i][0],
+ manualColorMatrix.color_transform[i][1], manualColorMatrix.color_transform[i][2]);
}
- LOG3("manual color gains in rggb:(%.3f,%.3f,%.3f,%.3f)",
- manualColorGains.color_gains_rggb[0], manualColorGains.color_gains_rggb[1],
- manualColorGains.color_gains_rggb[2], manualColorGains.color_gains_rggb[3]);
+ LOG3("manual color gains in rggb:(%.3f,%.3f,%.3f,%.3f)", manualColorGains.color_gains_rggb[0],
+ manualColorGains.color_gains_rggb[1], manualColorGains.color_gains_rggb[2],
+ manualColorGains.color_gains_rggb[3]);
- for (auto ®ion : afRegions) {
- LOG3("af region (%d, %d, %d, %d, %d)",
- region.left, region.top, region.right, region.bottom, region.weight);
+ for (auto& region : afRegions) {
+ LOG3("af region (%d, %d, %d, %d, %d)", region.left, region.top, region.right, region.bottom,
+ region.weight);
}
LOG3("manual focus distance: %f, min focus distance: %f", focusDistance, minFocusDistance);
LOG3("Focus position %d, start timestamp %llu", lensPosition, lensMovementStartTimestamp);
@@ -440,14 +434,14 @@ void aiq_parameter_t::dump() {
LOG3("DVS mode %d", videoStabilizationMode);
LOG3("makernoteMode %d", makernoteMode);
- LOG3("shadingMode %d, lensShadingMapMode %d, size %dx%d", shadingMode,
- lensShadingMapMode, lensShadingMapSize.x, lensShadingMapSize.y);
+ LOG3("shadingMode %d, lensShadingMapMode %d, size %dx%d", shadingMode, lensShadingMapMode,
+ lensShadingMapSize.x, lensShadingMapSize.y);
LOG3("ldcMode %d, rscMode %d, flipMode %d", ldcMode, ldcMode, flipMode);
LOG3("run3ACadence %d", run3ACadence);
- LOG3("tonemap mode %d, preset curve %d, gamma %f, curve points %d",
- tonemapMode, tonemapPresetCurve, tonemapGamma, tonemapCurves.gSize);
+ LOG3("tonemap mode %d, preset curve %d, gamma %f, curve points %d", tonemapMode,
+ tonemapPresetCurve, tonemapGamma, tonemapCurves.gSize);
LOG3("testPatternMode %d", testPatternMode);
LOG3("power mode %d", powerMode);
LOG3("totalExposureTarget %ld", totalExposureTarget);
diff --git a/src/3a/AiqSetting.h b/src/3a/AiqSetting.h
index 6034151a..015f0791 100644
--- a/src/3a/AiqSetting.h
+++ b/src/3a/AiqSetting.h
@@ -28,11 +28,7 @@ namespace icamera {
#define DEFAULT_TONEMAP_CURVE_POINT_NUM 2048
// HDR_FEATURE_S
-typedef enum {
- AEC_SCENE_NONE,
- AEC_SCENE_HDR,
- AEC_SCENE_ULL
-} aec_scene_t;
+typedef enum { AEC_SCENE_NONE, AEC_SCENE_HDR, AEC_SCENE_ULL } aec_scene_t;
// HDR_FEATURE_E
typedef struct {
@@ -129,30 +125,29 @@ struct aiq_parameter_t {
* and return some useful status of aiq results
*/
class AiqSetting {
-
-public:
+ public:
AiqSetting(int cameraId);
~AiqSetting();
int init(void);
int deinit(void);
- int configure(const stream_config_t *streamList);
+ int configure(const stream_config_t* streamList);
int setParameters(const Parameters& params);
- int getAiqParameter(aiq_parameter_t ¶m);
+ int getAiqParameter(aiq_parameter_t& param);
// HDR_FEATURE_S
void updateTuningMode(aec_scene_t aecScene);
// HDR_FEATURE_E
-private:
- void updateFrameUsage(const stream_config_t *streamList);
+ private:
+ void updateFrameUsage(const stream_config_t* streamList);
-public:
+ public:
int mCameraId;
-private:
+ private:
std::vector mTuningModes;
aiq_parameter_t mAiqParam;
diff --git a/src/3a/AiqStatistics.h b/src/3a/AiqStatistics.h
index 50295597..6518e0aa 100644
--- a/src/3a/AiqStatistics.h
+++ b/src/3a/AiqStatistics.h
@@ -27,11 +27,11 @@ struct AiqStatistics {
bool mInUse;
bool mPendingDecode;
- AiqStatistics() : mSequence(-1),
- mTimestamp(0),
- mTuningMode(TUNING_MODE_MAX),
- mInUse(false),
- mPendingDecode(false) {}
+ AiqStatistics()
+ : mSequence(-1),
+ mTimestamp(0),
+ mTuningMode(TUNING_MODE_MAX),
+ mInUse(false),
+ mPendingDecode(false) {}
};
} /* namespace icamera */
-
diff --git a/src/3a/AiqUnit.cpp b/src/3a/AiqUnit.cpp
index 7180a553..26c59576 100644
--- a/src/3a/AiqUnit.cpp
+++ b/src/3a/AiqUnit.cpp
@@ -28,17 +28,17 @@
namespace icamera {
-AiqUnit::AiqUnit(int cameraId, SensorHwCtrl *sensorHw, LensHw *lensHw) :
- mCameraId(cameraId),
- // LOCAL_TONEMAP_S
- mLtm(nullptr),
- // LOCAL_TONEMAP_E
- mAiqUnitState(AIQ_UNIT_NOT_INIT),
- // INTEL_DVS_S
- mDvs(nullptr),
- // INTEL_DVS_S
- mCcaInitialized(false),
- mActiveStreamCount(0) {
+AiqUnit::AiqUnit(int cameraId, SensorHwCtrl* sensorHw, LensHw* lensHw)
+ : mCameraId(cameraId),
+ // LOCAL_TONEMAP_S
+ mLtm(nullptr),
+ // LOCAL_TONEMAP_E
+ mAiqUnitState(AIQ_UNIT_NOT_INIT),
+ // INTEL_DVS_S
+ mDvs(nullptr),
+ // INTEL_DVS_S
+ mCcaInitialized(false),
+ mActiveStreamCount(0) {
mAiqSetting = new AiqSetting(cameraId);
mAiqEngine = new AiqEngine(cameraId, sensorHw, lensHw, mAiqSetting);
@@ -122,7 +122,7 @@ int AiqUnit::deinit() {
return OK;
}
-int AiqUnit::configure(const stream_config_t *streamList) {
+int AiqUnit::configure(const stream_config_t* streamList) {
CheckAndLogError(streamList == nullptr, BAD_VALUE, "streamList is nullptr");
AutoMutex l(mAiqUnitLock);
@@ -135,8 +135,7 @@ int AiqUnit::configure(const stream_config_t *streamList) {
}
std::vector configModes;
- PlatformData::getConfigModesByOperationMode(mCameraId, streamList->operation_mode,
- configModes);
+ PlatformData::getConfigModesByOperationMode(mCameraId, streamList->operation_mode, configModes);
int ret = initIntelCcaHandle(configModes);
CheckAndLogError(ret < 0, BAD_VALUE, "@%s failed to create intel cca handle", __func__);
@@ -146,18 +145,11 @@ int AiqUnit::configure(const stream_config_t *streamList) {
ret = mAiqEngine->configure();
CheckAndLogError(ret != OK, ret, "configure AIQ engine error: %d", ret);
- // LOCAL_TONEMAP_S
- if (mLtm) {
- ret = mLtm->configure(configModes);
- CheckAndLogError(ret != OK, ret, "configure LTM engine error: %d", ret);
- }
- // LOCAL_TONEMAP_E
-
mAiqUnitState = AIQ_UNIT_CONFIGURED;
return OK;
}
-int AiqUnit::initIntelCcaHandle(const std::vector &configModes) {
+int AiqUnit::initIntelCcaHandle(const std::vector& configModes) {
if (PlatformData::supportUpdateTuning() && !configModes.empty()) {
std::shared_ptr graphConfig =
IGraphConfigManager::getInstance(mCameraId)->getGraphConfig(configModes[0]);
@@ -178,7 +170,7 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) {
LOG1("@%s", mCameraId, __func__);
mTuningModes.clear();
- for (auto &cfg : configModes) {
+ for (auto& cfg : configModes) {
TuningMode tuningMode;
int ret = PlatformData::getTuningModeByConfigMode(mCameraId, cfg, tuningMode);
CheckAndLogError(ret != OK, ret, "%s: Failed to get tuningMode, cfg: %d", __func__, cfg);
@@ -191,8 +183,8 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) {
ret = PlatformData::getCpf(mCameraId, tuningMode, &cpfData);
if (ret == OK && cpfData.data) {
CheckAndLogError(cpfData.size > cca::MAX_CPF_LEN, UNKNOWN_ERROR,
- "%s, AIQB buffer is too small cpfData:%d > MAX_CPF_LEN:%d",
- __func__, cpfData.size, cca::MAX_CPF_LEN);
+ "%s, AIQB buffer is too small cpfData:%d > MAX_CPF_LEN:%d", __func__,
+ cpfData.size, cca::MAX_CPF_LEN);
MEMCPY_S(params.aiq_cpf.buf, cca::MAX_CPF_LEN, cpfData.data, cpfData.size);
params.aiq_cpf.size = cpfData.size;
}
@@ -200,9 +192,9 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) {
// Initialize cca_nvm data
ia_binary_data* nvmData = PlatformData::getNvm(mCameraId);
if (nvmData) {
- CheckAndLogError(nvmData->size > cca::MAX_NVM_LEN, UNKNOWN_ERROR,
- "%s, NVM buffer is too small: nvmData:%d MAX_NVM_LEN:%d",
- __func__, nvmData->size, cca::MAX_NVM_LEN);
+ CheckAndLogError(nvmData->size > cca::MAX_NVM_LEN, UNKNOWN_ERROR,
+ "%s, NVM buffer is too small: nvmData:%d MAX_NVM_LEN:%d", __func__,
+ nvmData->size, cca::MAX_NVM_LEN);
MEMCPY_S(params.aiq_nvm.buf, cca::MAX_NVM_LEN, nvmData->data, nvmData->size);
params.aiq_nvm.size = nvmData->size;
}
@@ -210,9 +202,9 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) {
// Initialize cca_aiqd data
ia_binary_data* aiqdData = PlatformData::getAiqd(mCameraId, tuningMode);
if (aiqdData) {
- CheckAndLogError(aiqdData->size > cca::MAX_AIQD_LEN, UNKNOWN_ERROR,
- "%s, AIQD buffer is too small aiqdData:%d > MAX_AIQD_LEN:%d",
- __func__, aiqdData->size, cca::MAX_AIQD_LEN);
+ CheckAndLogError(aiqdData->size > cca::MAX_AIQD_LEN, UNKNOWN_ERROR,
+ "%s, AIQD buffer is too small aiqdData:%d > MAX_AIQD_LEN:%d", __func__,
+ aiqdData->size, cca::MAX_AIQD_LEN);
MEMCPY_S(params.aiq_aiqd.buf, cca::MAX_AIQD_LEN, aiqdData->data, aiqdData->size);
params.aiq_aiqd.size = aiqdData->size;
}
@@ -228,13 +220,15 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) {
params.aecFrameDelay = 0;
// Initialize functions which need to be started
- params.bitmap = cca::CCA_MODULE_AE | cca::CCA_MODULE_AWB |
- cca::CCA_MODULE_PA | cca::CCA_MODULE_SA | cca::CCA_MODULE_GBCE |
- cca::CCA_MODULE_LARD;
+ params.bitmap = cca::CCA_MODULE_AE | cca::CCA_MODULE_AWB | cca::CCA_MODULE_PA |
+ cca::CCA_MODULE_SA | cca::CCA_MODULE_GBCE | cca::CCA_MODULE_LARD;
if (PlatformData::getLensHwType(mCameraId) == LENS_VCM_HW) {
params.bitmap |= cca::CCA_MODULE_AF;
}
+ std::shared_ptr graphConfig =
+ IGraphConfigManager::getInstance(mCameraId)->getGraphConfig(cfg);
+
// LOCAL_TONEMAP_S
bool hasLtm = PlatformData::isLtmEnabled(mCameraId);
// HDR_FEATURE_S
@@ -245,17 +239,26 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) {
// HDR_FEATURE_E
// DOL_FEATURE_S
- hasLtm |= (PlatformData::isDolShortEnabled(mCameraId)
- || PlatformData::isDolMediumEnabled(mCameraId));
+ hasLtm |= (PlatformData::isDolShortEnabled(mCameraId) ||
+ PlatformData::isDolMediumEnabled(mCameraId));
// DOL_FEATURE_E
-
- if (hasLtm) {
+ if (hasLtm && mLtm) {
params.bitmap |= cca::CCA_MODULE_LTM;
+ ret = mLtm->configure(configModes, graphConfig, VIDEO_STREAM_ID);
+ CheckAndLogError(ret != OK, ret, "configure LTM engine error: %d", ret);
}
// LOCAL_TONEMAP_E
// INTEL_DVS_S
if (mDvs) {
+ std::vector streamIds;
+ if (graphConfig != nullptr) {
+ graphConfig->graphGetStreamIds(streamIds);
+ }
+ params.dvs_ids.count = streamIds.size();
+ for (size_t i = 0; i < streamIds.size(); ++i) {
+ params.dvs_ids.ids[i] = streamIds[i];
+ }
ret = mDvs->configure(cfg, ¶ms);
CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, configure DVS error", __func__);
params.bitmap |= cca::CCA_MODULE_DVS;
@@ -265,10 +268,8 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) {
// DOL_FEATURE_S
// Initialize Bcomp params
if (PlatformData::isDolShortEnabled(mCameraId) ||
- PlatformData::isDolMediumEnabled(mCameraId)) {
+ PlatformData::isDolMediumEnabled(mCameraId)) {
// Parse the DOL mode and CG ratio from sensor mode config
- std::shared_ptr graphConfig =
- IGraphConfigManager::getInstance(mCameraId)->getGraphConfig(cfg);
if (graphConfig != nullptr) {
std::string dol_mode_name;
graphConfig->getDolInfo(params.conversionGainRatio, dol_mode_name);
@@ -281,35 +282,33 @@ int AiqUnit::initIntelCcaHandle(const std::vector &configModes) {
params.dolMode = dolModeNameMap[dol_mode_name];
}
}
- LOG2("conversionGainRatio: %f, dolMode: %d",
- params.conversionGainRatio, params.dolMode);
+ LOG2("conversionGainRatio: %f, dolMode: %d", params.conversionGainRatio,
+ params.dolMode);
params.bitmap = params.bitmap | cca::CCA_MODULE_BCOM;
} else if (PlatformData::getSensorAeEnable(mCameraId)) {
params.conversionGainRatio = 1;
params.dolMode = ia_bcomp_linear_hdr_mode;
- LOG2("WA: conversionGainRatio: %f, dolMode: %d",
- params.conversionGainRatio, params.dolMode);
+ LOG2("WA: conversionGainRatio: %f, dolMode: %d", params.conversionGainRatio,
+ params.dolMode);
params.bitmap = params.bitmap | cca::CCA_MODULE_BCOM;
}
// DOL_FEATURE_E
if (PlatformData::supportUpdateTuning()) {
- std::shared_ptr graphConfig =
- IGraphConfigManager::getInstance(mCameraId)->getGraphConfig(cfg);
if (graphConfig != nullptr) {
std::vector streamIds;
graphConfig->graphGetStreamIds(streamIds);
params.aic_stream_ids.count = streamIds.size();
CheckAndLogError(streamIds.size() > cca::MAX_STREAM_NUM, UNKNOWN_ERROR,
- "%s, Too many streams: %zu in graph", __func__, streamIds.size());
+ "%s, Too many streams: %zu in graph", __func__, streamIds.size());
for (size_t i = 0; i < streamIds.size(); ++i) {
params.aic_stream_ids.ids[i] = streamIds[i];
}
}
}
- IntelCca *intelCca = IntelCca::getInstance(mCameraId, tuningMode);
- CheckAndLogError(!intelCca, UNKNOWN_ERROR,
- "Failed to get cca. mode:%d cameraId:%d", tuningMode, mCameraId);
+ IntelCca* intelCca = IntelCca::getInstance(mCameraId, tuningMode);
+ CheckAndLogError(!intelCca, UNKNOWN_ERROR, "Failed to get cca. mode:%d cameraId:%d",
+ tuningMode, mCameraId);
ia_err iaErr = intelCca->init(params);
if (iaErr == ia_err_none) {
mTuningModes.push_back(tuningMode);
@@ -332,8 +331,8 @@ void AiqUnit::deinitIntelCcaHandle() {
if (!mCcaInitialized) return;
LOG1("@%s", mCameraId, __func__);
- for (auto &mode : mTuningModes) {
- IntelCca *intelCca = IntelCca::getInstance(mCameraId, mode);
+ for (auto& mode : mTuningModes) {
+ IntelCca* intelCca = IntelCca::getInstance(mCameraId, mode);
CheckAndLogError(!intelCca, VOID_VALUE, "%s, Failed to get cca: mode(%d), cameraId(%d)",
__func__, mode, mCameraId);
@@ -431,15 +430,20 @@ std::vector AiqUnit::getStatsEventListener() {
eventListenerList.push_back(mLtm);
}
// LOCAL_TONEMAP_E
- // INTEL_DVS_S
- if (mDvs) {
- eventListenerList.push_back(mDvs);
- }
- // INTEL_DVS_E
+
+ return eventListenerList;
+}
+
+// INTEL_DVS_S
+std::vector AiqUnit::getDVSEventListener() {
+ AutoMutex l(mAiqUnitLock);
+ std::vector eventListenerList;
+ if (mDvs) eventListenerList.push_back(mDvs);
return eventListenerList;
}
+// INTEL_DVS_E
-int AiqUnit::setParameters(const Parameters ¶ms) {
+int AiqUnit::setParameters(const Parameters& params) {
AutoMutex l(mAiqUnitLock);
if (mDvs) {
mDvs->setParameter(params);
diff --git a/src/3a/AiqUnit.h b/src/3a/AiqUnit.h
index b2b5a217..13c45d07 100644
--- a/src/3a/AiqUnit.h
+++ b/src/3a/AiqUnit.h
@@ -43,41 +43,42 @@ class LensHw;
* This class is used for upper layer to control 3a engine.
*/
-class AiqUnitBase{
-
-public:
+class AiqUnitBase {
+ public:
AiqUnitBase() {}
virtual ~AiqUnitBase() {}
virtual int init() { return OK; }
virtual int deinit() { return OK; }
- virtual int configure(const stream_config_t * /*streamList*/) { return OK; }
+ virtual int configure(const stream_config_t* /*streamList*/) { return OK; }
virtual int start() { return OK; }
virtual int stop() { return OK; }
- virtual int run3A(long request, int64_t applyingSeq, int64_t * /*effectSeq*/) { return OK; }
+ virtual int run3A(long request, int64_t applyingSeq, int64_t* /*effectSeq*/) { return OK; }
- virtual std::vector getSofEventListener()
- {
+ virtual std::vector getSofEventListener() {
std::vector eventListenerList;
return eventListenerList;
}
- virtual std::vector getStatsEventListener()
- {
+ virtual std::vector getStatsEventListener() {
std::vector eventListenerList;
return eventListenerList;
}
+ // INTEL_DVS_S
+ virtual std::vector getDVSEventListener() {
+ std::vector eventListenerList;
+ return eventListenerList;
+ }
+ // INTEL_DVS_E
- virtual int setParameters(const Parameters & /*params*/) { return OK; }
+ virtual int setParameters(const Parameters& /*params*/) { return OK; }
-private:
+ private:
DISALLOW_COPY_AND_ASSIGN(AiqUnitBase);
-
};
class AiqUnit : public AiqUnitBase {
-
-public:
- AiqUnit(int cameraId, SensorHwCtrl *sensorHw, LensHw *lensHw);
+ public:
+ AiqUnit(int cameraId, SensorHwCtrl* sensorHw, LensHw* lensHw);
~AiqUnit();
/**
@@ -93,7 +94,7 @@ class AiqUnit : public AiqUnitBase {
/**
* \brief configure 3a engine with stream configuration
*/
- int configure(const stream_config_t *streamList);
+ int configure(const stream_config_t* streamList);
/**
* \brief Start 3a Engine
@@ -128,25 +129,32 @@ class AiqUnit : public AiqUnitBase {
*/
std::vector getStatsEventListener();
+ // INTEL_DVS_S
+ /**
+ * \brief Get DVS EventListener
+ */
+ std::vector getDVSEventListener();
+ // INTEL_DVS_E
+
/**
* \brief Set 3A Parameters
*
* \param params: the Parameters update to 3A
*/
- int setParameters(const Parameters ¶ms);
+ int setParameters(const Parameters& params);
-private:
+ private:
DISALLOW_COPY_AND_ASSIGN(AiqUnit);
-private:
- int initIntelCcaHandle(const std::vector &configModes);
+ private:
+ int initIntelCcaHandle(const std::vector& configModes);
void deinitIntelCcaHandle();
void dumpCcaInitParam(const cca::cca_init_params params);
-private:
+ private:
int mCameraId;
// LOCAL_TONEMAP_S
- Ltm *mLtm;
+ Ltm* mLtm;
// LOCAL_TONEMAP_E
enum AiqUnitState {
AIQ_UNIT_NOT_INIT = 0,
@@ -158,10 +166,10 @@ class AiqUnit : public AiqUnitBase {
} mAiqUnitState;
// INTEL_DVS_S
- Dvs *mDvs;
+ Dvs* mDvs;
// INTEL_DVS_E
- AiqEngine *mAiqEngine;
- AiqSetting *mAiqSetting;
+ AiqEngine* mAiqEngine;
+ AiqSetting* mAiqSetting;
// Guard for AiqUnit public API.
Mutex mAiqUnitLock;
@@ -172,4 +180,3 @@ class AiqUnit : public AiqUnitBase {
};
} /* namespace icamera */
-
diff --git a/src/3a/AiqUtils.cpp b/src/3a/AiqUtils.cpp
index ca369f28..e0f3dbc0 100644
--- a/src/3a/AiqUtils.cpp
+++ b/src/3a/AiqUtils.cpp
@@ -46,7 +46,7 @@ void AiqUtils::dumpAeResults(const cca::cca_ae_results& aeResult) {
aeResult.exposures[i].exposure[0].aperture_fn,
aeResult.exposures[i].exposure[0].exposure_time_us,
aeResult.exposures[i].exposure[0].total_target_exposure,
- aeResult.exposures[i].exposure[0].nd_filter_enabled? "YES": "NO",
+ aeResult.exposures[i].exposure[0].nd_filter_enabled ? "YES" : "NO",
aeResult.exposures[i].exposure[0].iso,
aeResult.exposures[i].exposure[0].low_limit_total_exposure,
aeResult.exposures[i].exposure[0].up_limit_total_exposure);
@@ -62,13 +62,13 @@ void AiqUtils::dumpAeResults(const cca::cca_ae_results& aeResult) {
if (wg.width != 0 && wg.height != 0) {
LOG3("AE weight grid [%dx%d]", wg.width, wg.height);
for (int i = 0; i < 5 && i < wg.height; i++) {
- LOG3("AE weight_grid[%d] = %d ", wg.width/2, wg.weights[wg.width/2]);
+ LOG3("AE weight_grid[%d] = %d ", wg.width / 2, wg.weights[wg.width / 2]);
}
}
const ia_aiq_aperture_control& ac = aeResult.aperture_control;
- LOG3("AE aperture fn = %f, iris command = %d, code = %d",
- ac.aperture_fn, ac.dc_iris_command, ac.code);
+ LOG3("AE aperture fn = %f, iris command = %d, code = %d", ac.aperture_fn, ac.dc_iris_command,
+ ac.code);
}
void AiqUtils::dumpAfResults(const cca::cca_af_results& afResult) {
@@ -76,24 +76,24 @@ void AiqUtils::dumpAfResults(const cca::cca_af_results& afResult) {
LOG3("AF results: current/next dis %d/%d, next pos %d, final_position_reached %s, status %d",
afResult.current_focus_distance, afResult.next_focus_distance, afResult.next_lens_position,
- afResult.final_lens_position_reached ? "TRUE":"FALSE", afResult.status);
+ afResult.final_lens_position_reached ? "TRUE" : "FALSE", afResult.status);
switch (afResult.status) {
- case ia_aiq_af_status_local_search:
- LOG3("AF result state _local_search");
- break;
- case ia_aiq_af_status_extended_search:
- LOG3("AF result state extended_search");
- break;
- case ia_aiq_af_status_success:
- LOG3("AF state success");
- break;
- case ia_aiq_af_status_fail:
- LOG3("AF state fail");
- break;
- case ia_aiq_af_status_idle:
- default:
- LOG3("AF state idle");
+ case ia_aiq_af_status_local_search:
+ LOG3("AF result state _local_search");
+ break;
+ case ia_aiq_af_status_extended_search:
+ LOG3("AF result state extended_search");
+ break;
+ case ia_aiq_af_status_success:
+ LOG3("AF state success");
+ break;
+ case ia_aiq_af_status_fail:
+ LOG3("AF state fail");
+ break;
+ case ia_aiq_af_status_idle:
+ default:
+ LOG3("AF state idle");
}
}
@@ -108,22 +108,22 @@ void AiqUtils::dumpAwbResults(const cca::cca_awb_results& awbResult) {
void AiqUtils::dumpGbceResults(const cca::cca_gbce_params& gbceResult) {
if (!Log::isLogTagEnabled(GET_FILE_SHIFT(AiqUtils), CAMERA_DEBUG_LOG_LEVEL3)) return;
- LOG3("gamma_lut_size: %u, tone_map_lut_size: %u",
- gbceResult.gamma_lut_size, gbceResult.tone_map_lut_size);
+ LOG3("gamma_lut_size: %u, tone_map_lut_size: %u", gbceResult.gamma_lut_size,
+ gbceResult.tone_map_lut_size);
if (gbceResult.gamma_lut_size <= 0 || gbceResult.tone_map_lut_size <= 0) return;
LOG3("gamma table: R: 0(%f), %u(%f), %u(%f)", gbceResult.r_gamma_lut[0],
(gbceResult.gamma_lut_size / 2), gbceResult.r_gamma_lut[gbceResult.gamma_lut_size / 2],
- (gbceResult.gamma_lut_size - 1), gbceResult.r_gamma_lut[gbceResult.gamma_lut_size - 1]);
+ (gbceResult.gamma_lut_size - 1), gbceResult.r_gamma_lut[gbceResult.gamma_lut_size - 1]);
LOG3("gamma table: G: 0(%f), %u(%f), %u(%f)", gbceResult.g_gamma_lut[0],
(gbceResult.gamma_lut_size / 2), gbceResult.g_gamma_lut[gbceResult.gamma_lut_size / 2],
- (gbceResult.gamma_lut_size - 1), gbceResult.g_gamma_lut[gbceResult.gamma_lut_size - 1]);
+ (gbceResult.gamma_lut_size - 1), gbceResult.g_gamma_lut[gbceResult.gamma_lut_size - 1]);
LOG3("gamma table: B: 0(%f), %u(%f), %u(%f)", gbceResult.b_gamma_lut[0],
(gbceResult.gamma_lut_size / 2), gbceResult.b_gamma_lut[gbceResult.gamma_lut_size / 2],
- (gbceResult.gamma_lut_size - 1), gbceResult.b_gamma_lut[gbceResult.gamma_lut_size - 1]);
+ (gbceResult.gamma_lut_size - 1), gbceResult.b_gamma_lut[gbceResult.gamma_lut_size - 1]);
LOG3("tonemap table: 0(%f), %u(%f), %u(%f)", gbceResult.tone_map_lut[0],
(gbceResult.tone_map_lut_size / 2),
@@ -136,47 +136,44 @@ void AiqUtils::dumpPaResults(const cca::cca_pa_params& paResult) {
if (!Log::isLogTagEnabled(GET_FILE_SHIFT(AiqUtils), CAMERA_DEBUG_LOG_LEVEL3)) return;
for (int i = 0; i < 3; i++) {
- LOG3("color_conversion_matrix [%.4f %.4f %.4f] ",
- paResult.color_conversion_matrix[i][0],
- paResult.color_conversion_matrix[i][1],
- paResult.color_conversion_matrix[i][2]);
+ LOG3("color_conversion_matrix [%.4f %.4f %.4f] ", paResult.color_conversion_matrix[i][0],
+ paResult.color_conversion_matrix[i][1], paResult.color_conversion_matrix[i][2]);
}
- LOG3("color_gains, gr:%f, r:%f, b:%f, gb:%f",
- paResult.color_gains.gr, paResult.color_gains.r,
+ LOG3("color_gains, gr:%f, r:%f, b:%f, gb:%f", paResult.color_gains.gr, paResult.color_gains.r,
paResult.color_gains.b, paResult.color_gains.gb);
}
void AiqUtils::dumpSaResults(const cca::cca_sa_results& saResult) {
if (!Log::isLogTagEnabled(GET_FILE_SHIFT(AiqUtils), CAMERA_DEBUG_LOG_LEVEL3)) return;
- LOG3("SA results color_order %d size %dx%d",
- saResult.color_order, saResult.width, saResult.height);
+ LOG3("SA results color_order %d size %dx%d", saResult.color_order, saResult.width,
+ saResult.height);
}
int AiqUtils::convertError(ia_err iaErr) {
switch (iaErr) {
- case ia_err_none:
- return OK;
- case ia_err_general:
- return UNKNOWN_ERROR;
- case ia_err_nomemory:
- return NO_MEMORY;
- case ia_err_data:
- return BAD_VALUE;
- case ia_err_internal:
- return INVALID_OPERATION;
- case ia_err_argument:
- return BAD_VALUE;
- default:
- return UNKNOWN_ERROR;
+ case ia_err_none:
+ return OK;
+ case ia_err_general:
+ return UNKNOWN_ERROR;
+ case ia_err_nomemory:
+ return NO_MEMORY;
+ case ia_err_data:
+ return BAD_VALUE;
+ case ia_err_internal:
+ return INVALID_OPERATION;
+ case ia_err_argument:
+ return BAD_VALUE;
+ default:
+ return UNKNOWN_ERROR;
}
}
/**
* Convert SensorFrameParams defined in PlatformData to ia_aiq_frame_params in aiq
*/
-void AiqUtils::convertToAiqFrameParam(const SensorFrameParams &sensor, ia_aiq_frame_params &aiq) {
+void AiqUtils::convertToAiqFrameParam(const SensorFrameParams& sensor, ia_aiq_frame_params& aiq) {
aiq.cropped_image_height = sensor.cropped_image_height;
aiq.cropped_image_width = sensor.cropped_image_width;
aiq.horizontal_crop_offset = sensor.horizontal_crop_offset;
@@ -214,17 +211,17 @@ camera_window_t AiqUtils::convertToIaWindow(const camera_coordinate_system_t& sr
const camera_window_t& srcWindow) {
camera_coordinate_t leftTop;
camera_coordinate_t rightBottom;
- leftTop.x = srcWindow.left;
- leftTop.y = srcWindow.top;
+ leftTop.x = srcWindow.left;
+ leftTop.y = srcWindow.top;
rightBottom.x = srcWindow.right;
rightBottom.y = srcWindow.bottom;
- leftTop = convertToIaCoordinate(srcSystem, leftTop);
- rightBottom = convertToIaCoordinate(srcSystem, rightBottom);
+ leftTop = convertToIaCoordinate(srcSystem, leftTop);
+ rightBottom = convertToIaCoordinate(srcSystem, rightBottom);
camera_window_t result;
- result.left = leftTop.x;
- result.top = leftTop.y;
- result.right = rightBottom.x;
+ result.left = leftTop.x;
+ result.top = leftTop.y;
+ result.right = rightBottom.x;
result.bottom = rightBottom.y;
result.weight = srcWindow.weight;
return result;
@@ -235,14 +232,14 @@ camera_window_t AiqUtils::convertToIaWindow(const camera_coordinate_system_t& sr
*/
float AiqUtils::normalizeAwbGain(int gain) {
gain = CLIP(gain, AWB_GAIN_MAX, AWB_GAIN_MIN);
- return AWB_GAIN_NORMALIZED_START + (float)(gain - AWB_GAIN_MIN) * \
- AWB_GAIN_RANGE_NORMALIZED / AWB_GAIN_RANGE_USER;
+ return AWB_GAIN_NORMALIZED_START + static_cast(gain - AWB_GAIN_MIN) *
+ AWB_GAIN_RANGE_NORMALIZED / AWB_GAIN_RANGE_USER;
}
int AiqUtils::convertToUserAwbGain(float normalizedGain) {
normalizedGain = CLIP(normalizedGain, AWB_GAIN_NORMALIZED_START, AWB_GAIN_NORMALIZED_END);
- return AWB_GAIN_MIN + (normalizedGain - AWB_GAIN_NORMALIZED_START) * \
- AWB_GAIN_RANGE_USER / AWB_GAIN_RANGE_NORMALIZED;
+ return AWB_GAIN_MIN + (normalizedGain - AWB_GAIN_NORMALIZED_START) * AWB_GAIN_RANGE_USER /
+ AWB_GAIN_RANGE_NORMALIZED;
}
float AiqUtils::convertSpeedModeToTime(camera_converge_speed_t mode) {
@@ -317,10 +314,10 @@ void AiqUtils::applyTonemapGamma(float gamma, cca::cca_gbce_params* results) {
results->g_gamma_lut[i] = pow(i / static_cast(lutSize), 1 / gamma);
}
- MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float),
- results->g_gamma_lut, lutSize * sizeof(float));
- MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float),
- results->g_gamma_lut, lutSize * sizeof(float));
+ MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut,
+ lutSize * sizeof(float));
+ MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut,
+ lutSize * sizeof(float));
}
void AiqUtils::applyTonemapSRGB(cca::cca_gbce_params* results) {
@@ -330,17 +327,17 @@ void AiqUtils::applyTonemapSRGB(cca::cca_gbce_params* results) {
CheckAndLogError(lutSize < MIN_TONEMAP_POINTS, VOID_VALUE,
"Bad gamma lut size (%d) in gbce results", lutSize);
for (int i = 0; i < lutSize; i++) {
- if (i / (lutSize - 1) < 0.0031308)
+ if (i / (lutSize - 1) < 0.0031308)
results->g_gamma_lut[i] = 12.92 * (i / (lutSize - 1));
else
results->g_gamma_lut[i] =
- 1.055 * pow(i / static_cast(lutSize - 1), 1 / 2.4) - 0.055;
+ 1.055 * pow(i / static_cast(lutSize - 1), 1 / 2.4) - 0.055;
}
- MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float),
- results->g_gamma_lut, lutSize * sizeof(float));
- MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float),
- results->g_gamma_lut, lutSize * sizeof(float));
+ MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut,
+ lutSize * sizeof(float));
+ MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut,
+ lutSize * sizeof(float));
}
void AiqUtils::applyTonemapREC709(cca::cca_gbce_params* results) {
@@ -354,13 +351,13 @@ void AiqUtils::applyTonemapREC709(cca::cca_gbce_params* results) {
results->g_gamma_lut[i] = 4.5 * (i / (lutSize - 1));
else
results->g_gamma_lut[i] =
- 1.099 * pow(i / static_cast(lutSize - 1), 0.45) - 0.099;
+ 1.099 * pow(i / static_cast(lutSize - 1), 0.45) - 0.099;
}
- MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float),
- results->g_gamma_lut, lutSize * sizeof(float));
- MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float),
- results->g_gamma_lut, lutSize * sizeof(float));
+ MEMCPY_S(results->b_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut,
+ lutSize * sizeof(float));
+ MEMCPY_S(results->r_gamma_lut, lutSize * sizeof(float), results->g_gamma_lut,
+ lutSize * sizeof(float));
}
void AiqUtils::applyTonemapCurve(const camera_tonemap_curves_t& curves,
@@ -383,12 +380,12 @@ void AiqUtils::applyTonemapCurve(const camera_tonemap_curves_t& curves,
results->g_gamma_lut[i] = curves.gCurve[left];
results->b_gamma_lut[i] = curves.bCurve[left];
} else {
- results->r_gamma_lut[i] = curves.rCurve[left]
- + ratio * (curves.rCurve[right] - curves.rCurve[left]);
- results->g_gamma_lut[i] = curves.gCurve[left]
- + ratio * (curves.gCurve[right] - curves.gCurve[left]);
- results->b_gamma_lut[i] = curves.bCurve[left]
- + ratio * (curves.bCurve[right] - curves.bCurve[left]);
+ results->r_gamma_lut[i] =
+ curves.rCurve[left] + ratio * (curves.rCurve[right] - curves.rCurve[left]);
+ results->g_gamma_lut[i] =
+ curves.gCurve[left] + ratio * (curves.gCurve[right] - curves.gCurve[left]);
+ results->b_gamma_lut[i] =
+ curves.bCurve[left] + ratio * (curves.bCurve[right] - curves.bCurve[left]);
}
}
}
@@ -419,16 +416,16 @@ void AiqUtils::applyAwbGainForTonemapCurve(const camera_tonemap_curves_t& curves
float maxAverage = std::max(averageR, averageG);
maxAverage = std::max(maxAverage, averageB);
if (maxAverage - minAverage > EPSILON) {
- averageR = AWB_GAIN_NORMALIZED_START + (averageR - minAverage) * \
- AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage);
- averageG = AWB_GAIN_NORMALIZED_START + (averageG - minAverage) * \
- AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage);
- averageB = AWB_GAIN_NORMALIZED_START + (averageB - minAverage) * \
- AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage);
+ averageR = AWB_GAIN_NORMALIZED_START +
+ (averageR - minAverage) * AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage);
+ averageG = AWB_GAIN_NORMALIZED_START +
+ (averageG - minAverage) * AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage);
+ averageB = AWB_GAIN_NORMALIZED_START +
+ (averageB - minAverage) * AWB_GAIN_RANGE_NORMALIZED / (maxAverage - minAverage);
results->accurate_r_per_g = averageR / averageG;
results->accurate_b_per_g = averageB / averageG;
- LOG2("%s: overwrite awb gain %f %f", __func__,
- results->accurate_r_per_g, results->accurate_b_per_g);
+ LOG2("%s: overwrite awb gain %f %f", __func__, results->accurate_r_per_g,
+ results->accurate_b_per_g);
}
}
@@ -454,7 +451,7 @@ void AiqUtils::applyAwbGainForTonemapCurve(const camera_tonemap_curves_t& curves
* avoid division by 0. If any of the required CMC items is missing
* it will return the default value 5m
*/
-float AiqUtils::calculateHyperfocalDistance(const cca::cca_cmc &cmc) {
+float AiqUtils::calculateHyperfocalDistance(const cca::cca_cmc& cmc) {
const float DEFAULT_HYPERFOCAL_DISTANCE = 5000.0f;
// Pixel size is stored in CMC in hundreds of micrometers
@@ -473,8 +470,8 @@ float AiqUtils::calculateHyperfocalDistance(const cca::cca_cmc &cmc) {
// assuming square pixel
const int CIRCLE_OF_CONFUSION_IN_PIXELS = 2;
float cocMicros = pixelSizeMicro * CIRCLE_OF_CONFUSION_IN_PIXELS;
- float hyperfocalDistanceMillis = 1000 * (focalLengthMillis * focalLengthMillis) /
- (fNumber * cocMicros);
+ float hyperfocalDistanceMillis =
+ 1000 * (focalLengthMillis * focalLengthMillis) / (fNumber * cocMicros);
return (hyperfocalDistanceMillis == 0.0f) ? DEFAULT_HYPERFOCAL_DISTANCE :
hyperfocalDistanceMillis;
diff --git a/src/3a/AiqUtils.h b/src/3a/AiqUtils.h
index ffb57105..44810048 100644
--- a/src/3a/AiqUtils.h
+++ b/src/3a/AiqUtils.h
@@ -121,13 +121,12 @@ void applyAwbGainForTonemapCurve(const camera_tonemap_curves_t& curves,
* in a_dst_w width of the output array
* in a_dst_h height of the output array
*/
-template int resize2dArray(
- const T* a_src, int a_src_w, int a_src_h,
- T* a_dst, int a_dst_w, int a_dst_h) {
+template
+int resize2dArray(const T* a_src, int a_src_w, int a_src_h, T* a_dst, int a_dst_w, int a_dst_h) {
int i, j, step_size_w, step_size_h, rounding_term;
if (a_src_w < 2 || a_dst_w < 2 || a_src_h < 2 || a_dst_h < 2) {
- return -1;
+ return -1;
}
nsecs_t startTime = CameraUtils::systemTime();
step_size_w = ((a_src_w - 1) << FRAC_BITS_CURR_LOC) / (a_dst_w - 1);
@@ -145,19 +144,20 @@ template int resize2dArray(
curr_loc_lower_w = (curr_loc_w > 0) ? (curr_loc_w - 1) >> FRAC_BITS_CURR_LOC : 0;
a_dst[a_dst_w * j + i] =
- (a_src[curr_loc_lower_w + curr_loc_lower_h * a_src_w] *
- (((curr_loc_lower_w + 1) << FRAC_BITS_CURR_LOC) - curr_loc_w) *
- (((curr_loc_lower_h + 1) << FRAC_BITS_CURR_LOC) - curr_loc_h) +
- a_src[curr_loc_lower_w + 1 + curr_loc_lower_h * a_src_w] *
- (curr_loc_w-((curr_loc_lower_w) << FRAC_BITS_CURR_LOC)) *
- (((curr_loc_lower_h + 1) << FRAC_BITS_CURR_LOC) - curr_loc_h) +
- a_src[curr_loc_lower_w + (curr_loc_lower_h + 1) * a_src_w] *
- (((curr_loc_lower_w + 1) << FRAC_BITS_CURR_LOC) - curr_loc_w) *
- (curr_loc_h - ((curr_loc_lower_h) << FRAC_BITS_CURR_LOC)) +
- a_src[curr_loc_lower_w + 1 + (curr_loc_lower_h + 1) * a_src_w] *
- (curr_loc_w - ((curr_loc_lower_w) << FRAC_BITS_CURR_LOC)) *
- (curr_loc_h - ((curr_loc_lower_h) << FRAC_BITS_CURR_LOC))
- + rounding_term) / (FRAC_BASE * FRAC_BASE);
+ (a_src[curr_loc_lower_w + curr_loc_lower_h * a_src_w] *
+ (((curr_loc_lower_w + 1) << FRAC_BITS_CURR_LOC) - curr_loc_w) *
+ (((curr_loc_lower_h + 1) << FRAC_BITS_CURR_LOC) - curr_loc_h) +
+ a_src[curr_loc_lower_w + 1 + curr_loc_lower_h * a_src_w] *
+ (curr_loc_w - ((curr_loc_lower_w) << FRAC_BITS_CURR_LOC)) *
+ (((curr_loc_lower_h + 1) << FRAC_BITS_CURR_LOC) - curr_loc_h) +
+ a_src[curr_loc_lower_w + (curr_loc_lower_h + 1) * a_src_w] *
+ (((curr_loc_lower_w + 1) << FRAC_BITS_CURR_LOC) - curr_loc_w) *
+ (curr_loc_h - ((curr_loc_lower_h) << FRAC_BITS_CURR_LOC)) +
+ a_src[curr_loc_lower_w + 1 + (curr_loc_lower_h + 1) * a_src_w] *
+ (curr_loc_w - ((curr_loc_lower_w) << FRAC_BITS_CURR_LOC)) *
+ (curr_loc_h - ((curr_loc_lower_h) << FRAC_BITS_CURR_LOC)) +
+ rounding_term) /
+ (FRAC_BASE * FRAC_BASE);
}
}
LOG2("resize the 2D array cost %dus",
@@ -166,16 +166,13 @@ template int resize2dArray(
return 0;
}
-template int resize2dArray(
- const float* a_src, int a_src_w, int a_src_h,
- float* a_dst, int a_dst_w, int a_dst_h);
-template int resize2dArray(
- const unsigned short* a_src, int a_src_w, int a_src_h,
- unsigned short* a_dst, int a_dst_w, int a_dst_h);
-template int resize2dArray(
- const int* a_src, int a_src_w, int a_src_h,
- int* a_dst, int a_dst_w, int a_dst_h);
-
-float calculateHyperfocalDistance(const cca::cca_cmc &cmc);
+template int resize2dArray(const float* a_src, int a_src_w, int a_src_h, float* a_dst,
+ int a_dst_w, int a_dst_h);
+template int resize2dArray(const unsigned short* a_src, int a_src_w, int a_src_h,
+ unsigned short* a_dst, int a_dst_w, int a_dst_h);
+template int resize2dArray(const int* a_src, int a_src_w, int a_src_h, int* a_dst, int a_dst_w,
+ int a_dst_h);
+
+float calculateHyperfocalDistance(const cca::cca_cmc& cmc);
} // namespace AiqUtils
} // namespace icamera
diff --git a/src/3a/Dvs.cpp b/src/3a/Dvs.cpp
index 74680dfe..1ac14ecb 100644
--- a/src/3a/Dvs.cpp
+++ b/src/3a/Dvs.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2017-2021 Intel Corporation.
+ * Copyright (C) 2017-2022 Intel Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -39,17 +39,14 @@ const int DVS_OXDIM_UV = 64;
const int DVS_OYDIM_UV = 16;
const int DVS_MIN_ENVELOPE = 12;
-Dvs::Dvs(int cameraId)
- : mCameraId(cameraId),
- mTuningMode(TUNING_MODE_VIDEO) {
+Dvs::Dvs(int cameraId) : mCameraId(cameraId), mTuningMode(TUNING_MODE_VIDEO) {
CLEAR(mPtzRegion);
CLEAR(mGDCRegion);
}
-Dvs::~Dvs() {
-}
+Dvs::~Dvs() {}
-int Dvs::configure(const ConfigMode configMode, cca::cca_init_params *params) {
+int Dvs::configure(const ConfigMode configMode, cca::cca_init_params* params) {
LOG2("@%s", __func__);
int ret = configCcaDvsData(configMode, params);
@@ -64,11 +61,11 @@ int Dvs::configure(const ConfigMode configMode, cca::cca_init_params *params) {
return OK;
}
-int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *params) {
+int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params* params) {
// update GC
std::shared_ptr gc = nullptr;
if (PlatformData::getGraphConfigNodes(mCameraId)) {
- IGraphConfigManager *GCM = IGraphConfigManager::getInstance(mCameraId);
+ IGraphConfigManager* GCM = IGraphConfigManager::getInstance(mCameraId);
if (GCM) {
gc = GCM->getGraphConfig(configMode);
}
@@ -80,11 +77,11 @@ int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *par
int status = gc->getGdcKernelSetting(&gdcKernelId, &resolution);
CheckWarning(status != OK, UNKNOWN_ERROR, "Failed to get GDC kernel setting, DVS disabled");
- LOG2("%s, GDC kernel setting: id: %u, resolution:src: %dx%d, dst: %dx%d", __func__,
- gdcKernelId, resolution.input_width, resolution.input_height, resolution.output_width,
+ LOG2("%s, GDC kernel setting: id: %u, resolution:src: %dx%d, dst: %dx%d", __func__, gdcKernelId,
+ resolution.input_width, resolution.input_height, resolution.output_width,
resolution.output_height);
- cca::cca_gdc_configuration *gdcConfig = ¶ms->gdcConfig;
+ cca::cca_gdc_configuration* gdcConfig = ¶ms->gdcConfig;
CLEAR(*gdcConfig);
gdcConfig->gdc_filter_width = DVS_MIN_ENVELOPE / 2;
gdcConfig->gdc_filter_height = DVS_MIN_ENVELOPE / 2;
@@ -102,7 +99,7 @@ int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *par
gdcConfig->splitMetadata[0] = DVS_OYDIM_UV;
gdcConfig->splitMetadata[1] = DVS_OXDIM_UV;
gdcConfig->splitMetadata[2] = DVS_OYDIM_Y;
- gdcConfig->splitMetadata[3] = DVS_OXDIM_Y/2;
+ gdcConfig->splitMetadata[3] = DVS_OXDIM_Y / 2;
}
camera_resolution_t envelopeResolution;
@@ -117,17 +114,16 @@ int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *par
const float Max_Ratio = 1.45f;
int bq_max_width =
- static_cast(Max_Ratio * static_cast(resolution.output_width / 2));
+ static_cast(Max_Ratio * static_cast(resolution.output_width / 2));
int bq_max_height =
- static_cast(Max_Ratio * static_cast(resolution.output_height / 2));
- if (resolution.input_width / 2 - envelope_bq.width -
- gdcConfig->gdc_filter_width > bq_max_width)
+ static_cast(Max_Ratio * static_cast(resolution.output_height / 2));
+ if (resolution.input_width / 2 - envelope_bq.width - gdcConfig->gdc_filter_width > bq_max_width)
envelope_bq.width = resolution.input_width / 2 - gdcConfig->gdc_filter_width - bq_max_width;
- if (resolution.input_height / 2 - envelope_bq.height -
- gdcConfig->gdc_filter_height > bq_max_height)
- envelope_bq.height = resolution.input_height / 2 -
- gdcConfig->gdc_filter_height - bq_max_height;
+ if (resolution.input_height / 2 - envelope_bq.height - gdcConfig->gdc_filter_height >
+ bq_max_height)
+ envelope_bq.height =
+ resolution.input_height / 2 - gdcConfig->gdc_filter_height - bq_max_height;
float zoomHRatio = resolution.input_width / (resolution.input_width - envelope_bq.width * 2);
float zoomVRatio = resolution.input_height / (resolution.input_height - envelope_bq.height * 2);
@@ -144,7 +140,7 @@ int Dvs::configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *par
mGDCRegion.left = 0;
mGDCRegion.top = 0;
- mGDCRegion.right = resolution.input_width / 2;;
+ mGDCRegion.right = resolution.input_width / 2;
mGDCRegion.bottom = resolution.input_height / 2;
dumpDvsConfiguration(*params);
@@ -158,7 +154,8 @@ void Dvs::setParameter(const Parameters& p) {
void Dvs::handleEvent(EventData eventData) {
LOG2("@%s: eventData.type:%d", __func__, eventData.type);
- if (eventData.type != EVENT_PSYS_STATS_BUF_READY) return;
+ if (eventData.type != EVENT_DVS_READY) return;
+ int streamId = eventData.data.dvsRunReady.streamId;
IntelCca* intelCcaHandle = IntelCca::getInstance(mCameraId, mTuningMode);
CheckAndLogError(!intelCcaHandle, VOID_VALUE, "@%s, Failed to get IntelCca instance", __func__);
@@ -171,19 +168,49 @@ void Dvs::handleEvent(EventData eventData) {
zp.digital_zoom_ratio = 1.0f;
zp.digital_zoom_factor = 1.0f;
zp.zoom_mode = ia_dvs_zoom_mode_region;
- if (!mPtzRegion.left && !mPtzRegion.top && !mPtzRegion.right && !mPtzRegion.bottom)
+ if (!mPtzRegion.left && !mPtzRegion.top && !mPtzRegion.right && !mPtzRegion.bottom) {
zp.zoom_region = {mGDCRegion.left, mGDCRegion.top, mGDCRegion.right, mGDCRegion.bottom};
- else
- zp.zoom_region = { mPtzRegion.left, mPtzRegion.top, mPtzRegion.right, mPtzRegion.bottom };
- intelCcaHandle->updateZoom(zp);
+ } else {
+ /*
+ SCALER_CROP_REGION can adjust to a small crop region if the aspect of active
+ pixel array is not same as the crop region aspect. Crop can only on either
+ horizontally or veritacl but never both.
+ If active pixel array's aspect ratio is wider than the crop region, the region
+ should be further cropped vertically.
+ */
+ auto coord = PlatformData::getActivePixelArray(mCameraId);
+ int wpa = coord.right - coord.left;
+ int hpa = coord.bottom - coord.top;
+
+ int width = mPtzRegion.right - mPtzRegion.left;
+ int height = mPtzRegion.bottom - mPtzRegion.top;
+
+ float aspect0 = static_cast(wpa) / hpa;
+ float aspect1 = static_cast(width) / height;
+
+ if (std::fabs(aspect0 - aspect1) < 0.00001) {
+ zp.zoom_region = {mPtzRegion.left, mPtzRegion.top, mPtzRegion.right, mPtzRegion.bottom};
+ } else if (aspect0 > aspect1) {
+ auto croppedHeight = width / aspect0;
+ int diff = std::abs(height - croppedHeight) / 2;
+ zp.zoom_region = {mPtzRegion.left, mPtzRegion.top + diff, mPtzRegion.right,
+ mPtzRegion.bottom - diff};
+ } else {
+ auto croppedWidth = height * aspect0;
+ int diff = std::abs(width - croppedWidth) / 2;
+ zp.zoom_region = {mPtzRegion.left + diff, mPtzRegion.top, mPtzRegion.right - diff,
+ mPtzRegion.bottom};
+ }
+ }
+ intelCcaHandle->updateZoom(streamId, zp);
- ia_err iaErr = intelCcaHandle->runDVS(eventData.data.statsReady.sequence);
+ ia_err iaErr = intelCcaHandle->runDVS(streamId, eventData.data.statsReady.sequence);
int ret = AiqUtils::convertError(iaErr);
CheckAndLogError(ret != OK, VOID_VALUE, "Error running DVS: %d", ret);
return;
}
-void Dvs::dumpDvsConfiguration(const cca::cca_init_params &config) {
+void Dvs::dumpDvsConfiguration(const cca::cca_init_params& config) {
if (!Log::isLogTagEnabled(GET_FILE_SHIFT(Dvs), CAMERA_DEBUG_LOG_LEVEL3)) return;
LOG3("config.dvsOutputType %d", config.dvsOutputType);
diff --git a/src/3a/Dvs.h b/src/3a/Dvs.h
index 0e9fdabc..a509e50c 100644
--- a/src/3a/Dvs.h
+++ b/src/3a/Dvs.h
@@ -41,13 +41,13 @@ class Dvs : public EventListener {
explicit Dvs(int cameraId);
~Dvs();
- int configure(const ConfigMode configMode, cca::cca_init_params *params);
+ int configure(const ConfigMode configMode, cca::cca_init_params* params);
void handleEvent(EventData eventData);
void setParameter(const Parameters& p);
private:
- int configCcaDvsData(const ConfigMode configMode, cca::cca_init_params *params);
- void dumpDvsConfiguration(const cca::cca_init_params &config);
+ int configCcaDvsData(const ConfigMode configMode, cca::cca_init_params* params);
+ void dumpDvsConfiguration(const cca::cca_init_params& config);
private:
int mCameraId;
diff --git a/src/3a/I3AControlFactory.cpp b/src/3a/I3AControlFactory.cpp
index 120d2ca2..f2c4b8a9 100644
--- a/src/3a/I3AControlFactory.cpp
+++ b/src/3a/I3AControlFactory.cpp
@@ -23,9 +23,8 @@
namespace icamera {
-AiqUnitBase *I3AControlFactory::createI3AControl(int cameraId, SensorHwCtrl *sensorHw,
- LensHw *lensHw)
-{
+AiqUnitBase* I3AControlFactory::createI3AControl(int cameraId, SensorHwCtrl* sensorHw,
+ LensHw* lensHw) {
LOG1("@%s", cameraId, __func__);
if (PlatformData::isEnableAIQ(cameraId)) {
return new AiqUnit(cameraId, sensorHw, lensHw);
diff --git a/src/3a/I3AControlFactory.h b/src/3a/I3AControlFactory.h
index 940e2710..9e0e9ca9 100644
--- a/src/3a/I3AControlFactory.h
+++ b/src/3a/I3AControlFactory.h
@@ -28,7 +28,7 @@ namespace icamera {
* automatically based on 3a enabled status
*/
class I3AControlFactory {
-public:
+ public:
/**
* \brief Select the AIQ unit according to config file and compiling option
*
@@ -39,7 +39,7 @@ class I3AControlFactory {
*
* \return the AIQ unit base class
*/
- static AiqUnitBase *createI3AControl(int cameraId, SensorHwCtrl *sensorHw, LensHw *lensHw);
+ static AiqUnitBase* createI3AControl(int cameraId, SensorHwCtrl* sensorHw, LensHw* lensHw);
};
} /* namespace icamera */
diff --git a/src/3a/LensManager.cpp b/src/3a/LensManager.cpp
index 6f22be2c..c2247688 100644
--- a/src/3a/LensManager.cpp
+++ b/src/3a/LensManager.cpp
@@ -24,16 +24,14 @@
namespace icamera {
-LensManager::LensManager(int cameraId, LensHw *lensHw) :
- mCameraId(cameraId),
- mLensHw(lensHw),
- mDcIrisCommand(ia_aiq_aperture_control_dc_iris_close),
- mFocusPosition(-1),
- mLastSofSequence(-1) {
-}
+LensManager::LensManager(int cameraId, LensHw* lensHw)
+ : mCameraId(cameraId),
+ mLensHw(lensHw),
+ mDcIrisCommand(ia_aiq_aperture_control_dc_iris_close),
+ mFocusPosition(-1),
+ mLastSofSequence(-1) {}
-LensManager::~LensManager() {
-}
+LensManager::~LensManager() {}
int LensManager::start() {
AutoMutex l(mLock);
@@ -76,8 +74,8 @@ void LensManager::handleSofEvent(EventData eventData) {
}
}
-int LensManager::setLensResult(const cca::cca_af_results &afResults,
- int64_t sequence, const aiq_parameter_t &aiqParam) {
+int LensManager::setLensResult(const cca::cca_af_results& afResults, int64_t sequence,
+ const aiq_parameter_t& aiqParam) {
AutoMutex l(mLock);
if (!mLensHw->isLensSubdevAvailable() || afResults.next_lens_position == 0) {
@@ -87,7 +85,7 @@ int LensManager::setLensResult(const cca::cca_af_results &afResults,
int ret = OK;
int lensHwType = PlatformData::getLensHwType(mCameraId);
- switch(lensHwType) {
+ switch (lensHwType) {
case LENS_VCM_HW:
if (aiqParam.afMode == AF_MODE_OFF && aiqParam.focusDistance > 0.0f) {
// The manual focus setting requires perframe control
@@ -117,7 +115,7 @@ void LensManager::setFocusPosition(int focusPosition) {
}
}
-void LensManager::getLensInfo(aiq_parameter_t &aiqParam) {
+void LensManager::getLensInfo(aiq_parameter_t& aiqParam) {
if (PlatformData::getLensHwType(mCameraId) == LENS_VCM_HW) {
mLensHw->getLatestPosition(aiqParam.lensPosition, aiqParam.lensMovementStartTimestamp);
}
diff --git a/src/3a/LensManager.h b/src/3a/LensManager.h
index aa6a48e0..4c431dc6 100644
--- a/src/3a/LensManager.h
+++ b/src/3a/LensManager.h
@@ -31,9 +31,8 @@ namespace icamera {
* This class is used to control focus and aperture related controls.
*/
class LensManager {
-
-public:
- LensManager(int cameraId, LensHw *lensHw);
+ public:
+ LensManager(int cameraId, LensHw* lensHw);
~LensManager();
/**
@@ -60,24 +59,24 @@ class LensManager {
*
* \return OK if set successfully.
*/
- int setLensResult(const cca::cca_af_results &afResults,
- int64_t sequence, const aiq_parameter_t &aiqParam);
+ int setLensResult(const cca::cca_af_results& afResults, int64_t sequence,
+ const aiq_parameter_t& aiqParam);
/**
* \brief Get Lens info
*
* \param[out] aiqParam: updating lens related parameters.
*
*/
- void getLensInfo(aiq_parameter_t &aiqParam);
+ void getLensInfo(aiq_parameter_t& aiqParam);
-private:
+ private:
DISALLOW_COPY_AND_ASSIGN(LensManager);
void setFocusPosition(int focusPostion);
-private:
+ private:
int mCameraId;
- LensHw *mLensHw;
+ LensHw* mLensHw;
ia_aiq_aperture_control_dc_iris_command mDcIrisCommand;
int mFocusPosition;
diff --git a/src/3a/Ltm.cpp b/src/3a/Ltm.cpp
index 2c7266f4..ad1c61fb 100644
--- a/src/3a/Ltm.cpp
+++ b/src/3a/Ltm.cpp
@@ -29,6 +29,9 @@
#include "iutils/Errors.h"
#include "iutils/Utils.h"
+#include "ia_pal_types_isp_ids_autogen.h"
+#include "ia_pal_types_isp.h"
+
namespace icamera {
Ltm::Ltm(int cameraId)
@@ -38,6 +41,7 @@ Ltm::Ltm(int cameraId)
mThreadRunning(false),
mInputParamIndex(-1) {
CLEAR(mLtmParams);
+ CLEAR(mFrameResolution);
if (PlatformData::isEnableLtmThread(mCameraId)) {
mLtmThread = new LtmThread(this);
@@ -81,7 +85,28 @@ int Ltm::deinit() {
return OK;
}
-int Ltm::configure(const std::vector& configModes) {
+int Ltm::getPixelCropperResolution(std::shared_ptr graphConfig, int32_t streamId,
+ camera_resolution_t* resolution) {
+ ia_isp_bxt_program_group* pgPtr = graphConfig->getProgramGroup(streamId);
+ for (unsigned int i = 0; i < pgPtr->kernel_count; i++) {
+ // The kernel value is for cca_ltm_input_params::frame_width and frame_height.
+ if (pgPtr->run_kernels[i].kernel_uuid == ia_pal_uuid_isp_pxl_crop_yuv_a) {
+ if (pgPtr->run_kernels[i].resolution_info) {
+ resolution->width = pgPtr->run_kernels[i].resolution_info->output_width;
+ resolution->height = pgPtr->run_kernels[i].resolution_info->output_height;
+ return OK;
+ } else {
+ resolution->width = pgPtr->run_kernels[i].resolution_history->output_width;
+ resolution->height = pgPtr->run_kernels[i].resolution_history->output_height;
+ }
+ }
+ }
+
+ return UNKNOWN_ERROR;
+}
+
+int Ltm::configure(const std::vector& configModes,
+ std::shared_ptr graphConfig, int32_t streamId) {
TuningMode tMode = TUNING_MODE_MAX;
for (auto cfg : configModes) {
// Only support the 1st tuning mode if multiple config mode is configured.
@@ -102,6 +127,11 @@ int Ltm::configure(const std::vector& configModes) {
}
}
+ if (graphConfig) {
+ int ret = getPixelCropperResolution(graphConfig, streamId, &mFrameResolution);
+ CheckAndLogError(ret != OK, ret, "failed to get sis output resolution");
+ }
+
if (tMode == TUNING_MODE_MAX) {
return OK;
}
@@ -144,8 +174,8 @@ void Ltm::stop() {
}
void Ltm::handleEvent(EventData eventData) {
- if ((eventData.type != EVENT_PSYS_STATS_SIS_BUF_READY) &&
- (eventData.pipeType != STILL_STREAM_ID))
+ if ((eventData.type != EVENT_PSYS_STATS_SIS_BUF_READY) ||
+ (eventData.pipeType != VIDEO_STREAM_ID))
return;
LOG2("%s: handle EVENT_PSYS_STATS_SIS_BUF_READY", __func__);
@@ -188,8 +218,8 @@ int Ltm::handleSisLtm(const std::shared_ptr& cameraBuffer) {
AiqResult* feedback = getAiqResult(sequence);
mLtmParams[mInputParamIndex]->ltmParams.ev_shift = feedback->mAiqParam.evShift;
mLtmParams[mInputParamIndex]->ltmParams.ltm_strength_manual = feedback->mAiqParam.ltmStrength;
- mLtmParams[mInputParamIndex]->ltmParams.frame_width = feedback->mAiqParam.resolution.width;
- mLtmParams[mInputParamIndex]->ltmParams.frame_height = feedback->mAiqParam.resolution.height;
+ mLtmParams[mInputParamIndex]->ltmParams.frame_width = mFrameResolution.width;
+ mLtmParams[mInputParamIndex]->ltmParams.frame_height = mFrameResolution.height;
ia_image_full_info* imageInfo = &mLtmParams[mInputParamIndex]->ltmParams.sis.image_info;
CLEAR(*imageInfo);
@@ -207,6 +237,14 @@ int Ltm::handleSisLtm(const std::shared_ptr& cameraBuffer) {
cca::cca_ltm_statistics* sis = &mLtmParams[mInputParamIndex]->ltmParams.sis;
MEMCPY_S(sis->data, sizeof(sis->data), data, size);
sis->size = sizeof(sis->data) > size ? size : sizeof(sis->data);
+ LOG3(
+ "LTM data_format %d, bayer_order %d, data_format_bpp %d, data_bpp %d, frame_width and "
+ "height(%d, %d), SIS_image_width & height and right padder(%d, %d, %d), image data size %d",
+ imageInfo->raw_image.data_format, imageInfo->raw_image.bayer_order,
+ imageInfo->raw_image.data_format_bpp, imageInfo->raw_image.data_bpp,
+ mLtmParams[mInputParamIndex]->ltmParams.frame_width,
+ mLtmParams[mInputParamIndex]->ltmParams.frame_height, imageInfo->raw_image.width_cols,
+ imageInfo->raw_image.height_lines, imageInfo->extra_cols_right, sis->size);
if ((!PlatformData::isEnableLtmThread(mCameraId)) || sequence == 0) {
runLtm(*mLtmParams[mInputParamIndex]);
diff --git a/src/3a/Ltm.h b/src/3a/Ltm.h
index 29e6f8c5..55f584b8 100644
--- a/src/3a/Ltm.h
+++ b/src/3a/Ltm.h
@@ -57,7 +57,8 @@ class Ltm : public EventListener {
int start();
void stop();
- int configure(const std::vector& configModes);
+ int configure(const std::vector& configModes,
+ std::shared_ptr graphConfig, int32_t streamId);
/**
* \brief handle statistics event
@@ -68,6 +69,9 @@ class Ltm : public EventListener {
private:
DISALLOW_COPY_AND_ASSIGN(Ltm);
+ int getPixelCropperResolution(std::shared_ptr graphConfig, int32_t streamId,
+ camera_resolution_t* resolution);
+
int runLtmAsync();
int runLtm(const LtmInputParams& ltmInputParams);
@@ -100,11 +104,13 @@ class Ltm : public EventListener {
LtmThread* mLtmThread;
bool mThreadRunning;
Condition mParamAvailableSignal;
- static const int kMaxLtmParamsNum = 2; // 2 ltm input params
+ static const int kMaxLtmParamsNum = 2; // 2 ltm input params
int mInputParamIndex;
LtmInputParams* mLtmParams[kMaxLtmParamsNum];
std::queue mLtmParamsQ;
+
+ camera_resolution_t mFrameResolution;
};
} /* namespace icamera */
diff --git a/src/3a/MakerNote.cpp b/src/3a/MakerNote.cpp
index c318b189..0547efe0 100644
--- a/src/3a/MakerNote.cpp
+++ b/src/3a/MakerNote.cpp
@@ -27,12 +27,9 @@
namespace icamera {
-MakerNote::MakerNote() :
- mMknState(UNINIT) {
-}
+MakerNote::MakerNote() : mMknState(UNINIT) {}
-MakerNote::~MakerNote() {
-}
+MakerNote::~MakerNote() {}
int MakerNote::init(int cameraId, TuningMode tuningMode) {
LOG1("@%s, tuningMode:%d", cameraId, __func__, tuningMode);
@@ -89,8 +86,8 @@ int MakerNote::saveMakernoteData(int cameraId, camera_makernote_mode_t makernote
AutoMutex lock(mMknLock);
CheckAndLogError(mMknState != INIT, NO_INIT, "@%s, mkn isn't initialized", __func__);
- ia_mkn_trg mknTrg = ((makernoteMode == MAKERNOTE_MODE_JPEG) || dump
- ? ia_mkn_trg_section_1 : ia_mkn_trg_section_2);
+ ia_mkn_trg mknTrg = ((makernoteMode == MAKERNOTE_MODE_JPEG) || dump ? ia_mkn_trg_section_1 :
+ ia_mkn_trg_section_2);
MakernoteData data = mMakernoteDataList.front();
IntelCca* intelCca = IntelCca::getInstance(cameraId, tuningMode);
diff --git a/src/3a/SensorManager.cpp b/src/3a/SensorManager.cpp
index 00fb6844..be82ee6f 100644
--- a/src/3a/SensorManager.cpp
+++ b/src/3a/SensorManager.cpp
@@ -27,33 +27,32 @@ using std::vector;
namespace icamera {
-SensorManager::SensorManager(int cameraId, SensorHwCtrl *sensorHw) :
- mCameraId(cameraId),
- mSensorHwCtrl(sensorHw),
- // HDR_FEATURE_S
- mModeSwitched(false),
- // HDR_FEATURE_E
- mLastSofSequence(-1),
- mAnalogGainDelay(0),
- mDigitalGainDelay(0) {
+SensorManager::SensorManager(int cameraId, SensorHwCtrl* sensorHw)
+ : mCameraId(cameraId),
+ mSensorHwCtrl(sensorHw),
+ // HDR_FEATURE_S
+ mModeSwitched(false),
+ // HDR_FEATURE_E
+ mLastSofSequence(-1),
+ mAnalogGainDelay(0),
+ mDigitalGainDelay(0) {
// HDR_FEATURE_S
CLEAR(mWdrModeSetting);
// HDR_FEATURE_E
if (PlatformData::getAnalogGainLag(mCameraId) > 0) {
- mAnalogGainDelay = PlatformData::getExposureLag(mCameraId)
- - PlatformData::getAnalogGainLag(mCameraId);
+ mAnalogGainDelay =
+ PlatformData::getExposureLag(mCameraId) - PlatformData::getAnalogGainLag(mCameraId);
mDigitalGainDelay = mAnalogGainDelay;
}
if (PlatformData::getDigitalGainLag(mCameraId) >= 0) {
- mDigitalGainDelay = PlatformData::getExposureLag(mCameraId)
- - PlatformData::getDigitalGainLag(mCameraId);
+ mDigitalGainDelay =
+ PlatformData::getExposureLag(mCameraId) - PlatformData::getDigitalGainLag(mCameraId);
}
}
-SensorManager::~SensorManager() {
-}
+SensorManager::~SensorManager() {}
void SensorManager::reset() {
LOG1("@%s", mCameraId, __func__);
@@ -87,8 +86,8 @@ void SensorManager::handleSofEvent(EventData eventData) {
SofEventInfo info;
info.sequence = eventData.data.sync.sequence;
- info.timestamp = ((long)eventData.data.sync.timestamp.tv_sec) * 1000000
- + eventData.data.sync.timestamp.tv_usec;
+ info.timestamp = ((long)eventData.data.sync.timestamp.tv_sec) * 1000000 +
+ eventData.data.sync.timestamp.tv_usec;
if (mSofEventInfo.size() >= kMaxSofEventInfo) {
mSofEventInfo.erase(mSofEventInfo.begin());
}
@@ -109,7 +108,8 @@ uint64_t SensorManager::getSofTimestamp(int64_t sequence) {
// HDR_FEATURE_S
int SensorManager::convertTuningModeToWdrMode(TuningMode tuningMode) {
- return ((tuningMode == TUNING_MODE_VIDEO_HDR) || (tuningMode == TUNING_MODE_VIDEO_HDR2)) ? 1 : 0;
+ return ((tuningMode == TUNING_MODE_VIDEO_HDR) || (tuningMode == TUNING_MODE_VIDEO_HDR2)) ? 1 :
+ 0;
}
void SensorManager::handleSensorModeSwitch(int64_t sequence) {
@@ -147,7 +147,7 @@ int SensorManager::setWdrMode(TuningMode tuningMode, int64_t sequence) {
if (mWdrModeSetting.tuningMode != tuningMode) {
// Save WDR mode and update this mode to driver in SOF event handler.
- //So we know which frame is corrupted and we can skip the corrupted frames.
+ // So we know which frame is corrupted and we can skip the corrupted frames.
LOG2("@%s, tuningMode %d", sequence, __func__, tuningMode);
mWdrModeSetting.tuningMode = tuningMode;
mWdrModeSetting.sequence = sequence;
@@ -195,8 +195,8 @@ int SensorManager::getCurrentExposureAppliedDelay() {
uint32_t SensorManager::updateSensorExposure(SensorExpGroup sensorExposures, int64_t applyingSeq) {
AutoMutex l(mLock);
- int64_t effectSeq = mLastSofSequence < 0 ? 0 : \
- mLastSofSequence + PlatformData::getExposureLag(mCameraId);
+ int64_t effectSeq =
+ mLastSofSequence < 0 ? 0 : mLastSofSequence + PlatformData::getExposureLag(mCameraId);
if (sensorExposures.empty()) {
LOGW("%s: No exposure parameter", __func__);
@@ -250,19 +250,18 @@ uint32_t SensorManager::updateSensorExposure(SensorExpGroup sensorExposures, int
mSensorHwCtrl->setDigitalGains(digitalGains);
}
- LOG2("@%s: effectSeq %ld, applyingSeq %ld", mLastSofSequence, __func__,
- effectSeq, applyingSeq);
+ LOG2("@%s: effectSeq %ld, applyingSeq %ld", mLastSofSequence, __func__, effectSeq,
+ applyingSeq);
return ((uint32_t)effectSeq);
}
// CRL_MODULE_S
-int SensorManager::setFrameRate(float fps)
-{
+int SensorManager::setFrameRate(float fps) {
return mSensorHwCtrl->setFrameRate(fps);
}
// CRL_MODULE_E
-int SensorManager::getSensorInfo(ia_aiq_frame_params &frameParams,
- ia_aiq_exposure_sensor_descriptor &sensorDescriptor) {
+int SensorManager::getSensorInfo(ia_aiq_frame_params& frameParams,
+ ia_aiq_exposure_sensor_descriptor& sensorDescriptor) {
SensorFrameParams sensorFrameParams;
CLEAR(sensorFrameParams);
@@ -272,37 +271,36 @@ int SensorManager::getSensorInfo(ia_aiq_frame_params &frameParams,
}
if (!PlatformData::isIsysEnabled(mCameraId)) {
- vector res;
+ vector res;
PlatformData::getSupportedISysSizes(mCameraId, res);
CheckAndLogError(res.empty(), BAD_VALUE, "Supported ISYS resolutions are not configured.");
// In none-ISYS cases, only take 30 fps into account.
int fps = 30;
float freq = res[0].width * res[0].height * fps / 1000000;
- sensorDescriptor = {freq, static_cast(res[0].width),
- static_cast(res[0].height), 24, 0,
- static_cast(res[0].width), 6, 0};
+ sensorDescriptor = {freq,
+ static_cast(res[0].width),
+ static_cast(res[0].height),
+ 24,
+ 0,
+ static_cast(res[0].width),
+ 6,
+ 0};
LOG2("freq %f, width %d, height %d", freq, res[0].width, res[0].height);
return OK;
}
ret |= getSensorModeData(sensorDescriptor);
- LOG3("ia_aiq_frame_params=[%d, %d, %d, %d, %d, %d, %d, %d]",
- frameParams.horizontal_crop_offset,
- frameParams.vertical_crop_offset,
- frameParams.cropped_image_height,
- frameParams.cropped_image_width,
- frameParams.horizontal_scaling_numerator,
- frameParams.horizontal_scaling_denominator,
- frameParams.vertical_scaling_numerator,
+ LOG3("ia_aiq_frame_params=[%d, %d, %d, %d, %d, %d, %d, %d]", frameParams.horizontal_crop_offset,
+ frameParams.vertical_crop_offset, frameParams.cropped_image_height,
+ frameParams.cropped_image_width, frameParams.horizontal_scaling_numerator,
+ frameParams.horizontal_scaling_denominator, frameParams.vertical_scaling_numerator,
frameParams.vertical_scaling_denominator);
LOG3("ia_aiq_exposure_sensor_descriptor=[%f, %d, %d, %d, %d, %d, %d, %d]",
- sensorDescriptor.pixel_clock_freq_mhz,
- sensorDescriptor.pixel_periods_per_line,
- sensorDescriptor.line_periods_per_field,
- sensorDescriptor.line_periods_vertical_blanking,
+ sensorDescriptor.pixel_clock_freq_mhz, sensorDescriptor.pixel_periods_per_line,
+ sensorDescriptor.line_periods_per_field, sensorDescriptor.line_periods_vertical_blanking,
sensorDescriptor.coarse_integration_time_min,
sensorDescriptor.coarse_integration_time_max_margin,
sensorDescriptor.fine_integration_time_min,
@@ -318,7 +316,7 @@ int SensorManager::getSensorInfo(ia_aiq_frame_params &frameParams,
*/
int SensorManager::getSensorModeData(ia_aiq_exposure_sensor_descriptor& sensorData) {
int pixel = 0;
- int status = mSensorHwCtrl->getPixelRate(pixel);
+ int status = mSensorHwCtrl->getPixelRate(pixel);
CheckAndLogError(status != OK, status, "Failed to get pixel clock ret:%d", status);
sensorData.pixel_clock_freq_mhz = (float)pixel / 1000000;
@@ -334,7 +332,8 @@ int SensorManager::getSensorModeData(ia_aiq_exposure_sensor_descriptor& sensorDa
sensorData.line_periods_per_field = CLIP(line_periods_per_field, USHRT_MAX, 0);
int coarse_int_time_min, integration_step = 0, integration_max = 0;
- status = mSensorHwCtrl->getExposureRange(coarse_int_time_min, integration_max, integration_step);
+ status =
+ mSensorHwCtrl->getExposureRange(coarse_int_time_min, integration_max, integration_step);
CheckAndLogError(status != OK, status, "Failed to get Exposure Range ret:%d", status);
sensorData.coarse_integration_time_min = CLIP(coarse_int_time_min, USHRT_MAX, 0);
diff --git a/src/3a/SensorManager.h b/src/3a/SensorManager.h
index 01d9b984..8f9d55b2 100644
--- a/src/3a/SensorManager.h
+++ b/src/3a/SensorManager.h
@@ -46,7 +46,7 @@ typedef struct {
uint64_t timestamp;
} SofEventInfo;
-typedef std::vector SensorExpGroup;
+typedef std::vector SensorExpGroup;
/*
* \class SensorManager
*
@@ -54,17 +54,16 @@ typedef std::vector SensorExpGroup;
* and get some sensor info.
*/
class SensorManager {
-
-public:
- SensorManager(int cameraId, SensorHwCtrl *sensorHw);
+ public:
+ SensorManager(int cameraId, SensorHwCtrl* sensorHw);
~SensorManager();
void reset();
void handleSofEvent(EventData eventData);
/* sensorExposures are exposure results, applyingSeq is the sequence to apply results */
uint32_t updateSensorExposure(SensorExpGroup sensorExposures, int64_t applyingSeq);
- int getSensorInfo(ia_aiq_frame_params &frameParams,
- ia_aiq_exposure_sensor_descriptor &sensorDescriptor);
+ int getSensorInfo(ia_aiq_frame_params& frameParams,
+ ia_aiq_exposure_sensor_descriptor& sensorDescriptor);
// HDR_FEATURE_S
int setWdrMode(TuningMode tuningMode, int64_t sequence);
@@ -76,7 +75,8 @@ class SensorManager {
// CRL_MODULE_E
int getCurrentExposureAppliedDelay();
uint64_t getSofTimestamp(int64_t sequence);
-private:
+
+ private:
DISALLOW_COPY_AND_ASSIGN(SensorManager);
void handleSensorExposure();
@@ -86,15 +86,15 @@ class SensorManager {
// HDR_FEATURE_E
int getSensorModeData(ia_aiq_exposure_sensor_descriptor& sensorData);
-private:
+ private:
static const int kMaxSensorExposures = 10;
static const int kMaxSofEventInfo = 10;
int mCameraId;
- SensorHwCtrl *mSensorHwCtrl;
+ SensorHwCtrl* mSensorHwCtrl;
// HDR_FEATURE_S
- bool mModeSwitched; // Whether the TuningMode get updated
+ bool mModeSwitched; // Whether the TuningMode get updated
WdrModeSetting mWdrModeSetting;
// HDR_FEATURE_E
@@ -103,7 +103,7 @@ class SensorManager {
// Guard for SensorManager public API.
Mutex mLock;
- int mAnalogGainDelay; // Analog gain delay comparing exposure
+ int mAnalogGainDelay; // Analog gain delay comparing exposure
int mDigitalGainDelay; // Digital gain delay comparing exposure
// fisrt: sequence id, second: analog gain vector
std::map> mAnalogGainMap;
diff --git a/src/3a/intel3a/Intel3AParameter.cpp b/src/3a/intel3a/Intel3AParameter.cpp
index 7f8ffa7d..e370b554 100644
--- a/src/3a/intel3a/Intel3AParameter.cpp
+++ b/src/3a/intel3a/Intel3AParameter.cpp
@@ -429,7 +429,7 @@ void Intel3AParameter::updateAeParameter(const aiq_parameter_t& param) {
CLEAR(mAeParams.manual_total_target_exposure);
// Ignore TET in manual exposure case
if (param.totalExposureTarget > 0 && param.manualExpTimeUs <= 0 && param.manualIso <= 0) {
- camera_range_t range = { -1, -1 };
+ camera_range_t range = {-1, -1};
int ret = PlatformData::getSupportAeExposureTimeRange(mCameraId, param.sceneMode, range);
int64_t tet = param.totalExposureTarget;
if (ret == OK && mCMC.base_iso > 0) {
@@ -451,10 +451,10 @@ void Intel3AParameter::updatePaResult(cca::cca_pa_params* paResult) {
if (!mUseManualColorMatrix) return;
if (VALID_COLOR_GAINS(mColorGains.color_gains_rggb)) {
- paResult->color_gains.r = mColorGains.color_gains_rggb[0];
+ paResult->color_gains.r = mColorGains.color_gains_rggb[0];
paResult->color_gains.gr = mColorGains.color_gains_rggb[1];
paResult->color_gains.gb = mColorGains.color_gains_rggb[2];
- paResult->color_gains.b = mColorGains.color_gains_rggb[3];
+ paResult->color_gains.b = mColorGains.color_gains_rggb[3];
}
// Override color_conversion_matrix and color_gains
@@ -636,6 +636,7 @@ void Intel3AParameter::updateAfParameter(const aiq_parameter_t& param) {
// Region
mAfParams.focus_rect = {};
+ mAfParams.focus_metering_mode = ia_aiq_af_metering_mode_auto;
if (!param.afRegions.empty()) {
// Current only one AF metering window is supported, so use the latest one
camera_window_t window = param.afRegions.back();
@@ -643,6 +644,7 @@ void Intel3AParameter::updateAfParameter(const aiq_parameter_t& param) {
camera_coordinate_system_t frameCoord = {0, 0, param.resolution.width,
param.resolution.height};
window = AiqUtils::convertToIaWindow(frameCoord, window);
+ mAfParams.focus_metering_mode = ia_aiq_af_metering_mode_touch;
mAfParams.focus_rect = {window.left, window.top, window.right, window.bottom};
}
}
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 7969db98..ae13e2ee 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -21,6 +21,7 @@ add_subdirectory(hal)
add_subdirectory(isp_control)
# ISP_CONTROL_E
add_subdirectory(iutils)
+add_subdirectory(scheduler)
add_subdirectory(metadata)
add_subdirectory(platformdata)
add_subdirectory(v4l2)
diff --git a/src/core/BufferQueue.cpp b/src/core/BufferQueue.cpp
index 0951ded1..5d514df3 100644
--- a/src/core/BufferQueue.cpp
+++ b/src/core/BufferQueue.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015-2021 Intel Corporation.
+ * Copyright (C) 2015-2022 Intel Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -151,6 +151,24 @@ void BufferQueue::getFrameInfo(std::map& inputInfo,
outputInfo = mOutputFrameInfo;
}
+bool BufferQueue::waitBufferQueue(ConditionLock& lock, std::map& queue,
+ int64_t timeout) {
+ LOG2("@%s waiting buffers", __func__);
+ for (auto& bufQ : queue) {
+ if (bufQ.second.empty() && timeout > 0) {
+ // Thread was stopped during wait
+ if (!mThreadRunning) {
+ LOG1("@%s: inactive while waiting for buffers", __func__);
+ return false;
+ }
+ mFrameAvailableSignal.waitRelative(lock, timeout * SLOWLY_MULTIPLIER);
+ }
+ if (bufQ.second.empty()) return false;
+ }
+
+ return true;
+}
+
int BufferQueue::waitFreeBuffersInQueue(ConditionLock& lock,
std::map >& cInBuffer,
std::map >& cOutBuffer,
diff --git a/src/core/BufferQueue.h b/src/core/BufferQueue.h
index 4d499138..66dd492b 100644
--- a/src/core/BufferQueue.h
+++ b/src/core/BufferQueue.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015-2021 Intel Corporation.
+ * Copyright (C) 2015-2022 Intel Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -144,6 +144,14 @@ class BufferQueue : public BufferConsumer, public BufferProducer, public EventLi
* \brief Clear and initialize input and output buffer queues.
*/
void clearBufferQueues();
+ /**
+ * \brief Wait and check if queue is not empty until time out.
+ *
+ * No waiting if timeout value is zero
+ */
+ bool waitBufferQueue(ConditionLock& lock,
+ std::map& queue,
+ int64_t timeout);
/**
* \brief Wait for available input and output buffers.
*
diff --git a/src/core/CameraDevice.cpp b/src/core/CameraDevice.cpp
index 1f693a2a..d5496d92 100644
--- a/src/core/CameraDevice.cpp
+++ b/src/core/CameraDevice.cpp
@@ -68,8 +68,7 @@ CameraDevice::CameraDevice(int cameraId)
mLensCtrl = new LensHw(mCameraId);
mSensorCtrl = SensorHwCtrl::createSensorCtrl(mCameraId);
- m3AControl =
- I3AControlFactory::createI3AControl(mCameraId, mSensorCtrl, mLensCtrl);
+ m3AControl = I3AControlFactory::createI3AControl(mCameraId, mSensorCtrl, mLensCtrl);
mRequestThread = new RequestThread(mCameraId, m3AControl, mParamGenerator);
mRequestThread->registerListener(EVENT_PROCESS_REQUEST, this);
@@ -250,6 +249,7 @@ void CameraDevice::bindListeners() {
}
}
+ mProducer->registerListener(EVENT_ISYS_ERROR, this);
if (mPerframeControlSupport || !PlatformData::isIsysEnabled(mCameraId)) {
mProcessors.back()->registerListener(EVENT_PSYS_FRAME, mRequestThread);
} else {
@@ -268,6 +268,12 @@ void CameraDevice::bindListeners() {
mProducer->registerListener(EVENT_ISYS_SOF, mRequestThread);
}
// FILE_SOURCE_E
+
+ // INTEL_DVS_S
+ auto dvsListener = m3AControl->getDVSEventListener();
+ for (auto lis : dvsListener)
+ for (auto& item : mProcessors) item->registerListener(EVENT_DVS_READY, lis);
+ // INTEL_DVS_E
}
void CameraDevice::unbindListeners() {
@@ -316,6 +322,7 @@ void CameraDevice::unbindListeners() {
mProcessors.front()->removeListener(EVENT_REQUEST_METADATA_READY, this);
}
+ mProducer->removeListener(EVENT_ISYS_ERROR, this);
if (mPerframeControlSupport || !PlatformData::isIsysEnabled(mCameraId)) {
mProcessors.back()->removeListener(EVENT_PSYS_FRAME, mRequestThread);
} else {
@@ -329,6 +336,12 @@ void CameraDevice::unbindListeners() {
mProducer->removeListener(EVENT_ISYS_SOF, mRequestThread);
}
// FILE_SOURCE_E
+
+ // INTEL_DVS_S
+ auto dvsListener = m3AControl->getDVSEventListener();
+ for (auto lis : dvsListener)
+ for (auto& item : mProcessors) item->removeListener(EVENT_DVS_READY, lis);
+ // INTEL_DVS_E
}
int CameraDevice::configureInput(const stream_t* inputConfig) {
@@ -877,12 +890,22 @@ int CameraDevice::getParameters(Parameters& param, int64_t sequence) {
LOG2("@%s", mCameraId, sequence, __func__);
AutoMutex m(mDeviceLock);
+#ifdef CAL_BUILD
if (sequence >= 0 && mState != DEVICE_STOP) {
// fetch target parameter and results
return mParamGenerator->getParameters(sequence, ¶m);
}
param = mParameter;
+#else
+ param = mParameter;
+ Parameters nParam;
+ if (mState != DEVICE_STOP) {
+ // fetch target parameter and results
+ mParamGenerator->getParameters(sequence, &nParam, false);
+ }
+ param.merge(nParam);
+#endif
for (auto& item : mProcessors) {
item->getParameters(param);
@@ -1019,14 +1042,9 @@ void CameraDevice::handleEvent(EventData eventData) {
case EVENT_PSYS_REQUEST_BUF_READY: {
if (mCallback) {
camera_msg_data_t data = {CAMERA_ISP_BUF_READY, {}};
- int32_t userRequestId = 0;
- int ret = mParamGenerator->getUserRequestId(eventData.data.requestReady.sequence,
- userRequestId);
- CheckAndLogError(ret != OK, VOID_VALUE, "failed to find request id, seq %ld",
- eventData.data.requestReady.sequence);
data.data.buffer_ready.timestamp = eventData.data.requestReady.timestamp;
- data.data.buffer_ready.frameNumber = static_cast(userRequestId);
+ data.data.buffer_ready.frameNumber = eventData.data.requestReady.requestId;
mCallback->notify(mCallback, data);
PlatformData::updateMakernoteTimeStamp(mCameraId,
eventData.data.requestReady.sequence,
@@ -1038,14 +1056,16 @@ void CameraDevice::handleEvent(EventData eventData) {
case EVENT_REQUEST_METADATA_READY: {
if (mCallback) {
camera_msg_data_t data = {CAMERA_METADATA_READY, {}};
- int32_t userRequestId = 0;
- int ret = mParamGenerator->getUserRequestId(eventData.data.requestReady.sequence,
- userRequestId);
- CheckAndLogError(ret != OK, VOID_VALUE, "failed to find request id, seq %ld",
- eventData.data.requestReady.sequence);
data.data.metadata_ready.sequence = eventData.data.requestReady.sequence;
- data.data.metadata_ready.frameNumber = static_cast(userRequestId);
+ data.data.metadata_ready.frameNumber = eventData.data.requestReady.requestId;
+ mCallback->notify(mCallback, data);
+ }
+ break;
+ }
+ case EVENT_ISYS_ERROR: {
+ if (mCallback) {
+ camera_msg_data_t data = {CAMERA_DEVICE_ERROR, {}};
mCallback->notify(mCallback, data);
}
break;
diff --git a/src/core/CameraEventType.h b/src/core/CameraEventType.h
index a803df9f..240cb530 100644
--- a/src/core/CameraEventType.h
+++ b/src/core/CameraEventType.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015-2021 Intel Corporation.
+ * Copyright (C) 2015-2022 Intel Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -38,6 +38,10 @@ enum EventType {
EVENT_FRAME_AVAILABLE,
EVENT_PSYS_REQUEST_BUF_READY,
EVENT_REQUEST_METADATA_READY,
+ // INTEL_DVS_S
+ EVENT_DVS_READY,
+ // INTEL_DVS_E
+ EVENT_ISYS_ERROR,
};
struct EventDataStatsReady {
@@ -82,8 +86,15 @@ struct EventFrameAvailable {
struct EventRequestReady {
int64_t timestamp;
int64_t sequence;
+ uint32_t requestId;
};
+// INTEL_DVS_S
+struct EventDVSRunReady {
+ int streamId;
+};
+// INTEL_DVS_E
+
struct EventData {
EventData() : type(EVENT_ISYS_SOF), pipeType(-1) { CLEAR(data); }
@@ -98,7 +109,11 @@ struct EventData {
EventRequestData request;
EventConfigData config;
EventFrameAvailable frameDone;
- EventRequestReady requestReady; // use for returning metadata and shutter event
+ // use for returning metadata and shutter event
+ EventRequestReady requestReady;
+ // INTEL_DVS_S
+ EventDVSRunReady dvsRunReady;
+ // INTEL_DVS_E
} data;
};
diff --git a/src/core/CameraStream.cpp b/src/core/CameraStream.cpp
index 74ee71ca..8e982db4 100644
--- a/src/core/CameraStream.cpp
+++ b/src/core/CameraStream.cpp
@@ -118,8 +118,8 @@ int CameraStream::qbuf(camera_buffer_t* ubuffer, int64_t sequence) {
shared_ptr camBuffer = userBufferToCameraBuffer(ubuffer);
if (camBuffer) {
camBuffer->setSettingSequence(sequence);
- LOG2("@%s, mStreamId:%d, CameraBuffer:%p for port:%d, ubuffer:%p, addr:%p",
- mCameraId, __func__, mStreamId, camBuffer.get(), mPort, ubuffer, ubuffer->addr);
+ LOG2("@%s, mStreamId:%d, CameraBuffer:%p for port:%d, ubuffer:%p, addr:%p", mCameraId,
+ __func__, mStreamId, camBuffer.get(), mPort, ubuffer, ubuffer->addr);
}
int ret = BAD_VALUE;
diff --git a/src/core/CaptureUnit.cpp b/src/core/CaptureUnit.cpp
index fdef76cc..5240977d 100644
--- a/src/core/CaptureUnit.cpp
+++ b/src/core/CaptureUnit.cpp
@@ -379,13 +379,15 @@ int CaptureUnit::poll() {
PERF_CAMERA_ATRACE();
int ret = 0;
const int poll_timeout_count = 10;
- const int poll_timeout = gSlowlyRunRatio ? (gSlowlyRunRatio * 1000000) : 1000;
+ // Normally set the timeout threshold to 1s
+ const int poll_timeout = gSlowlyRunRatio ? (gSlowlyRunRatio * 100000) : 1000;
LOG2("%s", mCameraId, __func__);
CheckAndLogError((mState != CAPTURE_CONFIGURE && mState != CAPTURE_START), INVALID_OPERATION,
"@%s: poll buffer in wrong state %d", __func__, mState);
- int timeOutCount = poll_timeout_count;
+ int timeOutCount = (PlatformData::getMaxIsysTimeout() > 0) ? PlatformData::getMaxIsysTimeout() :
+ poll_timeout_count;
std::vector pollDevs, readyDevices;
for (const auto& device : mDevices) {
pollDevs.push_back(device->getV4l2Device());
@@ -403,8 +405,6 @@ int CaptureUnit::poll() {
V4L2DevicePoller poller{pollDevs, mFlushFd[0]};
ret = poller.Poll(poll_timeout, POLLPRI | POLLIN | POLLOUT | POLLERR, &readyDevices);
-
- LOG2("@%s: automation checkpoint: flag: poll_buffer, ret:%d", __func__, ret);
}
// In case poll error after stream off
@@ -415,7 +415,20 @@ int CaptureUnit::poll() {
}
CheckAndLogError(ret < 0, UNKNOWN_ERROR, "%s: Poll error, ret:%d", __func__, ret);
if (ret == 0) {
- LOG1("%s, timeout happens, wait recovery", mCameraId, __func__);
+#ifdef CAL_BUILD
+ LOGI("%s, timeout happens, buffer in device: %d. wait recovery", mCameraId, __func__,
+ mDevices.front()->getBufferNumInDevice());
+#else
+ LOG1("%s, timeout happens, buffer in device: %d. wait recovery", mCameraId, __func__,
+ mDevices.front()->getBufferNumInDevice());
+#endif
+ if (PlatformData::getMaxIsysTimeout() > 0 && mDevices.front()->getBufferNumInDevice() > 0) {
+ EventData errorData;
+ errorData.type = EVENT_ISYS_ERROR;
+ errorData.buffer = nullptr;
+ notifyListeners(errorData);
+ }
+
return OK;
}
@@ -461,11 +474,13 @@ void CaptureUnit::registerListener(EventType eventType, EventListener* eventList
for (auto device : mDevices) {
device->registerListener(eventType, eventListener);
}
+ if (eventType == EVENT_ISYS_ERROR) EventSource::registerListener(eventType, eventListener);
}
void CaptureUnit::removeListener(EventType eventType, EventListener* eventListener) {
for (auto device : mDevices) {
device->removeListener(eventType, eventListener);
}
+ if (eventType == EVENT_ISYS_ERROR) EventSource::removeListener(eventType, eventListener);
}
} // namespace icamera
diff --git a/src/core/DeviceBase.cpp b/src/core/DeviceBase.cpp
index d0c88b2e..c7808757 100644
--- a/src/core/DeviceBase.cpp
+++ b/src/core/DeviceBase.cpp
@@ -389,9 +389,8 @@ int MainDevice::onDequeueBuffer(shared_ptr buffer) {
if (mNeedSkipFrame) return OK;
- LOG2("@%s, field:%d, timestamp: sec=%ld, usec=%ld", buffer->getSequence(),
- __func__, buffer->getField(), buffer->getTimestamp().tv_sec,
- buffer->getTimestamp().tv_usec);
+ LOG2("@%s, field:%d, timestamp: sec=%ld, usec=%ld", buffer->getSequence(), __func__,
+ buffer->getField(), buffer->getTimestamp().tv_sec, buffer->getTimestamp().tv_usec);
for (auto& consumer : mConsumers) {
consumer->onFrameAvailable(mPort, buffer);
diff --git a/src/core/DeviceBase.h b/src/core/DeviceBase.h
index 052ce3fe..e1aae848 100644
--- a/src/core/DeviceBase.h
+++ b/src/core/DeviceBase.h
@@ -93,8 +93,9 @@ class DeviceBase : public EventSource {
/**
* Pre-process the buffer which to be queued to the device.
*/
- virtual int onQueueBuffer(int64_t sequence,
- std::shared_ptr& buffer) { return OK; }
+ virtual int onQueueBuffer(int64_t sequence, std::shared_ptr& buffer) {
+ return OK;
+ }
/**
* Post-process the buffer after it's dequeued from the device.
@@ -127,10 +128,10 @@ class DeviceBase : public EventSource {
VideoNodeType mNodeType;
VideoNodeDirection mNodeDirection;
const char* mName;
- V4L2VideoNode* mDevice; // The device used to queue/dequeue buffers.
- int64_t mLatestSequence; // Track the latest bufffer sequence from driver.
- bool mNeedSkipFrame; // True if the frame/buffer needs to be skipped.
- int mFrameSkipNum; // How many frames need to be skipped after stream on.
+ V4L2VideoNode* mDevice; // The device used to queue/dequeue buffers.
+ int64_t mLatestSequence; // Track the latest bufffer sequence from driver.
+ bool mNeedSkipFrame; // True if the frame/buffer needs to be skipped.
+ int mFrameSkipNum; // How many frames need to be skipped after stream on.
DeviceCallback* mDeviceCB;
std::set mConsumers;
diff --git a/src/core/FileSource.cpp b/src/core/FileSource.cpp
index 3b6b10a5..4b58d7b0 100644
--- a/src/core/FileSource.cpp
+++ b/src/core/FileSource.cpp
@@ -263,7 +263,6 @@ void FileSource::fillFrameBuffer(string fileName, shared_ptr& buff
void FileSource::fillFrameBuffer(shared_ptr& buffer) {
string fileName;
-
if (mInjectionWay == USING_CONFIG_FILE) {
FileSourceProfile profile(mInjectedFile);
fileName = profile.getFrameFile(mCameraId, mSequence);
diff --git a/src/core/IspParamAdaptor.cpp b/src/core/IspParamAdaptor.cpp
index d4163ac4..7008a004 100644
--- a/src/core/IspParamAdaptor.cpp
+++ b/src/core/IspParamAdaptor.cpp
@@ -40,32 +40,28 @@
#include "ia_pal_types_isp_parameters_autogen.h"
#include "ia_pal_types_isp.h"
-
namespace icamera {
-IspParamAdaptor::IspParamAdaptor(int cameraId) :
- mIspAdaptorState(ISP_ADAPTOR_NOT_INIT),
- mCameraId(cameraId),
- mTuningMode(TUNING_MODE_VIDEO),
- mIpuOutputFormat(V4L2_PIX_FMT_NV12),
- mGraphConfig(nullptr),
- mIntelCca(nullptr),
- mGammaTmOffset(-1) {
+IspParamAdaptor::IspParamAdaptor(int cameraId)
+ : mIspAdaptorState(ISP_ADAPTOR_NOT_INIT),
+ mCameraId(cameraId),
+ mTuningMode(TUNING_MODE_VIDEO),
+ mIpuOutputFormat(V4L2_PIX_FMT_NV12),
+ mGraphConfig(nullptr),
+ mIntelCca(nullptr),
+ mGammaTmOffset(-1) {
LOG1("@%s", mCameraId, __func__);
CLEAR(mLastPalDataForVideoPipe);
- PalRecord palRecordArray[] = {
- { ia_pal_uuid_isp_call_info, -1 },
- { ia_pal_uuid_isp_bnlm_3_2, -1 },
- { ia_pal_uuid_isp_lsc_1_1, -1 }
- };
+ PalRecord palRecordArray[] = {{ia_pal_uuid_isp_call_info, -1},
+ {ia_pal_uuid_isp_bnlm_3_2, -1},
+ {ia_pal_uuid_isp_lsc_1_1, -1}};
for (uint32_t i = 0; i < sizeof(palRecordArray) / sizeof(PalRecord); i++) {
mPalRecords.push_back(palRecordArray[i]);
}
}
-IspParamAdaptor::~IspParamAdaptor() {
-}
+IspParamAdaptor::~IspParamAdaptor() {}
int IspParamAdaptor::init() {
PERF_CAMERA_ATRACE();
@@ -95,15 +91,15 @@ int IspParamAdaptor::deinit() {
return OK;
}
-int IspParamAdaptor::deepCopyProgramGroup(const ia_isp_bxt_program_group *pgPtr,
- cca::cca_program_group *programGroup) {
+int IspParamAdaptor::deepCopyProgramGroup(const ia_isp_bxt_program_group* pgPtr,
+ cca::cca_program_group* programGroup) {
CheckAndLogError(!programGroup, UNKNOWN_ERROR, "%s, the programGroup is nullptr", __func__);
CheckAndLogError(pgPtr->kernel_count > cca::MAX_KERNEL_NUMBERS_IN_PIPE, NO_MEMORY,
"%s, memory for program group is too small, kernel count: %d", __func__,
pgPtr->kernel_count);
programGroup->base = *pgPtr;
- uint32_t &kernelCnt = programGroup->base.kernel_count;
+ uint32_t& kernelCnt = programGroup->base.kernel_count;
kernelCnt = 0;
for (unsigned int i = 0; i < pgPtr->kernel_count; ++i) {
@@ -159,18 +155,18 @@ int IspParamAdaptor::getDataFromProgramGroup() {
CheckAndLogError(ret != OK, UNKNOWN_ERROR, "Failed to get the PG streamIds");
for (auto id : streamIds) {
- ia_isp_bxt_program_group *pgPtr = mGraphConfig->getProgramGroup(id);
- CheckAndLogError(!pgPtr, UNKNOWN_ERROR, "%s, Failed to get the programGroup for streamId: %d",
- __func__, id);
+ ia_isp_bxt_program_group* pgPtr = mGraphConfig->getProgramGroup(id);
+ CheckAndLogError(!pgPtr, UNKNOWN_ERROR,
+ "%s, Failed to get the programGroup for streamId: %d", __func__, id);
cca::cca_program_group programGroup = {};
ret = deepCopyProgramGroup(pgPtr, &programGroup);
- CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, Failed to convert cca programGroup. streamId %d",
- __func__, id);
+ CheckAndLogError(ret != OK, UNKNOWN_ERROR,
+ "%s, Failed to convert cca programGroup. streamId %d", __func__, id);
mStreamIdToPGOutSizeMap[id] = mIntelCca->getPalDataSize(programGroup);
ia_isp_bxt_gdc_limits mbrData;
- ret = mGraphConfig->getMBRData(id, &mbrData);
+ ret = mGraphConfig->getMBRData(id, &mbrData);
if (ret == OK) {
mStreamIdToMbrDataMap[id] = mbrData;
LOG2("get mbr data for stream:%d:%f,%f,%f,%f", id, mbrData.rectilinear.zoom,
@@ -181,7 +177,7 @@ int IspParamAdaptor::getDataFromProgramGroup() {
return OK;
}
-void IspParamAdaptor::initInputParams(cca::cca_pal_input_params *params) {
+void IspParamAdaptor::initInputParams(cca::cca_pal_input_params* params) {
CheckAndLogError(params == nullptr, VOID_VALUE, "NULL input parameter");
params->ee_setting.feature_level = ia_isp_feature_level_low;
@@ -202,8 +198,8 @@ void IspParamAdaptor::initInputParams(cca::cca_pal_input_params *params) {
* \return OK: everything went ok.
* \return UNKNOWN_ERROR: First run of ISP adaptation failed.
*/
-int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode,
- TuningMode tuningMode, int ipuOutputFormat) {
+int IspParamAdaptor::configure(const stream_t& stream, ConfigMode configMode, TuningMode tuningMode,
+ int ipuOutputFormat) {
HAL_TRACE_CALL(CAMERA_DEBUG_LOG_LEVEL1);
int ret = OK;
@@ -223,10 +219,10 @@ int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode,
mGammaTmOffset = -1;
mIntelCca = IntelCca::getInstance(mCameraId, tuningMode);
- CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr, tuningMode:%d",
- __func__, mTuningMode);
+ CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr, tuningMode:%d", __func__,
+ mTuningMode);
- IGraphConfigManager *gcm = IGraphConfigManager::getInstance(mCameraId);
+ IGraphConfigManager* gcm = IGraphConfigManager::getInstance(mCameraId);
CheckAndLogError(!gcm, UNKNOWN_ERROR, "%s, Failed to get graph config manager for cameraId: %d",
__func__, mCameraId);
CheckAndLogError(!gcm->isGcConfigured(), UNKNOWN_ERROR, "%s, graph isn't configured", __func__);
@@ -258,13 +254,13 @@ int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode,
lardParam.isp_mode_index = ispTuningIndex;
cca::cca_nvm tmpNvm = {};
- ia_err iaErr = mIntelCca->updateTuning(lardTag, lardParam, tmpNvm,
- ispParamIt.first);
+ ia_err iaErr =
+ mIntelCca->updateTuning(lardTag, lardParam, tmpNvm, ispParamIt.first);
CheckAndLogError(iaErr != ia_err_none, UNKNOWN_ERROR,
- "%s, Failed to update isp tuning data. tuning_mode %d",
- __func__, ispTuningIndex);
- LOG2("%s, Update isp tuning data. tuning_mode:%d, streamId: %d,",
- __func__, ispTuningIndex, ispParamIt.first);
+ "%s, Failed to update isp tuning data. tuning_mode %d", __func__,
+ ispTuningIndex);
+ LOG2("%s, Update isp tuning data. tuning_mode:%d, streamId: %d,", __func__,
+ ispTuningIndex, ispParamIt.first);
}
}
}
@@ -279,14 +275,15 @@ int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode,
initInputParams(inputParams);
inputParams->stream_id = ispParamIt.first;
- ia_isp_bxt_program_group *pgPtr = mGraphConfig->getProgramGroup(ispParamIt.first);
+ ia_isp_bxt_program_group* pgPtr = mGraphConfig->getProgramGroup(ispParamIt.first);
CheckAndLogError(!pgPtr, UNKNOWN_ERROR,
- "%s, Failed to get the programGroup for streamId: %d",
- __func__, ispParamIt.first);
+ "%s, Failed to get the programGroup for streamId: %d", __func__,
+ ispParamIt.first);
ret = deepCopyProgramGroup(pgPtr, &(inputParams->program_group));
- CheckAndLogError(ret != OK, UNKNOWN_ERROR, "%s, Failed to convert cca programGroup. streamId %d",
- __func__, ispParamIt.first);
+ CheckAndLogError(ret != OK, UNKNOWN_ERROR,
+ "%s, Failed to convert cca programGroup. streamId %d", __func__,
+ ispParamIt.first);
dumpProgramGroup(&inputParams->program_group.base);
{
@@ -314,8 +311,8 @@ int IspParamAdaptor::configure(const stream_t &stream, ConfigMode configMode,
int IspParamAdaptor::decodeStatsData(TuningMode tuningMode,
std::shared_ptr statsBuffer,
std::shared_ptr graphConfig) {
- CheckAndLogError(mIspAdaptorState != ISP_ADAPTOR_CONFIGURED,
- INVALID_OPERATION, "%s, wrong state %d", __func__, mIspAdaptorState);
+ CheckAndLogError(mIspAdaptorState != ISP_ADAPTOR_CONFIGURED, INVALID_OPERATION,
+ "%s, wrong state %d", __func__, mIspAdaptorState);
CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr", __func__);
int64_t sequence = statsBuffer->getSequence();
@@ -330,8 +327,8 @@ int IspParamAdaptor::decodeStatsData(TuningMode tuningMode,
outStats->get_rgbs_stats = true;
}
- AiqResultStorage *aiqResultStorage = AiqResultStorage::getInstance(mCameraId);
- AiqStatistics *aiqStatistics = aiqResultStorage->acquireAiqStatistics();
+ AiqResultStorage* aiqResultStorage = AiqResultStorage::getInstance(mCameraId);
+ AiqStatistics* aiqStatistics = aiqResultStorage->acquireAiqStatistics();
aiqStatistics->mSequence = sequence;
aiqStatistics->mTimestamp = TIMEVAL2USECS(statsBuffer->getTimestamp());
aiqStatistics->mTuningMode = tuningMode;
@@ -344,7 +341,7 @@ int IspParamAdaptor::decodeStatsData(TuningMode tuningMode,
// Pend stats decoding to running 3A
if (aiqStatistics->mPendingDecode) return OK;
- ia_binary_data *hwStatsData = (ia_binary_data *)(statsBuffer->getBufferAddr());
+ ia_binary_data* hwStatsData = (ia_binary_data*)(statsBuffer->getBufferAddr());
if (CameraDump::isDumpTypeEnable(DUMP_PSYS_DECODED_STAT) && hwStatsData != nullptr) {
BinParam_t bParam;
bParam.bType = BIN_TYPE_GENERAL;
@@ -369,29 +366,29 @@ int IspParamAdaptor::decodeStatsData(TuningMode tuningMode,
return OK;
}
-void IspParamAdaptor::updateKernelToggles(cca::cca_program_group *programGroup) {
-
+void IspParamAdaptor::updateKernelToggles(cca::cca_program_group* programGroup) {
if (!Log::isDebugLevelEnable(CAMERA_DEBUG_LOG_KERNEL_TOGGLE)) return;
const char* ENABLED_KERNELS = "/tmp/enabledKernels";
const char* DISABLED_KERNELS = "/tmp/disabledKernels";
const int FLIE_CONT_MAX_LENGTH = 1024;
- char enabledKernels[FLIE_CONT_MAX_LENGTH] = { 0 };
- char disabledKernels[FLIE_CONT_MAX_LENGTH] = { 0 };
+ char enabledKernels[FLIE_CONT_MAX_LENGTH] = {0};
+ char disabledKernels[FLIE_CONT_MAX_LENGTH] = {0};
- int enLen = CameraUtils::getFileContent(ENABLED_KERNELS, enabledKernels, FLIE_CONT_MAX_LENGTH - 1);
- int disLen = CameraUtils::getFileContent(DISABLED_KERNELS, disabledKernels, FLIE_CONT_MAX_LENGTH - 1);
+ int enLen =
+ CameraUtils::getFileContent(ENABLED_KERNELS, enabledKernels, FLIE_CONT_MAX_LENGTH - 1);
+ int disLen =
+ CameraUtils::getFileContent(DISABLED_KERNELS, disabledKernels, FLIE_CONT_MAX_LENGTH - 1);
if (enLen == 0 && disLen == 0) {
LOG2("%s: no explicit kernel toggle.", __func__);
return;
}
- LOG2("%s: enabled kernels: %s, disabled kernels %s", __func__,
- enabledKernels, disabledKernels);
+ LOG2("%s: enabled kernels: %s, disabled kernels %s", __func__, enabledKernels, disabledKernels);
for (unsigned int i = 0; i < programGroup->base.kernel_count; i++) {
- ia_isp_bxt_run_kernels_t *curKernel = &(programGroup->base.run_kernels[i]);
+ ia_isp_bxt_run_kernels_t* curKernel = &(programGroup->base.run_kernels[i]);
std::string curKernelUUID = std::to_string(curKernel->kernel_uuid);
if (strstr(enabledKernels, curKernelUUID.c_str()) != nullptr) {
@@ -409,18 +406,17 @@ void IspParamAdaptor::updateKernelToggles(cca::cca_program_group *programGroup)
* So temporarily copy latest PAL data into PAL output buffer.
*/
void IspParamAdaptor::updatePalDataForVideoPipe(ia_binary_data dest) {
- if (mLastPalDataForVideoPipe.data == nullptr || mLastPalDataForVideoPipe.size == 0)
- return;
+ if (mLastPalDataForVideoPipe.data == nullptr || mLastPalDataForVideoPipe.size == 0) return;
if (mPalRecords.empty()) return;
- ia_pal_record_header *header = nullptr;
+ ia_pal_record_header* header = nullptr;
char* src = static_cast(mLastPalDataForVideoPipe.data);
// find uuid offset in saved PAL buffer
if (mPalRecords[0].offset < 0) {
uint32_t offset = 0;
while (offset < mLastPalDataForVideoPipe.size) {
- ia_pal_record_header *header = reinterpret_cast(src + offset);
+ ia_pal_record_header* header = reinterpret_cast(src + offset);
// check if header is valid or not
CheckWarning(header->uuid == 0 || header->size == 0, VOID_VALUE,
"%s, source header info isn't correct", __func__);
@@ -436,7 +432,7 @@ void IspParamAdaptor::updatePalDataForVideoPipe(ia_binary_data dest) {
}
char* destData = static_cast(dest.data);
- ia_pal_record_header *headerSrc = nullptr;
+ ia_pal_record_header* headerSrc = nullptr;
for (uint32_t i = 0; i < mPalRecords.size(); i++) {
if (mPalRecords[i].offset >= 0) {
// find source record header
@@ -490,7 +486,7 @@ int IspParamAdaptor::runIspAdapt(const IspSettings* ispSettings, int64_t setting
if (streamId != -1 && it.first != streamId) continue;
ia_binary_data binaryData = {};
- IspParameter *ispParam = &(it.second);
+ IspParameter* ispParam = &(it.second);
auto dataIt = ispParam->mSequenceToDataMap.end();
{
@@ -507,8 +503,8 @@ int IspParamAdaptor::runIspAdapt(const IspSettings* ispSettings, int64_t setting
"No PAL buf!");
binaryData = dataIt->second;
- LOG2("@%s, Pal data buffer seq: %ld", settingSequence,
- it.first, __func__, dataIt->first);
+ LOG2("@%s, Pal data buffer seq: %ld", settingSequence, it.first,
+ __func__, dataIt->first);
}
ia_isp_bxt_gdc_limits* mbrData = nullptr;
@@ -521,7 +517,7 @@ int IspParamAdaptor::runIspAdapt(const IspSettings* ispSettings, int64_t setting
updatePalDataForVideoPipe(binaryData);
}
- ia_isp_bxt_program_group *pgPtr = mGraphConfig->getProgramGroup(it.first);
+ ia_isp_bxt_program_group* pgPtr = mGraphConfig->getProgramGroup(it.first);
CheckAndLogError(!pgPtr, UNKNOWN_ERROR,
"%s, Failed to get the programGroup for streamId: %d", __func__, it.first);
@@ -575,11 +571,10 @@ ia_binary_data* IspParamAdaptor::getIpuParameter(int64_t sequence, int streamId)
}
}
} else {
- auto seqIt =ispParam.mSequenceToDataId.find(sequence);
+ auto seqIt = ispParam.mSequenceToDataId.find(sequence);
if (seqIt != ispParam.mSequenceToDataId.end()) {
auto dataIt = ispParam.mSequenceToDataMap.find(seqIt->second);
- if (dataIt != ispParam.mSequenceToDataMap.end())
- binaryData = &(dataIt->second);
+ if (dataIt != ispParam.mSequenceToDataMap.end()) binaryData = &(dataIt->second);
}
}
@@ -590,7 +585,7 @@ ia_binary_data* IspParamAdaptor::getIpuParameter(int64_t sequence, int streamId)
return binaryData;
}
-int IspParamAdaptor::getPalOutputDataSize(const ia_isp_bxt_program_group *programGroup) {
+int IspParamAdaptor::getPalOutputDataSize(const ia_isp_bxt_program_group* programGroup) {
CheckAndLogError(programGroup == nullptr, 0, "Request programGroup is nullptr");
CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr", __func__);
@@ -607,22 +602,21 @@ int IspParamAdaptor::allocateIspParamBuffers() {
releaseIspParamBuffers();
for (int i = 0; i < ISP_PARAM_QUEUE_SIZE; i++) {
- for (auto & pgMap : mStreamIdToPGOutSizeMap) {
+ for (auto& pgMap : mStreamIdToPGOutSizeMap) {
ia_binary_data binaryData = {};
int size = pgMap.second;
binaryData.size = size;
binaryData.data = mIntelCca->allocMem(pgMap.first, "palData", i, size);
CheckAndLogError(binaryData.data == nullptr, NO_MEMORY, "Faile to calloc PAL data");
- int64_t index = i * (-1) - 2; // default index list: -2, -3, -4, ...
+ int64_t index = i * (-1) - 2; // default index list: -2, -3, -4, ...
std::pair p(index, binaryData);
mStreamIdToIspParameterMap[pgMap.first].mSequenceToDataMap.insert(p);
}
}
for (auto& pgMap : mStreamIdToPGOutSizeMap) {
- cca::cca_pal_input_params* p = static_cast(
- mIntelCca->allocMem(pgMap.first, "palData", ISP_PARAM_QUEUE_SIZE,
- sizeof(cca::cca_pal_input_params)));
+ cca::cca_pal_input_params* p = static_cast(mIntelCca->allocMem(
+ pgMap.first, "palData", ISP_PARAM_QUEUE_SIZE, sizeof(cca::cca_pal_input_params)));
CheckAndLogError(p == nullptr, NO_MEMORY, "Cannot alloc memory for cca_pal_input_params!");
CLEAR(*p);
mStreamIdToPalInputParamsMap[pgMap.first] = p;
@@ -649,8 +643,8 @@ void IspParamAdaptor::releaseIspParamBuffers() {
mStreamIdToPalInputParamsMap.clear();
}
-void IspParamAdaptor::applyMediaFormat(const AiqResult* aiqResult,
- ia_media_format* mediaFormat, bool* useLinearGamma) {
+void IspParamAdaptor::applyMediaFormat(const AiqResult* aiqResult, ia_media_format* mediaFormat,
+ bool* useLinearGamma) {
CheckAndLogError(!mediaFormat || !aiqResult, VOID_VALUE, "mediaFormat or aiqResult is nullptr");
*mediaFormat = media_format_legacy;
@@ -701,16 +695,18 @@ void IspParamAdaptor::applyCscMatrix(ia_isp_bxt_csc* cscMatrix) {
}
}
-int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gdc_limits *mbrData,
+int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group* pgPtr, ia_isp_bxt_gdc_limits* mbrData,
const IspSettings* ispSettings, int64_t settingSequence,
- ia_binary_data *binaryData, int streamId) {
+ ia_binary_data* binaryData, int streamId) {
PERF_CAMERA_ATRACE();
CheckAndLogError(!mIntelCca, UNKNOWN_ERROR, "%s, mIntelCca is nullptr", __func__);
- AiqResult* aiqResults = const_cast(AiqResultStorage::getInstance(mCameraId)->getAiqResult(settingSequence));
+ AiqResult* aiqResults = const_cast(
+ AiqResultStorage::getInstance(mCameraId)->getAiqResult(settingSequence));
if (aiqResults == nullptr) {
LOGW("@%s: no result! use the latest instead", settingSequence, __func__);
- aiqResults = const_cast(AiqResultStorage::getInstance(mCameraId)->getAiqResult());
+ aiqResults =
+ const_cast(AiqResultStorage::getInstance(mCameraId)->getAiqResult());
CheckAndLogError((aiqResults == nullptr), INVALID_OPERATION,
"Cannot find available aiq result.");
}
@@ -722,8 +718,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd
bool useLinearGamma = false;
applyMediaFormat(aiqResults, &inputParams->media_format, &useLinearGamma);
- LOG2("%s, media format: 0x%x, gamma lut size: %d", __func__,
- inputParams->media_format, aiqResults->mGbceResults.gamma_lut_size);
+ LOG2("%s, media format: 0x%x, gamma lut size: %d", __func__, inputParams->media_format,
+ aiqResults->mGbceResults.gamma_lut_size);
if (inputParams->media_format == media_format_custom) {
applyCscMatrix(&inputParams->csc_matrix);
@@ -748,8 +744,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd
case ia_pal_uuid_isp_tnr5_22:
case ia_pal_uuid_isp_tnr5_25:
inputParams->program_group.base.run_kernels[i].metadata[0] = aiqResults->mSequence;
- LOG2("%s, ia_pal_uuid_isp_tnr5_2x frame count = %d",
- __func__, inputParams->program_group.base.run_kernels[i].metadata[0]);
+ LOG2("%s, ia_pal_uuid_isp_tnr5_2x frame count = %d", __func__,
+ inputParams->program_group.base.run_kernels[i].metadata[0]);
break;
case ia_pal_uuid_isp_ofa_2_mp:
case ia_pal_uuid_isp_ofa_2_dp:
@@ -778,6 +774,7 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd
break;
case ia_pal_uuid_isp_bxt_blc:
case ia_pal_uuid_isp_b2i_sie_1_1:
+ case ia_pal_uuid_isp_gammatm_v3:
if (aiqResults->mAiqParam.testPatternMode != TEST_PATTERN_OFF) {
LOG2("%s: disable kernel(%d) in test pattern mode", __func__,
inputParams->program_group.base.run_kernels[i].kernel_uuid);
@@ -828,9 +825,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd
inputParams->custom_controls.count = aiqResults->mCustomControls.count;
uint32_t cnt = static_cast(inputParams->custom_controls.count);
if (cnt > 0) {
- CheckAndLogError(cnt > cca::MAX_CUSTOM_CONTROLS_PARAM_SIZE,
- UNKNOWN_ERROR, "%s, buffer for custom control[%d] is too small",
- __func__, cnt);
+ CheckAndLogError(cnt > cca::MAX_CUSTOM_CONTROLS_PARAM_SIZE, UNKNOWN_ERROR,
+ "%s, buffer for custom control[%d] is too small", __func__, cnt);
MEMCPY_S(inputParams->custom_controls.parameters, cnt,
aiqResults->mCustomControls.parameters, cca::MAX_CUSTOM_CONTROLS_PARAM_SIZE);
@@ -853,8 +849,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd
// Fine-tune DG passed to ISP if partial ISP DG is needed.
if (PlatformData::isUsingIspDigitalGain(mCameraId)) {
- inputParams->manual_digital_gain = PlatformData::getIspDigitalGain(mCameraId,
- aiqResults->mAeResults.exposures[0].exposure[0].digital_gain);
+ inputParams->manual_digital_gain = PlatformData::getIspDigitalGain(
+ mCameraId, aiqResults->mAeResults.exposures[0].exposure[0].digital_gain);
}
LOG2("%s: set digital gain for ULL pipe: %f", __func__, inputParams->manual_digital_gain);
@@ -863,8 +859,8 @@ int IspParamAdaptor::runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gd
inputParams->manual_digital_gain =
aiqResults->mAeResults.exposures[0].exposure[0].digital_gain;
- LOG2("%s: all digital gain is passed to ISP, DG(%ld): %f", __func__,
- aiqResults->mSequence, aiqResults->mAeResults.exposures[0].exposure[0].digital_gain);
+ LOG2("%s: all digital gain is passed to ISP, DG(%ld): %f", __func__, aiqResults->mSequence,
+ aiqResults->mAeResults.exposures[0].exposure[0].digital_gain);
}
ia_err iaErr = ia_err_none;
@@ -900,14 +896,13 @@ void IspParamAdaptor::updateResultFromAlgo(ia_binary_data* binaryData, int64_t s
// update tone map result from pal algo
if (aiqResults->mAiqParam.callbackTmCurve &&
aiqResults->mGbceResults.have_manual_settings == false) {
-
char* src = static_cast(binaryData->data);
if (mGammaTmOffset < 0) {
uint32_t offset = 0;
bool foundRes = false;
while (offset < binaryData->size) {
- ia_pal_record_header *header =
+ ia_pal_record_header* header =
reinterpret_cast(src + offset);
if (header->uuid == ia_pal_uuid_isp_gammatm_v3) {
LOG2("src uuid %d, offset %d, size %d", header->uuid, offset, header->size);
@@ -920,27 +915,27 @@ void IspParamAdaptor::updateResultFromAlgo(ia_binary_data* binaryData, int64_t s
mGammaTmOffset = offset;
}
- ia_pal_isp_gammatm_v3_t *TM =
- reinterpret_cast(src + mGammaTmOffset +
- ALIGN_8(sizeof(ia_pal_record_header)));
- uint32_t tmSize = (reinterpret_cast(&(TM->prog_shift)) -
- reinterpret_cast(TM->tm_lut_gen_lut)) / sizeof(int32_t);
+ ia_pal_isp_gammatm_v3_t* TM = reinterpret_cast(
+ src + mGammaTmOffset + ALIGN_8(sizeof(ia_pal_record_header)));
+ uint32_t tmSize = (reinterpret_cast(&(TM->prog_shift)) -
+ reinterpret_cast(TM->tm_lut_gen_lut)) /
+ sizeof(int32_t);
if (aiqResults->mGbceResults.tone_map_lut_size == 0) {
LOG2("%s, gbce running in bypass mode, reset to max value", __func__);
aiqResults->mGbceResults.tone_map_lut_size = cca::MAX_TONE_MAP_LUT_SIZE;
}
- CheckAndLogError(tmSize < aiqResults->mGbceResults.tone_map_lut_size,
- VOID_VALUE, "memory is mismatch to store tone map from algo");
+ CheckAndLogError(tmSize < aiqResults->mGbceResults.tone_map_lut_size, VOID_VALUE,
+ "memory is mismatch to store tone map from algo");
- LOG2("%s, Tonemap Curve. enable: %d, prog_shift: %d, table size: %zu",
- __func__, TM->enable, TM->prog_shift, tmSize);
+ LOG2("%s, Tonemap Curve. enable: %d, prog_shift: %d, table size: %zu", __func__, TM->enable,
+ TM->prog_shift, tmSize);
const int shiftBase = 1 << TM->prog_shift;
for (uint32_t i = 0; i < aiqResults->mGbceResults.tone_map_lut_size; i++) {
- aiqResults->mGbceResults.tone_map_lut[i] = static_cast(TM->tm_lut_gen_lut[i]) /
- shiftBase;
+ aiqResults->mGbceResults.tone_map_lut[i] =
+ static_cast(TM->tm_lut_gen_lut[i]) / shiftBase;
}
}
}
@@ -949,14 +944,14 @@ void IspParamAdaptor::dumpIspParameter(int streamId, int64_t sequence, ia_binary
if (!CameraDump::isDumpTypeEnable(DUMP_PSYS_PAL)) return;
BinParam_t bParam;
- bParam.bType = BIN_TYPE_GENERAL;
- bParam.mType = M_PSYS;
+ bParam.bType = BIN_TYPE_GENERAL;
+ bParam.mType = M_PSYS;
bParam.sequence = sequence;
- bParam.gParam.appendix = ("pal_" + std::to_string(streamId)).c_str();;
+ bParam.gParam.appendix = ("pal_" + std::to_string(streamId)).c_str();
CameraDump::dumpBinary(mCameraId, binaryData.data, binaryData.size, &bParam);
}
-void IspParamAdaptor::dumpProgramGroup(ia_isp_bxt_program_group *pgPtr) {
+void IspParamAdaptor::dumpProgramGroup(ia_isp_bxt_program_group* pgPtr) {
if (!Log::isLogTagEnabled(GET_FILE_SHIFT(IspParamAdaptor), CAMERA_DEBUG_LOG_LEVEL3)) return;
LOG3("the kernel count: %d, run_kernels: %p", pgPtr->kernel_count, pgPtr->run_kernels);
@@ -1000,37 +995,36 @@ void IspParamAdaptor::dumpCscMatrix(const ia_isp_bxt_csc* cscMatrix) {
* 1, copy pal.bin file to local directory;
* 2, define pal uuid in palRecordArray which are expected to be replaced.
*/
-void IspParamAdaptor::loadPalBinFile(ia_binary_data *binaryData) {
+void IspParamAdaptor::loadPalBinFile(ia_binary_data* binaryData) {
// Get file size
struct stat fileStat;
CLEAR(fileStat);
- const char *fileName = "./pal.bin";
+ const char* fileName = "./pal.bin";
int ret = stat(fileName, &fileStat);
CheckWarning(ret != 0, VOID_VALUE, "no pal bin %s", fileName);
FILE* fp = fopen(fileName, "rb");
- CheckWarning(fp == nullptr, VOID_VALUE, "Failed to open %s, err %s", fileName,
- strerror(errno));
+ CheckWarning(fp == nullptr, VOID_VALUE, "Failed to open %s, err %s", fileName, strerror(errno));
std::unique_ptr dataPtr(new char[fileStat.st_size]);
size_t readSize = fread(dataPtr.get(), sizeof(char), fileStat.st_size, fp);
fclose(fp);
- CheckWarning(readSize != (size_t)fileStat.st_size, VOID_VALUE,
- "Failed to read %s, err %s", fileName, strerror(errno));
+ CheckWarning(readSize != (size_t)fileStat.st_size, VOID_VALUE, "Failed to read %s, err %s",
+ fileName, strerror(errno));
static PalRecord palRecordArray[] = {
- { ia_pal_uuid_isp_bnlm_3_2, -1 },
- { ia_pal_uuid_isp_tnr_6_0, -1 },
+ {ia_pal_uuid_isp_bnlm_3_2, -1},
+ {ia_pal_uuid_isp_tnr_6_0, -1},
};
- ia_pal_record_header *header = nullptr;
+ ia_pal_record_header* header = nullptr;
char* src = static_cast(dataPtr.get());
// find uuid offset in PAL bin
if (palRecordArray[0].offset < 0) {
uint32_t offset = 0;
while (offset < readSize) {
- ia_pal_record_header *header = reinterpret_cast(src + offset);
+ ia_pal_record_header* header = reinterpret_cast(src + offset);
for (uint32_t i = 0; i < sizeof(palRecordArray) / sizeof(PalRecord); i++) {
if (palRecordArray[i].offset < 0 && palRecordArray[i].uuid == header->uuid) {
palRecordArray[i].offset = offset;
@@ -1043,7 +1037,7 @@ void IspParamAdaptor::loadPalBinFile(ia_binary_data *binaryData) {
}
char* dest = static_cast(binaryData->data);
- ia_pal_record_header *headerSrc = nullptr;
+ ia_pal_record_header* headerSrc = nullptr;
for (uint32_t i = 0; i < sizeof(palRecordArray) / sizeof(PalRecord); i++) {
if (palRecordArray[i].offset >= 0) {
// find source record header
@@ -1085,4 +1079,4 @@ uint32_t IspParamAdaptor::getRequestedStats() {
return bitmap;
}
-} // namespace icamera
+} // namespace icamera
diff --git a/src/core/IspParamAdaptor.h b/src/core/IspParamAdaptor.h
index f155fea9..22fde02f 100644
--- a/src/core/IspParamAdaptor.h
+++ b/src/core/IspParamAdaptor.h
@@ -65,32 +65,32 @@ namespace icamera {
* 2. Run isp config
*/
class IspParamAdaptor {
-public:
+ public:
explicit IspParamAdaptor(int cameraId);
virtual ~IspParamAdaptor();
int init();
int deinit();
- int configure(const stream_t &stream, ConfigMode configMode, TuningMode tuningMode,
+ int configure(const stream_t& stream, ConfigMode configMode, TuningMode tuningMode,
int ipuOutputFormat = -1);
- int decodeStatsData(TuningMode tuningMode,
- std::shared_ptr statsBuffer,
+ int decodeStatsData(TuningMode tuningMode, std::shared_ptr statsBuffer,
std::shared_ptr graphConfig = nullptr);
- int runIspAdapt(const IspSettings* ispSettings, int64_t settingSequence = -1, int32_t streamId = -1);
- //Get ISP param from mult-stream ISP param adaptation
+ int runIspAdapt(const IspSettings* ispSettings, int64_t settingSequence = -1,
+ int32_t streamId = -1);
+ // Get ISP param from mult-stream ISP param adaptation
ia_binary_data* getIpuParameter(int64_t sequence = -1, int streamId = -1);
int getPalOutputDataSize(const ia_isp_bxt_program_group* programGroup);
-private:
+ private:
DISALLOW_COPY_AND_ASSIGN(IspParamAdaptor);
- int deepCopyProgramGroup(const ia_isp_bxt_program_group *pgPtr,
- cca::cca_program_group *programGroup);
+ int deepCopyProgramGroup(const ia_isp_bxt_program_group* pgPtr,
+ cca::cca_program_group* programGroup);
int getDataFromProgramGroup();
int initProgramGroupForAllStreams(ConfigMode configMode);
- void initInputParams(cca::cca_pal_input_params *params);
+ void initInputParams(cca::cca_pal_input_params* params);
void updatePalDataForVideoPipe(ia_binary_data dest);
@@ -104,27 +104,27 @@ class IspParamAdaptor {
// map from sequence to ia_binary_data
std::multimap mSequenceToDataMap;
};
- void updateIspParameterMap(IspParameter* ispParam, int64_t dataSeq,
- int64_t settingSeq, ia_binary_data curIpuParam);
- int runIspAdaptL(ia_isp_bxt_program_group *pgPtr, ia_isp_bxt_gdc_limits* mbrData,
+ void updateIspParameterMap(IspParameter* ispParam, int64_t dataSeq, int64_t settingSeq,
+ ia_binary_data curIpuParam);
+ int runIspAdaptL(ia_isp_bxt_program_group* pgPtr, ia_isp_bxt_gdc_limits* mbrData,
const IspSettings* ispSettings, int64_t settingSequence,
- ia_binary_data *binaryData, int32_t streamId = -1);
+ ia_binary_data* binaryData, int32_t streamId = -1);
- //Allocate memory for mIspParameters
+ // Allocate memory for mIspParameters
int allocateIspParamBuffers();
- //Release memory for mIspParameters
+ // Release memory for mIspParameters
void releaseIspParamBuffers();
// Dumping methods for debugging purposes.
void dumpIspParameter(int streamId, int64_t sequence, ia_binary_data binaryData);
// Enable or disable kernels according to environment variables for debug purpose.
- void updateKernelToggles(cca::cca_program_group *programGroup);
- void dumpProgramGroup(ia_isp_bxt_program_group *pgPtr);
- void applyMediaFormat(const AiqResult* aiqResult,
- ia_media_format* mediaFormat, bool* useLinearGamma);
+ void updateKernelToggles(cca::cca_program_group* programGroup);
+ void dumpProgramGroup(ia_isp_bxt_program_group* pgPtr);
+ void applyMediaFormat(const AiqResult* aiqResult, ia_media_format* mediaFormat,
+ bool* useLinearGamma);
void dumpCscMatrix(const ia_isp_bxt_csc* cscMatrix);
#ifdef PAL_DEBUG
- void loadPalBinFile(ia_binary_data *binaryData);
+ void loadPalBinFile(ia_binary_data* binaryData);
#endif
void applyCscMatrix(ia_isp_bxt_csc* cscMatrix);
void updateResultFromAlgo(ia_binary_data* binaryData, int64_t sequence);
@@ -141,18 +141,18 @@ class IspParamAdaptor {
TuningMode mTuningMode;
int mIpuOutputFormat;
- //Guard for IspParamAdaptor public API
+ // Guard for IspParamAdaptor public API
Mutex mIspAdaptorLock;
std::map mStreamIdToPGOutSizeMap;
std::map mStreamIdToMbrDataMap;
static const int ISP_PARAM_QUEUE_SIZE = MAX_SETTING_COUNT;
- std::map mStreamIdToIspParameterMap; // map from stream id to IspParameter
+ std::map mStreamIdToIspParameterMap; // map from stream id to IspParameter
ia_binary_data mLastPalDataForVideoPipe;
- //Guard lock for ipu parameter
+ // Guard lock for ipu parameter
Mutex mIpuParamLock;
std::unordered_map mStreamIdToPalInputParamsMap;
std::shared_ptr mGraphConfig;
- IntelCca *mIntelCca;
+ IntelCca* mIntelCca;
int mGammaTmOffset;
struct PalRecord {
@@ -161,4 +161,4 @@ class IspParamAdaptor {
};
std::vector mPalRecords; // Save PAL offset info for overwriting PAL
};
-} // namespace icamera
+} // namespace icamera
diff --git a/src/core/IspSettings.h b/src/core/IspSettings.h
index 8049243b..f9ebc649 100644
--- a/src/core/IspSettings.h
+++ b/src/core/IspSettings.h
@@ -22,7 +22,7 @@
namespace icamera {
-struct IspImageEnhancement{
+struct IspImageEnhancement {
char manualSharpness;
char manualBrightness;
char manualContrast;
@@ -38,12 +38,14 @@ struct IspSettings {
IspImageEnhancement manualSettings;
ia_binary_data* palOverride;
// DOL_FEATURE_S
- short vbp; // Used for DOL camera
+ short vbp; // Used for DOL camera
// DOL_FEATURE_E
float zoom;
camera_mount_type_t sensorMountType;
- IspSettings() { CLEAR(*this); zoom = 1.0f; }
+ IspSettings() {
+ CLEAR(*this);
+ zoom = 1.0f;
+ }
};
-} // namespace icamera
-
+} // namespace icamera
diff --git a/src/core/LensHw.cpp b/src/core/LensHw.cpp
index 47a33a21..7d21bedc 100644
--- a/src/core/LensHw.cpp
+++ b/src/core/LensHw.cpp
@@ -23,15 +23,13 @@
namespace icamera {
-LensHw::LensHw(int cameraId):
- mCameraId(cameraId),
- mLensSubdev(nullptr),
- mLastLensPosition(0),
- mLensMovementStartTime(0) {
-}
+LensHw::LensHw(int cameraId)
+ : mCameraId(cameraId),
+ mLensSubdev(nullptr),
+ mLastLensPosition(0),
+ mLensMovementStartTime(0) {}
-LensHw::~LensHw() {
-}
+LensHw::~LensHw() {}
int LensHw::init() {
std::string lensName = PlatformData::getLensName(mCameraId);
@@ -45,7 +43,7 @@ int LensHw::init() {
CameraUtils::getSubDeviceName(lensName.c_str(), subDevName);
if (!subDevName.empty()) {
mLensSubdev = V4l2DeviceFactory::getSubDev(mCameraId, subDevName);
- mLensName=lensName;
+ mLensName = lensName;
return OK;
}
@@ -77,7 +75,7 @@ int LensHw::setFocusStep(int steps) {
return mLensSubdev->SetControl(V4L2_CID_FOCUS_RELATIVE, steps);
}
-int LensHw::getFocusPosition(int &position) {
+int LensHw::getFocusPosition(int& position) {
CheckAndLogError(!mLensSubdev, NO_INIT, "%s: No Lens device inited.", __func__);
return mLensSubdev->GetControl(V4L2_CID_FOCUS_ABSOLUTE, &position);
}
@@ -92,10 +90,9 @@ int LensHw::stopAutoFocus(void) {
return mLensSubdev->SetControl(V4L2_CID_AUTO_FOCUS_STOP, 0);
}
-int LensHw::getAutoFocusStatus(int &status) {
+int LensHw::getAutoFocusStatus(int& status) {
CheckAndLogError(!mLensSubdev, NO_INIT, "%s: No Lens device inited.", __func__);
- return mLensSubdev->GetControl(V4L2_CID_AUTO_FOCUS_STATUS,
- reinterpret_cast(&status));
+ return mLensSubdev->GetControl(V4L2_CID_AUTO_FOCUS_STATUS, reinterpret_cast(&status));
}
int LensHw::setAutoFocusRange(int value) {
@@ -103,7 +100,7 @@ int LensHw::setAutoFocusRange(int value) {
return mLensSubdev->SetControl(V4L2_CID_AUTO_FOCUS_RANGE, value);
}
-int LensHw::getAutoFocusRange(int &value) {
+int LensHw::getAutoFocusRange(int& value) {
CheckAndLogError(!mLensSubdev, NO_INIT, "%s: No Lens device inited.", __func__);
return mLensSubdev->GetControl(V4L2_CID_AUTO_FOCUS_RANGE, &value);
}
@@ -127,4 +124,4 @@ int LensHw::getLatestPosition(int& lensPosition, unsigned long long& time) {
time = mLensMovementStartTime;
return OK;
}
-} // namespace icamera
+} // namespace icamera
diff --git a/src/core/LensHw.h b/src/core/LensHw.h
index b327f23f..eab0c166 100644
--- a/src/core/LensHw.h
+++ b/src/core/LensHw.h
@@ -35,8 +35,7 @@ typedef ::cros::V4L2Subdevice V4L2Subdevice;
*
*/
class LensHw {
-
-public:
+ public:
LensHw(int cameraId);
~LensHw();
@@ -46,21 +45,22 @@ class LensHw {
int setFocusPosition(int position);
int setFocusStep(int steps);
- int getFocusPosition(int &position);
+ int getFocusPosition(int& position);
int startAutoFocus(void);
int stopAutoFocus(void);
- int getAutoFocusStatus(int &status);
+ int getAutoFocusStatus(int& status);
int setAutoFocusRange(int value);
- int getAutoFocusRange(int &value);
+ int getAutoFocusRange(int& value);
int getLatestPosition(int& lensPosition, unsigned long long& time);
bool isLensSubdevAvailable() { return (mLensSubdev != nullptr); }
-private:
+ private:
int mCameraId;
V4L2Subdevice* mLensSubdev;
std::string mLensName;
int mLastLensPosition;
- unsigned long long mLensMovementStartTime; /*!< In microseconds */
+ // In microseconds
+ unsigned long long mLensMovementStartTime;
}; // class LensHW
-} // namespace icamera
+} // namespace icamera
diff --git a/src/core/PSysProcessor.cpp b/src/core/PSysProcessor.cpp
index 2a6e69b9..f2520b2a 100644
--- a/src/core/PSysProcessor.cpp
+++ b/src/core/PSysProcessor.cpp
@@ -40,7 +40,7 @@
* it's based on sensor vblank, psys iterating time
* and thread scheduling
*/
-#define SOF_EVENT_MARGIN (5000000) // 5ms
+#define SOF_EVENT_MARGIN (5000000) // 5ms
#define SOF_EVENT_MAX_MARGIN (60000000) // 60ms
#define EXTREME_STRENGTH_LEVEL4 (-120)
@@ -52,42 +52,50 @@ using std::shared_ptr;
using std::unique_ptr;
namespace icamera {
-PSysProcessor::PSysProcessor(int cameraId, ParameterGenerator *pGenerator) :
- mCameraId(cameraId),
- mParameterGenerator(pGenerator),
- // ISP_CONTROL_S
- mUpdatedIspIndex(-1),
- mUsedIspIndex(-1),
- // ISP_CONTROL_E
- mCurConfigMode(CAMERA_STREAM_CONFIGURATION_MODE_NORMAL),
- mTuningMode(TUNING_MODE_MAX),
- mRawPort(INVALID_PORT),
- mSofSequence(-1),
- mOpaqueRawPort(INVALID_PORT),
- mHoldRawBuffers(false),
- mLastStillTnrSequence(-1),
- mStatus(PIPELINE_UNCREATED) {
+PSysProcessor::PSysProcessor(int cameraId, ParameterGenerator* pGenerator)
+ : mCameraId(cameraId),
+ mParameterGenerator(pGenerator),
+ mScheduler(nullptr),
+ // ISP_CONTROL_S
+ mUpdatedIspIndex(-1),
+ mUsedIspIndex(-1),
+ // ISP_CONTROL_E
+ mCurConfigMode(CAMERA_STREAM_CONFIGURATION_MODE_NORMAL),
+ mTuningMode(TUNING_MODE_MAX),
+ mRawPort(INVALID_PORT),
+ mSofSequence(-1),
+ mOpaqueRawPort(INVALID_PORT),
+ mHoldRawBuffers(false),
+ mLastStillTnrSequence(-1),
+ mStatus(PIPELINE_UNCREATED) {
mProcessThread = new ProcessThread(this);
// ISP_CONTROL_S
allocPalControlBuffers();
// ISP_CONTROL_E
CLEAR(mSofTimestamp);
+
+ if (PlatformData::isSchedulerEnabled(mCameraId))
+ mScheduler = new CameraScheduler();
}
PSysProcessor::~PSysProcessor() {
// ISP_CONTROL_S
- for (int i = 0; i < IA_PAL_CONTROL_BUFFER_SIZE; i++)
- free(mPalCtrlBuffers[i].data);
+ for (int i = 0; i < IA_PAL_CONTROL_BUFFER_SIZE; i++) free(mPalCtrlBuffers[i].data);
mUpdatedIspIndex = -1;
mUsedIspIndex = -1;
+
// ISP_CONTROL_E
mProcessThread->join();
delete mProcessThread;
+
+ // Delete PSysDAG before Scheduler because ~PSysDAG() needs Scheduler
+ mPSysDAGs.clear();
+ if (mScheduler) delete mScheduler;
}
int PSysProcessor::configure(const std::vector& configModes) {
- //Create PSysDAGs actually
+ // Create PSysDAGs actually
CheckAndLogError(mStatus == PIPELINE_CREATED, -1,
"@%s mStatus is in wrong status: PIPELINE_CREATED", __func__);
@@ -100,7 +108,7 @@ int PSysProcessor::configure(const std::vector& configModes) {
std::map outputFrameInfo;
stream_t stillStream = {}, videoStream = {};
- for (auto &outFrameInfo : mOutputFrameInfo) {
+ for (auto& outFrameInfo : mOutputFrameInfo) {
// Check if it's required to output raw image from ISYS
if (outFrameInfo.second.format == V4L2_PIX_FMT_SGRBG12) {
mRawPort = outFrameInfo.first;
@@ -123,8 +131,8 @@ int PSysProcessor::configure(const std::vector& configModes) {
}
int ret = OK;
- //Create PSysDAG according to real configure mode
- for (auto &cfg : mConfigModes) {
+ // Create PSysDAG according to real configure mode
+ for (auto& cfg : mConfigModes) {
if (mPSysDAGs.find(cfg) != mPSysDAGs.end()) {
continue;
}
@@ -134,32 +142,30 @@ int PSysProcessor::configure(const std::vector& configModes) {
CheckAndLogError(ret != OK, ret, "%s: can't get config for mode %d", __func__, cfg);
LOG1("%s, Create PSysDAG for ConfigMode %d", __func__, cfg);
- unique_ptr pSysDAG = unique_ptr(new PSysDAG(mCameraId, this));
+ unique_ptr pSysDAG = unique_ptr(new PSysDAG(mCameraId, mScheduler, this));
pSysDAG->setFrameInfo(mInputFrameInfo, outputFrameInfo);
bool useTnrOutBuffer = mOpaqueRawPort != INVALID_PORT;
- ret = pSysDAG->configure(tuningConfig.configMode, tuningConfig.tuningMode,
- useTnrOutBuffer);
+ ret = pSysDAG->configure(tuningConfig.configMode, tuningConfig.tuningMode, useTnrOutBuffer);
CheckAndLogError(ret != OK, ret, "@%s configure psys dag failed:%d", __func__, ret);
mPSysDAGs[tuningConfig.configMode] = std::move(pSysDAG);
- //Update default active config mode
+ // Update default active config mode
mCurConfigMode = tuningConfig.configMode;
mTuningMode = tuningConfig.tuningMode;
}
if (ret == OK) mStatus = PIPELINE_CREATED;
return ret;
-
}
-int PSysProcessor::registerUserOutputBufs(Port port, const shared_ptr