#define RK29_CAM_SENSOR_NT99340 nt99340 //oyyf@rock-chips.com
#define RK29_CAM_ISP_ICATCH7002_MI1040 icatchmi1040
#define RK29_CAM_ISP_ICATCH7002_OV5693 icatchov5693
+#define RK29_CAM_ISP_ICATCH7002_OV8825 icatchov8825 //zyt
+#define RK29_CAM_ISP_ICATCH7002_OV2720 icatchov2720 //zyt
#define RK29_CAM_SENSOR_NAME_OV7675 "ov7675"
#define RK29_CAM_SENSOR_NAME_OV9650 "ov9650"
#define RK29_CAM_SENSOR_NAME_HM5065 "hm5065"
#define RK29_CAM_ISP_NAME_ICATCH7002_MI1040 "icatchmi1040"
#define RK29_CAM_ISP_NAME_ICATCH7002_OV5693 "icatchov5693"
+#define RK29_CAM_ISP_NAME_ICATCH7002_OV8825 "icatchov8825" //zyt
+#define RK29_CAM_ISP_NAME_ICATCH7002_OV2720 "icatchov2720" //zyt
//Sensor full resolution define
#define ov7675_FULL_RESOLUTION 0x30000 // 0.3 megapixel
#define nt99340_FULL_RESOLUTION 0x300000 // oyyf@rock-chips.com: 3 megapixel 2048*1536
#define icatchmi1040_FULL_RESOLUTION 0x200000
#define icatchov5693_FULL_RESOLUTION 0x500000
+#define icatchov8825_FULL_RESOLUTION 0x800000 //zyt
+#define icatchov2720_FULL_RESOLUTION 0x210000 //zyt
#define end_FULL_RESOLUTION 0x00
//Sensor i2c addr define
#define hm5065_I2C_ADDR 0x3e
#define icatchmi1040_I2C_ADDR 0x78
#define icatchov5693_I2C_ADDR 0x78
+#define icatchov8825_I2C_ADDR 0x78 //zyt
+#define icatchov2720_I2C_ADDR 0x78 //zyt
#define end_I2C_ADDR INVALID_VALUE
#define icatchov5693_PWRSEQ (SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_PWR,0)|\
SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_HWRST,2)|\
SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_CLKIN,1))
+
+#define icatchov8825_PWRSEQ (SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_PWR,0)|\
+ SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_HWRST,2)|\
+ SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_CLKIN,1)) //zyt
+
+#define icatchov2720_PWRSEQ (SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_PWR,0)|\
+ SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_HWRST,2)|\
+ SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_CLKIN,1)) //zyt
+
#define icatchmi1040_PWRSEQ (SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_PWR,0)|\
SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_HWRST,2)|\
SENSOR_PWRSEQ_SET(SENSOR_PWRSEQ_CLKIN,1))
*h = 1200;\r
break;\r
}\r
+ case 0x210000:\r
+ {\r
+ *w = 1920;\r
+ *h = 1080;\r
+ break;\r
+ }\r
\r
case 0x300000:\r
{\r
default n
help
Choose Y here if you have this this sensor and it is attach to icatch7002
+
+config ICATCH7002_OV8825
+ depends on SOC_CAMERA_ICATCH7002
+ tristate "icatch7002 attached ov8825"
+ default n
+ help
+ Choose Y here if you have this this sensor and it is attach to icatch7002
+
+config ICATCH7002_OV2720
+ depends on SOC_CAMERA_ICATCH7002
+ tristate "icatch7002 attached ov2720"
+ default n
+ help
+ Choose Y here if you have this this sensor and it is attach to icatch7002
+
config ICATCH7002_MI1040
depends on SOC_CAMERA_ICATCH7002
tristate "icatch7002 attached mi1040"
#obj-$(CONFIG_SOC_CAMERA_ICATCH7002) += icatch_spi_host.o
obj-$(CONFIG_ICATCH7002_MI1040) += icatch7002_mi1040.o
obj-$(CONFIG_ICATCH7002_OV5693) += icatch7002_ov5693.o
+obj-$(CONFIG_ICATCH7002_OV8825) += icatch7002_ov8825.o
+obj-$(CONFIG_ICATCH7002_OV2720) += icatch7002_ov2720.o
obj-$(CONFIG_SOC_CAMERA_ICATCH7002) += burn_spi_sample_code_0910.o
obj-$(CONFIG_SOC_CAMERA_ICATCH7002) += app_i2c_lib_icatch.o
}else if(value == WqCmd_af_continues)
set_val = 3;
else{
-
+ DEBUG_TRACE("%s:focus value is invalidate!\n",__func__);
}
EXISP_I2C_FocusModeSet(set_val);
}
}
+ int staus_value = 0;
+ if(value == WqCmd_af_continues)
+ //staus_value = 0x10;
+ goto icatch_sensor_set_auto_focus_end;
+ else if(value == WqCmd_af_single)
+ staus_value = 0x0;
+
+
while (cnt--) {
- if (EXISP_I2C_AFStatusGet() == 0) {
+ if (EXISP_I2C_AFStatusGet() == staus_value) {
break;
}
msleep(30);
if (cnt <= 0) {
DEBUG_TRACE("%s: focus timeout %d\n",__func__, value);
- //__dump_i2c(0x7005, 0x7005);
+ __dump_i2c(0x7200, 0x727f);
+
+ __dump_i2c(0x7005, 0x7006);
return 1;
}
DEBUG_TRACE("%s: focus fail %d\n",__func__, value);
return 1;
}
-
+icatch_sensor_set_auto_focus_end:
DEBUG_TRACE("%s: focus success %d\n\n",__func__, value);
return 0;
}
- int icatch_s_fmt(struct i2c_client *client, struct v4l2_mbus_framefmt *mf)
+ int icatch_s_fmt(struct i2c_client *client, struct v4l2_mbus_framefmt *mf,bool is_capture)
{
const struct sensor_datafmt *fmt;
struct generic_sensor*sgensor = to_generic_sensor(client);
set_h = 1944;
res_set = OUTPUT_QSXGA;
}
+ else if (((set_w <= 3264) && (set_h <= 2448)) && (supported_size & OUTPUT_QUXGA))
+ {
+ set_w = 3264;
+ set_h = 2448;
+ res_set = OUTPUT_QUXGA;
+ }
else
{
set_w = 1280;
// sensor_set_isp_output_res(client,res_set);
//res will be setted
sensor->isp_priv_info.curRes = res_set;
+ if(is_capture)
+ sensor->isp_priv_info.curPreviewCapMode = CAPTURE_MODE;
+ else
+ sensor->isp_priv_info.curPreviewCapMode = PREVIEW_MODE;
+
mf->width = set_w;
mf->height = set_h;
//enter capture or preview mode
fsize->discrete.width = 2592;
fsize->discrete.height = 1944;
fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
+ }
+ else if ((sensor->isp_priv_info.supportedSize[fsize->index] & OUTPUT_QUXGA))
+ {
+
+ fsize->discrete.width = 3264;
+ fsize->discrete.height = 2448;
+ fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
} else {
err = -1;
}
static int icatch_set_isp_output_res(struct i2c_client *client,enum ISP_OUTPUT_RES outputSize){
struct generic_sensor*sgensor = to_generic_sensor(client);
struct specific_sensor*sensor = to_specific_sensor(sgensor);
-#if 0
+#if 1
u8 res_sel = 0;
switch(outputSize) {
case OUTPUT_QCIF:
case OUTPUT_SXGA:
case OUTPUT_UXGA:
case OUTPUT_1080P:
+ res_sel = 0x02;
case OUTPUT_QXGA:
case OUTPUT_QSXGA:
- res_sel = IMAGE_CAP_NONZSL_SINGLE;// non-zsl single
+ res_sel = 0x0A;// non-zsl single
+ break;
+ case OUTPUT_QUXGA:
+ res_sel = 0x01;// non-zsl single
break;
default:
DEBUG_TRACE("%s %s isp not support this resolution!\n",sgensor->dev_name,__FUNCTION__);
#endif
int cnt = 16;
//preview mode set
- if(outputSize == OUTPUT_QSXGA){
+ if((sensor->isp_priv_info.curPreviewCapMode == CAPTURE_MODE)
+ /*(outputSize == OUTPUT_QSXGA) || (outputSize == OUTPUT_QSXGA)*/){
+ //in capture mode , isp output full size if size have not been set.
if(sensor->isp_priv_info.hdr == FALSE){
if(IsZSL){
printk("IsZSL EXISP_PvSizeSet(0x0A)\n");
- EXISP_PvSizeSet(0x0A);
+ EXISP_PvSizeSet(res_sel);
//polling until AE ready
while (((EXISP_I2C_3AStatusGet() & 0x1) == 0) && (cnt -- > 0)) {
DEBUG_TRACE("%s %s polling AE ready\n",sgensor->dev_name,__FUNCTION__);
EXISP_ImageCapSet(IMAGE_CAP_HDR);
sensor_interrupt_wait_clear();
}
- sensor->isp_priv_info.curPreviewCapMode = CAPTURE_NONE_ZSL_MODE;
+ //sensor->isp_priv_info.curPreviewCapMode = CAPTURE_NONE_ZSL_MODE;
}
else{
- EXISP_PvSizeSet(IMAGE_CAP_SINGLE);
+ EXISP_PvSizeSet(res_sel);
//polling until AE ready
while (((EXISP_I2C_3AStatusGet() & 0x1) == 0) && (cnt -- > 0)) {
DEBUG_TRACE("%s %s polling AE ready\n",sgensor->dev_name,__FUNCTION__);
mdelay(50);
}
sensor_interrupt_wait_clear();
- sensor->isp_priv_info.curPreviewCapMode = PREVIEW_MODE;
+ //sensor->isp_priv_info.curPreviewCapMode = PREVIEW_MODE;
#if 1
DEBUG_TRACE("\n %s pid = 0x%x\n", sgensor->dev_name, EXISP_I2C_VendorIdGet);
DEBUG_TRACE("fw version is 0x%x\n ",EXISP_I2C_FWVersionGet());
int icatch_get_rearid_by_lowlevelmode(struct soc_camera_device *icd,UINT16 *rear_id);
int icatch_get_frontid_by_lowlevelmode(struct soc_camera_device *icd,UINT16 *front_id);
extern int icatch_sensor_init(struct i2c_client *client);
-extern int icatch_s_fmt(struct i2c_client *client, struct v4l2_mbus_framefmt *mf);
+extern int icatch_s_fmt(struct i2c_client *client, struct v4l2_mbus_framefmt *mf,bool is_capture);
extern int icatch_s_stream(struct v4l2_subdev *sd, int enable);
extern int sensor_set_get_control_cb(struct soc_camera_device *icd, struct sensor_v4l2ctrl_info_s *ctrl_info,
struct v4l2_ext_control *ext_ctrl,bool is_set);
OUTPUT_1080P =0x0800, //1920*1080
OUTPUT_QXGA =0x1000, // 2048*1536
OUTPUT_QSXGA =0x2000, // 2592*1944
+ OUTPUT_QUXGA =0x4000, //3264*2448
};
*/
static int sensor_s_fmt_cb_th(struct i2c_client *client,struct v4l2_mbus_framefmt *mf, bool capture)
{
- return icatch_s_fmt(client, mf);
+ return icatch_s_fmt(client, mf,capture);
}
/*
* the function is called after sensor register setting finished in VIDIOC_S_FMT
--- /dev/null
+
+#include "icatch7002_common.h"
+/*
+* Driver Version Note
+*v0.0.1: this driver is compatible with generic_sensor
+*v0.0.3:
+* add sensor_focus_af_const_pause_usr_cb;
+*/
+static int version = KERNEL_VERSION(0,0,3);
+module_param(version, int, S_IRUGO);
+
+
+
+static int debug;
+module_param(debug, int, S_IRUGO|S_IWUSR);
+
+#define dprintk(level, fmt, arg...) do { \
+ if (debug >= level) \
+ printk(KERN_WARNING fmt , ## arg); } while (0)
+
+/* Sensor Driver Configuration Begin */
+#define SENSOR_NAME RK29_CAM_ISP_ICATCH7002_OV2720
+#define SENSOR_V4L2_IDENT V4L2_IDENT_ICATCH7002_OV2720
+#define SENSOR_ID 0x2720
+#define SENSOR_BUS_PARAM (SOCAM_MASTER | SOCAM_PCLK_SAMPLE_RISING|\
+ SOCAM_HSYNC_ACTIVE_HIGH| SOCAM_VSYNC_ACTIVE_HIGH|\
+ SOCAM_DATA_ACTIVE_HIGH|SOCAM_DATAWIDTH_8 |SOCAM_MCLK_24MHZ)
+#define SENSOR_PREVIEW_W 1280
+#define SENSOR_PREVIEW_H 960
+#define SENSOR_PREVIEW_FPS 30000 // 15fps
+#define SENSOR_FULLRES_L_FPS 15000 // 7.5fps
+#define SENSOR_FULLRES_H_FPS 15000 // 7.5fps
+#define SENSOR_720P_FPS 30000
+#define SENSOR_1080P_FPS 0
+
+
+static unsigned int SensorConfiguration = 0;
+static unsigned int SensorChipID[] = {SENSOR_ID};
+/* Sensor Driver Configuration End */
+
+
+#define SENSOR_NAME_STRING(a) STR(CONS(SENSOR_NAME, a))
+//#define SENSOR_NAME_VARFUN(a) CONS(SENSOR_NAME, a)
+
+//#define SensorRegVal(a,b) CONS4(SensorReg,SENSOR_REGISTER_LEN,Val,SENSOR_VALUE_LEN)(a,b)
+//#define sensor_write(client,reg,v) CONS4(sensor_write_reg,SENSOR_REGISTER_LEN,val,SENSOR_VALUE_LEN)(client,(reg),(v))
+//#define sensor_read(client,reg,v) CONS4(sensor_read_reg,SENSOR_REGISTER_LEN,val,SENSOR_VALUE_LEN)(client,(reg),(v))
+//#define sensor_write_array generic_sensor_write_array
+
+
+
+/*
+* The follow setting need been filled.
+*
+* Must Filled:
+* sensor_init_data : Sensor initial setting;
+* sensor_fullres_lowfps_data : Sensor full resolution setting with best auality, recommand for video;
+* sensor_preview_data : Sensor preview resolution setting, recommand it is vga or svga;
+* sensor_softreset_data : Sensor software reset register;
+* sensor_check_id_data : Sensir chip id register;
+*
+* Optional filled:
+* sensor_fullres_highfps_data: Sensor full resolution setting with high framerate, recommand for video;
+* sensor_720p: Sensor 720p setting, it is for video;
+* sensor_1080p: Sensor 1080p setting, it is for video;
+*
+* :::::WARNING:::::
+* The SensorEnd which is the setting end flag must be filled int the last of each setting;
+*/
+
+/* Sensor initial setting */
+static struct rk_sensor_reg sensor_init_data[] ={
+ SensorStreamChk,
+ SensorEnd
+};
+/* Senor full resolution setting: recommand for capture */
+static struct rk_sensor_reg sensor_fullres_lowfps_data[] ={
+ SensorEnd
+
+};
+
+/* Senor full resolution setting: recommand for video */
+static struct rk_sensor_reg sensor_fullres_highfps_data[] ={
+ SensorEnd
+};
+/* Preview resolution setting*/
+static struct rk_sensor_reg sensor_preview_data[] =
+{
+ SensorStreamChk,
+ SensorEnd
+};
+/* 1280x720 */
+static struct rk_sensor_reg sensor_720p[]={
+ SensorStreamChk,
+ SensorEnd
+};
+
+/* 1920x1080 */
+static struct rk_sensor_reg sensor_1080p[]={
+ SensorEnd
+};
+
+
+static struct rk_sensor_reg sensor_softreset_data[]={
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_check_id_data[]={
+ SensorEnd
+};
+/*
+* The following setting must been filled, if the function is turn on by CONFIG_SENSOR_xxxx
+*/
+static struct rk_sensor_reg sensor_WhiteB_Auto[]=
+{
+ SensorEnd
+};
+/* Cloudy Colour Temperature : 6500K - 8000K */
+static struct rk_sensor_reg sensor_WhiteB_Cloudy[]=
+{
+ SensorEnd
+};
+/* ClearDay Colour Temperature : 5000K - 6500K */
+static struct rk_sensor_reg sensor_WhiteB_ClearDay[]=
+{
+ //Sunny
+ SensorEnd
+};
+/* Office Colour Temperature : 3500K - 5000K */
+static struct rk_sensor_reg sensor_WhiteB_TungstenLamp1[]=
+{
+ //Office
+ SensorEnd
+
+};
+/* Home Colour Temperature : 2500K - 3500K */
+static struct rk_sensor_reg sensor_WhiteB_TungstenLamp2[]=
+{
+ //Home
+ SensorEnd
+};
+static struct rk_sensor_reg *sensor_WhiteBalanceSeqe[] = {sensor_WhiteB_Auto, sensor_WhiteB_TungstenLamp1,sensor_WhiteB_TungstenLamp2,
+ sensor_WhiteB_ClearDay, sensor_WhiteB_Cloudy,NULL,
+};
+
+static struct rk_sensor_reg sensor_Brightness0[]=
+{
+ // Brightness -2
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Brightness1[]=
+{
+ // Brightness -1
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Brightness2[]=
+{
+ // Brightness 0
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Brightness3[]=
+{
+ // Brightness +1
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Brightness4[]=
+{
+ // Brightness +2
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Brightness5[]=
+{
+ // Brightness +3
+
+ SensorEnd
+};
+static struct rk_sensor_reg *sensor_BrightnessSeqe[] = {sensor_Brightness0, sensor_Brightness1, sensor_Brightness2, sensor_Brightness3,
+ sensor_Brightness4, sensor_Brightness5,NULL,
+};
+
+static struct rk_sensor_reg sensor_Effect_Normal[] =
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Effect_WandB[] =
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Effect_Sepia[] =
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Effect_Negative[] =
+{
+ //Negative
+ SensorEnd
+};
+static struct rk_sensor_reg sensor_Effect_Bluish[] =
+{
+ // Bluish
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Effect_Green[] =
+{
+ // Greenish
+ SensorEnd
+};
+static struct rk_sensor_reg *sensor_EffectSeqe[] = {sensor_Effect_Normal, sensor_Effect_WandB, sensor_Effect_Negative,sensor_Effect_Sepia,
+ sensor_Effect_Bluish, sensor_Effect_Green,NULL,
+};
+
+static struct rk_sensor_reg sensor_Exposure0[]=
+{
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Exposure1[]=
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Exposure2[]=
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Exposure3[]=
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Exposure4[]=
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Exposure5[]=
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Exposure6[]=
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg *sensor_ExposureSeqe[] = {sensor_Exposure0, sensor_Exposure1, sensor_Exposure2, sensor_Exposure3,
+ sensor_Exposure4, sensor_Exposure5,sensor_Exposure6,NULL,
+};
+
+static struct rk_sensor_reg sensor_Saturation0[]=
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Saturation1[]=
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Saturation2[]=
+{
+ SensorEnd
+};
+static struct rk_sensor_reg *sensor_SaturationSeqe[] = {sensor_Saturation0, sensor_Saturation1, sensor_Saturation2, NULL,};
+
+static struct rk_sensor_reg sensor_Contrast0[]=
+{
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Contrast1[]=
+{
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Contrast2[]=
+{
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Contrast3[]=
+{
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Contrast4[]=
+{
+
+ SensorEnd
+};
+
+
+static struct rk_sensor_reg sensor_Contrast5[]=
+{
+
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Contrast6[]=
+{
+
+ SensorEnd
+};
+static struct rk_sensor_reg *sensor_ContrastSeqe[] = {sensor_Contrast0, sensor_Contrast1, sensor_Contrast2, sensor_Contrast3,
+ sensor_Contrast4, sensor_Contrast5, sensor_Contrast6, NULL,
+};
+static struct rk_sensor_reg sensor_SceneAuto[] =
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_SceneNight[] =
+{
+ SensorEnd
+};
+static struct rk_sensor_reg *sensor_SceneSeqe[] = {sensor_SceneAuto, sensor_SceneNight,NULL,};
+
+static struct rk_sensor_reg sensor_Zoom0[] =
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Zoom1[] =
+{
+ SensorEnd
+};
+
+static struct rk_sensor_reg sensor_Zoom2[] =
+{
+ SensorEnd
+};
+
+
+static struct rk_sensor_reg sensor_Zoom3[] =
+{
+ SensorEnd
+};
+static struct rk_sensor_reg *sensor_ZoomSeqe[] = {sensor_Zoom0, sensor_Zoom1, sensor_Zoom2, sensor_Zoom3, NULL,};
+
+/*
+* User could be add v4l2_querymenu in sensor_controls by new_usr_v4l2menu
+*/
+static struct v4l2_querymenu sensor_menus[] =
+{
+ //white balance
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,0,"auto",0),
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,1,"incandescent",0),
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,2,"fluorescent",0),
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,3,"daylight",0),
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,4,"cloudy-daylight",0),
+
+ //speical effect
+ new_usr_v4l2menu(V4L2_CID_EFFECT,0,"normal",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,1,"aqua",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,2,"negative",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,3,"sepia",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,4,"mono",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,5,"none",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,6,"aura",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,7,"vintage",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,8,"vintage2",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,9,"lomo",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,10,"red",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,11,"blue",0),
+ new_usr_v4l2menu(V4L2_CID_EFFECT,12,"green",0),
+
+ //scence
+ new_usr_v4l2menu(V4L2_CID_SCENE,0,"normal",0),
+ new_usr_v4l2menu(V4L2_CID_SCENE,1,"auto",0),
+ new_usr_v4l2menu(V4L2_CID_SCENE,2,"landscape",0),
+ new_usr_v4l2menu(V4L2_CID_SCENE,3,"night",0),
+ new_usr_v4l2menu(V4L2_CID_SCENE,4,"night_portrait",0),
+ new_usr_v4l2menu(V4L2_CID_SCENE,5,"snow",0),
+ new_usr_v4l2menu(V4L2_CID_SCENE,6,"sports",0),
+ new_usr_v4l2menu(V4L2_CID_SCENE,7,"candlelight",0),
+
+ //antibanding
+ new_usr_v4l2menu(V4L2_CID_ANTIBANDING,0,"50hz",0),
+ new_usr_v4l2menu(V4L2_CID_ANTIBANDING,1,"60hz",0),
+
+ //ISO
+ new_usr_v4l2menu(V4L2_CID_ISO,0,"auto",0),
+ new_usr_v4l2menu(V4L2_CID_ISO,1,"50",0),
+ new_usr_v4l2menu(V4L2_CID_ISO,2,"100",0),
+ new_usr_v4l2menu(V4L2_CID_ISO,3,"200",0),
+ new_usr_v4l2menu(V4L2_CID_ISO,4,"400",0),
+ new_usr_v4l2menu(V4L2_CID_ISO,5,"800",0),
+ new_usr_v4l2menu(V4L2_CID_ISO,6,"1600",0),
+};
+/*
+* User could be add v4l2_queryctrl in sensor_controls by new_user_v4l2ctrl
+*/
+static struct sensor_v4l2ctrl_usr_s sensor_controls[] =
+{
+ new_user_v4l2ctrl(V4L2_CID_DO_WHITE_BALANCE,V4L2_CTRL_TYPE_MENU,"White Balance Control", 0, 4, 1, 0,sensor_set_get_control_cb, NULL),
+// new_user_v4l2ctrl(V4L2_CID_BRIGHTNESS,V4L2_CTRL_TYPE_INTEGER,"Brightness Control", -3, 2, 1, 0,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_EXPOSURE,V4L2_CTRL_TYPE_INTEGER,"Exposure Control", -3, 3, 1, 0,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_EFFECT,V4L2_CTRL_TYPE_MENU,"Effect Control", 0, 12, 1, 5,sensor_set_get_control_cb, NULL),
+// new_user_v4l2ctrl(V4L2_CID_CONTRAST,V4L2_CTRL_TYPE_INTEGER,"Contrast Control", -4, 4, 1, 0,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_SCENE,V4L2_CTRL_TYPE_MENU,"Scene Control", 0, 7, 1, 1,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_ANTIBANDING,V4L2_CTRL_TYPE_MENU,"Antibanding Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_WHITEBALANCE_LOCK,V4L2_CTRL_TYPE_BOOLEAN,"WhiteBalanceLock Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_EXPOSURE_LOCK,V4L2_CTRL_TYPE_BOOLEAN,"ExposureLock Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_METERING_AREAS,V4L2_CTRL_TYPE_INTEGER,"MeteringAreas Control", 0, 1, 1, 1,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_WDR,V4L2_CTRL_TYPE_BOOLEAN,"WDR Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_EDGE,V4L2_CTRL_TYPE_BOOLEAN,"EDGE Control", 0, 1, 1, 1,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_JPEG_EXIF,V4L2_CTRL_TYPE_BOOLEAN,"Exif Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),
+ new_user_v4l2ctrl(V4L2_CID_ISO,V4L2_CTRL_TYPE_MENU,"Exif Control", 0, 6, 1, 0,sensor_set_get_control_cb, NULL),
+};
+
+//MUST define the current used format as the first item
+static struct rk_sensor_datafmt sensor_colour_fmts[] = {
+ {V4L2_MBUS_FMT_UYVY8_2X8, V4L2_COLORSPACE_JPEG}
+};
+
+
+/*
+**********************************************************
+* Following is local code:
+*
+* Please codeing your program here
+**********************************************************
+*/
+/*
+**********************************************************
+* Following is callback
+* If necessary, you could coding these callback
+**********************************************************
+*/
+/*
+* the function is called in open sensor
+*/
+static int sensor_activate_cb(struct i2c_client *client)
+{
+ return icatch_sensor_init(client);
+}
+/*
+* the function is called in close sensor
+*/
+static int sensor_deactivate_cb(struct i2c_client *client)
+{
+
+ return 0;
+}
+/*
+* the function is called before sensor register setting in VIDIOC_S_FMT
+*/
+static int sensor_s_fmt_cb_th(struct i2c_client *client,struct v4l2_mbus_framefmt *mf, bool capture)
+{
+ return icatch_s_fmt(client, mf,capture);
+}
+/*
+* the function is called after sensor register setting finished in VIDIOC_S_FMT
+*/
+static int sensor_s_fmt_cb_bh (struct i2c_client *client,struct v4l2_mbus_framefmt *mf, bool capture)
+{
+ return 0;
+}
+static int sensor_try_fmt_cb_th(struct i2c_client *client,struct v4l2_mbus_framefmt *mf)
+{
+ return 0;
+}
+
+static int sensor_softrest_usr_cb(struct i2c_client *client,struct rk_sensor_reg *series)
+{
+
+ return 0;
+}
+static int sensor_check_id_usr_cb(struct i2c_client *client,struct rk_sensor_reg *series)
+{
+ struct generic_sensor *sensor = to_generic_sensor(client);
+ return sensor->info_priv.chip_id[0];
+}
+static int sensor_suspend(struct soc_camera_device *icd, pm_message_t pm_msg)
+{
+ //struct i2c_client *client = to_i2c_client(to_soc_camera_control(icd));
+
+ if (pm_msg.event == PM_EVENT_SUSPEND) {
+ SENSOR_DG("Suspend");
+
+ } else {
+ SENSOR_TR("pm_msg.event(0x%x) != PM_EVENT_SUSPEND\n",pm_msg.event);
+ return -EINVAL;
+ }
+ return 0;
+}
+
+static int sensor_resume(struct soc_camera_device *icd)
+{
+
+ SENSOR_DG("Resume");
+
+ return 0;
+
+}
+/*
+* the function is v4l2 control V4L2_CID_HFLIP callback
+*/
+static int sensor_v4l2ctrl_mirror_cb(struct soc_camera_device *icd, struct sensor_v4l2ctrl_info_s *ctrl_info,
+ struct v4l2_ext_control *ext_ctrl)
+{
+ return 0;
+}
+
+/*
+* the function is v4l2 control V4L2_CID_VFLIP callback
+*/
+static int sensor_v4l2ctrl_flip_cb(struct soc_camera_device *icd, struct sensor_v4l2ctrl_info_s *ctrl_info,
+ struct v4l2_ext_control *ext_ctrl)
+{
+ return 0;
+}
+/*
+* the function is v4l2 control V4L2_CID_HFLIP callback
+*/
+
+static int sensor_flip_cb(struct i2c_client *client, int flip)
+{
+ int err = 0;
+
+ return err;
+}
+static int sensor_mirror_cb(struct i2c_client *client, int flip)
+{
+ int err = 0;
+
+ return err;
+}
+
+/*
+* the functions are focus callbacks
+*/
+static int sensor_focus_init_usr_cb(struct i2c_client *client){
+ return 0;
+}
+
+static int sensor_focus_af_single_usr_cb(struct i2c_client *client){
+ return 0;
+}
+
+static int sensor_focus_af_near_usr_cb(struct i2c_client *client){
+ return 0;
+}
+
+static int sensor_focus_af_far_usr_cb(struct i2c_client *client){
+ return 0;
+}
+
+static int sensor_focus_af_specialpos_usr_cb(struct i2c_client *client,int pos){
+ return 0;
+}
+
+static int sensor_focus_af_const_usr_cb(struct i2c_client *client){
+
+ return 0;
+}
+static int sensor_focus_af_const_pause_usr_cb(struct i2c_client *client)
+{
+ return 0;
+}
+static int sensor_focus_af_close_usr_cb(struct i2c_client *client){
+ return 0;
+}
+
+static int sensor_focus_af_zoneupdate_usr_cb(struct i2c_client *client, int *zone_tm_pos)
+{
+ return 0;
+}
+
+/*
+face defect call back
+*/
+static int sensor_face_detect_usr_cb(struct i2c_client *client,int on){
+ return 0;
+}
+
+/*
+* The function can been run in sensor_init_parametres which run in sensor_probe, so user can do some
+* initialization in the function.
+*/
+static void sensor_init_parameters_user(struct specific_sensor* spsensor,struct soc_camera_device *icd)
+{
+ spsensor->common_sensor.sensor_cb.sensor_s_stream_cb = icatch_s_stream;
+ spsensor->common_sensor.sensor_cb.sensor_enum_framesizes = icatch_enum_framesizes;
+
+ spsensor->isp_priv_info.outputSize =OUTPUT_1080P|OUTPUT_QUADVGA;
+ spsensor->isp_priv_info.supportedSizeNum = 2;
+ spsensor->isp_priv_info.supportedSize[0] = OUTPUT_QUADVGA;
+ spsensor->isp_priv_info.supportedSize[1] = OUTPUT_1080P;
+ return;
+}
+
+/*
+* :::::WARNING:::::
+* It is not allowed to modify the following code
+*/
+
+sensor_init_parameters_default_code();
+
+sensor_v4l2_struct_initialization();
+
+sensor_probe_default_code();
+
+sensor_remove_default_code();
+
+sensor_driver_default_module_code();
+
+
+
+
*/\r
static int sensor_s_fmt_cb_th(struct i2c_client *client,struct v4l2_mbus_framefmt *mf, bool capture)\r
{\r
- return icatch_s_fmt(client, mf);\r
+ return icatch_s_fmt(client, mf,capture);\r
}\r
/*\r
* the function is called after sensor register setting finished in VIDIOC_S_FMT \r
--- /dev/null
+\r
+#include "icatch7002_common.h"\r
+/*\r
+* Driver Version Note\r
+*v0.0.1: this driver is compatible with generic_sensor\r
+*v0.0.3:\r
+* add sensor_focus_af_const_pause_usr_cb;\r
+*/\r
+static int version = KERNEL_VERSION(0,0,3);\r
+module_param(version, int, S_IRUGO);\r
+\r
+\r
+\r
+static int debug;\r
+module_param(debug, int, S_IRUGO|S_IWUSR);\r
+\r
+#define dprintk(level, fmt, arg...) do { \\r
+ if (debug >= level) \\r
+ printk(KERN_WARNING fmt , ## arg); } while (0)\r
+\r
+/* Sensor Driver Configuration Begin */\r
+#define SENSOR_NAME RK29_CAM_ISP_ICATCH7002_OV8825\r
+#define SENSOR_V4L2_IDENT V4L2_IDENT_ICATCH7002_OV8825\r
+#define SENSOR_ID 0x8825\r
+#define SENSOR_BUS_PARAM (SOCAM_MASTER | SOCAM_PCLK_SAMPLE_RISING|\\r
+ SOCAM_HSYNC_ACTIVE_HIGH| SOCAM_VSYNC_ACTIVE_HIGH|\\r
+ SOCAM_DATA_ACTIVE_HIGH|SOCAM_DATAWIDTH_8 |SOCAM_MCLK_24MHZ)\r
+#define SENSOR_PREVIEW_W 1280\r
+#define SENSOR_PREVIEW_H 960\r
+#define SENSOR_PREVIEW_FPS 30000 // 15fps \r
+#define SENSOR_FULLRES_L_FPS 15000 // 7.5fps\r
+#define SENSOR_FULLRES_H_FPS 15000 // 7.5fps\r
+#define SENSOR_720P_FPS 30000\r
+#define SENSOR_1080P_FPS 0\r
+\r
+ \r
+static unsigned int SensorConfiguration = CFG_Focus |CFG_FocusContinues|CFG_FocusZone;\r
+static unsigned int SensorChipID[] = {SENSOR_ID};\r
+/* Sensor Driver Configuration End */\r
+\r
+\r
+#define SENSOR_NAME_STRING(a) STR(CONS(SENSOR_NAME, a))\r
+#define SENSOR_NAME_VARFUN(a) CONS(SENSOR_NAME, a)\r
+\r
+//#define SensorRegVal(a,b) CONS4(SensorReg,SENSOR_REGISTER_LEN,Val,SENSOR_VALUE_LEN)(a,b)\r
+//#define sensor_write(client,reg,v) CONS4(sensor_write_reg,SENSOR_REGISTER_LEN,val,SENSOR_VALUE_LEN)(client,(reg),(v))\r
+//#define sensor_read(client,reg,v) CONS4(sensor_read_reg,SENSOR_REGISTER_LEN,val,SENSOR_VALUE_LEN)(client,(reg),(v))\r
+//#define sensor_write_array generic_sensor_write_array\r
+\r
+\r
+\r
+/*\r
+* The follow setting need been filled.\r
+* \r
+* Must Filled:\r
+* sensor_init_data : Sensor initial setting;\r
+* sensor_fullres_lowfps_data : Sensor full resolution setting with best auality, recommand for video;\r
+* sensor_preview_data : Sensor preview resolution setting, recommand it is vga or svga;\r
+* sensor_softreset_data : Sensor software reset register;\r
+* sensor_check_id_data : Sensir chip id register;\r
+*\r
+* Optional filled:\r
+* sensor_fullres_highfps_data: Sensor full resolution setting with high framerate, recommand for video;\r
+* sensor_720p: Sensor 720p setting, it is for video;\r
+* sensor_1080p: Sensor 1080p setting, it is for video;\r
+*\r
+* :::::WARNING:::::\r
+* The SensorEnd which is the setting end flag must be filled int the last of each setting;\r
+*/\r
+\r
+/* Sensor initial setting */\r
+static struct rk_sensor_reg sensor_init_data[] ={\r
+ SensorStreamChk,\r
+ SensorEnd\r
+};\r
+/* Senor full resolution setting: recommand for capture */\r
+static struct rk_sensor_reg sensor_fullres_lowfps_data[] ={\r
+ SensorStreamChk,\r
+ SensorEnd\r
+\r
+};\r
+\r
+/* Senor full resolution setting: recommand for video */\r
+static struct rk_sensor_reg sensor_fullres_highfps_data[] ={\r
+ SensorEnd\r
+};\r
+/* Preview resolution setting*/\r
+static struct rk_sensor_reg sensor_preview_data[] =\r
+{\r
+ SensorStreamChk,\r
+ SensorEnd\r
+};\r
+/* 1280x720 */\r
+static struct rk_sensor_reg sensor_720p[]={\r
+ SensorStreamChk,\r
+ SensorEnd\r
+};\r
+\r
+/* 1920x1080 */\r
+static struct rk_sensor_reg sensor_1080p[]={\r
+ SensorEnd\r
+};\r
+\r
+\r
+static struct rk_sensor_reg sensor_softreset_data[]={\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_check_id_data[]={\r
+ SensorEnd\r
+};\r
+/*\r
+* The following setting must been filled, if the function is turn on by CONFIG_SENSOR_xxxx\r
+*/\r
+static struct rk_sensor_reg sensor_WhiteB_Auto[]=\r
+{\r
+ SensorEnd\r
+};\r
+/* Cloudy Colour Temperature : 6500K - 8000K */\r
+static struct rk_sensor_reg sensor_WhiteB_Cloudy[]=\r
+{\r
+ SensorEnd\r
+};\r
+/* ClearDay Colour Temperature : 5000K - 6500K */\r
+static struct rk_sensor_reg sensor_WhiteB_ClearDay[]=\r
+{\r
+ //Sunny\r
+ SensorEnd\r
+};\r
+/* Office Colour Temperature : 3500K - 5000K */\r
+static struct rk_sensor_reg sensor_WhiteB_TungstenLamp1[]=\r
+{\r
+ //Office\r
+ SensorEnd\r
+\r
+};\r
+/* Home Colour Temperature : 2500K - 3500K */\r
+static struct rk_sensor_reg sensor_WhiteB_TungstenLamp2[]=\r
+{\r
+ //Home\r
+ SensorEnd\r
+};\r
+static struct rk_sensor_reg *sensor_WhiteBalanceSeqe[] = {sensor_WhiteB_Auto, sensor_WhiteB_TungstenLamp1,sensor_WhiteB_TungstenLamp2,\r
+ sensor_WhiteB_ClearDay, sensor_WhiteB_Cloudy,NULL,\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Brightness0[]=\r
+{\r
+ // Brightness -2\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Brightness1[]=\r
+{\r
+ // Brightness -1\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Brightness2[]=\r
+{\r
+ // Brightness 0\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Brightness3[]=\r
+{\r
+ // Brightness +1\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Brightness4[]=\r
+{\r
+ // Brightness +2\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Brightness5[]=\r
+{\r
+ // Brightness +3\r
+\r
+ SensorEnd\r
+};\r
+static struct rk_sensor_reg *sensor_BrightnessSeqe[] = {sensor_Brightness0, sensor_Brightness1, sensor_Brightness2, sensor_Brightness3,\r
+ sensor_Brightness4, sensor_Brightness5,NULL,\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Effect_Normal[] =\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Effect_WandB[] =\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Effect_Sepia[] =\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Effect_Negative[] =\r
+{\r
+ //Negative\r
+ SensorEnd\r
+};\r
+static struct rk_sensor_reg sensor_Effect_Bluish[] =\r
+{\r
+ // Bluish\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Effect_Green[] =\r
+{\r
+ // Greenish\r
+ SensorEnd\r
+};\r
+static struct rk_sensor_reg *sensor_EffectSeqe[] = {sensor_Effect_Normal, sensor_Effect_WandB, sensor_Effect_Negative,sensor_Effect_Sepia,\r
+ sensor_Effect_Bluish, sensor_Effect_Green,NULL,\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Exposure0[]=\r
+{\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Exposure1[]=\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Exposure2[]=\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Exposure3[]=\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Exposure4[]=\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Exposure5[]=\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Exposure6[]=\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg *sensor_ExposureSeqe[] = {sensor_Exposure0, sensor_Exposure1, sensor_Exposure2, sensor_Exposure3,\r
+ sensor_Exposure4, sensor_Exposure5,sensor_Exposure6,NULL,\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Saturation0[]=\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Saturation1[]=\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Saturation2[]=\r
+{\r
+ SensorEnd\r
+};\r
+static struct rk_sensor_reg *sensor_SaturationSeqe[] = {sensor_Saturation0, sensor_Saturation1, sensor_Saturation2, NULL,};\r
+\r
+static struct rk_sensor_reg sensor_Contrast0[]=\r
+{\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Contrast1[]=\r
+{\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Contrast2[]=\r
+{\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Contrast3[]=\r
+{\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Contrast4[]=\r
+{\r
+\r
+ SensorEnd\r
+};\r
+\r
+\r
+static struct rk_sensor_reg sensor_Contrast5[]=\r
+{\r
+\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Contrast6[]=\r
+{\r
+\r
+ SensorEnd\r
+};\r
+static struct rk_sensor_reg *sensor_ContrastSeqe[] = {sensor_Contrast0, sensor_Contrast1, sensor_Contrast2, sensor_Contrast3,\r
+ sensor_Contrast4, sensor_Contrast5, sensor_Contrast6, NULL,\r
+};\r
+static struct rk_sensor_reg sensor_SceneAuto[] =\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_SceneNight[] =\r
+{\r
+ SensorEnd\r
+};\r
+static struct rk_sensor_reg *sensor_SceneSeqe[] = {sensor_SceneAuto, sensor_SceneNight,NULL,};\r
+\r
+static struct rk_sensor_reg sensor_Zoom0[] =\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Zoom1[] =\r
+{\r
+ SensorEnd\r
+};\r
+\r
+static struct rk_sensor_reg sensor_Zoom2[] =\r
+{\r
+ SensorEnd\r
+};\r
+\r
+\r
+static struct rk_sensor_reg sensor_Zoom3[] =\r
+{\r
+ SensorEnd\r
+};\r
+static struct rk_sensor_reg *sensor_ZoomSeqe[] = {sensor_Zoom0, sensor_Zoom1, sensor_Zoom2, sensor_Zoom3, NULL,};\r
+\r
+/*\r
+* User could be add v4l2_querymenu in sensor_controls by new_usr_v4l2menu\r
+*/\r
+static struct v4l2_querymenu sensor_menus[] =\r
+{\r
+ //white balance\r
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,0,"auto",0),\r
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,1,"incandescent",0),\r
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,2,"fluorescent",0),\r
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,3,"daylight",0),\r
+ new_usr_v4l2menu(V4L2_CID_DO_WHITE_BALANCE,4,"cloudy-daylight",0),\r
+\r
+ //speical effect\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,0,"normal",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,1,"aqua",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,2,"negative",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,3,"sepia",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,4,"mono",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,5,"none",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,6,"aura",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,7,"vintage",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,8,"vintage2",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,9,"lomo",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,10,"red",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,11,"blue",0),\r
+ new_usr_v4l2menu(V4L2_CID_EFFECT,12,"green",0),\r
+\r
+ //scence\r
+ new_usr_v4l2menu(V4L2_CID_SCENE,0,"normal",0),\r
+ new_usr_v4l2menu(V4L2_CID_SCENE,1,"auto",0),\r
+ new_usr_v4l2menu(V4L2_CID_SCENE,2,"landscape",0),\r
+ new_usr_v4l2menu(V4L2_CID_SCENE,3,"night",0),\r
+ new_usr_v4l2menu(V4L2_CID_SCENE,4,"night_portrait",0),\r
+ new_usr_v4l2menu(V4L2_CID_SCENE,5,"snow",0),\r
+ new_usr_v4l2menu(V4L2_CID_SCENE,6,"sports",0),\r
+ new_usr_v4l2menu(V4L2_CID_SCENE,7,"candlelight",0),\r
+\r
+ //antibanding\r
+ new_usr_v4l2menu(V4L2_CID_ANTIBANDING,0,"50hz",0),\r
+ new_usr_v4l2menu(V4L2_CID_ANTIBANDING,1,"60hz",0),\r
+\r
+ //ISO\r
+ new_usr_v4l2menu(V4L2_CID_ISO,0,"auto",0),\r
+ new_usr_v4l2menu(V4L2_CID_ISO,1,"50",0),\r
+ new_usr_v4l2menu(V4L2_CID_ISO,2,"100",0),\r
+ new_usr_v4l2menu(V4L2_CID_ISO,3,"200",0),\r
+ new_usr_v4l2menu(V4L2_CID_ISO,4,"400",0),\r
+ new_usr_v4l2menu(V4L2_CID_ISO,5,"800",0),\r
+ new_usr_v4l2menu(V4L2_CID_ISO,6,"1600",0),\r
+};\r
+/*\r
+* User could be add v4l2_queryctrl in sensor_controls by new_user_v4l2ctrl\r
+*/\r
+static struct sensor_v4l2ctrl_usr_s sensor_controls[] =\r
+{\r
+ new_user_v4l2ctrl(V4L2_CID_DO_WHITE_BALANCE,V4L2_CTRL_TYPE_MENU,"White Balance Control", 0, 4, 1, 0,sensor_set_get_control_cb, NULL),\r
+// new_user_v4l2ctrl(V4L2_CID_BRIGHTNESS,V4L2_CTRL_TYPE_INTEGER,"Brightness Control", -3, 2, 1, 0,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_EXPOSURE,V4L2_CTRL_TYPE_INTEGER,"Exposure Control", -3, 3, 1, 0,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_EFFECT,V4L2_CTRL_TYPE_MENU,"Effect Control", 0, 12, 1, 5,sensor_set_get_control_cb, NULL),\r
+// new_user_v4l2ctrl(V4L2_CID_CONTRAST,V4L2_CTRL_TYPE_INTEGER,"Contrast Control", -4, 4, 1, 0,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_SCENE,V4L2_CTRL_TYPE_MENU,"Scene Control", 0, 7, 1, 1,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_ANTIBANDING,V4L2_CTRL_TYPE_MENU,"Antibanding Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_WHITEBALANCE_LOCK,V4L2_CTRL_TYPE_BOOLEAN,"WhiteBalanceLock Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_EXPOSURE_LOCK,V4L2_CTRL_TYPE_BOOLEAN,"ExposureLock Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_METERING_AREAS,V4L2_CTRL_TYPE_INTEGER,"MeteringAreas Control", 0, 1, 1, 1,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_WDR,V4L2_CTRL_TYPE_BOOLEAN,"WDR Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_EDGE,V4L2_CTRL_TYPE_BOOLEAN,"EDGE Control", 0, 1, 1, 1,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_JPEG_EXIF,V4L2_CTRL_TYPE_BOOLEAN,"Exif Control", 0, 1, 1, 0,sensor_set_get_control_cb, NULL),\r
+ new_user_v4l2ctrl(V4L2_CID_ISO,V4L2_CTRL_TYPE_MENU,"Exif Control", 0, 6, 1, 0,sensor_set_get_control_cb, NULL),\r
+\r
+// new_user_v4l2ctrl(V4L2_CID_FOCUSZONE,V4L2_CTRL_TYPE_BOOLEAN,"FocusZone Control", 0, 1, 1, 1,sensor_focus_default_cb, NULL),\r
+// new_user_v4l2ctrl(V4L2_CID_FOCUS_ABSOLUTE,V4L2_CTRL_TYPE_INTEGER,"Focus Control", 0, 0xff, 1, 0,sensor_focus_default_cb, NULL),\r
+// new_user_v4l2ctrl(V4L2_CID_FOCUS_AUTO,V4L2_CTRL_TYPE_BOOLEAN,"Focus Control", 0, 1, 1, 0,sensor_focus_default_cb, NULL),\r
+// new_user_v4l2ctrl(V4L2_CID_FOCUS_CONTINUOUS,V4L2_CTRL_TYPE_BOOLEAN,"Focus Control", 0, 1, 1, 0,sensor_focus_default_cb, NULL),\r
+};\r
+\r
+//MUST define the current used format as the first item \r
+static struct rk_sensor_datafmt sensor_colour_fmts[] = {\r
+ {V4L2_MBUS_FMT_UYVY8_2X8, V4L2_COLORSPACE_JPEG} \r
+};\r
+\r
+\r
+/*\r
+**********************************************************\r
+* Following is local code:\r
+* \r
+* Please codeing your program here \r
+**********************************************************\r
+*/\r
+/*\r
+**********************************************************\r
+* Following is callback\r
+* If necessary, you could coding these callback\r
+**********************************************************\r
+*/\r
+/*\r
+* the function is called in open sensor \r
+*/\r
+static int sensor_activate_cb(struct i2c_client *client)\r
+{\r
+ return icatch_sensor_init(client);\r
+}\r
+/*\r
+* the function is called in close sensor\r
+*/\r
+static int sensor_deactivate_cb(struct i2c_client *client)\r
+{\r
+ \r
+ return 0;\r
+}\r
+/*\r
+* the function is called before sensor register setting in VIDIOC_S_FMT \r
+*/\r
+static int sensor_s_fmt_cb_th(struct i2c_client *client,struct v4l2_mbus_framefmt *mf, bool capture)\r
+{\r
+ return icatch_s_fmt(client, mf,capture);\r
+}\r
+/*\r
+* the function is called after sensor register setting finished in VIDIOC_S_FMT \r
+*/\r
+static int sensor_s_fmt_cb_bh (struct i2c_client *client,struct v4l2_mbus_framefmt *mf, bool capture)\r
+{\r
+ return 0;\r
+}\r
+static int sensor_try_fmt_cb_th(struct i2c_client *client,struct v4l2_mbus_framefmt *mf)\r
+{\r
+ return 0;\r
+}\r
+\r
+static int sensor_softrest_usr_cb(struct i2c_client *client,struct rk_sensor_reg *series)\r
+{\r
+ \r
+ return 0;\r
+}\r
+static int sensor_check_id_usr_cb(struct i2c_client *client,struct rk_sensor_reg *series)\r
+{\r
+ struct generic_sensor *sensor = to_generic_sensor(client);\r
+ return sensor->info_priv.chip_id[0];\r
+}\r
+static int sensor_suspend(struct soc_camera_device *icd, pm_message_t pm_msg)\r
+{\r
+ //struct i2c_client *client = to_i2c_client(to_soc_camera_control(icd));\r
+ \r
+ if (pm_msg.event == PM_EVENT_SUSPEND) {\r
+ SENSOR_DG("Suspend");\r
+ \r
+ } else {\r
+ SENSOR_TR("pm_msg.event(0x%x) != PM_EVENT_SUSPEND\n",pm_msg.event);\r
+ return -EINVAL;\r
+ }\r
+ return 0;\r
+}\r
+\r
+static int sensor_resume(struct soc_camera_device *icd)\r
+{\r
+\r
+ SENSOR_DG("Resume");\r
+\r
+ return 0;\r
+\r
+}\r
+/*\r
+* the function is v4l2 control V4L2_CID_HFLIP callback \r
+*/\r
+static int sensor_v4l2ctrl_mirror_cb(struct soc_camera_device *icd, struct sensor_v4l2ctrl_info_s *ctrl_info, \r
+ struct v4l2_ext_control *ext_ctrl)\r
+{\r
+ return 0;\r
+}\r
+\r
+/*\r
+* the function is v4l2 control V4L2_CID_VFLIP callback \r
+*/\r
+static int sensor_v4l2ctrl_flip_cb(struct soc_camera_device *icd, struct sensor_v4l2ctrl_info_s *ctrl_info, \r
+ struct v4l2_ext_control *ext_ctrl)\r
+{\r
+ return 0;\r
+}\r
+/*\r
+* the function is v4l2 control V4L2_CID_HFLIP callback \r
+*/\r
+\r
+static int sensor_flip_cb(struct i2c_client *client, int flip)\r
+{\r
+ int err = 0; \r
+\r
+ return err; \r
+}\r
+static int sensor_mirror_cb(struct i2c_client *client, int flip)\r
+{\r
+ int err = 0; \r
+\r
+ return err; \r
+}\r
+\r
+/*\r
+* the functions are focus callbacks\r
+*/\r
+static int sensor_focus_init_usr_cb(struct i2c_client *client){\r
+ return 0;\r
+}\r
+\r
+static int sensor_focus_af_single_usr_cb(struct i2c_client *client){\r
+ return icatch_sensor_set_auto_focus(client, WqCmd_af_single,NULL);\r
+}\r
+\r
+static int sensor_focus_af_near_usr_cb(struct i2c_client *client){\r
+ return 0;\r
+}\r
+\r
+static int sensor_focus_af_far_usr_cb(struct i2c_client *client){\r
+ return 0;\r
+}\r
+\r
+static int sensor_focus_af_specialpos_usr_cb(struct i2c_client *client,int pos){\r
+ return 0;\r
+}\r
+\r
+static int sensor_focus_af_const_usr_cb(struct i2c_client *client){\r
+\r
+ return icatch_sensor_set_auto_focus(client, WqCmd_af_continues,NULL);\r
+}\r
+static int sensor_focus_af_const_pause_usr_cb(struct i2c_client *client)\r
+{\r
+ return 0;\r
+}\r
+static int sensor_focus_af_close_usr_cb(struct i2c_client *client){\r
+ return 0;\r
+}\r
+\r
+static int sensor_focus_af_zoneupdate_usr_cb(struct i2c_client *client, int *zone_tm_pos)\r
+{\r
+ return icatch_sensor_set_auto_focus(client, WqCmd_af_update_zone,zone_tm_pos);\r
+}\r
+\r
+/*\r
+face defect call back\r
+*/\r
+static int sensor_face_detect_usr_cb(struct i2c_client *client,int on){\r
+ return 0;\r
+}\r
+\r
+/*\r
+* The function can been run in sensor_init_parametres which run in sensor_probe, so user can do some\r
+* initialization in the function. \r
+*/\r
+static void sensor_init_parameters_user(struct specific_sensor* spsensor,struct soc_camera_device *icd)\r
+{\r
+ spsensor->common_sensor.sensor_cb.sensor_s_stream_cb = icatch_s_stream;\r
+ spsensor->isp_priv_info.focus_zone.lx = 256;\r
+ spsensor->isp_priv_info.focus_zone.rx = 768;\r
+ spsensor->isp_priv_info.focus_zone.ty = 256;\r
+ spsensor->isp_priv_info.focus_zone.dy = 768;\r
+ spsensor->common_sensor.sensor_cb.sensor_enum_framesizes = icatch_enum_framesizes;\r
+\r
+ spsensor->isp_priv_info.outputSize = OUTPUT_QUXGA|OUTPUT_QUADVGA;\r
+ spsensor->isp_priv_info.supportedSizeNum = 2;\r
+ spsensor->isp_priv_info.supportedSize[0] = OUTPUT_QUADVGA;\r
+ spsensor->isp_priv_info.supportedSize[1] = OUTPUT_QUXGA;\r
+ return;\r
+}\r
+\r
+/*\r
+* :::::WARNING:::::\r
+* It is not allowed to modify the following code\r
+*/\r
+\r
+sensor_init_parameters_default_code();\r
+\r
+sensor_v4l2_struct_initialization();\r
+\r
+sensor_probe_default_code();\r
+\r
+sensor_remove_default_code();\r
+\r
+sensor_driver_default_module_code();\r
+\r
+\r
+\r
+\r
case 0x800000:\r
default:\r
{\r
- cam_ipp_mem = 0x800000;\r
+ cam_ipp_mem = 0xC00000;\r
cam_pmem = 0x1900000;\r
break;\r
}\r
cam_pmem = 0xc00000;\r
break;\r
}\r
-\r
+ case 0x210000:\r
+ {\r
+ cam_ipp_mem = 0xc00000;\r
+ cam_pmem = 0xc00000;\r
+ break;\r
+ }\r
case 0x100000:\r
{\r
cam_ipp_mem = 0x600000;\r
bool en;
struct hdr_exposure frame[3];
};
+struct rk_cif_crop
+{
+ spinlock_t lock;
+ struct v4l2_rect c;
+ struct v4l2_rect bounds;
+};
+
#define CONFIG_CIF_STOP_SYNC 1
struct rk_camera_dev
int hostid;
int icd_width;
int icd_height;
+ struct rk_cif_crop cropinfo;
struct rk29camera_platform_data *pdata;
struct resource *res;
static int rk_camera_s_stream(struct soc_camera_device *icd, int enable);
static void rk_camera_capture_process(struct work_struct *work);
-#define OPTIMIZE_MEMORY_USE
+// #define OPTIMIZE_MEMORY_USE
/*
* Videobuf operations
struct v4l2_subdev *sd;
int ret,i,icd_catch;
struct rk_camera_frmivalenum *fival_list,*fival_nxt;
+ struct v4l2_cropcap cropcap;
+ struct v4l2_mbus_framefmt mf;
+ const struct soc_camera_format_xlate *xlate = NULL;
mutex_lock(&camera_lock);
goto ebusy;
#endif
v4l2_subdev_call(sd, core, ioctl, RK29_CAM_SUBDEV_CB_REGISTER,(void*)(&pcdev->icd_cb));
+
+ if (v4l2_subdev_call(sd, video, cropcap, &cropcap) == 0) {
+ memcpy(&pcdev->cropinfo.bounds ,&cropcap.bounds,sizeof(struct v4l2_rect));
+ } else {
+ xlate = soc_camera_xlate_by_fourcc(icd, V4L2_PIX_FMT_NV12);
+ mf.width = 10000;
+ mf.height = 10000;
+ mf.field = V4L2_FIELD_NONE;
+ mf.code = xlate->code;
+ mf.reserved[6] = 0xfefe5a5a;
+ v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
+
+ pcdev->cropinfo.bounds.left = 0;
+ pcdev->cropinfo.bounds.top = 0;
+ pcdev->cropinfo.bounds.width = mf.width;
+ pcdev->cropinfo.bounds.height = mf.height;
+ }
}
pcdev->icd = icd;
pcdev->icd_init = 0;
{
return;
}
+static int rk_camera_get_crop(struct soc_camera_device *icd,struct v4l2_crop *crop)
+{
+ struct soc_camera_host *ici =to_soc_camera_host(icd->dev.parent);
+ struct rk_camera_dev *pcdev = ici->priv;
+ spin_lock(&pcdev->cropinfo.lock);
+ memcpy(&crop->c,&pcdev->cropinfo.c,sizeof(struct v4l2_rect));
+ spin_unlock(&pcdev->cropinfo.lock);
+
+ return 0;
+}
static int rk_camera_set_crop(struct soc_camera_device *icd,
struct v4l2_crop *a)
{
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_mbus_framefmt mf;
struct v4l2_rect rect;
- int ret,usr_w,usr_h;
+ int ret,usr_w,usr_h,sensor_w,sensor_h;
int stream_on = 0;
-
+ int ratio, bounds_aspect;
+
usr_w = pix->width;
usr_h = pix->height;
- RK30_CAM_DEBUG_TRACE("%s enter width:%d height:%d\n",__FUNCTION__,usr_w,usr_h);
+
+ RK30_CAM_DEBUG_TRACE("enter width:%d height:%d\n",usr_w,usr_h);
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
if (!xlate) {
dev_err(dev, "Format %x not found\n", pix->pixelformat);
stream_on = read_cif_reg(pcdev->base,CIF_CIF_CTRL);
if (stream_on & ENABLE_CAPTURE)
write_cif_reg(pcdev->base,CIF_CIF_CTRL, (stream_on & (~ENABLE_CAPTURE)));
-
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
- ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
- if (mf.code != xlate->code)
- return -EINVAL;
- #ifdef CONFIG_VIDEO_RK29_WORK_IPP
+
+ mf.width = pix->width;
+ mf.height = pix->height;
+ mf.field = pix->field;
+ mf.colorspace = pix->colorspace;
+ mf.code = xlate->code;
+ mf.reserved[0] = pix->priv; /* ddl@rock-chips.com : v0.3.3 */
+ mf.reserved[1] = 0;
+
+ ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
+ if (mf.code != xlate->code)
+ return -EINVAL;
+
+ if ((pcdev->cropinfo.c.width == pcdev->cropinfo.bounds.width) &&
+ (pcdev->cropinfo.c.height == pcdev->cropinfo.bounds.height)) {
+ bounds_aspect = (pcdev->cropinfo.bounds.width*10/pcdev->cropinfo.bounds.height);
+ if ((mf.width*10/mf.height) != bounds_aspect) {
+ RK30_CAM_DEBUG_TRACE("User request fov unchanged in %dx%d, But sensor %dx%d is croped, so switch to full resolution %dx%d\n",
+ usr_w,usr_h,mf.width, mf.height,pcdev->cropinfo.bounds.width,pcdev->cropinfo.bounds.height);
+
+ mf.width = pcdev->cropinfo.bounds.width/4;
+ mf.height = pcdev->cropinfo.bounds.height/4;
+
+ mf.field = pix->field;
+ mf.colorspace = pix->colorspace;
+ mf.code = xlate->code;
+ mf.reserved[0] = pix->priv;
+ mf.reserved[1] = 0;
+
+ ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
+ if (mf.code != xlate->code)
+ return -EINVAL;
+ }
+ }
+
+ sensor_w = mf.width;
+ sensor_h = mf.height;
+
+ ratio = ((mf.width*mf.reserved[1])/100)&(~0x03); // 4 align
+ mf.width -= ratio;
+
+ ratio = ((ratio*mf.height/mf.width)+1)&(~0x01); // 2 align
+ mf.height -= ratio;
+
if ((mf.width != usr_w) || (mf.height != usr_h)) {
- int ratio;
- if (unlikely((mf.width <16) || (mf.width > 8190) || (mf.height < 16) || (mf.height > 8190))) {
- RK30_CAM_DEBUG_TRACE("Senor and IPP both invalid source resolution(%dx%d)\n",mf.width,mf.height);
- ret = -EINVAL;
- goto RK_CAMERA_SET_FMT_END;
- }
- if (unlikely((usr_w <16)||(usr_h < 16))) {
- RK30_CAM_DEBUG_TRACE("Senor and IPP both invalid destination resolution(%dx%d)\n",usr_w,usr_h);
- ret = -EINVAL;
- goto RK_CAMERA_SET_FMT_END;
- }
- //need crop ?
- if((mf.width*10/mf.height) != (usr_w*10/usr_h)) {
- ratio = ((mf.width*10/usr_w) >= (mf.height*10/usr_h))?(mf.height*10/usr_h):(mf.width*10/usr_w);
- pcdev->host_width = ratio*usr_w/10;
- pcdev->host_height = ratio*usr_h/10;
- //for ipp ,need 4 bit alligned.
- pcdev->host_width &= ~CROP_ALIGN_BYTES;
- pcdev->host_height &= ~CROP_ALIGN_BYTES;
- RK30_CAM_DEBUG_TRACE("ratio = %d ,host:%d*%d\n",ratio,pcdev->host_width,pcdev->host_height);
- }
- else { // needn't crop ,just scaled by ipp
- pcdev->host_width = mf.width;
- pcdev->host_height = mf.height;
- }
- }
- else {
- pcdev->host_width = usr_w;
- pcdev->host_height = usr_h;
- }
- #else
- //according to crop and scale capability to change , here just cropt to user needed
+
if (unlikely((mf.width <16) || (mf.width > 8190) || (mf.height < 16) || (mf.height > 8190))) {
- RK30_CAM_DEBUG_TRACE("Senor invalid source resolution(%dx%d)\n",mf.width,mf.height);
+ RKCAMERA_TR("Senor and IPP both invalid source resolution(%dx%d)\n",mf.width,mf.height);
ret = -EINVAL;
goto RK_CAMERA_SET_FMT_END;
- }
+ }
if (unlikely((usr_w <16)||(usr_h < 16))) {
- RK30_CAM_DEBUG_TRACE("Senor invalid destination resolution(%dx%d)\n",usr_w,usr_h);
+ RKCAMERA_TR("Senor and IPP both invalid destination resolution(%dx%d)\n",usr_w,usr_h);
ret = -EINVAL;
goto RK_CAMERA_SET_FMT_END;
- }
- pcdev->host_width = usr_w;
- pcdev->host_height = usr_h;
- #endif
+ }
+
+ spin_lock(&pcdev->cropinfo.lock);
+ if (((mf.width*10/mf.height) != (usr_w*10/usr_h))) {
+ if ((pcdev->cropinfo.c.width == 0)&&(pcdev->cropinfo.c.height == 0)) {
+ //Scale + Crop center is for keep aspect ratio unchange
+ ratio = ((mf.width*10/usr_w) >= (mf.height*10/usr_h))?(mf.height*10/usr_h):(mf.width*10/usr_w);
+ pcdev->host_width = ratio*usr_w/10;
+ pcdev->host_height = ratio*usr_h/10;
+ pcdev->host_width &= ~CROP_ALIGN_BYTES;
+ pcdev->host_height &= ~CROP_ALIGN_BYTES;
+
+ pcdev->host_left = ((sensor_w-pcdev->host_width )>>1);
+ pcdev->host_top = ((sensor_h-pcdev->host_height)>>1);
+ } else {
+ //Scale + crop(user define)
+ pcdev->host_width = pcdev->cropinfo.c.width*mf.width/pcdev->cropinfo.bounds.width;
+ pcdev->host_height = pcdev->cropinfo.c.height*mf.height/pcdev->cropinfo.bounds.height;
+ pcdev->host_left = (pcdev->cropinfo.c.left*mf.width/pcdev->cropinfo.bounds.width);
+ pcdev->host_top = (pcdev->cropinfo.c.top*mf.height/pcdev->cropinfo.bounds.height);
+ }
+
+ pcdev->host_left &= (~0x01);
+ pcdev->host_top &= (~0x01);
+ } else {
+ if ((pcdev->cropinfo.c.width == 0)&&(pcdev->cropinfo.c.height == 0)) {
+ //Crop Center for cif can work , then scale
+ pcdev->host_width = mf.width;
+ pcdev->host_height = mf.height;
+ pcdev->host_left = ((sensor_w - mf.width)>>1)&(~0x01);
+ pcdev->host_top = ((sensor_h - mf.height)>>1)&(~0x01);
+ } else {
+ //Crop center for cif can work + crop(user define), then scale
+ pcdev->host_width = pcdev->cropinfo.c.width*mf.width/pcdev->cropinfo.bounds.width;
+ pcdev->host_height = pcdev->cropinfo.c.height*mf.height/pcdev->cropinfo.bounds.height;
+ pcdev->host_left = (pcdev->cropinfo.c.left*mf.width/pcdev->cropinfo.bounds.width)+((sensor_w - mf.width)>>1);
+ pcdev->host_top = (pcdev->cropinfo.c.top*mf.height/pcdev->cropinfo.bounds.height)+((sensor_h - mf.height)>>1);
+ }
+
+ pcdev->host_left &= (~0x01);
+ pcdev->host_top &= (~0x01);
+ }
+ spin_unlock(&pcdev->cropinfo.lock);
+ } else {
+ spin_lock(&pcdev->cropinfo.lock);
+ if ((pcdev->cropinfo.c.width == 0)&&(pcdev->cropinfo.c.height == 0)) {
+ pcdev->host_width = mf.width;
+ pcdev->host_height = mf.height;
+ pcdev->host_left = 0;
+ pcdev->host_top = 0;
+ } else {
+ pcdev->host_width = pcdev->cropinfo.c.width*mf.width/pcdev->cropinfo.bounds.width;
+ pcdev->host_height = pcdev->cropinfo.c.height*mf.height/pcdev->cropinfo.bounds.height;
+ pcdev->host_left = (pcdev->cropinfo.c.left*mf.width/pcdev->cropinfo.bounds.width);
+ pcdev->host_top = (pcdev->cropinfo.c.top*mf.height/pcdev->cropinfo.bounds.height);
+ }
+ spin_unlock(&pcdev->cropinfo.lock);
+ }
+
icd->sense = NULL;
if (!ret) {
- RK30_CAM_DEBUG_TRACE("%s..%d.. host:%d*%d , sensor output:%d*%d,user demand:%d*%d\n",__FUNCTION__,__LINE__,
- pcdev->host_width,pcdev->host_height,mf.width,mf.height,usr_w,usr_h);
rect.width = pcdev->host_width;
rect.height = pcdev->host_height;
- rect.left = ((mf.width-pcdev->host_width )>>1)&(~0x01);
- rect.top = ((mf.height-pcdev->host_height)>>1)&(~0x01);
- pcdev->host_left = rect.left;
- pcdev->host_top = rect.top;
+ rect.left = pcdev->host_left;
+ rect.top = pcdev->host_top;
down(&pcdev->zoominfo.sem);
- #if CIF_DO_CROP
- pcdev->zoominfo.a.c.left = 0;
- pcdev->zoominfo.a.c.top = 0;
- pcdev->zoominfo.a.c.width = pcdev->host_width*100/pcdev->zoominfo.zoom_rate;
- pcdev->zoominfo.a.c.width &= ~CROP_ALIGN_BYTES;
- pcdev->zoominfo.a.c.height = pcdev->host_height*100/pcdev->zoominfo.zoom_rate;
- pcdev->zoominfo.a.c.height &= ~CROP_ALIGN_BYTES;
- pcdev->zoominfo.vir_width = pcdev->zoominfo.a.c.width;
- pcdev->zoominfo.vir_height = pcdev->zoominfo.a.c.height;
- //recalculate the CIF width & height
- rect.width = pcdev->zoominfo.a.c.width ;
- rect.height = pcdev->zoominfo.a.c.height;
- rect.left = ((((pcdev->host_width - pcdev->zoominfo.a.c.width)>>1))+pcdev->host_left)&(~0x01);
- rect.top = ((((pcdev->host_height - pcdev->zoominfo.a.c.height)>>1))+pcdev->host_top)&(~0x01);
- #else
- pcdev->zoominfo.a.c.width = pcdev->host_width*100/pcdev->zoominfo.zoom_rate;
- pcdev->zoominfo.a.c.width &= ~CROP_ALIGN_BYTES;
- pcdev->zoominfo.a.c.height = pcdev->host_height*100/pcdev->zoominfo.zoom_rate;
- pcdev->zoominfo.a.c.height &= ~CROP_ALIGN_BYTES;
- //now digital zoom use ipp to do crop and scale
- if(pcdev->zoominfo.zoom_rate != 100) {
- pcdev->zoominfo.a.c.left = ((pcdev->host_width - pcdev->zoominfo.a.c.width)>>1)&(~0x01);
- pcdev->zoominfo.a.c.top = ((pcdev->host_height - pcdev->zoominfo.a.c.height)>>1)&(~0x01);
- }
- else {
- pcdev->zoominfo.a.c.left = 0;
- pcdev->zoominfo.a.c.top = 0;
- }
- pcdev->zoominfo.vir_width = pcdev->host_width;
- pcdev->zoominfo.vir_height = pcdev->host_height;
- #endif
+#if CIF_DO_CROP // this crop is only for digital zoom
+ pcdev->zoominfo.a.c.left = 0;
+ pcdev->zoominfo.a.c.top = 0;
+ pcdev->zoominfo.a.c.width = pcdev->host_width*100/pcdev->zoominfo.zoom_rate;
+ pcdev->zoominfo.a.c.width &= ~CROP_ALIGN_BYTES;
+ pcdev->zoominfo.a.c.height = pcdev->host_height*100/pcdev->zoominfo.zoom_rate;
+ pcdev->zoominfo.a.c.height &= ~CROP_ALIGN_BYTES;
+ pcdev->zoominfo.vir_width = pcdev->zoominfo.a.c.width;
+ pcdev->zoominfo.vir_height = pcdev->zoominfo.a.c.height;
+ //recalculate the CIF width & height
+ rect.width = pcdev->zoominfo.a.c.width ;
+ rect.height = pcdev->zoominfo.a.c.height;
+ rect.left = ((((pcdev->host_width - pcdev->zoominfo.a.c.width)>>1))+pcdev->host_left)&(~0x01);
+ rect.top = ((((pcdev->host_height - pcdev->zoominfo.a.c.height)>>1))+pcdev->host_top)&(~0x01);
+#else
+ pcdev->zoominfo.a.c.width = pcdev->host_width*100/pcdev->zoominfo.zoom_rate;
+ pcdev->zoominfo.a.c.width &= ~CROP_ALIGN_BYTES;
+ pcdev->zoominfo.a.c.height = pcdev->host_height*100/pcdev->zoominfo.zoom_rate;
+ pcdev->zoominfo.a.c.height &= ~CROP_ALIGN_BYTES;
+ //now digital zoom use ipp to do crop and scale
+ if(pcdev->zoominfo.zoom_rate != 100){
+ pcdev->zoominfo.a.c.left = ((pcdev->host_width - pcdev->zoominfo.a.c.width)>>1)&(~0x01);
+ pcdev->zoominfo.a.c.top = ((pcdev->host_height - pcdev->zoominfo.a.c.height)>>1)&(~0x01);
+ } else {
+ pcdev->zoominfo.a.c.left = 0;
+ pcdev->zoominfo.a.c.top = 0;
+ }
+ pcdev->zoominfo.vir_width = pcdev->host_width;
+ pcdev->zoominfo.vir_height = pcdev->host_height;
+#endif
up(&pcdev->zoominfo.sem);
/* ddl@rock-chips.com: IPP work limit check */
if ((pcdev->zoominfo.a.c.width != usr_w) || (pcdev->zoominfo.a.c.height != usr_h)) {
if (usr_w > 0x7f0) {
if (((usr_w>>1)&0x3f) && (((usr_w>>1)&0x3f) <= 8)) {
- RK30_CAM_DEBUG_TRACE("IPP Destination resolution(%dx%d, ((%d div 1) mod 64)=%d is <= 8)",usr_w,usr_h, usr_w, (int)((usr_w>>1)&0x3f));
+ RKCAMERA_TR("IPP Destination resolution(%dx%d, ((%d div 1) mod 64)=%d is <= 8)",usr_w,usr_h, usr_w, (int)((usr_w>>1)&0x3f));
ret = -EINVAL;
goto RK_CAMERA_SET_FMT_END;
}
} else {
if ((usr_w&0x3f) && ((usr_w&0x3f) <= 8)) {
- RK30_CAM_DEBUG_TRACE("IPP Destination resolution(%dx%d, %d mod 64=%d is <= 8)",usr_w,usr_h, usr_w, (int)(usr_w&0x3f));
+ RKCAMERA_TR("IPP Destination resolution(%dx%d, %d mod 64=%d is <= 8)",usr_w,usr_h, usr_w, (int)(usr_w&0x3f));
ret = -EINVAL;
goto RK_CAMERA_SET_FMT_END;
}
}
}
- RK30_CAM_DEBUG_TRACE("%s..%s icd width:%d user width:%d (zoom: %dx%d@(%d,%d)->%dx%d)\n",__FUNCTION__,xlate->host_fmt->name,
- rect.width, pix->width, pcdev->zoominfo.a.c.width,pcdev->zoominfo.a.c.height, pcdev->zoominfo.a.c.left,pcdev->zoominfo.a.c.top,
+
+ RK30_CAM_DEBUG_TRACE("%s CIF Host:%dx%d@(%d,%d) Sensor:%dx%d->%dx%d User crop:(%d,%d,%d,%d)in(%d,%d) (zoom: %dx%d@(%d,%d)->%dx%d)\n",xlate->host_fmt->name,
+ pcdev->host_width,pcdev->host_height,pcdev->host_left,pcdev->host_top,
+ sensor_w,sensor_h,mf.width,mf.height,
+ pcdev->cropinfo.c.left,pcdev->cropinfo.c.top,pcdev->cropinfo.c.width,pcdev->cropinfo.c.height,
+ pcdev->cropinfo.bounds.width,pcdev->cropinfo.bounds.height,
+ pcdev->zoominfo.a.c.width,pcdev->zoominfo.a.c.height, pcdev->zoominfo.a.c.left,pcdev->zoominfo.a.c.top,
pix->width, pix->height);
rk_camera_setup_format(icd, pix->pixelformat, mf.code, &rect);
if (stream_on & ENABLE_CAPTURE)
write_cif_reg(pcdev->base,CIF_CIF_CTRL, (read_cif_reg(pcdev->base,CIF_CIF_CTRL) | ENABLE_CAPTURE));
if (ret)
- RK30_CAM_DEBUG_TRACE("\n%s..%d.. ret = %d \n",__FUNCTION__,__LINE__, ret);
+ RKCAMERA_TR("\n%s..%d.. ret = %d \n",__FUNCTION__,__LINE__, ret);
return ret;
}
static bool rk_camera_fmt_capturechk(struct v4l2_format *f)
char fov[9];
int i;
- strlcpy(cap->card, dev_name(pcdev->icd->pdev), 18);
+ strlcpy(cap->card, dev_name(pcdev->icd->pdev), 20);
memset(orientation,0x00,sizeof(orientation));
for (i=0; i<RK_CAM_NUM;i++) {
if ((pcdev->pdata->info[i].dev_name!=NULL) && (strcmp(dev_name(pcdev->icd->pdev), pcdev->pdata->info[i].dev_name) == 0)) {
INIT_LIST_HEAD(&pcdev->camera_work_queue);
spin_lock_init(&pcdev->lock);
spin_lock_init(&pcdev->camera_work_lock);
- // spin_lock_init(&pcdev->irq_lock);
+
+ memset(&pcdev->cropinfo.c,0x00,sizeof(struct v4l2_rect));
+ spin_lock_init(&pcdev->cropinfo.lock);
sema_init(&pcdev->zoominfo.sem,1);
/*
b->timestamp = vb->ts;
b->bytesused = vb->size;
b->sequence = vb->field_count >> 1;
+ b->reserved = vb->rk_code; /* ddl@rock-chips.com */
}
int videobuf_mmap_free(struct videobuf_queue *q)
V4L2_IDENT_MTK9335ISP = 320, /* ddl@rock-chips.com : MTK9335ISP support */
V4L2_IDENT_ICATCH7002_MI1040 = 321,
V4L2_IDENT_ICATCH7002_OV5693 =322,
+ V4L2_IDENT_ICATCH7002_OV8825 = 323, //zyt
+ V4L2_IDENT_ICATCH7002_OV2720 = 324, //zyt
/* Conexant MPEG encoder/decoders: reserved range 400-420 */
V4L2_IDENT_CX23418_843 = 403, /* Integrated A/V Decoder on the '418 */
V4L2_IDENT_CX23415 = 415,
struct rk29_vaddr vaddr;
#endif
void *priv;
+ unsigned int rk_code; /* ddl@rock-chips.com: this filed must copy to struct v4l2_buffer.reserved */
};
struct videobuf_queue_ops {