本文整理汇总了C++中soc_camera_to_subdev函数的典型用法代码示例。如果您正苦于以下问题:C++ soc_camera_to_subdev函数的具体用法?C++ soc_camera_to_subdev怎么用?C++ soc_camera_to_subdev使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了soc_camera_to_subdev函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: mx2_camera_set_crop
static int mx2_camera_set_crop(struct soc_camera_device *icd,
struct v4l2_crop *a)
{
struct v4l2_rect *rect = &a->c;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct v4l2_mbus_framefmt mf;
int ret;
soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096);
soc_camera_limit_side(&rect->top, &rect->height, 0, 2, 4096);
ret = v4l2_subdev_call(sd, video, s_crop, a);
if (ret < 0)
return ret;
/* The capture device might have changed its output */
ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
if (ret < 0)
return ret;
dev_dbg(icd->parent, "Sensor cropped %dx%d\n",
mf.width, mf.height);
icd->user_width = mf.width;
icd->user_height = mf.height;
return ret;
}
示例2: ak_camera_set_crop
static int ak_camera_set_crop(struct soc_camera_device *icd,
struct v4l2_crop *crop)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
struct ak_camera_dev *pcdev = ici->priv;
int ret, width, height;
isp_dbg("entry %s\n", __func__);
if (pcdev->dma_running) {
/* make sure streaming is not started */
v4l2_err(&ici->v4l2_dev,
"Cannot change crop when streaming is ON\n");
return -EBUSY;
}
width = crop->c.width - crop->c.left;
height = crop->c.height - crop->c.top;
if ((crop->c.top < 0 || crop->c.left < 0)
||(((width * 3) < 18) || (height * 3) < 18)
||((width > 1280) || (height > 720))) {
v4l2_err(&ici->v4l2_dev,
"doesn't support negative values for top & left\n");
return -EINVAL;
}
if ((ret = isp_set_crop(&pcdev->isp, crop->c)) < 0)
ret = v4l2_subdev_call(sd, video, s_crop, crop);
return ret;
}
示例3: unicam_videobuf_stop_streaming_int
static int unicam_videobuf_stop_streaming_int(struct unicam_camera_dev \
*unicam_dev)
{
struct soc_camera_device *icd = unicam_dev->icd;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
int ret = 0;
unsigned long flags;
struct rx_stat_list rx;
/* grab the lock */
spin_lock_irqsave(&unicam_dev->lock, flags);
pr_debug("-enter");
pr_debug("disabling csi");
pr_debug("stopping stream");
if (!atomic_read(&unicam_dev->streaming)) {
pr_err("stream already turned off\n");
goto out;
}
if (unicam_dev->active) {
atomic_set(&unicam_dev->stopping, 1);
spin_unlock_irqrestore(&unicam_dev->lock, flags);
ret = down_timeout(&unicam_dev->stop_sem,
msecs_to_jiffies(500));
atomic_set(&unicam_dev->stopping, 0);
if (ret == -ETIME) {
pr_err("Unicam: semaphore timed out waiting to STOP\n");
unicam_reg_dump();
}
} else {
spin_unlock_irqrestore(&unicam_dev->lock, flags);
}
usleep_range(50, 60); /*TODO: Need to double-check with ASIC team*/
spin_lock_irqsave(&unicam_dev->lock, flags);
unicam_stop();
/* Restart rx stat */
mm_csi0_get_rx_stat(&rx, 1);
/* Don't bother what values were returned */
mm_csi0_teardown();
unicam_dev->active = NULL;
atomic_set(&unicam_dev->streaming, 0);
memset(&unicam_dev->crop, 0x00, sizeof(struct v4l2_crop));
unicam_dev->cap_done = 0;
unicam_dev->cap_mode = 0;
out:
pr_debug("-exit");
atomic_set(&unicam_dev->cam_triggered, 0);
spin_unlock_irqrestore(&unicam_dev->lock, flags);
/* stop sensor streaming after UNICAM is disabled */
ret = v4l2_subdev_call(sd, video, s_stream, 0);
if (ret < 0 && ret != -ENOIOCTLCMD) {
pr_err("failed to stop sensor streaming\n");
ret = -1;
}
return ret;
}
示例4: omap1_cam_get_formats
static int omap1_cam_get_formats(struct soc_camera_device *icd,
unsigned int idx, struct soc_camera_format_xlate *xlate)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->dev.parent;
int formats = 0, ret;
enum v4l2_mbus_pixelcode code;
const struct soc_mbus_pixelfmt *fmt;
ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
if (ret < 0)
/* No more formats */
return 0;
fmt = soc_mbus_get_fmtdesc(code);
if (!fmt) {
dev_warn(dev, "%s: unsupported format code #%d: %d\n", __func__,
idx, code);
return 0;
}
/* Check support for the requested bits-per-sample */
if (fmt->bits_per_sample != 8)
return 0;
switch (code) {
case V4L2_MBUS_FMT_YUYV8_2X8:
case V4L2_MBUS_FMT_YVYU8_2X8:
case V4L2_MBUS_FMT_UYVY8_2X8:
case V4L2_MBUS_FMT_VYUY8_2X8:
case V4L2_MBUS_FMT_RGB555_2X8_PADHI_BE:
case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE:
case V4L2_MBUS_FMT_RGB565_2X8_BE:
case V4L2_MBUS_FMT_RGB565_2X8_LE:
formats++;
if (xlate) {
xlate->host_fmt = soc_mbus_find_fmtdesc(code,
omap1_cam_formats,
ARRAY_SIZE(omap1_cam_formats));
xlate->code = code;
xlate++;
dev_dbg(dev,
"%s: providing format %s as byte swapped code #%d\n",
__func__, xlate->host_fmt->name, code);
}
default:
if (xlate)
dev_dbg(dev,
"%s: providing format %s in pass-through mode\n",
__func__, fmt->name);
}
formats++;
if (xlate) {
xlate->host_fmt = fmt;
xlate->code = code;
xlate++;
}
return formats;
}
示例5: mx1_camera_set_crop
static int mx1_camera_set_crop(struct soc_camera_device *icd,
struct v4l2_crop *a)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
return v4l2_subdev_call(sd, video, s_crop, a);
}
示例6: ak_camera_remove_device
/**
* @brief: Called when the /dev/videox is close. close ISP and sensor device.
*
* @author: caolianming
* @date: 2014-01-06
* @param [in] *icd: soc_camera_device information structure,
* akcamera depends on the soc driver.
*/
static void ak_camera_remove_device(struct soc_camera_device *icd)
{
struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
struct ak_camera_dev *pcdev = ici->priv;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
CAMDBG("entry %s\n", __func__);
BUG_ON(icd != pcdev->icd);
v4l2_subdev_call(sd, core, reset, 0);
isp_clear_irq(&pcdev->isp);
isp_stop_capturing(&pcdev->isp);
/* disable sensor clk */
clk_disable(pcdev->cis_sclk);
/* disable the clock of isp module */
clk_disable(pcdev->clk);
//ak_soft_reset(AK_SRESET_CAMERA);
dev_info(icd->parent, "AK Camera driver detached from camera %d\n",
icd->devnum);
pcdev->active = NULL;
pcdev->icd = NULL;
CAMDBG("Leave %s\n", __func__);
}
示例7: mx1_camera_try_fmt
static int mx1_camera_try_fmt(struct soc_camera_device *icd,
struct v4l2_format *f)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_mbus_framefmt mf;
int ret;
/* TODO: limit to mx1 hardware capabilities */
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
if (!xlate) {
dev_warn(icd->parent, "Format %x not found\n",
pix->pixelformat);
return -EINVAL;
}
mf.width = pix->width;
mf.height = pix->height;
mf.field = pix->field;
mf.colorspace = pix->colorspace;
mf.code = xlate->code;
/* limit to sensor capabilities */
ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
if (ret < 0)
return ret;
pix->width = mf.width;
pix->height = mf.height;
pix->field = mf.field;
pix->colorspace = mf.colorspace;
return 0;
}
示例8: ak_camera_get_formats
/**
* @brief: getting image format information
*
* @author: caolianming
* @date: 2014-01-06
* @param [in] *icd: soc_camera_device information structure,
* akcamera depends on the soc driver.
* @param [in] *f: image format
*/
static int ak_camera_get_formats(struct soc_camera_device *icd, unsigned int idx,
struct soc_camera_format_xlate *xlate)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->parent;
struct soc_camera_host *ici = to_soc_camera_host(dev);
struct ak_camera_dev *pcdev = ici->priv;
int ret, formats = 0;
enum v4l2_mbus_pixelcode code;
const struct soc_mbus_pixelfmt *fmt;
CAMDBG("entry %s\n", __func__);
ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
if (ret < 0)
/* No more formats */
return 0;
/*
* @Note: ISP only support yuv420 output and jpeg out.
* FIXME1: We miss jpeg here.
* FIXME2: the output squence of YUV is actually UYVY.
*/
fmt = soc_mbus_get_fmtdesc(V4L2_MBUS_FMT_YUYV8_2X8);
if (!fmt) {
dev_warn(dev, "unsupported format code #%u: %d\n", idx, code);
return 0;
}
CAMDBG("get format %s code=%d from sensor\n", fmt->name, code);
/* Generic pass-through */
formats++;
if (xlate) {
xlate->host_fmt = fmt;
xlate->code = code;
xlate++;
/*
* @decide the default working mode of isp
* @prefer RGB mode
*/
if (code < V4L2_MBUS_FMT_Y8_1X8) {
pcdev->def_mode = ISP_RGB_VIDEO_OUT;
//pcdev->def_mode = ISP_RGB_OUT;
}
if ((pcdev->def_mode != ISP_RGB_VIDEO_OUT)
&& (pcdev->def_mode != ISP_RGB_OUT)) {
pcdev->def_mode = ISP_YUV_VIDEO_BYPASS;
//pcdev->def_mode = ISP_YUV_BYPASS;
}
pcdev->isp.cur_mode = pcdev->def_mode;
update_cur_mode_class(&pcdev->isp);
dev_dbg(dev, "Providing format %s in pass-through mode\n",
fmt->name);
}
return formats;
}
示例9: unicam_camera_set_fmt
static int unicam_camera_set_fmt(struct soc_camera_device *icd,
struct v4l2_format *f)
{
struct device *dev = icd->dev.parent;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct unicam_camera_dev *unicam_dev = ici->priv;
const struct soc_camera_format_xlate *xlate = NULL;
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_mbus_framefmt mf;
int ret;
u32 skip_frames = 0;
dprintk("-enter");
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
if (!xlate) {
dev_warn(dev, "Format %x not found\n", pix->pixelformat);
return -EINVAL;
}
mf.width = pix->width;
mf.height = pix->height;
mf.field = pix->field;
mf.colorspace = pix->colorspace;
mf.code = xlate->code;
ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
if (mf.code != xlate->code)
return -EINVAL;
if (ret < 0) {
dev_warn(dev, "Failed to configure for format %x\n",
pix->pixelformat);
return ret;
}
/*TODO limit here any maximum size */
ret = v4l2_subdev_call(sd, sensor, g_skip_frames, &skip_frames);
if (ret < 0) {
dev_warn(dev,
"sensor driver doesn't implement g_skip_frames operation\n");
dev_warn(dev, "assuming zero skip frames\n");
skip_frames = 0;
ret = 0;
}
unicam_dev->skip_frames = skip_frames;
pix->width = mf.width;
pix->height = mf.height;
pix->field = mf.field;
pix->colorspace = mf.colorspace;
icd->current_fmt = xlate;
iprintk("format set to %c%c%c%c res=%dx%d success=%d",
pixfmtstr(pix->pixelformat), pix->width, pix->height, ret);
dprintk("-exit");
return ret;
}
示例10: unicam_camera_set_crop
static int unicam_camera_set_crop(struct soc_camera_device *icd,
struct v4l2_crop *crop)
{
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct unicam_camera_dev *unicam_dev = ici->priv;
if(crop == NULL)
return -EINVAL;
unicam_dev->crop = *crop;
return 0;
#if 0
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
pr_info("Configuring crop to %d %d\n", crop->c.width, crop->c.height);
pr_info("Configuring top left to %d %d\n", crop->c.top, crop->c.left);
v4l2_subdev_call(sd, video, s_stream, 0);
spin_lock_irqsave(&unicam_dev->lock, flags);
unicam_dev->crop = *crop;
if(unicam_dev->streaming){
pr_info("Stopping stream\n");
unicam_stop();
}
/* Configure new crop parameters */
mm_csi0_set_windowing_vertical(unicam_dev->crop.c.top,
(unicam_dev->crop.c.top + unicam_dev->crop.c.height));
mm_csi0_cfg_pipeline_unpack(PIX_UNPACK_NONE);
mm_csi0_cfg_pipeline_dpcm_dec(DPCM_DEC_NONE);
mm_csi0_set_windowing_horizontal(unicam_dev->crop.c.left,
(unicam_dev->crop.c.left + unicam_dev->crop.c.width));
mm_csi0_cfg_pipeline_dpcm_enc(DPCM_ENC_NONE);
mm_csi0_cfg_pipeline_pack(PIX_PACK_NONE);
mm_csi0_start_rx();
/* Re-configure buffer parameters */
unicam_camera_update_buf(unicam_dev);
/* set data capture */
if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2) {
idesc.fsi = 1;
idesc.fei = 1;
idesc.lci = 0;
idesc.die = 1;
idesc.dataline = 2;
mm_csi0_config_int(&idesc, IMAGE_BUFFER);
mm_csi0_config_int(&idesc, DATA_BUFFER);
unicam_camera_capture(unicam_dev);
} else {
idesc.fsi = 0;
idesc.fei = 0;
idesc.lci = unicam_dev->icd->user_height;
idesc.die = 0;
idesc.dataline = 0;
mm_csi0_config_int(&idesc, IMAGE_BUFFER);
}
spin_unlock_irqrestore(&unicam_dev->lock, flags);
v4l2_subdev_call(sd, video, s_stream, 1);
return 0;
#endif
}
示例11: mx3_camera_try_fmt
static int mx3_camera_try_fmt(struct soc_camera_device *icd,
struct v4l2_format *f)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_mbus_framefmt mf;
__u32 pixfmt = pix->pixelformat;
int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pixfmt);
if (pixfmt && !xlate) {
dev_warn(icd->dev.parent, "Format %x not found\n", pixfmt);
return -EINVAL;
}
/* limit to MX3 hardware capabilities */
if (pix->height > 4096)
pix->height = 4096;
if (pix->width > 4096)
pix->width = 4096;
pix->bytesperline = soc_mbus_bytes_per_line(pix->width,
xlate->host_fmt);
if (pix->bytesperline < 0)
return pix->bytesperline;
pix->sizeimage = pix->height * pix->bytesperline;
/* limit to sensor capabilities */
mf.width = pix->width;
mf.height = pix->height;
mf.field = pix->field;
mf.colorspace = pix->colorspace;
mf.code = xlate->code;
ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
if (ret < 0)
return ret;
pix->width = mf.width;
pix->height = mf.height;
pix->colorspace = mf.colorspace;
switch (mf.field) {
case V4L2_FIELD_ANY:
pix->field = V4L2_FIELD_NONE;
break;
case V4L2_FIELD_NONE:
break;
default:
dev_err(icd->dev.parent, "Field type %d unsupported.\n",
mf.field);
ret = -EINVAL;
}
return ret;
}
示例12: gc0329_suspend
static int gc0329_suspend(struct soc_camera_device *icd, pm_message_t state)
{
struct v4l2_subdev *sd;
struct gc0329_priv *priv;
sd = soc_camera_to_subdev(icd);
priv = container_of(sd, struct gc0329_priv, subdev);
return 0;
}
示例13: ak_camera_get_crop
static int ak_camera_get_crop(struct soc_camera_device *icd,
struct v4l2_crop *crop)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
//struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
//struct ak_camera_dev *pcdev = ici->priv;
isp_dbg("entry %s\n", __func__);
return v4l2_subdev_call(sd, video, g_crop, crop);
}
示例14: ak_camera_cropcap
static int ak_camera_cropcap(struct soc_camera_device *icd,
struct v4l2_cropcap *crop)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
isp_dbg("enter %s\n", __func__);
if (crop->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
return -EINVAL;
// isp support crop, need complete.
return v4l2_subdev_call(sd, video, cropcap, crop);
}
示例15: omap1_cam_set_crop
static int omap1_cam_set_crop(struct soc_camera_device *icd,
const struct v4l2_crop *crop)
{
const struct v4l2_rect *rect = &crop->c;
const struct soc_camera_format_xlate *xlate = icd->current_fmt;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->parent;
struct soc_camera_host *ici = to_soc_camera_host(dev);
struct omap1_cam_dev *pcdev = ici->priv;
struct v4l2_subdev_format fmt = {
.which = V4L2_SUBDEV_FORMAT_ACTIVE,
};
struct v4l2_mbus_framefmt *mf = &fmt.format;
int ret;
ret = subdev_call_with_sense(pcdev, dev, icd, sd, video, s_crop, crop);
if (ret < 0) {
dev_warn(dev, "%s: failed to crop to %ux%[email protected]%u:%u\n", __func__,
rect->width, rect->height, rect->left, rect->top);
return ret;
}
ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
if (ret < 0) {
dev_warn(dev, "%s: failed to fetch current format\n", __func__);
return ret;
}
ret = dma_align(&mf->width, &mf->height, xlate->host_fmt, pcdev->vb_mode,
false);
if (ret < 0) {
dev_err(dev, "%s: failed to align %ux%u %s with DMA\n",
__func__, mf->width, mf->height,
xlate->host_fmt->name);
return ret;
}
if (!ret) {
/* sensor returned geometry not DMA aligned, trying to fix */
ret = set_format(pcdev, dev, icd, sd, &fmt, xlate);
if (ret < 0) {
dev_err(dev, "%s: failed to set format\n", __func__);
return ret;
}
}
icd->user_width = mf->width;
icd->user_height = mf->height;
return 0;
}