Files
luban-lite-t3e-pro/doc/topics/sdk/dvp/dvp_demo.html
2025-01-23 16:37:00 +08:00

914 lines
31 KiB
HTML

<!DOCTYPE html><html xmlns="http://www.w3.org/1999/xhtml" xml:lang="zh-cn" lang="zh-cn" data-whc_version="26.0">
<head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/><meta name="viewport" content="width=device-width, initial-scale=1.0"/><meta http-equiv="X-UA-Compatible" content="IE=edge"/><meta name="description" content="APP 层的处理流程 APP 中实现从 Sensor -&gt; DVP -&gt; DE 的数据通路,整体的处理流程如下图(图中按照访问对象分为三列,实际上整体是串行执行): 图 1 . APP 中的处理流程 test-dvp 支持了 先旋转再显示 ,上图中为了简化并未展示 GE 的处理。在图中将当前 Buf 传给 Video Layer 环节,如果打开了旋转功能(参数 -a ),会先将 Buf 送给 GE ..."/><meta name="DC.rights.owner" content="(C) 版权 2025"/><meta name="copyright" content="(C) 版权 2025"/><meta name="generator" content="DITA-OT"/><meta name="DC.type" content="concept"/><meta name="DC.contributor" content="yan.wang"/><meta name="DC.date.modified" content="2024-12-13"/><meta name="DC.format" content="HTML5"/><meta name="DC.identifier" content="dvp-demo"/><title>APP Demo</title><!-- Build number 2023110923. --><meta name="wh-path2root" content="../../../"/><meta name="wh-toc-id" content=""/><meta name="wh-source-relpath" content="topics/sdk/dvp/dvp_demo.dita"/><meta name="wh-out-relpath" content="topics/sdk/dvp/dvp_demo.html"/>
<link rel="stylesheet" type="text/css" href="../../../webhelp/app/commons.css?buildId=2023110923"/>
<link rel="stylesheet" type="text/css" href="../../../webhelp/app/topic.css?buildId=2023110923"/>
<script src="../../../webhelp/app/options/properties.js?buildId=20250121171154"></script>
<script src="../../../webhelp/app/localization/strings.js?buildId=2023110923"></script>
<script src="../../../webhelp/app/search/index/keywords.js?buildId=20250121171154"></script>
<script defer="defer" src="../../../webhelp/app/commons.js?buildId=2023110923"></script>
<script defer="defer" src="../../../webhelp/app/topic.js?buildId=2023110923"></script>
<link rel="stylesheet" type="text/css" href="../../../webhelp/template/aic-styles-web.css?buildId=2023110923"/><link rel="stylesheet" type="text/css" href="../../../webhelp/template/notes.css?buildId=2023110923"/><link rel="stylesheet" type="text/css" href="../../../webhelp/template/aic-common.css?buildId=2023110923"/><link rel="stylesheet" type="text/css" href="../../../webhelp/template/aic-images.css?buildId=2023110923"/><link rel="stylesheet" type="text/css" href="../../../webhelp/template/footnote.css?buildId=2023110923"/><link rel="stylesheet" type="text/css" href="../../../webhelp/template/aic-web-watermark.css?buildId=2023110923"/><link rel="stylesheet" type="text/css" href="../../../webhelp/template/topic-body-list.css?buildId=2023110923"/></head>
<body id="dvp-demo" class="wh_topic_page frmBody">
<a href="#wh_topic_body" class="sr-only sr-only-focusable">
跳转到主要内容
</a>
<header class="navbar navbar-default wh_header">
<div class="container-fluid">
<div class="wh_header_flex_container navbar-nav navbar-expand-md navbar-dark">
<div class="wh_logo_and_publication_title_container">
<div class="wh_logo_and_publication_title">
<a href="http://www.artinchip.com" class=" wh_logo d-none d-sm-block "><img src="../../../company-logo-white.png" alt="RTOS SDK 使用指南SDK 指南文件"/></a>
<div class=" wh_publication_title "><a href="../../../index.html"><span class="booktitle"><span class="ph mainbooktitle">RTOS SDK 使用指南</span><span class="ph booktitlealt">SDK 指南文件</span></span></a></div>
</div>
</div>
<div class="wh_top_menu_and_indexterms_link collapse navbar-collapse" id="wh_top_menu_and_indexterms_link">
</div>
</div>
</div>
</header>
<div class=" wh_search_input navbar-form wh_topic_page_search search " role="form">
<form id="searchForm" method="get" role="search" action="../../../search.html"><div><input type="search" placeholder="搜索 " class="wh_search_textfield" id="textToSearch" name="searchQuery" aria-label="搜索查询" required="required"/><button type="submit" class="wh_search_button" aria-label="搜索"><span class="search_input_text">搜索</span></button></div></form>
</div>
<div class="container-fluid" id="wh_topic_container">
<div class="row">
<nav class="wh_tools d-print-none navbar-expand-md" aria-label="Tools">
<div data-tooltip-position="bottom" class=" wh_breadcrumb "></div>
<div class="wh_right_tools">
<button class="wh_hide_highlight" aria-label="切换搜索突出显示" title="切换搜索突出显示"></button>
<button class="webhelp_expand_collapse_sections" data-next-state="collapsed" aria-label="折叠截面" title="折叠截面"></button>
<div class=" wh_print_link print d-none d-md-inline-block "><button onClick="window.print()" title="打印此页" aria-label="打印此页"></button></div>
</div>
</nav>
</div>
<div class="wh_content_area">
<div class="row">
<div class="col-lg-10 col-md-10 col-sm-10 col-xs-12" id="wh_topic_body">
<button id="wh_close_topic_toc_button" class="close-toc-button d-none" aria-label="Toggle topic table of content" aria-controls="wh_topic_toc" aria-expanded="true">
<span class="close-toc-icon-container">
<span class="close-toc-icon"></span>
</span>
</button>
<div class=" wh_topic_content body "><main role="main"><article class="- topic/topic concept/concept topic concept" role="article" aria-labelledby="ariaid-title1"><span class="edit-link" style="font-size:12px; opacity:0.6; text-align:right; vertical-align:middle"><a target="_blank" href="http://172.16.35.88/tasks/jdssno1uvvbf2mltu9kb9v3if05d5gopuakboe8hlud18rma/edit/F:/aicdita/aicdita-cn/topics/sdk/dvp/dvp_demo.dita">Edit online</a></span><h1 class="- topic/title title topictitle1" id="ariaid-title1">APP Demo</h1><div class="date inPage">13 Dec 2024</div><div style="color: gray;">
Read time: 21 minute(s)
</div><div class="- topic/body concept/conbody body conbody"><section class="- topic/section section" id="dvp-demo__section_gvw_npn_rdc" data-ofbid="dvp-demo__section_gvw_npn_rdc"><h2 class="- topic/title title sectiontitle">APP 层的处理流程</h2>
<p class="- topic/p p" data-ofbid="d29923e27__20250121171507">APP 中实现从 Sensor -&gt; DVP -&gt; DE 的数据通路,整体的处理流程如下图(图中按照访问对象分为三列,实际上整体是串行执行):</p>
<figure class="- topic/fig fig fignone" id="dvp-demo__fig_isj_ppn_rdc" data-ofbid="dvp-demo__fig_isj_ppn_rdc"><br/><div class="imagecenter"><img class="- topic/image image imagecenter" id="dvp-demo__image_hvw_npn_rdc" src="../../../images/dvp/demo_flow.png" width="480" alt="../../../../_images/demo_flow.png"/></div><br/><figcaption data-caption-side="bottom" class="- topic/title title figcapcenter"><span class="figtitleprefix fig--title-label"><span class="fig--title-label-number"> 1</span><span class="fig--title-label-punctuation">. </span></span><span class="fig--title">APP 中的处理流程</span></figcaption></figure>
<p class="- topic/p p" data-ofbid="d29923e37__20250121171507">test-dvp 支持了 <strong class="+ topic/ph hi-d/b ph b">先旋转再显示</strong>,上图中为了简化并未展示 GE 的处理。在图中将当前 Buf 传给 Video Layer
环节,如果打开了旋转功能(参数 <code class="+ topic/ph pr-d/codeph ph codeph">-a</code> ),会先将 Buf 送给 GE 旋转,然后才给 DE 去显示。</p>
</section><section class="- topic/section section" id="dvp-demo__section_ivw_npn_rdc" data-ofbid="dvp-demo__section_ivw_npn_rdc"><h2 class="- topic/title title sectiontitle">APP Demo 参考实现</h2>
<div class="- topic/p p" data-ofbid="d29923e51__20250121171507">Demo 代码见
<span class="+ topic/ph sw-d/filepath ph filepath">test-dvp/test_dvp.c</span>,如下:<pre class="+ topic/pre pr-d/codeblock pre codeblock" id="dvp-demo__codeblock_b1s_5pn_rdc" data-ofbid="dvp-demo__codeblock_b1s_5pn_rdc"><code>#define VID_BUF_NUM 3
#define DVP_PLANE_NUM 2
#define CMA_BUF_MAX (8 * 1024 * 1024)
#define DMA_HEAP_DEV "/dev/dma_heap/reserved"
#define FB_DEV "/dev/fb0"
#define VIDEO_DEV "/dev/video0"
#define SENSOR_DEV "/dev/v4l-subdev0"
#define DVP_SUBDEV_DEV "/dev/v4l-subdev1"
static const char sopts[] = "f:c:w:h:r:a:uv";
static const struct option lopts[] = {
{"format", required_argument, NULL, 'f'},
{"capture", required_argument, NULL, 'c'},
{"width", required_argument, NULL, 'w'},
{"height", required_argument, NULL, 'h'},
{"framerate", required_argument, NULL, 'r'},
{"angle", required_argument, NULL, 'a'},
{"usage", no_argument, NULL, 'u'},
{"verbose", required_argument, NULL, 'v'},
{0, 0, 0, 0}
};
struct video_plane {
int fd;
int buf;
int len;
};
struct video_buf_info {
char *vaddr;
u32 len;
u32 offset;
struct video_plane planes[DVP_PLANE_NUM];
};
struct aic_video_data {
int w;
int h;
int frame_size;
int fmt; // output format
struct v4l2_subdev_format src_fmt;
struct video_buf_info binfo[VID_BUF_NUM + 1];
};
struct aic_media_dev {
/* about Sensor */
int sensor_fd;
int sensor_width;
int sensor_height;
int sensor_fr;
/* about DVP */
int dvp_fd;
int video_fd;
/* about GE */
struct mpp_ge *ge_dev;
int rotation;
/* about DE */
int fb_fd;
int fb_xres;
int fb_yres;
};
static struct aic_media_dev g_mdev = {0};
static struct aic_video_data g_vdata = {0};
static int g_verbose = 0;
/* Functions */
void usage(char *program)
{
printf("Usage: %s [options]: \n", program);
printf("\t -f, --format\t\tformat of input video, NV16/NV12 etc\n");
printf("\t -c, --count\t\tthe number of capture frame \n");
printf("\t -w, --width\t\tthe width of sensor \n");
printf("\t -h, --height\t\tthe height of sensor \n");
printf("\t -r, --framerate\tthe framerate of sensor \n");
#ifdef SUPPORT_ROTATION
printf("\t -a, --angle\t\tthe angle of rotation \n");
#endif
printf("\t -u, --usage \n");
printf("\t -v, --verbose \n");
printf("\n");
printf("Example: %s -f nv16 -c 1\n", program);
}
/* Open a device file to be needed. */
int device_open(char *_fname, int _flag)
{
s32 fd = -1;
fd = open(_fname, _flag);
if (fd &lt; 0) {
ERR("Failed to open %s errno: %d[%s]\n",
_fname, errno, strerror(errno));
exit(0);
}
return fd;
}
int get_fb_size(void)
{
struct fb_var_screeninfo var;
if (ioctl(g_mdev.fb_fd, FBIOGET_VSCREENINFO, &amp;var) &lt; 0) {
ERR("ioctl FBIOGET_VSCREENINFO");
close(g_mdev.fb_fd);
return -1;
}
g_mdev.fb_xres = var.xres;
g_mdev.fb_yres = var.yres;
printf("Framebuf size: width %d, height %d\n", g_mdev.fb_xres, g_mdev.fb_yres);
return 0;
}
int set_ui_layer_alpha(int val)
{
int ret = 0;
struct aicfb_alpha_config alpha = {0};
alpha.layer_id = 1;
alpha.enable = 1;
alpha.mode = 1;
alpha.value = val;
ret = ioctl(g_mdev.fb_fd, AICFB_UPDATE_ALPHA_CONFIG, &amp;alpha);
if (ret &lt; 0)
ERR("ioctl() failed! errno: %d[%s]\n", errno, strerror(errno));
return ret;
}
void vidbuf_dmabuf_begin(struct aic_video_data *vdata, u32 num)
{
int i, j;
struct dma_buf_info fds = {0};
for (i = 0; i &lt; num; i++) {
struct video_plane *plane = vdata-&gt;binfo[i].planes;
for (j = 0; j &lt; DVP_PLANE_NUM; j++, plane++) {
fds.fd = plane-&gt;fd;
if (ioctl(g_mdev.fb_fd, AICFB_ADD_DMABUF, &amp;fds) &lt; 0)
ERR("Failed to add DMABUF for %d! %d[%s]\n",
plane-&gt;fd, errno, strerror(errno));
}
}
}
void vidbuf_dmabuf_end(struct aic_video_data *vdata, u32 num)
{
int i, j;
struct dma_buf_info fds = {0};
for (i = 0; i &lt; num; i++) {
struct video_plane *plane = vdata-&gt;binfo[i].planes;
for (j = 0; j &lt; DVP_PLANE_NUM; j++, plane++) {
fds.fd = plane-&gt;fd;
if (ioctl(g_mdev.fb_fd, AICFB_RM_DMABUF, &amp;fds) &lt; 0)
ERR("Failed to rm DMABUF for %d! err %d[%s]\n",
plane-&gt;fd, errno, strerror(errno));
}
}
}
int sensor_set_fmt(void)
{
int ret = 0;
struct v4l2_subdev_format f = {0};
struct v4l2_subdev_frame_interval fr = {0};
/* Set resolution */
f.pad = 0;
f.which = V4L2_SUBDEV_FORMAT_ACTIVE;
if (ioctl(g_mdev.sensor_fd, VIDIOC_SUBDEV_G_FMT, &amp;f) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
if (f.format.width != g_mdev.sensor_width ||
f.format.height != g_mdev.sensor_height) {
printf("Set sensor resolution: %dx%d -&gt; %dx%d\n",
f.format.width, f.format.height,
g_mdev.sensor_width, g_mdev.sensor_height);
f.format.width = g_mdev.sensor_width;
f.format.height = g_mdev.sensor_height;
if (ioctl(g_mdev.sensor_fd, VIDIOC_SUBDEV_S_FMT, &amp;f) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
}
/* Confirm the current resolution */
if (ioctl(g_mdev.sensor_fd, VIDIOC_SUBDEV_G_FMT, &amp;f) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
/* Set framerate */
ret = ioctl(g_mdev.sensor_fd, VIDIOC_SUBDEV_G_FRAME_INTERVAL, &amp;fr);
if ((ret == 0) &amp;&amp; (fr.interval.denominator != g_mdev.sensor_fr)) {
printf("Set sensor framerate: %d -&gt; %d\n",
fr.interval.denominator, g_mdev.sensor_fr);
fr.interval.denominator = g_mdev.sensor_fr;
ret = ioctl(g_mdev.sensor_fd, VIDIOC_SUBDEV_S_FRAME_INTERVAL, &amp;fr);
if (ret &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
/* Confirm the current framerate */
ioctl(g_mdev.sensor_fd, VIDIOC_SUBDEV_G_FRAME_INTERVAL, &amp;fr);
}
g_vdata.src_fmt = f;
g_vdata.w = g_vdata.src_fmt.format.width;
g_vdata.h = g_vdata.src_fmt.format.height;
printf("Sensor format: w %d h %d, code %#x, colorspace %#x, fr %d\n",
f.format.width, f.format.height, f.format.code,
f.format.colorspace, fr.interval.denominator);
return 0;
}
int dvp_subdev_set_fmt(void)
{
struct v4l2_subdev_format f = g_vdata.src_fmt;
f.pad = 0;
f.which = V4L2_SUBDEV_FORMAT_ACTIVE;
if (ioctl(g_mdev.dvp_fd, VIDIOC_SUBDEV_S_FMT, &amp;f) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
return 0;
}
int dvp_cfg(int width, int height, int format)
{
struct v4l2_format f = {0};
f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
f.fmt.pix_mp.width = g_vdata.src_fmt.format.width;
f.fmt.pix_mp.height = g_vdata.src_fmt.format.height;
f.fmt.pix_mp.pixelformat = g_vdata.fmt;
f.fmt.pix_mp.num_planes = DVP_PLANE_NUM;
if (ioctl(g_mdev.video_fd, VIDIOC_S_FMT, &amp;f) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
return 0;
}
int dvp_expbuf(int index)
{
int i;
struct video_buf_info *binfo = &amp;g_vdata.binfo[index];
struct v4l2_exportbuffer expbuf = {0};
for (i = 0; i &lt; DVP_PLANE_NUM; i++) {
memset(&amp;expbuf, 0, sizeof(struct v4l2_exportbuffer));
expbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
expbuf.index = index;
expbuf.plane = i;
if (ioctl(g_mdev.video_fd, VIDIOC_EXPBUF, &amp;expbuf) &lt; 0) {
ERR("%d/%d: ioctl() failed! err %d[%s]\n",
i, index, errno, strerror(errno));
return -1;
}
binfo-&gt;planes[i].fd = expbuf.fd;
if (g_verbose)
DBG("%d-%d Export DMABUF fd %d\n", index, i, expbuf.fd);
}
return 0;
}
int dvp_request_buf(int num)
{
int i;
struct v4l2_buffer buf = {0};
struct v4l2_requestbuffers req = {0};
struct v4l2_plane planes[DVP_PLANE_NUM];
req.count = num;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
req.memory = V4L2_MEMORY_MMAP; // Only MMAP will do alloc memory
if (ioctl(g_mdev.video_fd, VIDIOC_REQBUFS, &amp;req) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
for (i = 0; i &lt; num; i++) {
if (dvp_expbuf(i) &lt; 0)
return -1;
memset(&amp;buf, 0, sizeof(struct v4l2_buffer));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.index = i;
buf.length = DVP_PLANE_NUM;
buf.memory = V4L2_MEMORY_DMABUF;
buf.m.planes = planes;
if (ioctl(g_mdev.video_fd, VIDIOC_QUERYBUF, &amp;buf) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n",
errno, strerror(errno));
return -1;
}
}
return 0;
}
void dvp_release_buf(int num)
{
int i;
struct video_buf_info *binfo = NULL;
for (i = 0; i &lt; num; i++) {
binfo = &amp;g_vdata.binfo[i];
if (binfo-&gt;vaddr) {
munmap(binfo-&gt;vaddr, binfo-&gt;len);
binfo-&gt;vaddr = NULL;
}
}
}
int dvp_queue_buf(int index)
{
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[DVP_PLANE_NUM] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = index;
buf.length = DVP_PLANE_NUM;
buf.m.planes = planes;
if (ioctl(g_mdev.video_fd, VIDIOC_QBUF, &amp;buf) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
return 0;
}
int dvp_dequeue_buf(int *index)
{
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[DVP_PLANE_NUM] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.length = DVP_PLANE_NUM;
buf.m.planes = planes;
if (ioctl(g_mdev.video_fd, VIDIOC_DQBUF, &amp;buf) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
*index = buf.index;
return 0;
}
int dvp_start(void)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
if (ioctl(g_mdev.video_fd, VIDIOC_STREAMON, &amp;type) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
return 0;
}
int dvp_stop(void)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
if (ioctl(g_mdev.video_fd, VIDIOC_STREAMOFF, &amp;type) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
return 0;
}
#ifdef SUPPORT_ROTATION
int do_rotate(struct aic_video_data *vdata, int index)
{
struct ge_bitblt blt = {0};
struct mpp_buf *src = &amp;blt.src_buf;
struct mpp_buf *dst = &amp;blt.dst_buf;
int ret = 0;
if (g_vdata.fmt == V4L2_PIX_FMT_NV16) {
src-&gt;format = MPP_FMT_NV16;
dst-&gt;format = MPP_FMT_NV16;
} else {
src-&gt;format = MPP_FMT_NV12;
dst-&gt;format = MPP_FMT_NV12;
}
src-&gt;buf_type = MPP_DMA_BUF_FD;
src-&gt;fd[0] = vdata-&gt;binfo[index].planes[0].fd;
src-&gt;fd[1] = vdata-&gt;binfo[index].planes[1].fd;
src-&gt;stride[0] = vdata-&gt;w;
src-&gt;stride[1] = vdata-&gt;w;
src-&gt;size.width = vdata-&gt;w;
src-&gt;size.height = vdata-&gt;h;
dst-&gt;buf_type = MPP_DMA_BUF_FD;
dst-&gt;fd[0] = vdata-&gt;binfo[VID_BUF_NUM].planes[0].fd;
dst-&gt;fd[1] = vdata-&gt;binfo[VID_BUF_NUM].planes[1].fd;
if (g_mdev.rotation == MPP_ROTATION_180) {
dst-&gt;stride[0] = vdata-&gt;w;
dst-&gt;stride[1] = vdata-&gt;w;
dst-&gt;size.width = vdata-&gt;w;
dst-&gt;size.height = vdata-&gt;h;
} else {
dst-&gt;stride[0] = vdata-&gt;h;
dst-&gt;stride[1] = vdata-&gt;h;
dst-&gt;size.width = vdata-&gt;h;
dst-&gt;size.height = vdata-&gt;w;
}
blt.ctrl.flags = g_mdev.rotation;
#if 0
printf("GE: %d(%d) * %d -&gt; %d * %d, canvas %d(%d) * %d\n",
src-&gt;size.width, src-&gt;stride[0],
src-&gt;size.height,
dst-&gt;crop.width, dst-&gt;crop.height,
dst-&gt;size.width, dst-&gt;stride[0],
dst-&gt;size.height);
#endif
ret = mpp_ge_bitblt(g_mdev.ge_dev, &amp;blt);
if (ret &lt; 0) {
ERR("GE bitblt failed\n");
return -1;
}
ret = mpp_ge_emit(g_mdev.ge_dev);
if (ret &lt; 0) {
ERR("GE emit failed\n");
return -1;
}
ret = mpp_ge_sync(g_mdev.ge_dev);
if (ret &lt; 0) {
ERR("GE sync failed\n");
return -1;
}
return 0;
}
#endif
#define DVP_SCALE_OFFSET 10
int video_layer_set(struct aic_video_data *vdata, int index)
{
struct aicfb_layer_data layer = {0};
struct video_buf_info *binfo = &amp;vdata-&gt;binfo[index];
u32 video_w, video_h, fb_xres, fb_yres;
layer.layer_id = 0;
layer.enable = 1;
if (g_mdev.rotation == MPP_ROTATION_90
|| g_mdev.rotation == MPP_ROTATION_270) {
video_w = vdata-&gt;h;
video_h = vdata-&gt;w;
fb_xres = g_mdev.fb_yres;
fb_yres = g_mdev.fb_xres;
} else {
video_w = vdata-&gt;w;
video_h = vdata-&gt;h;
fb_xres = g_mdev.fb_xres;
fb_yres = g_mdev.fb_yres;
}
if ((video_w &lt; fb_xres - 2 * DVP_SCALE_OFFSET) &amp;&amp;
(video_h &lt; fb_yres - 2 * DVP_SCALE_OFFSET)) {
layer.scale_size.width = video_w;
layer.scale_size.height = video_h;
} else {
layer.scale_size.width = fb_xres - 2 * DVP_SCALE_OFFSET;
layer.scale_size.height = fb_yres - 2 * DVP_SCALE_OFFSET;
}
layer.pos.x = DVP_SCALE_OFFSET;
layer.pos.y = DVP_SCALE_OFFSET;
layer.buf.size.width = video_w;
layer.buf.size.height = video_h;
if (g_vdata.fmt == V4L2_PIX_FMT_NV16)
layer.buf.format = MPP_FMT_NV16;
else
layer.buf.format = MPP_FMT_NV12;
layer.buf.buf_type = MPP_DMA_BUF_FD;
layer.buf.fd[0] = binfo-&gt;planes[0].fd;
layer.buf.fd[1] = binfo-&gt;planes[1].fd;
layer.buf.stride[0] = video_w;
layer.buf.stride[1] = video_w;
if (ioctl(g_mdev.fb_fd, AICFB_UPDATE_LAYER_CONFIG, &amp;layer) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
return 0;
}
int video_layer_disable(void)
{
struct aicfb_layer_data layer = {0};
layer.layer_id = 0;
if (ioctl(g_mdev.fb_fd, AICFB_UPDATE_LAYER_CONFIG, &amp;layer) &lt; 0) {
ERR("ioctl() failed! err %d[%s]\n", errno, strerror(errno));
return -1;
}
return 0;
}
#define US_PER_SEC 1000000
static void show_fps(struct timeval *start, struct timeval *end, int cnt)
{
double diff;
if (end-&gt;tv_usec &lt; start-&gt;tv_usec) {
diff = (double)(US_PER_SEC + end-&gt;tv_usec - start-&gt;tv_usec)/US_PER_SEC;
diff += end-&gt;tv_sec - 1 - start-&gt;tv_sec;
} else {
diff = (double)(end-&gt;tv_usec - start-&gt;tv_usec)/US_PER_SEC;
diff += end-&gt;tv_sec - start-&gt;tv_sec;
}
printf("\nDVP frame rate: %.1f, frame %d / %.1f seconds\n",
(double)cnt/diff, cnt, diff);
}
int dvp_capture(u32 cnt)
{
int i, index = 0;
struct timeval start, end;
gettimeofday(&amp;start, NULL);
for (i = 0; i &lt; cnt; i++) {
if (dvp_dequeue_buf(&amp;index) &lt; 0)
return -1;
if (g_verbose)
DBG("Set the buf %d to video layer\n", index);
if (g_mdev.rotation) {
#ifdef SUPPORT_ROTATION
if (do_rotate(&amp;g_vdata, index) &lt; 0)
return -1;
if (video_layer_set(&amp;g_vdata, VID_BUF_NUM) &lt; 0)
return -1;
#endif
} else {
if (video_layer_set(&amp;g_vdata, index) &lt; 0)
return -1;
}
dvp_queue_buf(index);
if (i &amp;&amp; (i % 1000 == 0)) {
gettimeofday(&amp;end, NULL);
show_fps(&amp;start, &amp;end, i);
}
}
gettimeofday(&amp;end, NULL);
show_fps(&amp;start, &amp;end, i);
return 0;
}
int media_dev_open(void)
{
g_mdev.sensor_fd = device_open(SENSOR_DEV, O_RDWR);
if (g_mdev.sensor_fd &lt; 0)
return -1;
g_mdev.video_fd = device_open(VIDEO_DEV, O_RDWR);
if (g_mdev.video_fd &lt; 0)
return -1;
g_mdev.dvp_fd = device_open(DVP_SUBDEV_DEV, O_RDWR);
if (g_mdev.dvp_fd &lt; 0)
return -1;
g_mdev.fb_fd = device_open(FB_DEV, O_RDWR);
if (g_mdev.fb_fd &lt; 0)
return -1;
#ifdef SUPPORT_ROTATION
if (g_mdev.rotation) {
g_mdev.ge_dev = mpp_ge_open();
if (!g_mdev.ge_dev) {
ERR("Failed to open GE\n");
return -1;
}
}
#endif
return 0;
}
void media_dev_close(void)
{
if (g_mdev.sensor_fd &gt; 0)
close(g_mdev.sensor_fd);
if (g_mdev.video_fd &gt; 0)
close(g_mdev.video_fd);
if (g_mdev.dvp_fd &gt; 0)
close(g_mdev.dvp_fd);
if (g_mdev.fb_fd &gt; 0)
close(g_mdev.fb_fd);
#ifdef SUPPORT_ROTATION
if (g_mdev.ge_dev)
mpp_ge_close(g_mdev.ge_dev);
#endif
}
int main(int argc, char **argv)
{
int c, i, frame_cnt = 1;
g_mdev.sensor_width = 640;
g_mdev.sensor_height = 480;
g_mdev.sensor_fr = 30;
g_vdata.fmt = V4L2_PIX_FMT_NV16;
while ((c = getopt_long(argc, argv, sopts, lopts, NULL)) != -1) {
switch (c) {
case 'f':
if (strncasecmp("nv12", optarg, strlen(optarg)) == 0)
g_vdata.fmt = V4L2_PIX_FMT_NV12;
break;
case 'c':
frame_cnt = str2int(optarg);
break;
case 'w':
g_mdev.sensor_width = str2int(optarg);
break;
case 'h':
g_mdev.sensor_height = str2int(optarg);
break;
case 'r':
g_mdev.sensor_fr = str2int(optarg);
break;
#ifdef SUPPORT_ROTATION
case 'a':
g_mdev.rotation = (str2int(optarg) % 360) / 90;
break;
#endif
case 'u':
usage(argv[0]);
return 0;
case 'v':
g_verbose = 1;
break;
default:
break;
}
}
printf("Capture %d frames from camera\n", frame_cnt);
printf("DVP out format: %s\n",
g_vdata.fmt == V4L2_PIX_FMT_NV16 ? "NV16" : "NV12");
if (media_dev_open())
goto end;
if (sensor_set_fmt() &lt; 0)
return -1;
if (dvp_subdev_set_fmt() &lt; 0)
return -1;
if (g_vdata.fmt == V4L2_PIX_FMT_NV16)
g_vdata.frame_size = g_vdata.w * g_vdata.h * 2;
else
g_vdata.frame_size = (g_vdata.w * g_vdata.h * 3) &gt;&gt; 1;
if (get_fb_size())
goto end;
if (set_ui_layer_alpha(15) &lt; 0)
goto end;
if (dvp_cfg(g_vdata.w, g_vdata.h, g_vdata.fmt) &lt; 0)
goto end;
if (g_mdev.rotation) {
printf("Rotate %d by GE\n", g_mdev.rotation * 90);
/* Use the last buf connect GE and Video layer */
if (dvp_request_buf(VID_BUF_NUM + 1) &lt; 0)
goto end;
vidbuf_dmabuf_begin(&amp;g_vdata, VID_BUF_NUM + 1);
} else {
if (dvp_request_buf(VID_BUF_NUM) &lt; 0)
goto end;
vidbuf_dmabuf_begin(&amp;g_vdata, VID_BUF_NUM);
}
for (i = 0; i &lt; VID_BUF_NUM; i++)
if (dvp_queue_buf(i) &lt; 0)
goto end;
if (dvp_start() &lt; 0)
goto end;
dvp_capture(frame_cnt);
dvp_stop();
if (g_mdev.rotation) {
vidbuf_dmabuf_end(&amp;g_vdata, VID_BUF_NUM + 1);
dvp_release_buf(VID_BUF_NUM + 1);
} else {
vidbuf_dmabuf_end(&amp;g_vdata, VID_BUF_NUM);
dvp_release_buf(VID_BUF_NUM);
}
end:
video_layer_disable();
media_dev_close();
return 0;
}</code></pre></div>
</section></div></article></main></div>
</div>
<nav role="navigation" id="wh_topic_toc" aria-label="On this page" class="col-lg-2 d-none d-lg-block navbar d-print-none">
<div id="wh_topic_toc_content">
<div class=" wh_topic_toc "><div class="wh_topic_label">在本页上</div><ul><li class="section-item"><div class="section-title"><a href="#dvp-demo__section_gvw_npn_rdc" data-tocid="dvp-demo__section_gvw_npn_rdc">APP 层的处理流程</a></div></li><li class="section-item"><div class="section-title"><a href="#dvp-demo__section_ivw_npn_rdc" data-tocid="dvp-demo__section_ivw_npn_rdc">APP Demo 参考实现</a></div></li></ul></div>
</div>
</nav>
</div>
</div>
</div>
<footer class="navbar navbar-default wh_footer">
<div class=" footer-container mx-auto ">
<title>footer def</title>
<style><!--
.p1 {
font-family: FangZhengShuSong, Times, serif;
}
.p2 {
font-family: Arial, Helvetica, sans-serif;
}
.p3 {
font-family: "Lucida Console", "Courier New", monospace;
}
--></style>
<div class="webhelp.fragment.footer">
<p class="p1">Copyright © 2019-2024 广东匠芯创科技有限公司. All rights reserved.</p>
</div><div>
<div class="generation_time">
Update Time: 2025-01-21
</div>
</div>
</div>
</footer>
<div id="go2top" class="d-print-none">
<span class="oxy-icon oxy-icon-up"></span>
</div>
<div id="modal_img_large" class="modal">
<span class="close oxy-icon oxy-icon-remove"></span>
<div id="modal_img_container"></div>
<div id="caption"></div>
</div>
<script src="${pd}/publishing/publishing-styles-AIC-template/js/custom.js" defer="defer"></script>
</body>
</html>