github上面找到两本红宝书, 不敢独享:
参考链接1: allwinner-zh/media-codec
-----------------------------------
有找到几个相关的文档:
参考链接2: CamDroid编译第三方app
参考链接3: camdroid使用的linux ndk版本 android-ndk-r8b-linux-x86.tar.bz2
参考链接4: camdroid使用的windows ndk版本 android-ndk-r8b-windows.zip
参考链接5: Android NDK 工具链的使用方法(Standalone Toolchain)
参考链接6: 直接使用ndk提供的arm-linux-androideabi-gcc编译android可执行程序
使用 make-standalone-toolchain.sh --platform=android-14 可以创建单独的工具链,
# /d/Downloads/android-ndk-r8b/build/tools/make-standalone-toolchain.sh --platform=android-14
Auto-config: --toolchain=arm-linux-androideabi-4.6
Copying prebuilt binaries...
Copying sysroot headers and libraries...
Copying libstdc++ headers and libraries...
Creating package file: /tmp/ndk-/arm-linux-androideabi-4.6.tar.bz2
Cleaning up...
Done.
解压arm-linux-androideabi-4.6.tar.bz2, 设置系统PATH,
然后可以使用
arm-linux-androideabi-gcc.exe -o test test.c
这种方式创建camdroid可执行文件。
camdroid代码地址: https://github.com/qq516333132/camdroid
离线
视频编码库 APIs:
VideoEncCreate 创建一个视频编码器
VideoEncDestroy 销毁视频编码器
VideoEncInit 初始化视频编码器
VideoEncUnInit 去初始化视频编码器
AllocInputBuffer 通过 vencoder 申请输入图像帧 buffer
GetOneAllocInputBuffer 获取一块由 vencoder 分配的图像帧
FlushCacheAllocInputBuffer 刷 cache 保持数据的一致性
ReturnOneAllocInputBuffer 还回由 vencoder 申请的图像帧
ReleaseAllocInputBuffer 释放由 vencoder 申请的图像帧
AddOneInputBuffer 添加一块输入的图像帧到编码器
VideoEncodeOneFrame 编码一帧图像
AlreadyUsedInputBuffer 获取编码器已经使用过的图像帧
ValidBitstreamFrameNum 获取有效的输出码流 buffer 的个数
GetOneBitstreamFrame 获取一个码流 buffer
FreeOneBitStreamFrame 还回码流 buffer
VideoEncGetParameter 获取编码器参数
VideoEncSetParameter 设置编码器参数
上面这些函数都可以在这个动态链接库 camdroid/frameworks/av/media/CedarX-Projects/CedarAndroidLib/LIB_JB42_F81/libvencoder.so 找到!
搜索方法:
# arm-linux-gnueabihf-readelf -s ./camdroid/frameworks/av/media/CedarX-Projects/CedarAndroidLib/LIB_JB42_F81/libvencoder.so |grep VideoEnc
3: 00003799 172 FUNC GLOBAL DEFAULT 7 VideoEncCreate
13: 00003845 236 FUNC GLOBAL DEFAULT 7 VideoEncInit
19: 00003931 120 FUNC GLOBAL DEFAULT 7 VideoEncUnInit
22: 000039a9 44 FUNC GLOBAL DEFAULT 7 VideoEncDestroy
36: 00003b8d 172 FUNC GLOBAL DEFAULT 7 VideoEncodeOneFrame
45: 00003ceb 12 FUNC GLOBAL DEFAULT 7 VideoEncGetParameter
46: 00003cf7 12 FUNC GLOBAL DEFAULT 7 VideoEncSetParameter
离线
这里有一个编码的demo
到时候试一试看看能不能编译运行成功:
#include "log.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "vencoder.h"
#include <time.h>
int yu12_nv12(unsigned int width, unsigned int height, unsigned char *addr_uv, unsigned char *addr_tmp_uv)
{
unsigned int i, chroma_bytes;
unsigned char *u_addr = NULL;
unsigned char *v_addr = NULL;
unsigned char *tmp_addr = NULL;
chroma_bytes = width*height/4;
u_addr = addr_uv;
v_addr = addr_uv + chroma_bytes;
tmp_addr = addr_tmp_uv;
for(i=0; i<chroma_bytes; i++)
{
*(tmp_addr++) = *(u_addr++);
*(tmp_addr++) = *(v_addr++);
}
memcpy(addr_uv, addr_tmp_uv, chroma_bytes*2);
return 0;
}
int main()
{
VencBaseConfig baseConfig;
VencAllocateBufferParam bufferParam;
VideoEncoder* pVideoEnc = NULL;
VencInputBuffer inputBuffer;
VencOutputBuffer outputBuffer;
VencHeaderData sps_pps_data;
VencH264Param h264Param;
VencH264FixQP fixQP;
EXIFInfo exifinfo;
VencCyclicIntraRefresh sIntraRefresh;
unsigned int src_width,src_height,dst_width,dst_height;
unsigned char *uv_tmp_buffer = NULL;
VencROIConfig sRoiConfig[4];
src_width = 1920;
src_height = 1080;
dst_width = 1920;
dst_height = 1080;
// roi
sRoiConfig[0].bEnable = 1;
sRoiConfig[0].index = 0;
sRoiConfig[0].nQPoffset = 10;
sRoiConfig[0].sRect.nLeft = 320;
sRoiConfig[0].sRect.nTop = 180;
sRoiConfig[0].sRect.nWidth = 320;
sRoiConfig[0].sRect.nHeight = 180;
sRoiConfig[1].bEnable = 1;
sRoiConfig[1].index = 1;
sRoiConfig[1].nQPoffset = 10;
sRoiConfig[1].sRect.nLeft = 320;
sRoiConfig[1].sRect.nTop = 180;
sRoiConfig[1].sRect.nWidth = 320;
sRoiConfig[1].sRect.nHeight = 180;
sRoiConfig[2].bEnable = 1;
sRoiConfig[2].index = 2;
sRoiConfig[2].nQPoffset = 10;
sRoiConfig[2].sRect.nLeft = 320;
sRoiConfig[2].sRect.nTop = 180;
sRoiConfig[2].sRect.nWidth = 320;
sRoiConfig[2].sRect.nHeight = 180;
sRoiConfig[3].bEnable = 1;
sRoiConfig[3].index = 3;
sRoiConfig[3].nQPoffset = 10;
sRoiConfig[3].sRect.nLeft = 320;
sRoiConfig[3].sRect.nTop = 180;
sRoiConfig[3].sRect.nWidth = 320;
sRoiConfig[3].sRect.nHeight = 180;
//intraRefresh
sIntraRefresh.bEnable = 1;
sIntraRefresh.nBlockNumber = 10;
//fix qp mode
fixQP.bEnable = 1;
fixQP.nIQp = 20;
fixQP.nPQp = 30;
exifinfo.ThumbWidth = 176;
exifinfo.ThumbHeight = 144;
//* h264 param
h264Param.bEntropyCodingCABAC = 1;
h264Param.nBitrate = 4*1024*1024; /* bps */
h264Param.nFramerate = 30; /* fps */
h264Param.nCodingMode = VENC_FRAME_CODING;
// h264Param.nCodingMode = VENC_FIELD_CODING;
h264Param.nMaxKeyInterval = 30;
h264Param.sProfileLevel.nProfile = VENC_H264ProfileMain;
h264Param.sProfileLevel.nLevel = VENC_H264Level31;
h264Param.sQPRange.nMinqp = 10;
h264Param.sQPRange.nMaxqp = 40;
int codecType = VENC_CODEC_H264;
int testNumber = 70;
strcpy((char*)exifinfo.CameraMake, "allwinner make test");
strcpy((char*)exifinfo.CameraModel, "allwinner model test");
strcpy((char*)exifinfo.DateTime, "2014:02:21 10:54:05");
strcpy((char*)exifinfo.gpsProcessingMethod, "allwinner gps");
exifinfo.Orientation = 0;
exifinfo.ExposureTime.num = 2;
exifinfo.ExposureTime.den = 1000;
exifinfo.FNumber.num = 20;
exifinfo.FNumber.den = 10;
exifinfo.ISOSpeed = 50;
exifinfo.ExposureBiasValue.num= -4;
exifinfo.ExposureBiasValue.den= 1;
exifinfo.MeteringMode = 1;
exifinfo.FlashUsed = 0;
exifinfo.FocalLength.num = 1400;
exifinfo.FocalLength.den = 100;
exifinfo.DigitalZoomRatio.num = 4;
exifinfo.DigitalZoomRatio.den = 1;
exifinfo.WhiteBalance = 1;
exifinfo.ExposureMode = 1;
exifinfo.enableGpsInfo = 1;
exifinfo.gps_latitude = 23.2368;
exifinfo.gps_longitude = 24.3244;
exifinfo.gps_altitude = 1234.5;
exifinfo.gps_timestamp = (long)time(NULL);
FILE *in_file = NULL;
FILE *out_file = NULL;
if(codecType == VENC_CODEC_H264)
{
in_file = fopen("/root/mnt/repos/codec-lte/demo/data/stream/1080p.yuv", "r");
if(in_file == NULL)
{
loge("open in_file fail\n");
return -1;
}
out_file = fopen("./1080p.264", "wb");
if(out_file == NULL)
{
loge("open out_file fail\n");
return -1;
}
}
else
{
in_file = fopen("/data/camera/720p-30zhen.yuv", "r");
if(in_file == NULL)
{
loge("open in_file fail\n");
return -1;
}
out_file = fopen("/data/camera/test.jpg", "wb");
if(out_file == NULL)
{
loge("open out_file fail\n");
return -1;
}
}
memset(&baseConfig, 0 ,sizeof(VencBaseConfig));
memset(&bufferParam, 0 ,sizeof(VencAllocateBufferParam));
baseConfig.nInputWidth= src_width;
baseConfig.nInputHeight = src_height;
baseConfig.nStride = src_width;
baseConfig.nDstWidth = dst_width;
baseConfig.nDstHeight = dst_height;
baseConfig.eInputFormat = VENC_PIXEL_YUV420SP;
bufferParam.nSizeY = baseConfig.nInputWidth*baseConfig.nInputHeight;
bufferParam.nSizeC = baseConfig.nInputWidth*baseConfig.nInputHeight/2;
bufferParam.nBufferNum = 4;
pVideoEnc = VideoEncCreate(codecType);
if(codecType == VENC_CODEC_JPEG)
{
VideoEncSetParameter(pVideoEnc, VENC_IndexParamJpegExifInfo, &exifinfo);
}
else if(codecType == VENC_CODEC_H264)
{
int value;
VideoEncSetParameter(pVideoEnc, VENC_IndexParamH264Param, &h264Param);
value = 0;
VideoEncSetParameter(pVideoEnc, VENC_IndexParamIfilter, &value);
value = 0; //degree
VideoEncSetParameter(pVideoEnc, VENC_IndexParamRotation, &value);
//VideoEncSetParameter(pVideoEnc, VENC_IndexParamH264FixQP, &fixQP);
//VideoEncSetParameter(pVideoEnc, VENC_IndexParamH264CyclicIntraRefresh, &sIntraRefresh);
value = 720/4;
//VideoEncSetParameter(pVideoEnc, VENC_IndexParamSliceHeight, &value);
//VideoEncSetParameter(pVideoEnc, VENC_IndexParamROIConfig, &sRoiConfig[0]);
//VideoEncSetParameter(pVideoEnc, VENC_IndexParamROIConfig, &sRoiConfig[1]);
//VideoEncSetParameter(pVideoEnc, VENC_IndexParamROIConfig, &sRoiConfig[2]);
//VideoEncSetParameter(pVideoEnc, VENC_IndexParamROIConfig, &sRoiConfig[3]);
}
VideoEncInit(pVideoEnc, &baseConfig);
if(codecType == VENC_CODEC_H264)
{
unsigned int head_num = 0;
VideoEncGetParameter(pVideoEnc, VENC_IndexParamH264SPSPPS, &sps_pps_data);
fwrite(sps_pps_data.pBuffer, 1, sps_pps_data.nLength, out_file);
logd("sps_pps_data.nLength: %d", sps_pps_data.nLength);
for(head_num=0; head_num<sps_pps_data.nLength; head_num++)
logd("the sps_pps :%02x\n", *(sps_pps_data.pBuffer+head_num));
}
if(codecType == VENC_CODEC_JPEG)
{
testNumber = 1;
}
AllocInputBuffer(pVideoEnc, &bufferParam);
if(baseConfig.eInputFormat == VENC_PIXEL_YUV420SP)
{
uv_tmp_buffer = (unsigned char*)malloc(baseConfig.nInputWidth*baseConfig.nInputHeight/2);
if(uv_tmp_buffer == NULL)
{
loge("malloc uv_tmp_buffer fail\n");
return -1;
}
}
while(testNumber > 0)
{
GetOneAllocInputBuffer(pVideoEnc, &inputBuffer);
{
unsigned int size1, size2;
size1 = fread(inputBuffer.pAddrVirY, 1, baseConfig.nInputWidth*baseConfig.nInputHeight, in_file);
size2 = fread(inputBuffer.pAddrVirC, 1, baseConfig.nInputWidth*baseConfig.nInputHeight/2, in_file);
if((size1!= baseConfig.nInputWidth*baseConfig.nInputHeight) || (size2!= baseConfig.nInputWidth*baseConfig.nInputHeight/2))
{
fseek(in_file, 0L, SEEK_SET);
size1 = fread(inputBuffer.pAddrVirY, 1, baseConfig.nInputWidth*baseConfig.nInputHeight, in_file);
size2 = fread(inputBuffer.pAddrVirC, 1, baseConfig.nInputWidth*baseConfig.nInputHeight/2, in_file);
}
if(baseConfig.eInputFormat == VENC_PIXEL_YUV420SP)
{
yu12_nv12(baseConfig.nInputWidth, baseConfig.nInputHeight, inputBuffer.pAddrVirC, uv_tmp_buffer);
}
}
inputBuffer.bEnableCorp = 0;
inputBuffer.sCropInfo.nLeft = 240;
inputBuffer.sCropInfo.nTop = 240;
inputBuffer.sCropInfo.nWidth = 240;
inputBuffer.sCropInfo.nHeight = 240;
FlushCacheAllocInputBuffer(pVideoEnc, &inputBuffer);
AddOneInputBuffer(pVideoEnc, &inputBuffer);
VideoEncodeOneFrame(pVideoEnc);
AlreadyUsedInputBuffer(pVideoEnc,&inputBuffer);
ReturnOneAllocInputBuffer(pVideoEnc, &inputBuffer);
GetOneBitstreamFrame(pVideoEnc, &outputBuffer);
//logi("size: %d,%d", outputBuffer.nSize0,outputBuffer.nSize1);
fwrite(outputBuffer.pData0, 1, outputBuffer.nSize0, out_file);
if(outputBuffer.nSize1)
{
fwrite(outputBuffer.pData1, 1, outputBuffer.nSize1, out_file);
}
FreeOneBitStreamFrame(pVideoEnc, &outputBuffer);
if(h264Param.nCodingMode==VENC_FIELD_CODING && codecType==VENC_CODEC_H264)
{
GetOneBitstreamFrame(pVideoEnc, &outputBuffer);
//logi("size: %d,%d", outputBuffer.nSize0,outputBuffer.nSize1);
fwrite(outputBuffer.pData0, 1, outputBuffer.nSize0, out_file);
if(outputBuffer.nSize1)
{
fwrite(outputBuffer.pData1, 1, outputBuffer.nSize1, out_file);
}
FreeOneBitStreamFrame(pVideoEnc, &outputBuffer);
}
testNumber--;
}
out:
fclose(out_file);
fclose(in_file);
if(uv_tmp_buffer)
free(uv_tmp_buffer);
return 0;
}
离线
居然找到编码器的实现代码:
/*
* Cedarx framework.
* Copyright (c) 2008-2015 Allwinner Technology Co. Ltd.
* Author: Ning Fang <fangning@allwinnertech.com>
*
* This file is part of Cedarx.
*
* Cedarx is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed "as is" WITHOUT ANY WARRANTY of any
* kind, whether express or implied; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*/
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
#ifndef _VENCODER_H_
#define _VENCODER_H_
#define DATA_TIME_LENGTH 24
#define INFO_LENGTH 64
#define GPS_PROCESS_METHOD_LENGTH 100
#define VENC_BUFFERFLAG_KEYFRAME 0x00000001
#define VENC_BUFFERFLAG_EOS 0x00000002
typedef struct rational_t
{
unsigned int num;
unsigned int den;
}rational_t;
typedef struct srational_t
{
int num;
int den;
}srational_t;
typedef enum ExifMeteringModeType{
EXIF_METERING_UNKNOWN,
EXIF_METERING_AVERAGE,
EXIF_METERING_CENTER,
EXIF_METERING_SPOT,
EXIF_METERING_MULTISPOT,
EXIF_METERING_PATTERN,
EXIF_METERING_PARTIAL,
EXIF_METERING_OTHER = 255,
} ExifMeteringModeType;
typedef enum ExifExposureModeType
{
EXIF_EXPOSURE_AUTO,
EXIF_EXPOSURE_MANUAL,
EXIF_EXPOSURE_AUTO_BRACKET,
}ExifExposureModeType;
typedef struct EXIFInfo
{
unsigned char CameraMake[INFO_LENGTH];
unsigned char CameraModel[INFO_LENGTH];
unsigned char DateTime[DATA_TIME_LENGTH];
unsigned int ThumbWidth;
unsigned int ThumbHeight;
int Orientation; //value can be 0,90,180,270 degree
rational_t ExposureTime; //tag 0x829A
rational_t FNumber; //tag 0x829D
short ISOSpeed;//tag 0x8827
srational_t ShutterSpeedValue; //tag 0x9201
//srational_t BrightnessValue; //tag 0x9203
srational_t ExposureBiasValue; //tag 0x9204
short MeteringMode; //tag 0x9207
short FlashUsed; //tag 0x9209
rational_t FocalLength; //tag 0x920A
rational_t DigitalZoomRatio; // tag A404
short WhiteBalance; //tag 0xA403
short ExposureMode; //tag 0xA402
// gps info
int enableGpsInfo;
double gps_latitude;
double gps_longitude;
double gps_altitude;
long gps_timestamp;
unsigned char gpsProcessingMethod[GPS_PROCESS_METHOD_LENGTH];
}EXIFInfo;
typedef struct VencRect
{
int nLeft;
int nTop;
int nWidth;
int nHeight;
}VencRect;
typedef enum VENC_COLOR_SPACE
{
VENC_BT601,
VENC_BT709,
VENC_YCC,
}VENC_COLOR_SPACE;
typedef enum VENC_YUV2YUV
{
VENC_YCCToBT601,
VENC_BT601ToYCC,
}VENC_YUV2YUV;
typedef enum VENC_CODING_MODE
{
VENC_FRAME_CODING = 0,
VENC_FIELD_CODING = 1,
}VENC_CODING_MODE;
typedef enum VENC_CODEC_TYPE
{
VENC_CODEC_H264,
VENC_CODEC_JPEG,
VENC_CODEC_VP8,
}VENC_CODEC_TYPE;
typedef enum VENC_PIXEL_FMT
{
VENC_PIXEL_YUV420SP,
VENC_PIXEL_YVU420SP,
VENC_PIXEL_YUV420P,
VENC_PIXEL_YVU420P,
VENC_PIXEL_YUV422SP,
VENC_PIXEL_YVU422SP,
VENC_PIXEL_YUV422P,
VENC_PIXEL_YVU422P,
VENC_PIXEL_YUYV422,
VENC_PIXEL_UYVY422,
VENC_PIXEL_YVYU422,
VENC_PIXEL_VYUY422,
VENC_PIXEL_ARGB,
VENC_PIXEL_RGBA,
VENC_PIXEL_ABGR,
VENC_PIXEL_BGRA,
VENC_PIXEL_TILE_32X32,
VENC_PIXEL_TILE_128X32,
}VENC_PIXEL_FMT;
typedef struct VencBaseConfig
{
unsigned int nInputWidth;
unsigned int nInputHeight;
unsigned int nDstWidth;
unsigned int nDstHeight;
unsigned int nStride;
VENC_PIXEL_FMT eInputFormat;
}VencBaseConfig;
/**
* H264 profile types
*/
typedef enum VENC_H264PROFILETYPE
{
VENC_H264ProfileBaseline = 66, /**< Baseline profile */
VENC_H264ProfileMain = 77, /**< Main profile */
VENC_H264ProfileHigh = 100, /**< High profile */
}VENC_H264PROFILETYPE;
/**
* H264 level types
*/
typedef enum VENC_H264LEVELTYPE
{
VENC_H264Level1 = 10, /**< Level 1 */
VENC_H264Level11 = 11, /**< Level 1.1 */
VENC_H264Level12 = 12, /**< Level 1.2 */
VENC_H264Level13 = 13, /**< Level 1.3 */
VENC_H264Level2 = 20, /**< Level 2 */
VENC_H264Level21 = 21, /**< Level 2.1 */
VENC_H264Level22 = 22, /**< Level 2.2 */
VENC_H264Level3 = 30, /**< Level 3 */
VENC_H264Level31 = 31, /**< Level 3.1 */
VENC_H264Level32 = 32, /**< Level 3.2 */
VENC_H264Level4 = 40, /**< Level 4 */
VENC_H264Level41 = 41, /**< Level 4.1 */
VENC_H264Level42 = 42, /**< Level 4.2 */
VENC_H264Level5 = 50, /**< Level 5 */
VENC_H264Level51 = 51, /**< Level 5.1 */
}VENC_H264LEVELTYPE;
typedef struct VencH264ProfileLevel
{
VENC_H264PROFILETYPE nProfile;
VENC_H264LEVELTYPE nLevel;
}VencH264ProfileLevel;
typedef struct VencQPRange
{
int nMaxqp;
int nMinqp;
}VencQPRange;
typedef struct MotionParam
{
int nMotionDetectEnable;
int nMotionDetectRatio; /* 0~12, 0 is the best sensitive */
}MotionParam;
typedef struct VencHeaderData
{
unsigned char* pBuffer;
unsigned int nLength;
}VencHeaderData;
typedef struct VencInputBuffer
{
unsigned long nID;
long long nPts;
unsigned int nFlag;
unsigned char* pAddrPhyY;
unsigned char* pAddrPhyC;
unsigned char* pAddrVirY;
unsigned char* pAddrVirC;
int bEnableCorp;
VencRect sCropInfo;
int ispPicVar;
}VencInputBuffer;
typedef struct VencOutputBuffer
{
int nID;
long long nPts;
unsigned int nFlag;
unsigned int nSize0;
unsigned int nSize1;
unsigned char* pData0;
unsigned char* pData1;
}VencOutputBuffer;
typedef struct VencAllocateBufferParam
{
unsigned int nBufferNum;
unsigned int nSizeY;
unsigned int nSizeC;
}VencAllocateBufferParam;
typedef struct VencH264FixQP
{
int bEnable;
int nIQp;
int nPQp;
}VencH264FixQP;
typedef struct VencCyclicIntraRefresh
{
int bEnable;
int nBlockNumber;
}VencCyclicIntraRefresh;
typedef struct VencSize
{
int nWidth;
int nHeight;
}VencSize;
typedef struct VencH264Param
{
VencH264ProfileLevel sProfileLevel;
int bEntropyCodingCABAC; /* 0:CAVLC 1:CABAC*/
VencQPRange sQPRange;
int nFramerate; /* fps*/
int nBitrate; /* bps*/
int nMaxKeyInterval;
VENC_CODING_MODE nCodingMode;
}VencH264Param;
/* support 4 ROI region */
typedef struct VencROIConfig
{
int bEnable;
int index; /* (0~3) */
int nQPoffset;
VencRect sRect;
}VencROIConfig;
typedef struct VencCheckColorFormat
{
int index;
VENC_PIXEL_FMT eColorFormat;
}VencCheckColorFormat;
typedef struct VencVP8Param
{
int nFramerate; /* fps*/
int nBitrate; /* bps*/
int nMaxKeyInterval;
}VencVP8Param;
typedef enum VENC_INDEXTYPE
{
VENC_IndexParamBitrate = 0x0, /**< reference type: int */
VENC_IndexParamFramerate, /**< reference type: int */
VENC_IndexParamMaxKeyInterval, /**< reference type: int */
VENC_IndexParamIfilter, /**< reference type: int */
VENC_IndexParamRotation, /**< reference type: int */
VENC_IndexParamSliceHeight, /**< reference type: int */
VENC_IndexParamForceKeyFrame, /**< reference type: int (write only)*/
VENC_IndexParamMotionDetectEnable, /**< reference type: MotionParam(write only) */
VENC_IndexParamMotionDetectStatus, /**< reference type: int(read only) */
VENC_IndexParamRgb2Yuv, /**< reference type: VENC_COLOR_SPACE */
VENC_IndexParamYuv2Yuv, /**< reference type: VENC_YUV2YUV */
VENC_IndexParamROIConfig, /**< reference type: VencROIConfig */
VENC_IndexParamStride, /**< reference type: int */
VENC_IndexParamColorFormat, /**< reference type: VENC_PIXEL_FMT */
VENC_IndexParamSize, /**< reference type: VencSize(read only) */
VENC_IndexParamSetVbvSize, /**< reference type: setVbvSize(write only) */
/* check capabiliy */
VENC_IndexParamMAXSupportSize, /**< reference type: VencSize(read only) */
VENC_IndexParamCheckColorFormat, /**< reference type: VencCheckFormat(read only) */
/* H264 param */
VENC_IndexParamH264Param, /**< reference type: VencH264Param */
VENC_IndexParamH264SPSPPS, /**< reference type: VencHeaderData (read only)*/
VENC_IndexParamH264QPRange = 0x100, /**< reference type: VencQPRange */
VENC_IndexParamH264ProfileLevel, /**< reference type: VencProfileLevel */
VENC_IndexParamH264EntropyCodingCABAC, /**< reference type: int(0:CAVLC 1:CABAC) */
VENC_IndexParamH264CyclicIntraRefresh, /**< reference type: VencCyclicIntraRefresh */
VENC_IndexParamH264FixQP, /**< reference type: VencH264FixQP */
/* jpeg param */
VENC_IndexParamJpegQuality = 0x200, /**< reference type: int (1~100) */
VENC_IndexParamJpegExifInfo, /**< reference type: EXIFInfo */
/* VP8 param */
VENC_IndexParamVP8Param,
}VENC_INDEXTYPE;
typedef enum VENC_RESULT_TYPE
{
VENC_RESULT_ERROR = -1,
VENC_RESULT_OK = 0,
VENC_RESULT_NO_FRAME_BUFFER = 1,
VENC_RESULT_BITSTREAM_IS_FULL = 2,
}VENC_RESULT_TYPE;
typedef struct JpegEncInfo
{
VencBaseConfig sBaseInfo;
int bNoUseAddrPhy;
unsigned char* pAddrPhyY;
unsigned char* pAddrPhyC;
unsigned char* pAddrVirY;
unsigned char* pAddrVirC;
int bEnableCorp;
VencRect sCropInfo;
int quality;
}JpegEncInfo;
int AWJpecEnc(JpegEncInfo* pJpegInfo, EXIFInfo* pExifInfo, void* pOutBuffer, int* pOutBufferSize);
typedef void* VideoEncoder;
VideoEncoder* VideoEncCreate(VENC_CODEC_TYPE eCodecType);
void VideoEncDestroy(VideoEncoder* pEncoder);
int VideoEncInit(VideoEncoder* pEncoder, VencBaseConfig* pConfig);
int VideoEncUnInit(VideoEncoder* pEncoder);
int AllocInputBuffer(VideoEncoder* pEncoder, VencAllocateBufferParam *pBufferParam);
int GetOneAllocInputBuffer(VideoEncoder* pEncoder, VencInputBuffer* pInputbuffer);
int FlushCacheAllocInputBuffer(VideoEncoder* pEncoder, VencInputBuffer *pInputbuffer);
int ReturnOneAllocInputBuffer(VideoEncoder* pEncoder, VencInputBuffer *pInputbuffer);
int ReleaseAllocInputBuffer(VideoEncoder* pEncoder);
int AddOneInputBuffer(VideoEncoder* pEncoder, VencInputBuffer* pInputbuffer);
int VideoEncodeOneFrame(VideoEncoder* pEncoder);
int AlreadyUsedInputBuffer(VideoEncoder* pEncoder, VencInputBuffer* pBuffer);
int ValidBitstreamFrameNum(VideoEncoder* pEncoder);
int GetOneBitstreamFrame(VideoEncoder* pEncoder, VencOutputBuffer* pBuffer);
int FreeOneBitStreamFrame(VideoEncoder* pEncoder, VencOutputBuffer* pBuffer);
int VideoEncGetParameter(VideoEncoder* pEncoder, VENC_INDEXTYPE indexType, void* paramData);
int VideoEncSetParameter(VideoEncoder* pEncoder, VENC_INDEXTYPE indexType, void* paramData);
#endif //_VENCODER_H_
#ifdef __cplusplus
}
#endif /* __cplusplus */
/*
* Cedarx framework.
* Copyright (c) 2008-2015 Allwinner Technology Co. Ltd.
* Author: Ning Fang <fangning@allwinnertech.com>
*
* This file is part of Cedarx.
*
* Cedarx is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed "as is" WITHOUT ANY WARRANTY of any
* kind, whether express or implied; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*/
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
#include "log.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <pthread.h>
#include "vencoder.h"
#include "FrameBufferManager.h"
#include "venc_device.h"
#include "EncAdapter.h"
#define FRAME_BUFFER_NUM 4
typedef struct VencContext
{
VENC_DEVICE* pVEncDevice;
void* pEncoderHandle;
FrameBufferManager* pFBM;
VencBaseConfig baseConfig;
unsigned int nFrameBufferNum;
VencHeaderData headerData;
VencInputBuffer curEncInputbuffer;
VENC_CODEC_TYPE codecType;
unsigned int ICVersion;
int bInit;
}VencContext;
VideoEncoder* VideoEncCreate(VENC_CODEC_TYPE eCodecType)
{
VencContext* venc_ctx = NULL;
if(EncAdapterInitialize() != 0)
{
loge("can not set up video engine runtime environment.");
return NULL;
}
venc_ctx = (VencContext*)malloc(sizeof(VencContext));
if(!venc_ctx){
loge("malloc VencContext fail!");
return NULL;
}
memset(venc_ctx, 0,sizeof(VencContext));
venc_ctx->nFrameBufferNum = FRAME_BUFFER_NUM;
venc_ctx->codecType = eCodecType;
venc_ctx->ICVersion = EncAdapterGetICVersion();
venc_ctx->bInit = 0;
venc_ctx->pVEncDevice = VencoderDeviceCreate(eCodecType);
if(venc_ctx->pVEncDevice == NULL)
{
free(venc_ctx);
return NULL;
}
venc_ctx->pEncoderHandle = venc_ctx->pVEncDevice->open();
if(!venc_ctx->pEncoderHandle)
{
VencoderDeviceDestroy(venc_ctx->pVEncDevice);
venc_ctx->pVEncDevice = NULL;
free(venc_ctx);
return NULL;
}
return (VideoEncoder*)venc_ctx;
}
void VideoEncDestroy(VideoEncoder* pEncoder)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
VideoEncUnInit(pEncoder);
if(venc_ctx->pVEncDevice)
{
venc_ctx->pVEncDevice->close(venc_ctx->pEncoderHandle);
VencoderDeviceDestroy(venc_ctx->pVEncDevice);
venc_ctx->pVEncDevice = NULL;
venc_ctx->pEncoderHandle = NULL;
}
EncAdpaterRelease();
if(venc_ctx)
{
free(venc_ctx);
}
}
int VideoEncInit(VideoEncoder* pEncoder, VencBaseConfig* pConfig)
{
int result = 0;
VencContext* venc_ctx = (VencContext*)pEncoder;
if(pEncoder == NULL || pConfig == NULL || venc_ctx->bInit)
{
loge("InitVideoEncoder, param is NULL");
return -1;
}
venc_ctx->pFBM = FrameBufferManagerCreate(venc_ctx->nFrameBufferNum);
if(venc_ctx->pFBM == NULL)
{
loge("venc_ctx->pFBM == NULL");
return -1;
}
logd("(f:%s, l:%d)", __FUNCTION__, __LINE__);
if(venc_ctx->ICVersion == 0x1639)
{
if(pConfig->nDstWidth >= 3840 || pConfig->nDstHeight>= 2160)
{
VeInitEncoderPerformance(1);
}
else
{
VeInitEncoderPerformance(0);
logd("VeInitEncoderPerformance");
}
}
logd("(f:%s, l:%d)", __FUNCTION__, __LINE__);
memcpy(&venc_ctx->baseConfig, pConfig, sizeof(VencBaseConfig));
EncAdapterLockVideoEngine();
result = venc_ctx->pVEncDevice->init(venc_ctx->pEncoderHandle, &venc_ctx->baseConfig);
EncAdapterUnLockVideoEngine();
venc_ctx->bInit = 1;
return result;
}
int VideoEncUnInit(VideoEncoder* pEncoder)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
if(!venc_ctx->bInit)
{
return -1;
}
venc_ctx->pVEncDevice->uninit(venc_ctx->pEncoderHandle);
if(venc_ctx->ICVersion == 0x1639)
{
if(venc_ctx->baseConfig.nDstWidth >= 3840 || venc_ctx->baseConfig.nDstHeight >= 2160)
{
VeUninitEncoderPerformance(1);
}
else
{
VeUninitEncoderPerformance(0);
logd("VeUninitEncoderPerformance");
}
}
if(venc_ctx->pFBM)
{
FrameBufferManagerDestroy(venc_ctx->pFBM);
venc_ctx->pFBM = NULL;
}
venc_ctx->bInit = 0;
return 0;
}
int AllocInputBuffer(VideoEncoder* pEncoder, VencAllocateBufferParam *pBufferParam)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
if(pEncoder == NULL || pBufferParam == NULL)
{
loge("InitVideoEncoder, param is NULL");
return -1;
}
if(venc_ctx->pFBM == NULL)
{
loge("venc_ctx->pFBM == NULL, must call InitVideoEncoder firstly");
return -1;
}
if(AllocateInputBuffer(venc_ctx->pFBM, pBufferParam)!=0)
{
loge("allocat inputbuffer failed");
return -1;
}
return 0;
}
int GetOneAllocInputBuffer(VideoEncoder* pEncoder, VencInputBuffer* pInputbuffer)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
if(pEncoder == NULL)
{
loge("pEncoder == NULL");
return -1;
}
if(GetOneAllocateInputBuffer(venc_ctx->pFBM, pInputbuffer) != 0)
{
loge("get one allocate inputbuffer failed");
return -1;
}
return 0;
}
int FlushCacheAllocInputBuffer(VideoEncoder* pEncoder, VencInputBuffer *pInputbuffer)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
if(venc_ctx == NULL)
{
loge("pEncoder == NULL");
return -1;
}
FlushCacheAllocateInputBuffer(venc_ctx->pFBM, pInputbuffer);
return 0;
}
int ReturnOneAllocInputBuffer(VideoEncoder* pEncoder, VencInputBuffer *pInputbuffer)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
if(pEncoder == NULL)
{
loge("pEncoder == NULL");
return -1;
}
if(ReturnOneAllocateInputBuffer(venc_ctx->pFBM, pInputbuffer) != 0)
{
loge("get one allocate inputbuffer failed");
return -1;
}
return 0;
}
int ReleaseAllocInputBuffer(VideoEncoder* pEncoder)
{
if(pEncoder == NULL)
{
loge("ReleaseAllocInputBuffer, pEncoder is NULL");
return -1;
}
return 0;
}
int AddOneInputBuffer(VideoEncoder* pEncoder, VencInputBuffer* pBuffer)
{
int result = 0;
VencContext* venc_ctx = (VencContext*)pEncoder;
if(venc_ctx == NULL || pBuffer == NULL)
{
loge("AddInputBuffer, param is NULL");
return -1;
}
result = AddInputBuffer(venc_ctx->pFBM, pBuffer);
return result;
}
int VideoEncodeOneFrame(VideoEncoder* pEncoder)
{
int result = 0;
VencContext* venc_ctx = (VencContext*)pEncoder;
if(!venc_ctx) {
return -1;
}
if(GetInputBuffer(venc_ctx->pFBM, &venc_ctx->curEncInputbuffer) != 0)
{
return VENC_RESULT_NO_FRAME_BUFFER;
}
if(venc_ctx->ICVersion == 0x1639)
{
if((unsigned long)(venc_ctx->curEncInputbuffer.pAddrPhyY) >= 0x20000000)
{
venc_ctx->curEncInputbuffer.pAddrPhyY -= 0x20000000;
}
else
{
logw("venc_ctx->curEncInputbuffer.pAddrPhyY: %p, maybe not right", venc_ctx->curEncInputbuffer.pAddrPhyY);
}
if((unsigned long)venc_ctx->curEncInputbuffer.pAddrPhyC >= 0x20000000)
{
venc_ctx->curEncInputbuffer.pAddrPhyC -= 0x20000000;
}
}
else
{
if((unsigned long)venc_ctx->curEncInputbuffer.pAddrPhyY >= 0x40000000)
{
venc_ctx->curEncInputbuffer.pAddrPhyY -= 0x40000000;
}
else
{
logw("venc_ctx->curEncInputbuffer.pAddrPhyY: %p, maybe not right", venc_ctx->curEncInputbuffer.pAddrPhyY);
}
if((unsigned long)venc_ctx->curEncInputbuffer.pAddrPhyC >= 0x40000000)
{
venc_ctx->curEncInputbuffer.pAddrPhyC -= 0x40000000;
}
}
EncAdapterLockVideoEngine();
result = venc_ctx->pVEncDevice->encode(venc_ctx->pEncoderHandle, &venc_ctx->curEncInputbuffer);
EncAdapterUnLockVideoEngine();
AddUsedInputBuffer(venc_ctx->pFBM, &venc_ctx->curEncInputbuffer);
return result;
}
int AlreadyUsedInputBuffer(VideoEncoder* pEncoder, VencInputBuffer* pBuffer)
{
int result = 0;
VencContext* venc_ctx = (VencContext*)pEncoder;
if(venc_ctx == NULL || pBuffer == NULL)
{
loge("AddInputBuffer, param is NULL");
return -1;
}
result = GetUsedInputBuffer(venc_ctx->pFBM, pBuffer);
return result;
}
int ValidBitstreamFrameNum(VideoEncoder* pEncoder)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
return venc_ctx->pVEncDevice->ValidBitStreamFrameNum(venc_ctx->pEncoderHandle);
}
int GetOneBitstreamFrame(VideoEncoder* pEncoder, VencOutputBuffer* pBuffer)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
if(!venc_ctx) {
return -1;
}
if(venc_ctx->pVEncDevice->GetOneBitStreamFrame(venc_ctx->pEncoderHandle, pBuffer)!=0)
{
return -1;
}
return 0;
}
int FreeOneBitStreamFrame(VideoEncoder* pEncoder, VencOutputBuffer* pBuffer)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
if(!venc_ctx) {
return -1;
}
if(venc_ctx->pVEncDevice->FreeOneBitStreamFrame(venc_ctx->pEncoderHandle, pBuffer)!=0)
{
return -1;
}
return 0;
}
int VideoEncGetParameter(VideoEncoder* pEncoder, VENC_INDEXTYPE indexType, void* paramData)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
return venc_ctx->pVEncDevice->GetParameter(venc_ctx->pEncoderHandle, indexType, paramData);
}
int VideoEncSetParameter(VideoEncoder* pEncoder, VENC_INDEXTYPE indexType, void* paramData)
{
VencContext* venc_ctx = (VencContext*)pEncoder;
return venc_ctx->pVEncDevice->SetParameter(venc_ctx->pEncoderHandle, indexType, paramData);
}
int AWJpecEnc(JpegEncInfo* pJpegInfo, EXIFInfo* pExifInfo, void* pOutBuffer, int* pOutBufferSize)
{
VencAllocateBufferParam bufferParam;
VideoEncoder* pVideoEnc = NULL;
VencInputBuffer inputBuffer;
VencOutputBuffer outputBuffer;
int result = 0;
pVideoEnc = VideoEncCreate(VENC_CODEC_JPEG);
VideoEncSetParameter(pVideoEnc, VENC_IndexParamJpegExifInfo, pExifInfo);
VideoEncSetParameter(pVideoEnc, VENC_IndexParamJpegQuality, &pJpegInfo->quality);
if(VideoEncInit(pVideoEnc, &pJpegInfo->sBaseInfo)< 0)
{
result = -1;
goto ERROR;
}
if(pJpegInfo->bNoUseAddrPhy)
{
bufferParam.nSizeY = pJpegInfo->sBaseInfo.nStride*pJpegInfo->sBaseInfo.nInputHeight;
bufferParam.nSizeC = bufferParam.nSizeY>>1;
bufferParam.nBufferNum = 1;
if(AllocInputBuffer(pVideoEnc, &bufferParam)<0)
{
result = -1;
goto ERROR;
}
GetOneAllocInputBuffer(pVideoEnc, &inputBuffer);
memcpy(inputBuffer.pAddrVirY, pJpegInfo->pAddrPhyY, bufferParam.nSizeY);
memcpy(inputBuffer.pAddrVirC, pJpegInfo->pAddrPhyC, bufferParam.nSizeC);
FlushCacheAllocInputBuffer(pVideoEnc, &inputBuffer);
}
else
{
inputBuffer.pAddrPhyY = pJpegInfo->pAddrPhyY;
inputBuffer.pAddrPhyC = pJpegInfo->pAddrPhyC;
}
inputBuffer.bEnableCorp = pJpegInfo->bEnableCorp;
inputBuffer.sCropInfo.nLeft = pJpegInfo->sCropInfo.nLeft;
inputBuffer.sCropInfo.nTop = pJpegInfo->sCropInfo.nTop;
inputBuffer.sCropInfo.nWidth = pJpegInfo->sCropInfo.nWidth;
inputBuffer.sCropInfo.nHeight = pJpegInfo->sCropInfo.nHeight;
AddOneInputBuffer(pVideoEnc, &inputBuffer);
if(VideoEncodeOneFrame(pVideoEnc)!= 0)
{
loge("jpeg encoder error");
}
AlreadyUsedInputBuffer(pVideoEnc,&inputBuffer);
if(pJpegInfo->bNoUseAddrPhy)
{
ReturnOneAllocInputBuffer(pVideoEnc, &inputBuffer);
}
GetOneBitstreamFrame(pVideoEnc, &outputBuffer);
memcpy(pOutBuffer, outputBuffer.pData0, outputBuffer.nSize0);
if(outputBuffer.nSize1)
{
memcpy(((unsigned char*)pOutBuffer + outputBuffer.nSize0), outputBuffer.pData1, outputBuffer.nSize1);
*pOutBufferSize = outputBuffer.nSize0 + outputBuffer.nSize1;
}
else
{
*pOutBufferSize = outputBuffer.nSize0;
}
FreeOneBitStreamFrame(pVideoEnc, &outputBuffer);
ERROR:
if(pVideoEnc)
{
VideoEncDestroy(pVideoEnc);
}
return result;
}
#ifdef __cplusplus
}
#endif /* __cplusplus */
离线
libVE.so 核心代码: ve.c
/*
* Cedarx framework.
* Copyright (c) 2008-2015 Allwinner Technology Co. Ltd.
* Copyright (c) 2014 BZ Chen <bzchen@allwinnertech.com>
*
* This file is part of Cedarx.
*
* Cedarx is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed "as is" WITHOUT ANY WARRANTY of any
* kind, whether express or implied; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*/
#include "log.h"
#include <unistd.h>
#include <stdlib.h>
#include <stdio.h>
#include <fcntl.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <time.h>
#include <sys/mman.h>
#include <pthread.h>
#include "ve.h"
#include <sys/ioctl.h>
//#include "vdecoder.h"
#include "cedardev_api.h"
#define PAGE_OFFSET (0xc0000000) // from kernel
#define PAGE_SIZE (4096)
static pthread_mutex_t gVeMutex = PTHREAD_MUTEX_INITIALIZER;
static pthread_mutex_t gVeRegisterMutex = PTHREAD_MUTEX_INITIALIZER;
static pthread_mutex_t gVeDecoderMutex = PTHREAD_MUTEX_INITIALIZER;
static pthread_mutex_t gVeEncoderMutex = PTHREAD_MUTEX_INITIALIZER;
#if (CONFIG_MEMORY_DRIVER == OPTION_MEMORY_DRIVER_VE)
static pthread_mutex_t gVeMemoryMutex = PTHREAD_MUTEX_INITIALIZER;
struct MemChunkS
{
unsigned int physAddr;
int size;
void *virtAddr;
struct MemChunkS *next;
};
struct MemChunkS firstMemchunk;
struct CedarvCacheRangeS
{
long start;
long end;
};
#endif
static int gVeRefCount = 0;
int gVeDriverFd = -1;
struct cedarv_env_infomation gVeEnvironmentInfo;
#define VE_MODE_SELECT 0x00
#define VE_RESET 0x04
static int gNomalEncRefCount = 0;
static int gPerfEncRefCount = 0;
int VeInitialize(void)
{
pthread_mutex_lock(&gVeMutex);
if(gVeRefCount == 0)
{
//* open Ve driver.
gVeDriverFd = open("/dev/cedar_dev", O_RDWR);
if(gVeDriverFd < 0)
{
loge("open /dev/cedar_dev fail.");
pthread_mutex_unlock(&gVeMutex);
return -1;
}
//* set ve reference count to zero.
//* we must reset it to zero to fix refcount error when process crash.
ioctl(gVeDriverFd, IOCTL_SET_REFCOUNT, 0);
//* request ve.
ioctl(gVeDriverFd, IOCTL_ENGINE_REQ, 0);
//* map registers.
ioctl(gVeDriverFd, IOCTL_GET_ENV_INFO, (unsigned long)&gVeEnvironmentInfo);
gVeEnvironmentInfo.address_macc = (unsigned int)mmap(NULL,
2048,
PROT_READ | PROT_WRITE, MAP_SHARED,
gVeDriverFd,
(int)gVeEnvironmentInfo.address_macc);
//* reset ve.
VeReset();
#if (CONFIG_MEMORY_DRIVER == OPTION_MEMORY_DRIVER_VE)
firstMemchunk.physAddr = gVeEnvironmentInfo.phymem_start - PAGE_OFFSET;
firstMemchunk.size = gVeEnvironmentInfo.phymem_total_size;
logd("xxxxxxx firstMemchunk.size(%d) '0x%x'", firstMemchunk.size, firstMemchunk.physAddr);
#endif
}
gVeRefCount++;
pthread_mutex_unlock(&gVeMutex);
return 0;
}
void VeRelease(void)
{
pthread_mutex_lock(&gVeMutex);
if(gVeRefCount <= 0)
{
loge("invalid status, gVeRefCount=%d at AdpaterRelease", gVeRefCount);
pthread_mutex_unlock(&gVeMutex);
return;
}
gVeRefCount--;
if(gVeRefCount == 0)
{
if(gVeDriverFd != -1)
{
ioctl(gVeDriverFd, IOCTL_ENGINE_REL, 0);
munmap((void *)gVeEnvironmentInfo.address_macc, 2048);
close(gVeDriverFd);
gVeDriverFd = -1;
}
}
pthread_mutex_unlock(&gVeMutex);
return;
}
int VeLock(void)
{
return pthread_mutex_lock(&gVeDecoderMutex);
}
void VeUnLock(void)
{
pthread_mutex_unlock(&gVeDecoderMutex);
}
int VeEncoderLock(void)
{
return VeLock();
}
void VeEncoderUnLock(void)
{
VeUnLock();
}
void VeSetDramType()
{
volatile vetop_reg_mode_sel_t* pVeModeSelect;
pthread_mutex_lock(&gVeRegisterMutex);
pVeModeSelect = (vetop_reg_mode_sel_t*)(gVeEnvironmentInfo.address_macc + VE_MODE_SELECT);
switch (VeGetDramType())
{
case DDRTYPE_DDR1_16BITS:
pVeModeSelect->ddr_mode = 0;
break;
case DDRTYPE_DDR1_32BITS:
case DDRTYPE_DDR2_16BITS:
pVeModeSelect->ddr_mode = 1;
break;
case DDRTYPE_DDR2_32BITS:
case DDRTYPE_DDR3_16BITS:
pVeModeSelect->ddr_mode = 2;
break;
case DDRTYPE_DDR3_32BITS:
case DDRTYPE_DDR3_64BITS:
pVeModeSelect->ddr_mode = 3;
pVeModeSelect->rec_wr_mode = 1;
break;
default:
break;
}
pthread_mutex_unlock(&gVeRegisterMutex);
}
void VeReset(void)
{
ioctl(gVeDriverFd, IOCTL_RESET_VE, 0);
VeSetDramType();
}
int VeWaitInterrupt(void)
{
int ret;
ret = ioctl(gVeDriverFd, IOCTL_WAIT_VE_DE, 1);
if(ret <= 0)
{
logw("wait ve interrupt timeout.");
return -1; //* wait ve interrupt fail.
}
else
return 0;
}
int VeWaitEncoderInterrupt(void)
{
int ret;
ret = ioctl(gVeDriverFd, IOCTL_WAIT_VE_EN, 1);
if(ret <= 0)
return -1; //* wait ve interrupt fail.
else
return 0;
}
void* VeGetRegisterBaseAddress(void)
{
return (void*)gVeEnvironmentInfo.address_macc;
}
unsigned int VeGetIcVersion()
{
if(gVeRefCount >0)
{
volatile unsigned int value;
value = *((unsigned int*)((char *)gVeEnvironmentInfo.address_macc + 0xf0));
return (value>>16);
}
else
{
loge("must call VeGetIcVersion(), affer VeInitialize");
return 0;
}
}
int VeGetDramType(void)
{
//* can we know memory type by some system api?
#if CONFIG_DRAM_INTERFACE == OPTION_DRAM_INTERFACE_DDR1_16BITS
return DDRTYPE_DDR1_16BITS;
#elif CONFIG_DRAM_INTERFACE == OPTION_DRAM_INTERFACE_DDR1_32BITS
return DDRTYPE_DDR1_32BITS;
#elif CONFIG_DRAM_INTERFACE == OPTION_DRAM_INTERFACE_DDR2_16BITS
return DDRTYPE_DDR2_16BITS;
#elif CONFIG_DRAM_INTERFACE == OPTION_DRAM_INTERFACE_DDR2_32BITS
return DDRTYPE_DDR2_32BITS;
#elif CONFIG_DRAM_INTERFACE == OPTION_DRAM_INTERFACE_DDR3_16BITS
return DDRTYPE_DDR3_16BITS;
#elif CONFIG_DRAM_INTERFACE == OPTION_DRAM_INTERFACE_DDR3_32BITS
return DDRTYPE_DDR3_32BITS;
#elif CONFIG_DRAM_INTERFACE == OPTION_DRAM_INTERFACE_DDR3_64BITS
return DDRTYPE_DDR3_64BITS;
#else
#error "invalid ddr type configuration."
#endif
}
int VeSetSpeed(int nSpeedMHz)
{
return ioctl(gVeDriverFd, IOCTL_SET_VE_FREQ, nSpeedMHz);
}
void VeEnableEncoder()
{
volatile vetop_reg_mode_sel_t* pVeModeSelect;
pthread_mutex_lock(&gVeRegisterMutex);
pVeModeSelect = (vetop_reg_mode_sel_t*)(gVeEnvironmentInfo.address_macc + VE_MODE_SELECT);
pVeModeSelect->mode = 11;
pVeModeSelect->enc_enable = 1;
pVeModeSelect->enc_isp_enable = 1;
pthread_mutex_unlock(&gVeRegisterMutex);
}
void VeDisableEncoder()
{
volatile vetop_reg_mode_sel_t* pVeModeSelect;
pthread_mutex_lock(&gVeRegisterMutex);
pVeModeSelect = (vetop_reg_mode_sel_t*)(gVeEnvironmentInfo.address_macc + VE_MODE_SELECT);
pVeModeSelect->mode = 0x7;
pVeModeSelect->enc_enable = 0;
pVeModeSelect->enc_isp_enable = 0;
pthread_mutex_unlock(&gVeRegisterMutex);
}
void VeEnableDecoder(enum VeRegionE region)
{
volatile vetop_reg_mode_sel_t* pVeModeSelect;
pthread_mutex_lock(&gVeRegisterMutex);
pVeModeSelect = (vetop_reg_mode_sel_t*)(gVeEnvironmentInfo.address_macc + VE_MODE_SELECT);
// pVeModeSelect->mode = nDecoderMode;
switch (region)
{
case VE_REGION_0:
pVeModeSelect->mode = 0;
break;
case VE_REGION_1:
pVeModeSelect->mode = 1; //* MPEG1/2/4 or JPEG decoder.
break;
case VE_REGION_2:
case VE_REGION_3:
default:
pVeModeSelect->mode = 0; //* MPEG1/2/4 or JPEG decoder.
break;
}
pthread_mutex_unlock(&gVeRegisterMutex);
}
void VeDisableDecoder()
{
volatile vetop_reg_mode_sel_t* pVeModeSelect;
pthread_mutex_lock(&gVeRegisterMutex);
pVeModeSelect = (vetop_reg_mode_sel_t*)(gVeEnvironmentInfo.address_macc + VE_MODE_SELECT);
pVeModeSelect->mode = 7;
pthread_mutex_unlock(&gVeRegisterMutex);
}
void VeDecoderWidthMode(int nWidth)
{
volatile vetop_reg_mode_sel_t* pVeModeSelect;
pthread_mutex_lock(&gVeRegisterMutex);
pVeModeSelect = (vetop_reg_mode_sel_t*)(gVeEnvironmentInfo.address_macc + VE_MODE_SELECT);
if(nWidth >= 4096)
{
pVeModeSelect->pic_width_more_2048 = 1;
}
else if(nWidth >= 2048)
{
pVeModeSelect->pic_width_more_2048 = 1;
}
else
{
pVeModeSelect->pic_width_more_2048 = 0;
}
pthread_mutex_unlock(&gVeRegisterMutex);
}
void VeResetDecoder()
{
VeReset();
}
void VeResetEncoder()
{
VeReset();
}
void VeInitEncoderPerformance(int nMode) //* 0: normal performance; 1. high performance
{
CEDARX_UNUSE(nMode);
}
void VeUninitEncoderPerformance(int nMode) //* 0: normal performance; 1. high performance
{
CEDARX_UNUSE(nMode);
}
//******************************************************************************************
//* for malloc from ve
#if (CONFIG_MEMORY_DRIVER == OPTION_MEMORY_DRIVER_VE)
void *VeMalloc(int size)
{
if(gVeDriverFd == -1)
{
loge("invalid fd.");
return NULL;
}
pthread_mutex_lock(&gVeMemoryMutex);
void *addr = NULL;
size = (size + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1); /* align to 4096 */
struct MemChunkS *c, *bestChunk = NULL;
for (c = &firstMemchunk; c != NULL; c = c->next)
{
if(c->virtAddr == NULL && c->size >= size)
{
if(bestChunk == NULL || c->size < bestChunk->size)
{
bestChunk = c;
}
if(c->size == size)
{
break;
}
}
}
if(!bestChunk)
{
logw("no bestChunk");
goto out;
}
int leftSize = bestChunk->size - size;
addr = mmap(NULL,
size,
PROT_READ | PROT_WRITE,
MAP_SHARED,
gVeDriverFd,
bestChunk->physAddr+PAGE_OFFSET);
if(addr == MAP_FAILED)
{
loge("map failed.");
addr = NULL;
goto out;
}
bestChunk->virtAddr = addr;
bestChunk->size = size;
if(leftSize > 0)
{
c = malloc(sizeof(struct MemChunkS));
c->physAddr = bestChunk->physAddr + size;
c->size = leftSize;
c->virtAddr = NULL;
c->next = bestChunk->next;
bestChunk->next = c;
}
out:
pthread_mutex_unlock(&gVeMemoryMutex);
return addr;
}
void VeFree(void *ptr)
{
if(gVeDriverFd == -1 || ptr == NULL)
{
loge("fd(%d), ptr(%p).", gVeDriverFd, ptr);
return;
}
pthread_mutex_lock(&gVeMemoryMutex);
struct MemChunkS *c;
for(c = &firstMemchunk; c != NULL; c = c->next)
{
if(c->virtAddr == ptr)
{
munmap(ptr, c->size);
c->virtAddr = NULL;
break;
}
}
for(c = &firstMemchunk; c != NULL; c = c->next)
{
if(c->virtAddr == NULL)
{
while(c->next != NULL && c->next->virtAddr == NULL)
{
struct MemChunkS *n = c->next;
c->size += n->size;
c->next = n->next;
free(n);
}
}
}
pthread_mutex_unlock(&gVeMemoryMutex);
}
unsigned int VeVir2Phy(void *ptr)
{
if(gVeDriverFd == -1)
{
loge("invalid fd.");
return 0;
}
pthread_mutex_lock(&gVeMemoryMutex);
unsigned int addr = 0;
struct MemChunkS *c;
for(c = &firstMemchunk; c != NULL; c = c->next)
{
if(c->virtAddr == NULL)
continue;
if(c->virtAddr == ptr)
{
addr = c->physAddr;
break;
}
else if(ptr > c->virtAddr && ptr < (c->virtAddr + c->size))
{
addr = c->physAddr + (ptr - c->virtAddr);
break;
}
}
pthread_mutex_unlock(&gVeMemoryMutex);
return addr;
}
unsigned int VePhy2Vir(void *ptr) //*
{
unsigned int addrPhy = (unsigned int)ptr;
if(gVeDriverFd == -1)
{
loge("invalid fd.");
return 0;
}
pthread_mutex_lock(&gVeMemoryMutex);
unsigned int addr = 0;
struct MemChunkS *c;
for(c = &firstMemchunk; c != NULL; c = c->next)
{
if(c->physAddr == 0)
continue;
if(c->physAddr == addrPhy)
{
addr = (unsigned int)c->virtAddr;
break;
}
else if(addrPhy > c->physAddr && addrPhy < (c->physAddr + c->size))
{
addr = (unsigned int)c->virtAddr + (addrPhy - c->physAddr);
break;
}
}
pthread_mutex_unlock(&gVeMemoryMutex);
return addr;
}
void VeFlushCache(void *startAddr, int size)
{
if(gVeDriverFd == -1)
{
loge("invalid fd.");
return ;
}
struct CedarvCacheRangeS range =
{
.start = (int)startAddr,
.end = (int)(startAddr + size)
};
ioctl(gVeDriverFd, IOCTL_FLUSH_CACHE, (void*)(&range));
}
#endif
/*
* Cedarx framework.
* Copyright (c) 2008-2015 Allwinner Technology Co. Ltd.
* Copyright (c) 2014 BZ Chen <bzchen@allwinnertech.com>
*
* This file is part of Cedarx.
*
* Cedarx is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed "as is" WITHOUT ANY WARRANTY of any
* kind, whether express or implied; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*/
#ifndef VE_H
#define VE_H
#ifdef __cplusplus
extern "C" {
#endif
enum DRAMTYPE
{
DDRTYPE_DDR1_16BITS = 0,
DDRTYPE_DDR1_32BITS = 1,
DDRTYPE_DDR2_16BITS = 2,
DDRTYPE_DDR2_32BITS = 3,
DDRTYPE_DDR3_16BITS = 4,
DDRTYPE_DDR3_32BITS = 5,
DDRTYPE_DDR3_64BITS = 6,
DDRTYPE_MIN = DDRTYPE_DDR1_16BITS,
DDRTYPE_MAX = DDRTYPE_DDR3_64BITS,
};
enum VeRegionE
{
VE_REGION_INVALID,
VE_REGION_0, /* Mpeg1/2/4, mjpeg */
VE_REGION_1, /* H264,VP8 */
VE_REGION_2, /* VC-1 */
VE_REGION_3 /* H265 */
};
typedef struct VETOP_REG_MODE_SELECT
{
volatile unsigned int mode :4; //* 0: mpeg/jpeg, 1:h264
volatile unsigned int reserved0 :1;
volatile unsigned int jpg_dec_en :1; //
volatile unsigned int enc_isp_enable :1; //
volatile unsigned int enc_enable :1; //* H264 encoder enable.
volatile unsigned int read_counter_sel :1;
volatile unsigned int write_counter_sel :1;
volatile unsigned int decclkgen :1;
volatile unsigned int encclkgen :1;
volatile unsigned int reserved1 :1;
volatile unsigned int rabvline_spu_dis :1;
volatile unsigned int deblk_spu_dis :1;
volatile unsigned int mc_spu_dis :1;
volatile unsigned int ddr_mode :2; //* 00: 16-DDR1, 01: 32-DDR1 or DDR2, 10: 32-DDR2 or 16-DDR3, 11: 32-DDR3
volatile unsigned int reserved2 :1;
volatile unsigned int mbcntsel :1;
volatile unsigned int rec_wr_mode :1;
volatile unsigned int pic_width_more_2048 :1;
volatile unsigned int reserved3 :1;
volatile unsigned int reserved4 :9;
}vetop_reg_mode_sel_t;
//* 0x04
typedef struct VETOP_REG_RESET
{
volatile unsigned int reset :1;
volatile unsigned int reserved0 :3;
volatile unsigned int mem_sync_mask :1;
volatile unsigned int wdram_clr :1;
volatile unsigned int reserved1 :2;
volatile unsigned int write_dram_finish :1;
volatile unsigned int ve_sync_idle :1; //* this bit can be used to check the status of sync module before rest.
volatile unsigned int reserved2 :6;
volatile unsigned int decoder_reset :1; //* 1: reset assert, 0: reset de-assert.
volatile unsigned int dec_req_mask_enable :1; //* 1: mask, 0: pass.
// volatile unsigned int reserved3 :6;
volatile unsigned int dec_vebk_reset :1; //* 1: reset assert, 0: reset de-assert. used in decoder.
volatile unsigned int reserved3 :5;
volatile unsigned int encoder_reset :1; //* 1. reset assert, 0: reset de-assert.
volatile unsigned int enc_req_mask_enable :1; //* 1: mask, 0: pass.
volatile unsigned int reserved4 :6;
}vetop_reg_reset_t;
int VeInitialize(void);
void VeRelease(void);
int VeLock(void);
void VeUnLock(void);
int VeEncoderLock(void);
void VeEncoderUnLock(void);
void VeSetDramType();
void VeReset(void);
int VeWaitInterrupt(void);
int VeWaitEncoderInterrupt(void);
void* VeGetRegisterBaseAddress(void);
unsigned int VeGetIcVersion();
int VeGetDramType(void);
int VeSetSpeed(int nSpeedMHz);
void VeEnableEncoder();
void VeDisableEncoder();
void VeEnableDecoder(enum VeRegionE region);
void VeDisableDecoder();
void VeDecoderWidthMode(int nWidth);
void VeResetDecoder();
void VeResetEncoder();
void VeInitEncoderPerformance(int nMode);
void VeUninitEncoderPerformance(int nMode);
#if (CONFIG_MEMORY_DRIVER == OPTION_MEMORY_DRIVER_VE)
void *VeMalloc(int size);
void VeFree(void *ptr);
unsigned int VeVir2Phy(void *ptr);
unsigned int VePhy2Vir(void *ptr); //*
void VeFlushCache(void *startAddr, int size);
#endif
#ifdef __cplusplus
}
#endif
#endif
离线
晕哥,那个camdroid的入口函数在哪里??我找不到这个函数,然后我在烧写到板子的时候,找不到项目文件开始的入口位置,麻烦你知道一下
入口函数在这个文件: camdroid/device/softwinner/tiger-cdr/app/ccdr/include/window/MainWindow.h
#ifndef _MAINWINDOW_H
#define _MAINWINDOW_H
#include <ProcessState.h>
#include <IPCThreadState.h>
#include <minigui/common.h>
#include <minigui/minigui.h>
#include <minigui/gdi.h>
#include <minigui/window.h>
#include "windows.h"
#include "cdr_message.h"
#include "keyEvent.h"
#include "PowerManager.h"
#include "EventManager.h"
/* MainWindow */
#define ID_TIMER_KEY 100
#define CDRMain \
MiniGUIAppMain (int args, const char* argv[], CdrMain*); \
int main_entry (int args, const char* argv[]) \
{ \
sys_log_init(); \
sys_log("main entry\n"); \
int iRet = 0; \
CdrMain *cdrMain = new CdrMain(); \
cdrMain->initPreview(NULL); \
if (InitGUI (args, argv) != 0) { \
return 1; \
} \
iRet = MiniGUIAppMain (args, argv, cdrMain); \
TerminateGUI (iRet); \
return iRet; \
} \
int MiniGUIAppMain
#endif
MiniGUI原理分析: https://blog.csdn.net/lieye_leaves/article/details/8947165
离线
注册个账号支持一下晕哥,问一下我编译camdroid的时候出现
arm-linux-gnueabi-gcc: error trying to exec 'cc1': execvp: No such file or directory
用的编译器版本不对吗?搭建这环境需要哪个版本的编译器?
可能你的 PATH 设置不对 https://blog.csdn.net/hello404/article/details/17099903
也可能是 32/64bit Linux 兼容性问题。
离线
离线
离线