#include <stdbool.h>
|
#include <time.h>
|
#include <linux/version.h>
|
#include <linux/videodev2.h>
|
#include <stdbool.h>
|
|
#if LINUX_VERSION_CODE > KERNEL_VERSION (3, 0, 36)
|
#include <linux/uvcvideo.h>
|
#include <ctype.h>
|
#include <pthread.h>
|
|
#endif
|
#include "ImageProc.h"
|
#include "libyuv.h"
|
#include "charencode.h"
|
#include "jpeg.h"
|
#include "watermark.h"
|
#include "SonixCamera/XUOptDev.h"
|
#include "SonixCamera/SonixCamera.h"
|
|
#define LIBENC_ARRAY_ELEMS(a) (sizeof(a) / sizeof(a[0]))
|
|
#define XU_SONIX_SYS_ASIC_RW 0x01
|
|
#if LINUX_VERSION_CODE > KERNEL_VERSION (3, 0, 36)
|
#define UVC_SET_CUR 0x01
|
#define UVC_GET_CUR 0x81
|
#define UVCIOC_CTRL_MAP _IOWR('u', 0x20, struct uvc_xu_control_mapping)
|
#define UVCIOC_CTRL_QUERY _IOWR('u', 0x21, struct uvc_xu_control_query)
|
#else
|
#define UVCIOC_CTRL_ADD _IOW('U', 1, struct uvc_xu_control_info)
|
#define UVCIOC_CTRL_MAP _IOWR('U', 2, struct uvc_xu_control_mapping)
|
#define UVCIOC_CTRL_GET _IOWR('U', 3, struct uvc_xu_control)
|
#define UVCIOC_CTRL_SET _IOW('U', 4, struct uvc_xu_control)
|
#endif
|
|
struct buffer {
|
void * start;
|
size_t length;
|
};
|
|
static char dev_name[32];
|
static int fd = -1;
|
struct buffer * buffers = NULL;
|
static unsigned int n_buffers = 0;
|
|
static int *rgb = NULL;
|
static int *ybuf = NULL;
|
|
static int yuv_tbl_ready=0;
|
static int y1192_tbl[256];
|
static int v1634_tbl[256];
|
static int v833_tbl[256];
|
static int u400_tbl[256];
|
static int u2066_tbl[256];
|
|
static int IMG_WIDTH = 640;
|
static int IMG_HEIGHT = 480;
|
|
static uint32_t img_pixelformat = 0;
|
static int camera_version = 0;
|
static char camera_true_version[64] = {0};
|
static char camera_sn[32] = {0};
|
static char camera_name[32] = {0};
|
static int camera_index = 0;
|
|
static JavaVM *sg_jvm = NULL;
|
static JNIEnv *sg_jenv = NULL;
|
static jobject sg_obj = NULL;
|
|
static struct camera_info{
|
char name[32];
|
int resolution_num;
|
int resolution[20][2];
|
}CameraInfo[16];
|
|
static int CameraInfoNum = 0;
|
|
static bool wm_enable = false;
|
|
static pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
|
static int init_module = 0;
|
static bool cam_inited = false;
|
|
static void mjpegToRGBA(const uint8_t *data, int length);
|
|
static int errnoexit(const char *s);
|
static int xioctl(int fd, int request, void *arg);
|
|
static int opendevice(int videoid);
|
static int initdevice(int width, int height, const char *name, bool define_camera);
|
static int initmmap(void);
|
static int startcapturing(void);
|
|
static int readframeonce(void);
|
static int readframe(void);
|
static void processimage (const void *p, int length);
|
|
static int stopcapturing(void);
|
static int uninitdevice(void);
|
static int closedevice(void);
|
|
static int handshake(void);
|
|
static void yuyv422toABGRY(unsigned char *src);
|
|
static int errnoexit(const char *s) {
|
LOGE("%s error %d, %s", s, errno, strerror (errno));
|
return ERROR_LOCAL;
|
}
|
|
static int xioctl(int fd, int request, void *arg) {
|
int r;
|
// LOGE("xioctl(int fd, int request, void *arg)");
|
do
|
r = ioctl(fd, request, arg);
|
while (-1 == r && EINTR == errno);
|
|
return r;
|
}
|
|
static int opendevice(int i) {
|
struct stat st;
|
|
sprintf(dev_name, "/dev/video%d", i);
|
|
LOGI("尝试打开Camera %s...", dev_name);
|
|
if (-1 == stat(dev_name, &st)) {
|
LOGE("Cannot identify '%s': %d, %s", dev_name, errno, strerror (errno));
|
return ERROR_LOCAL;
|
}
|
|
if (!S_ISCHR(st.st_mode)) {
|
LOGE("%s is no device", dev_name);
|
return ERROR_LOCAL;
|
}
|
memset(camera_true_version, 0, sizeof(camera_true_version));
|
if (SonixCam_Init(i)) {
|
char buf[100] = {0};
|
if (SonixCam_GetFwVersion(buf, 100, true)) {
|
LOGI("fw version=%s", buf);
|
strcpy(camera_true_version, buf);
|
}
|
SonixCam_UnInit();
|
}
|
|
fd = open(dev_name, O_RDWR | O_NONBLOCK, 0);
|
|
if (-1 == fd) {
|
LOGE("Open Fail '%s': %d, %s", dev_name, errno, strerror (errno));
|
return ERROR_LOCAL;
|
}
|
LOGI("Open Success fd: %d", fd);
|
return SUCCESS_LOCAL;
|
}
|
|
static int initdevice(int width, int height, const char *name, bool ay_camera) {
|
LOGE("initdevice width %d height %d", width, height);
|
|
struct v4l2_capability cap;
|
struct v4l2_cropcap cropcap;
|
struct v4l2_crop crop;
|
struct v4l2_format fmt;
|
unsigned int min;
|
|
memset(camera_name, 0, sizeof(camera_name));
|
memset(camera_sn, 0, sizeof(camera_sn));
|
camera_version = -1;
|
|
if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) {
|
if (EINVAL == errno) {
|
LOGE("%s is no V4L2 device", dev_name);
|
return ERROR_LOCAL;
|
} else {
|
return errnoexit("VIDIOC_QUERYCAP");
|
}
|
}
|
LOGE("initdevice 查询设备能力...");
|
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
|
LOGE("%s is no video capture device", dev_name);
|
return ERROR_LOCAL;
|
}
|
|
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
|
LOGE("%s does not support streaming i/o", dev_name);
|
return ERROR_LOCAL;
|
}
|
|
LOGI("Drive = %s, DevName = %s, path = %s, aycam = %d", cap.driver, cap.card, cap.bus_info, ay_camera);
|
|
if (name != NULL) {
|
LOGI("repect name: %s", name);
|
if (strlen(name) != strlen(cap.card)) {
|
LOGE("name not match");
|
return ERROR_USB_NAME;
|
}
|
for (int x = 0, y = 0; x < strlen(name) && y < strlen(cap.card); ++x, ++y) {
|
if (toupper(name[x]) != toupper(cap.card[y])) {
|
LOGE("name not match");
|
return ERROR_USB_NAME;
|
}
|
}
|
}
|
|
if (ay_camera) {
|
if (handshake() != 0) {
|
LOGE("handshake fail");
|
return ERROR_AY_CAMERA;
|
}
|
}
|
|
struct v4l2_fmtdesc fmtdesc;
|
struct v4l2_frmsizeenum frmsize;
|
int resolution_table[8][20][2];
|
int resolution_num = 0;
|
uint32_t support_pixelformat[16] = {0};
|
int support_pixelformat_num = 0;
|
|
fmtdesc.index=0;
|
fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
support_pixelformat_num = 0;
|
|
CameraInfoNum = 0;
|
CLEAR(CameraInfo);
|
|
LOGE("Support format:");
|
while(xioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) != -1) {
|
LOGE("%d.%s 0x%x flag: %d",fmtdesc.index+1, fmtdesc.description, fmtdesc.pixelformat, fmtdesc.flags);
|
|
strcpy(CameraInfo[fmtdesc.index].name, fmtdesc.description);
|
|
frmsize.pixel_format = fmtdesc.pixelformat;
|
frmsize.index = 0;
|
|
while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) != -1) {
|
if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
|
LOGE("A %dx%d\n", frmsize.discrete.width, frmsize.discrete.height);
|
resolution_table[fmtdesc.index][resolution_num][0] = frmsize.discrete.width;
|
resolution_table[fmtdesc.index][resolution_num][1] = frmsize.discrete.height;
|
resolution_num++;
|
|
CameraInfo[fmtdesc.index].resolution[frmsize.index][0] = frmsize.discrete.width;
|
CameraInfo[fmtdesc.index].resolution[frmsize.index][1] = frmsize.discrete.height;
|
CameraInfo[fmtdesc.index].resolution_num++;
|
} else if (frmsize.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
|
LOGE("B %dx%d\n", frmsize.discrete.width, frmsize.discrete.height);
|
}
|
frmsize.index++;
|
}
|
|
if (frmsize.index == 0) {
|
return ERROR_LOCAL;
|
}
|
|
support_pixelformat[support_pixelformat_num++] = fmtdesc.pixelformat;
|
fmtdesc.index++;
|
|
CameraInfoNum++;
|
}
|
|
if (fmtdesc.index == 0) {
|
return ERROR_LOCAL;
|
}
|
|
img_pixelformat = 0;
|
int pfidx = 0;
|
|
// 支持MJPEG和YUYV格式,优先选择MJPEG模式
|
for (int i = 0; i < support_pixelformat_num; ++i) {
|
if (support_pixelformat[i] == V4L2_PIX_FMT_MJPEG) {
|
img_pixelformat = V4L2_PIX_FMT_MJPEG;
|
pfidx = i;
|
break;
|
} else if (support_pixelformat[i] == V4L2_PIX_FMT_YUYV) {
|
img_pixelformat = V4L2_PIX_FMT_YUYV;
|
pfidx = i;
|
}
|
}
|
|
if (img_pixelformat == 0) {
|
LOGI("None of pixelformat support");
|
return ERROR_LOCAL;
|
}
|
|
// 匹配最佳分辨率
|
int optimal = 0;
|
|
for (int i = 1; i < resolution_num; ++i) {
|
if ((abs(width - resolution_table[pfidx][i][0]) < abs(width - resolution_table[pfidx][optimal][0])) &&
|
(abs(height - resolution_table[pfidx][i][1]) < abs(height - resolution_table[pfidx][optimal][1]))) {
|
optimal = i;
|
}
|
}
|
|
LOGE("设置的分辨率 img_pixelformat %d pfidx %d optimal %d %dx%d", img_pixelformat, pfidx, optimal, resolution_table[pfidx][optimal][0], resolution_table[pfidx][optimal][1]);
|
/*
|
CLEAR(cropcap);
|
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
LOGE("initdevice 设置video捕获模式...");
|
if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) {
|
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
crop.c = cropcap.defrect;
|
|
if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) {
|
LOGE("initdevice VIDIOC_S_CROP fail");
|
switch (errno) {
|
case EINVAL:
|
break;
|
default:
|
break;
|
}
|
}
|
} else {
|
LOGE("initdevice VIDIOC_CROPCAP fail");
|
}
|
LOGE("initdevice 设置video捕获模式完毕");
|
*/
|
CLEAR(fmt);
|
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
IMG_WIDTH = resolution_table[pfidx][optimal][0];
|
IMG_HEIGHT = resolution_table[pfidx][optimal][1];
|
|
fmt.fmt.pix.width = IMG_WIDTH;
|
fmt.fmt.pix.height = IMG_HEIGHT;
|
|
fmt.fmt.pix.pixelformat = img_pixelformat;
|
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
|
|
struct v4l2_format fmt2;
|
CLEAR(fmt2);
|
fmt2.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
if(-1 == xioctl(fd, VIDIOC_G_FMT, &fmt2)) {
|
LOGI("VIDIOC_G_FMT error");
|
} else {
|
if (fmt2.fmt.pix.width == IMG_WIDTH && fmt2.fmt.pix.height == IMG_HEIGHT && fmt2.fmt.pix.pixelformat == img_pixelformat) {
|
LOGI("无需设置!!");
|
} else {
|
if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt))
|
return errnoexit("VIDIOC_S_FMT");
|
|
CLEAR(fmt2);
|
fmt2.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
if(-1 == xioctl(fd, VIDIOC_G_FMT, &fmt2)) {
|
LOGI("VIDIOC_G_FMT error");
|
}
|
|
img_pixelformat = fmt2.fmt.pix.pixelformat;
|
|
LOGI("复查:W:%d, H:%d, %s", fmt2.fmt.pix.width, fmt2.fmt.pix.height, V4L2_PIX_FMT_MJPEG == fmt2.fmt.pix.pixelformat? "V4L2_PIX_FMT_MJPEG" : "V4L2_PIX_FMT_YUYV");
|
}
|
}
|
|
min = fmt.fmt.pix.width * 2;
|
if (fmt.fmt.pix.bytesperline < min)
|
fmt.fmt.pix.bytesperline = min;
|
min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
|
if (fmt.fmt.pix.sizeimage < min)
|
fmt.fmt.pix.sizeimage = min;
|
|
LOGE("initdevice 设置内存映射...");
|
|
return initmmap();
|
}
|
#if 0
|
int XU_Get_Cur(__u8 xu_unit, __u8 xu_selector, __u16 xu_size, __u8 *xu_data)
|
{
|
int err=0;
|
#if LINUX_VERSION_CODE > KERNEL_VERSION (3, 0, 36)
|
struct uvc_xu_control_query xctrl;
|
xctrl.unit = xu_unit;
|
xctrl.selector = xu_selector;
|
xctrl.query = UVC_GET_CUR;
|
xctrl.size = xu_size;
|
xctrl.data = xu_data;
|
err=ioctl(fd, UVCIOC_CTRL_QUERY, &xctrl);
|
#else
|
struct uvc_xu_control xctrl;
|
xctrl.unit = xu_unit;
|
xctrl.selector = xu_selector;
|
xctrl.size = xu_size;
|
xctrl.data = xu_data;
|
err=ioctl(fd, UVCIOC_CTRL_GET, &xctrl);
|
#endif
|
return err;
|
}
|
|
int XU_Set_Cur(__u8 xu_unit, __u8 xu_selector, __u16 xu_size, __u8 *xu_data)
|
{
|
int err=0;
|
#if LINUX_VERSION_CODE > KERNEL_VERSION (3, 0, 36)
|
struct uvc_xu_control_query xctrl;
|
xctrl.unit = xu_unit;
|
xctrl.selector = xu_selector;
|
xctrl.query = UVC_SET_CUR;
|
xctrl.size = xu_size;
|
xctrl.data = xu_data;
|
err=ioctl(fd, UVCIOC_CTRL_QUERY, &xctrl);
|
#else
|
struct uvc_xu_control xctrl;
|
xctrl.unit = xu_unit;
|
xctrl.selector = xu_selector;
|
xctrl.size = xu_size;
|
xctrl.data = xu_data;
|
err=ioctl(fd, UVCIOC_CTRL_SET, &xctrl);
|
#endif
|
return err;
|
}
|
|
static bool XU_ReadFromASIC(uint16_t addr, uint8_t *pValue)
|
{
|
int ret = 0;
|
__u8 ctrldata[4];
|
|
//uvc_xu_control parmeters
|
__u8 xu_unit= 3;
|
__u8 xu_selector= XU_SONIX_SYS_ASIC_RW;
|
__u16 xu_size= 4;
|
__u8 *xu_data= ctrldata;
|
|
xu_data[0] = (addr & 0xFF);
|
xu_data[1] = ((addr >> 8) & 0xFF);
|
xu_data[2] = 0x0;
|
xu_data[3] = 0xFF; /* Dummy Write */
|
|
/* Dummy Write */
|
if ((ret=XU_Set_Cur(xu_unit, xu_selector, xu_size, xu_data)) < 0)
|
{
|
LOGI("ioctl(UVCIOC_CTRL_SET) FAILED (%i) \n",ret);
|
//if(ret==EINVAL) printf("Invalid arguments\n");
|
return false;
|
}
|
|
/* Asic Read */
|
xu_data[3] = 0x00;
|
if ((ret=XU_Get_Cur(xu_unit, xu_selector, xu_size, xu_data)) < 0)
|
{
|
LOGI("ioctl(UVCIOC_CTRL_GET) FAILED (%i)\n",ret);
|
//if(ret==EINVAL) printf("Invalid arguments\n");
|
return false;
|
}
|
*pValue = xu_data[2];
|
if(ret < 0)
|
return false;
|
return true;
|
}
|
|
static bool XU_WriteToASIC(uint16_t addr, uint8_t value)
|
{
|
int ret = 0;
|
__u8 ctrldata[4];
|
|
//uvc_xu_control parmeters
|
__u8 xu_unit= 3;
|
__u8 xu_selector= XU_SONIX_SYS_ASIC_RW;
|
__u16 xu_size= 4;
|
__u8 *xu_data= ctrldata;
|
|
xu_data[0] = (addr & 0xFF); /* Addr Low */
|
xu_data[1] = ((addr >> 8) & 0xFF); /* Addr High */
|
xu_data[2] = value;
|
xu_data[3] = 0x0; /* Normal Write */
|
|
/* Normal Write */
|
if ((ret=XU_Set_Cur(xu_unit, xu_selector, xu_size, xu_data)) < 0)
|
{
|
LOGI("ioctl(UVCIOC_CTRL_SET) FAILED (%i) \n",ret);
|
//if(ret==EINVAL) printf("Invalid arguments\n");
|
return false;
|
}
|
|
if(ret < 0)
|
return false;
|
return true;
|
}
|
#endif
|
|
static void Camencrypt(uint8_t *buffer,uint16_t size ,uint32_t CamCrpt)
|
{
|
uint32_t CamKey=0,CamM1=0,CamIA1=0,CamIC1=0;
|
uint16_t idx = 0;
|
uint32_t Key_0;
|
|
CamKey=CamCrpt*0x1398+0x1F678632;
|
CamM1=(CamCrpt%0x693E)+0x91305EA3;
|
CamIA1=CamCrpt*35-0x98DA63CC;
|
CamIC1=CamCrpt*17+0xB01A4967;
|
if(CamKey == 0 ) Key_0= 1;
|
else Key_0= CamKey;
|
while( idx < size )
|
{
|
Key_0= CamIA1 * (Key_0% CamM1 ) + CamIC1;
|
buffer[idx++] ^= (uint8_t)((( Key_0>>20)+0x9e)&0xFF);
|
}
|
}
|
|
static int handshake(void)
|
{
|
const uint8_t comm_xor_code_1[8]= {0x98,0x32,0xa0,0xaa,0x02,0x0d,0x09,0xa9};
|
const uint8_t comm_xor_code_2[8]= {0x65,0x89,0xA3,0xEE,0x69,0x8D,0x06,0xE2};
|
|
uint8_t cmd[16] = {0};
|
uint32_t seed, seed_client;
|
|
uint8_t op[11] = {0};
|
|
srand(time(NULL));
|
|
for (int i = 0; i < sizeof(op); ++i) {
|
do {
|
op[i] = rand();
|
} while (op[i] == 0 || op[i] == 0xFF);
|
}
|
seed = (op[0]<<24)+(op[1]<<16)+(op[2]<<8)+op[3];
|
|
// for (int i = 0; i < sizeof(op); ++i) {
|
// LOGI("初始 %d - 0x%02X", i, op[i]);
|
// }
|
|
Camencrypt(op, sizeof(op), seed);
|
|
// for (int i = 0; i < sizeof(op); ++i) {
|
// LOGI("Camencrypt后 %d - 0x%02X", i, op[i]);
|
// }
|
|
cmd[0] = 0x0A;
|
cmd[1] = (seed>>24)&0xFF;
|
cmd[2] = (seed>>16)&0xFF;
|
cmd[3] = (seed>>8)&0xFF;
|
cmd[4] = seed&0xFF;
|
memcpy(cmd+5, op, sizeof(op));
|
|
// for (int i = 0; i < sizeof(cmd); ++i) {
|
// LOGI("CMD %d - 0x%02X", i, cmd[i]);
|
// }
|
|
for (int i = 0; i < sizeof(cmd); ++i) {
|
if (i < sizeof(comm_xor_code_1)) {
|
cmd[i] ^= comm_xor_code_1[i];
|
} else {
|
cmd[i] ^= comm_xor_code_1[i-sizeof(comm_xor_code_1)];
|
}
|
}
|
|
// for (int i = 0; i < sizeof(cmd); ++i) {
|
// LOGI("发送的CMD %d - 0x%02X", i, cmd[i]);
|
// }
|
|
uint8_t ver[4];
|
for (int i = 0; i < 4; ++i) {
|
if (!XU_ReadFromASIC(0x0474 + i, ver + i))
|
return -2;
|
|
// LOGI("版本 0x%02X", ver[i]);
|
}
|
|
camera_version = 0;
|
for (int i = 0; i < 4; ++i) {
|
camera_version *= 10;
|
camera_version += (ver[i]>>4)&0x0F;
|
camera_version *= 10;
|
camera_version += ver[i]&0x0F;
|
}
|
// LOGI("版本 %ld", camera_version);
|
|
for (int i = 0; i < 16; ++i) {
|
uint8_t dat = 0;
|
if (!XU_ReadFromASIC(0x0CFA + i, &dat))
|
return -2;
|
|
// LOGI("首次读出的<%d>: 0x%02X", i, dat);
|
//if (dat != 0xFF)
|
// return -1;
|
}
|
////
|
// LOGI("写入数据");
|
for (int i = 1; i < sizeof(cmd); ++i) {
|
if (!XU_WriteToASIC(0x0CFA + i, cmd[i])) {
|
return -2;
|
}
|
}
|
if (!XU_WriteToASIC(0x0CFA, cmd[0])) {
|
return -2;
|
}
|
|
usleep(100000);
|
|
// LOGI("读取数据");
|
for (int i = 0; i < sizeof(cmd); ++i) {
|
if (!XU_ReadFromASIC(0x0CFA + i, cmd + i)) {
|
return -2;
|
}
|
}
|
|
// for (int i = 0; i < sizeof(cmd); ++i) {
|
// LOGI("读出的CMD %d - 0x%02X", i, cmd[i]);
|
// }
|
|
for (int i = 0; i < sizeof(cmd); ++i) {
|
if (i < sizeof(comm_xor_code_2)) {
|
cmd[i] ^= comm_xor_code_2[i];
|
} else {
|
cmd[i] ^= comm_xor_code_2[i-sizeof(comm_xor_code_2)];
|
}
|
}
|
|
// for (int i = 0; i < sizeof(cmd); ++i) {
|
// LOGI("异或后 %d - 0x%02X", i, cmd[i]);
|
// }
|
|
memcpy(op, cmd + 5, 11);
|
seed_client = (cmd[1]<<24)+(cmd[2]<<16)+(cmd[3]<<8)+cmd[4];
|
|
if (seed != seed_client || cmd[0] != 0x8A) {
|
return -1;
|
}
|
|
Camencrypt(op, sizeof(op), seed_client);
|
|
// for (int i = 0; i < sizeof(op); ++i) {
|
// LOGI("解密后 %d - 0x%02X", i, op[i]);
|
// }
|
|
const uint8_t cam_sn[] = {0x01, 0x09, 0x09, 0x04, 0x01, 0x01, 0x02, 0x02};
|
|
if (!memcmp(op, cam_sn, sizeof(cam_sn))) {
|
// LOGI("握手成功");
|
strcpy(camera_sn, "0109090401010202");
|
return 0;
|
}
|
// LOGI("握手失败");
|
return -1;
|
}
|
|
static int initmmap(void) {
|
struct v4l2_requestbuffers req;
|
|
CLEAR(req);
|
|
req.count = 4;
|
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
req.memory = V4L2_MEMORY_MMAP;
|
|
if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) {
|
if (EINVAL == errno) {
|
LOGE("%s does not support memory mapping", dev_name);
|
return ERROR_LOCAL;
|
} else {
|
return errnoexit("VIDIOC_REQBUFS");
|
}
|
}
|
LOGE("initdevice initmmap calloc = %d", req.count*sizeof(*buffers));
|
if (req.count < 2) {
|
LOGE("Insufficient buffer memory on %s", dev_name);
|
return ERROR_LOCAL;
|
}
|
|
buffers = calloc(req.count, sizeof(*buffers));
|
|
if (!buffers) {
|
LOGE("Out of memory");
|
return ERROR_LOCAL;
|
}
|
|
for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
|
struct v4l2_buffer buf;
|
|
CLEAR(buf);
|
|
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
buf.memory = V4L2_MEMORY_MMAP;
|
buf.index = n_buffers;
|
|
if (-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf))
|
return errnoexit("VIDIOC_QUERYBUF");
|
|
buffers[n_buffers].length = buf.length;
|
|
LOGI("buff[%d] length = %ld offset = %ld", n_buffers, buf.length, buf.m.offset);
|
|
buffers[n_buffers].start = mmap(NULL, buf.length,
|
PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
|
LOGI("buff[%d] start = %lu", n_buffers, (unsigned long)buffers[n_buffers].start);
|
|
if (MAP_FAILED == buffers[n_buffers].start)
|
return errnoexit("mmap");
|
}
|
|
LOGI("initmmap OK");
|
|
return SUCCESS_LOCAL;
|
}
|
|
static int startcapturing(void) {
|
unsigned int i;
|
enum v4l2_buf_type type;
|
|
for (i = 0; i < n_buffers; ++i) {
|
struct v4l2_buffer buf;
|
|
CLEAR(buf);
|
|
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
buf.memory = V4L2_MEMORY_MMAP;
|
buf.index = i;
|
|
if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
|
return errnoexit("VIDIOC_QBUF");
|
}
|
LOGE("startcapturing startcapturing");
|
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
if (-1 == xioctl(fd, VIDIOC_STREAMON, &type))
|
return errnoexit("VIDIOC_STREAMON");
|
|
return SUCCESS_LOCAL;
|
}
|
|
static int readframeonce(void) {
|
int err_cnt = 0;
|
|
if (!cam_inited) {
|
return ERROR_LOCAL;
|
}
|
|
for (;;) {
|
fd_set fds;
|
struct timeval tv;
|
int r;
|
|
FD_ZERO(&fds);
|
FD_SET(fd, &fds);
|
|
tv.tv_sec = 2;
|
tv.tv_usec = 0;
|
|
r = select(fd + 1, &fds, NULL, NULL, &tv);
|
|
if (-1 == r) {
|
if (EINTR == errno)
|
continue;
|
|
return errnoexit("select");
|
}
|
|
if (0 == r) {
|
LOGE("select timeout");
|
|
return ERROR_LOCAL;
|
|
}
|
// handshake();
|
r = readframe();
|
|
if (r == 1)
|
break;
|
else {
|
err_cnt++;
|
if (err_cnt >= 10) {
|
return r;
|
}
|
}
|
}
|
|
return SUCCESS_LOCAL;
|
}
|
|
static void processimage(const void *p, int length) {
|
if (img_pixelformat == V4L2_PIX_FMT_YUYV)
|
yuyv422toABGRY((unsigned char *) p);
|
else if (img_pixelformat == V4L2_PIX_FMT_MJPEG) {
|
mjpegToRGBA((unsigned char *) p, length);
|
}
|
|
if (wm_enable) {
|
AddWatermark((uint8_t *)rgb);
|
}
|
}
|
|
static int readframe(void) {
|
struct v4l2_buffer buf;
|
unsigned int i;
|
|
CLEAR(buf);
|
|
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
buf.memory = V4L2_MEMORY_MMAP;
|
if (buffers == NULL){
|
return 0;
|
}
|
if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
|
switch (errno) {
|
case EAGAIN:
|
return 0;
|
case EIO:
|
default:
|
return errnoexit("VIDIOC_DQBUF");
|
}
|
} else {
|
// LOGI("readframe %d %ld", buf.index, buf.length);
|
}
|
|
assert(buf.index < n_buffers);
|
|
// LOGI("readframe %d: 0x%X %ld", buf.index, buffers[buf.index].start, buf.bytesused);
|
processimage(buffers[buf.index].start, buf.bytesused);
|
|
if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
|
return errnoexit("VIDIOC_QBUF");
|
|
return 1;
|
}
|
|
static int stopcapturing(void) {
|
enum v4l2_buf_type type;
|
|
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
if (fd <= 0) {
|
return SUCCESS_LOCAL;
|
}
|
|
if (-1 == xioctl(fd, VIDIOC_STREAMOFF, &type))
|
return errnoexit("VIDIOC_STREAMOFF");
|
|
LOGE("stopcaptured!");
|
return SUCCESS_LOCAL;
|
}
|
|
static int uninitdevice(void) {
|
unsigned int i;
|
|
if (buffers == NULL){
|
return SUCCESS_LOCAL;
|
}
|
for (i = 0; i < n_buffers; ++i)
|
if (-1 == munmap(buffers[i].start, buffers[i].length))
|
return errnoexit("munmap");
|
|
free(buffers);
|
buffers = NULL;
|
|
LOGE("uninitdeviced!");
|
return SUCCESS_LOCAL;
|
}
|
|
static int closedevice(void) {
|
LOGI("Close fd: %d", fd);
|
if (fd <= 0) {
|
return SUCCESS_LOCAL;
|
}
|
if (-1 == close(fd)) {
|
fd = -1;
|
return errnoexit("close");
|
}
|
|
fd = -1;
|
|
LOGI("Closed!");
|
return SUCCESS_LOCAL;
|
}
|
|
static void yuyv422toABGRY(unsigned char *src) {
|
|
int width = 0;
|
int height = 0;
|
|
width = IMG_WIDTH;
|
height = IMG_HEIGHT;
|
|
int frameSize = width * height * 2;
|
|
int i;
|
|
if ((!rgb || !ybuf)) {
|
return;
|
}
|
int *lrgb = NULL;
|
int *lybuf = NULL;
|
|
lrgb = &rgb[0];
|
lybuf = &ybuf[0];
|
|
if (yuv_tbl_ready == 0) {
|
for (i = 0; i < 256; i++) {
|
y1192_tbl[i] = 1192 * (i - 16);
|
if (y1192_tbl[i] < 0) {
|
y1192_tbl[i] = 0;
|
}
|
|
v1634_tbl[i] = 1634 * (i - 128);
|
v833_tbl[i] = 833 * (i - 128);
|
u400_tbl[i] = 400 * (i - 128);
|
u2066_tbl[i] = 2066 * (i - 128);
|
}
|
yuv_tbl_ready = 1;
|
}
|
|
for (i = 0; i < frameSize; i += 4) {
|
unsigned char y1, y2, u, v;
|
y1 = src[i];
|
u = src[i + 1];
|
y2 = src[i + 2];
|
v = src[i + 3];
|
|
int y1192_1 = y1192_tbl[y1];
|
int r1 = (y1192_1 + v1634_tbl[v]) >> 10;
|
int g1 = (y1192_1 - v833_tbl[v] - u400_tbl[u]) >> 10;
|
int b1 = (y1192_1 + u2066_tbl[u]) >> 10;
|
|
int y1192_2 = y1192_tbl[y2];
|
int r2 = (y1192_2 + v1634_tbl[v]) >> 10;
|
int g2 = (y1192_2 - v833_tbl[v] - u400_tbl[u]) >> 10;
|
int b2 = (y1192_2 + u2066_tbl[u]) >> 10;
|
|
r1 = r1 > 255 ? 255 : r1 < 0 ? 0 : r1;
|
g1 = g1 > 255 ? 255 : g1 < 0 ? 0 : g1;
|
b1 = b1 > 255 ? 255 : b1 < 0 ? 0 : b1;
|
r2 = r2 > 255 ? 255 : r2 < 0 ? 0 : r2;
|
g2 = g2 > 255 ? 255 : g2 < 0 ? 0 : g2;
|
b2 = b2 > 255 ? 255 : b2 < 0 ? 0 : b2;
|
|
*lrgb++ = 0xff000000 | b1 << 16 | g1 << 8 | r1;
|
*lrgb++ = 0xff000000 | b2 << 16 | g2 << 8 | r2;
|
|
if (lybuf != NULL) {
|
*lybuf++ = y1;
|
*lybuf++ = y2;
|
}
|
}
|
}
|
|
void InitNVLog(void)
|
{
|
/*if (0 != access("/storage/sdcard0/nvlog", F_OK)) {
|
if (0 == mkdir("/storage/sdcard0/nvlog", S_IRWXU | S_IRWXG | S_IRWXO)) {
|
|
}
|
}*/
|
}
|
|
void WriteNVLog(const char *fmt, ...)
|
{
|
/*va_list argp;
|
char buffer[1024];
|
char fname[64];
|
FILE *fp;
|
|
sprintf(fname, "/storage/sdcard0/nvlog/log%d.txt", 0);
|
|
// LOGD("WriteNVLog 写文件 log%d.txt", index);
|
|
fp = fopen(fname, "a+");
|
if (fp != NULL) {
|
time_t time1 = time(NULL);
|
struct tm *ts = localtime(&time1);
|
|
sprintf(buffer, "%04d-%02d-%02d %02d:%02d:%02d\t\t", 1900 + ts->tm_year, 1 + ts->tm_mon, ts->tm_mday, ts->tm_hour, ts->tm_min, ts->tm_sec);
|
|
va_start(argp, fmt);
|
vsprintf(buffer + strlen(buffer), fmt, argp);
|
va_end(argp);
|
|
strcat(buffer, "\r\n");
|
|
fwrite(buffer, 1, strlen(buffer), fp);
|
fclose(fp);
|
}*/
|
}
|
|
void rgba(JNIEnv* env, jobject thiz, jint type,jbyteArray output) {
|
jbyte *outputBuffer = (*env)->GetByteArrayElements(env, output, 0);
|
|
if (!rgb || !ybuf)
|
return;
|
//0-录像 1-推流
|
if (type == 0){
|
rgbaToYuvSIMD(IMG_WIDTH, IMG_HEIGHT, (uint8_t *)rgb, (uint8_t *)outputBuffer, BGRA_YUV420SP);
|
} else if (type == 1) {
|
rgbaToYuvSIMD(IMG_WIDTH, IMG_HEIGHT, (uint8_t *)rgb, (uint8_t *)outputBuffer, RGBA_YUV420P);
|
} else if (type == 2) {
|
rgbaToYuvSIMD(IMG_WIDTH, IMG_HEIGHT, (uint8_t *)rgb, (uint8_t *)outputBuffer, BGRA_NV21);
|
} else {
|
rgbaToYuvSIMD(IMG_WIDTH, IMG_HEIGHT, (uint8_t *)rgb, (uint8_t *)outputBuffer, RGBA_NV21);
|
}
|
|
(*env)->ReleaseByteArrayElements(env, output, outputBuffer, 0);
|
}
|
|
void pixeltobmp(JNIEnv* env, jobject thiz, jobject bitmap) {
|
AndroidBitmapInfo info;
|
void* pixels;
|
int ret;
|
int i;
|
int *colors;
|
|
int width = 0;
|
int height = 0;
|
|
if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
|
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
|
return;
|
}
|
|
width = info.width;
|
height = info.height;
|
|
if (!rgb || !ybuf)
|
return;
|
|
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
|
LOGE("Bitmap format is not RGBA_8888 !");
|
return;
|
}
|
|
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
|
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
|
}
|
|
colors = (int*) pixels;
|
int *lrgb = NULL;
|
|
lrgb = &rgb[0];
|
|
if (width == IMG_WIDTH && height == IMG_HEIGHT) {
|
for (i = 0; i < IMG_WIDTH * IMG_HEIGHT; i++) {
|
*colors++ = *lrgb++;
|
}
|
} else {
|
for (int y = 0; y < IMG_HEIGHT && y < height; ++y) {
|
for (int x = 0; x < IMG_WIDTH && x < width; ++x) {
|
colors[y * width + x] = lrgb[y * IMG_WIDTH + x];
|
}
|
}
|
}
|
|
AndroidBitmap_unlockPixels(env, bitmap);
|
}
|
|
jint prepareCamera(JNIEnv* env, jobject thiz, jintArray ids, jstring name, jintArray resolution, jboolean ay_camera) {
|
int ret;
|
char *except_camera_name = NULL;
|
|
InitNVLog();
|
|
jint *ptr_resolution = (*env)->GetIntArrayElements(env, resolution, 0);
|
LOGE("打开摄像头 分辨率 (%d x %d)- %d",ptr_resolution[0],ptr_resolution[1], ++init_module);
|
|
if (name != NULL) {
|
except_camera_name = (*env)->GetStringUTFChars(env, name, 0);
|
}
|
|
jint *ptr_index = (*env)->GetIntArrayElements(env, ids, 0);
|
LOGE("打开摄像头 index [%d - %d]",ptr_index[0], ptr_index[1]);
|
|
pthread_mutex_lock(&mutex);
|
for (int index = ptr_index[0]; index < ptr_index[1]; ++index) {
|
ret = opendevice(index);
|
LOGI("open device %d", ret);
|
if (ret != ERROR_LOCAL) {
|
ret = initdevice(ptr_resolution[0], ptr_resolution[1], except_camera_name, ay_camera);
|
}
|
if (ret == SUCCESS_LOCAL) {
|
camera_index = index;
|
break;
|
}
|
else if (fd != -1) {
|
closedevice();
|
}
|
}
|
|
if (ret == SUCCESS_LOCAL) {
|
ret = startcapturing();
|
|
if (ret != SUCCESS_LOCAL) {
|
LOGE("device reset.....");
|
stopcapturing();
|
uninitdevice();
|
closedevice();
|
LOGE("device resetted");
|
}
|
}
|
|
if (ret == SUCCESS_LOCAL) {
|
rgb = (int *) malloc(sizeof(int) * (IMG_WIDTH * IMG_HEIGHT));
|
ybuf = (int *) malloc(sizeof(int) * (IMG_WIDTH * IMG_HEIGHT));
|
init_JPEG_turbo();
|
|
cam_inited = true;
|
} else {
|
cam_inited = false;
|
}
|
|
ptr_resolution[0] = IMG_WIDTH;
|
ptr_resolution[1] = IMG_HEIGHT;
|
|
LOGE("##### 初始化摄像头 %s #####", ret == SUCCESS_LOCAL? "成功": "失败");
|
|
pthread_mutex_unlock(&mutex);
|
|
(*env)->ReleaseIntArrayElements(env, resolution, ptr_resolution, 0);
|
(*env)->ReleaseIntArrayElements(env, ids, ptr_index, 0);
|
|
if (name != NULL) {
|
(*env)->ReleaseStringUTFChars(env, name, except_camera_name);
|
}
|
|
if (ret == SUCCESS_LOCAL) {
|
InitWatermark("/system/ms_unicode_24.bin", IMG_WIDTH, IMG_HEIGHT);
|
wm_enable = true;
|
}
|
|
return ret;
|
}
|
|
jint processCamera(JNIEnv* env, jobject thiz) {
|
int ret;
|
pthread_mutex_lock(&mutex);
|
ret = readframeonce();
|
pthread_mutex_unlock(&mutex);
|
return ret;
|
}
|
|
void stopCamera(JNIEnv* env, jobject thiz) {
|
pthread_mutex_lock(&mutex);
|
cam_inited = false;
|
|
wm_enable = false;
|
UninitWatermark();
|
|
LOGI("stopcapturing");
|
stopcapturing();
|
|
LOGI("uninitdevice");
|
uninitdevice();
|
|
LOGI("closedevice");
|
closedevice();
|
|
if (rgb){
|
free(rgb);
|
rgb=NULL;
|
}
|
if (ybuf){
|
free(ybuf);
|
ybuf = NULL;
|
}
|
|
fd = -1;
|
|
uninit_JPEG_turbo();
|
|
LOGE("##### 反初始化摄像头 #####");
|
|
pthread_mutex_unlock(&mutex);
|
}
|
|
static void mjpegToRGBA(const uint8_t *data, int length)
|
{
|
read_JPEG_turbo(data, length, (uint8_t *)rgb);
|
}
|
|
void regenv(JNIEnv *env, jobject instance) {
|
// TODO
|
// 保存全局JVM以便在子线程中使用
|
// (*env)->GetJavaVM(env, &sg_jvm);
|
// 不能直接赋值(g_obj = ojb)
|
sg_obj = (*env)->NewGlobalRef(env, instance);
|
|
if (init_module == 0) {
|
pthread_mutex_init(&mutex, NULL);
|
init_module++;
|
}
|
}
|
|
jstring getPixelFormat(JNIEnv *env, jobject instance) {
|
switch (img_pixelformat) {
|
case V4L2_PIX_FMT_MJPEG:
|
return (*env)->NewStringUTF(env, "MJPEG");
|
case V4L2_PIX_FMT_YUYV:
|
return (*env)->NewStringUTF(env, "YUYV");
|
default:
|
break;
|
}
|
return (*env)->NewStringUTF(env, "CLOSE");
|
}
|
|
jobject getUsbCameraInfo(JNIEnv *env, jobject instance) {
|
jclass objectClass = (*env)->FindClass(env, "com/anyun/libusbcamera/UsbCameraInfo");
|
jmethodID init1 = (*env)->GetMethodID(env, objectClass, "<init>",
|
"(ILjava/lang/String;ILjava/lang/String;Ljava/lang/String;[Lcom/anyun/libusbcamera/UsbCameraFormat;)V");
|
|
jclass objectClass2 = (*env)->FindClass(env, "com/anyun/libusbcamera/UsbCameraFormat");
|
jmethodID jidResultInfo = (*env)->GetMethodID(env, objectClass2, "<init>",
|
"(Ljava/lang/String;[[I)V");
|
|
jobjectArray args = (*env)->NewObjectArray(env, CameraInfoNum, objectClass2, 0);
|
|
jclass intArrCls = (*env)->FindClass(env, "[I");
|
|
for (int i = 0; i < CameraInfoNum; ++i) {
|
jobjectArray resArray = (*env)->NewObjectArray(env, CameraInfo[i].resolution_num, intArrCls, NULL);
|
|
for (int j = 0; j < CameraInfo[i].resolution_num; j++) {
|
jint buff1[2] = {CameraInfo[i].resolution[j][0], CameraInfo[i].resolution[j][1]};
|
|
jintArray whArray = (*env)->NewIntArray(env, 2);
|
(*env)->SetIntArrayRegion(env, whArray, 0, 2, buff1);
|
|
(*env)->SetObjectArrayElement(env, resArray, j, whArray);
|
|
(*env)->DeleteLocalRef(env, whArray);
|
}
|
jobject son = (*env)->NewObject(env, objectClass2, jidResultInfo, (*env)->NewStringUTF(env, CameraInfo[i].name), resArray);
|
(*env)->SetObjectArrayElement(env, args, i, son);
|
|
(*env)->DeleteLocalRef(env, resArray);
|
(*env)->DeleteLocalRef(env, son);
|
}
|
|
jobject ret = (*env)->NewObject(env, objectClass, init1,
|
camera_index,
|
(*env)->NewStringUTF(env, camera_name[0] > 0? camera_name: "Unknown"),
|
camera_version,
|
(*env)->NewStringUTF(env, camera_true_version[0] > 0? camera_true_version: "Unknown"),
|
(*env)->NewStringUTF(env, camera_sn[0] > 0? camera_sn: "Unknown"),
|
args);
|
(*env)->DeleteLocalRef(env, args);
|
|
return ret;
|
}
|
|
jint getUsbCameraVersion(JNIEnv *env, jobject thiz) {
|
// TODO: implement getUsbCameraVersion()
|
return camera_version;
|
}
|
|
jstring getUsbCameraTrueVersion(JNIEnv *env, jobject thiz) {
|
// TODO: implement getUsbCameraVersion()
|
return (*env)->NewStringUTF(env, camera_true_version);
|
}
|
|
jstring getUsbCameraSn(JNIEnv *env, jobject thiz) {
|
// TODO: implement getUsbCameraSn()
|
return (*env)->NewStringUTF(env, camera_sn);
|
}
|
|
void enableWatermark(JNIEnv *env, jobject thiz, jboolean en, jstring font_)
|
{
|
if (en && font_ != NULL) {
|
const char *font = (*env)->GetStringUTFChars(env, font_, 0);
|
InitWatermark(font, IMG_WIDTH, IMG_HEIGHT);
|
(*env)->ReleaseStringUTFChars(env, font_, font);
|
wm_enable = true;
|
} else {
|
UninitWatermark();
|
wm_enable = false;
|
}
|
}
|
|
void setWatermark(JNIEnv *env, jobject thiz, jint color, jint font_size, jint multiple, jobject texts)
|
{
|
jclass cls_arraylist = (*env)->GetObjectClass(env, texts);
|
|
if (cls_arraylist != NULL) {
|
jmethodID arraylist_get = (*env)->GetMethodID(env, cls_arraylist,"get","(I)Ljava/lang/Object;");
|
jmethodID arraylist_size = (*env)->GetMethodID(env, cls_arraylist,"size","()I");
|
|
jint nums = (*env)->CallIntMethod(env, texts, arraylist_size);
|
|
LOGI("Texts num = %d", nums);
|
|
text_t *text_array = NULL;
|
|
if (nums > 0) {
|
text_array = malloc(nums * sizeof(text_t));
|
}
|
|
for (int i = 0; i < nums; ++i) {
|
jobject obj_user = (*env)->CallObjectMethod(env, texts, arraylist_get, i);
|
jclass cls_user = (*env)->GetObjectClass(env, obj_user);
|
|
jfieldID idx = (*env)->GetFieldID(env, cls_user,"x","I");
|
jfieldID idy = (*env)->GetFieldID(env, cls_user,"y","I");
|
jfieldID idtext = (*env)->GetFieldID(env, cls_user,"text","Ljava/lang/String;");
|
|
jint x = (*env)->GetIntField(env, obj_user, idx);
|
jint y = (*env)->GetIntField(env, obj_user, idy);
|
jstring value = (jstring )(*env)->GetObjectField(env, obj_user, idtext);
|
const char *soc = (*env)->GetStringUTFChars(env, value, 0);
|
LOGI("Text x = %d, y = %d, text = %s", x, y, soc);
|
|
text_array[i].x = x;
|
text_array[i].y = y;
|
strcpy(text_array[i].text, soc);
|
|
(*env)->ReleaseStringUTFChars(env, value, soc);
|
(*env)->DeleteLocalRef(env, obj_user);
|
(*env)->DeleteLocalRef(env, cls_user);
|
}
|
|
PrepareWatermark(color, font_size, multiple, nums, text_array);
|
if (text_array != NULL) {
|
free(text_array);
|
}
|
}
|
|
LOGI("setWatermark end");
|
|
// const char *school_temp = NULL;
|
// const char *teacher_temp = NULL;
|
// const char *student_temp = NULL;
|
// const char *speed_temp = NULL;
|
// const char *czh_temp = NULL;
|
// const char *jwd_temp = NULL;
|
// const char *date_temp = NULL;
|
//
|
// if (school != NULL) {
|
// school_temp = (*env)->GetStringUTFChars(env, school, 0);
|
// }
|
// if (teacher != NULL) {
|
// teacher_temp = (*env)->GetStringUTFChars(env, teacher, 0);
|
// }
|
// if (student != NULL) {
|
// student_temp = (*env)->GetStringUTFChars(env, student, 0);
|
// }
|
// if (speed != NULL) {
|
// speed_temp = (*env)->GetStringUTFChars(env, speed, 0);
|
// }
|
// if (czh != NULL) {
|
// czh_temp = (*env)->GetStringUTFChars(env, czh, 0);
|
// }
|
// if (jwd != NULL) {
|
// jwd_temp = (*env)->GetStringUTFChars(env, jwd, 0);
|
// }
|
// if (date != NULL) {
|
// date_temp = (*env)->GetStringUTFChars(env, date, 0);
|
// }
|
//
|
// PrepareWatermark(color, school_temp, teacher_temp, student_temp, speed_temp, czh_temp, jwd_temp, date_temp);
|
//
|
// if (school != NULL) {
|
// (*env)->ReleaseStringUTFChars(env, school, school_temp);
|
// }
|
// if (teacher != NULL) {
|
// (*env)->ReleaseStringUTFChars(env, teacher, teacher_temp);
|
// }
|
// if (student != NULL) {
|
// (*env)->ReleaseStringUTFChars(env, student, student_temp);
|
// }
|
// if (speed != NULL) {
|
// (*env)->ReleaseStringUTFChars(env, speed, speed_temp);
|
// }
|
// if (czh != NULL) {
|
// (*env)->ReleaseStringUTFChars(env, czh, czh_temp);
|
// }
|
// if (jwd != NULL) {
|
// (*env)->ReleaseStringUTFChars(env, jwd, jwd_temp);
|
// }
|
// if (date != NULL) {
|
// (*env)->ReleaseStringUTFChars(env, date, date_temp);
|
// }
|
}
|
|
jboolean hardRebootUsbCamera(JNIEnv *env, jobject thiz)
|
{
|
FILE *fp = fopen("/sys/devices/platform/soc/soc:ap-ahb/20200000.usb/mode_switch", "w");
|
|
if (fp != NULL) {
|
fwrite("device", 1, 6, fp);
|
usleep(1000000);
|
fflush(fp);
|
fwrite("host", 1, 4, fp);
|
fflush(fp);
|
fclose(fp);
|
return true;
|
}
|
|
return false;
|
}
|
|
static JNINativeMethod methods[] = {
|
{"rgba", "(I[B)V", (void *)rgba},
|
{"getPixelFormat", "()Ljava/lang/String;", (void *)getPixelFormat},
|
{"pixeltobmp", "(Landroid/graphics/Bitmap;)V", (void *)pixeltobmp},
|
{"prepareCamera", "([ILjava/lang/String;[IZ)I", (void *)prepareCamera},
|
{"processCamera", "()I", (void *)processCamera},
|
{"stopCamera", "()V", (void *)stopCamera},
|
{"setenv", "()V", (void *)regenv},
|
{"getUsbCameraInfo", "()Lcom/anyun/libusbcamera/UsbCameraInfo;", (void *)getUsbCameraInfo},
|
{"getUsbCameraVersion", "()I", (void *)getUsbCameraVersion},
|
{"getUsbCameraTrueVersion", "()Ljava/lang/String;", (void *)getUsbCameraTrueVersion},
|
{"getUsbCameraSn", "()Ljava/lang/String;", (void *)getUsbCameraSn},
|
{"enableWatermark", "(ZLjava/lang/String;)V", (void *)enableWatermark},
|
{"setWatermark", "(IIILjava/util/ArrayList;)V", (void *)setWatermark},
|
{"hardRebootUsbCamera", "()Z", (void *)hardRebootUsbCamera}
|
};
|
|
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
|
LOGI("JNI_OnLoad");
|
|
sg_jvm = vm;
|
|
if ((*sg_jvm)->GetEnv(sg_jvm, (void **) (&sg_jenv), JNI_VERSION_1_6) != JNI_OK) {
|
LOGI("Env not got");
|
return JNI_ERR;
|
}
|
|
jclass clz = (*sg_jenv)->FindClass(sg_jenv, "com/anyun/libusbcamera/UsbCamera");
|
if (clz == NULL) {
|
LOGI("目标类未找到");
|
return JNI_ERR;
|
}
|
|
if ((*sg_jenv)->RegisterNatives(sg_jenv, clz, methods, LIBENC_ARRAY_ELEMS(methods))) {
|
LOGI("methods not registered");
|
return JNI_ERR;
|
}
|
|
return JNI_VERSION_1_6;
|
}
|
|
void JNI_OnUnload(JavaVM* vm, void* reserved) {
|
LOGI("JNI_OnUnload");
|
|
JNIEnv* env;
|
|
if ((*vm)->GetEnv(vm, (void **) (&env), JNI_VERSION_1_6) != JNI_OK) {
|
return;
|
}
|
|
jclass clz = (*env)->FindClass(env, "com/anyun/libusbcamera/UsbCamera");
|
if (clz == NULL) {
|
return;
|
}
|
|
(*env)->UnregisterNatives(env, clz);
|
}
|