原文网址(转载请注明出处):
http://t.csdnimg.cn/SJ9gd
源码基于:Android Q
https://bitbucket.org/neuralassembly/simplewebcam/src/master/src/com/camera/simplewebcam/
https://github.com/chenguangxiang/simplewebcam
UVC
├── AndroidManifest.xml
├── Android.mk
├── jni
│ ├── Android.mk
│ ├── ImageProc.c
│ └── ImageProc.h
├── libs
├── ReadMe
├── res
│ ├── drawable
│ │ └── ic_launcher_background.xml
│ ├── drawable-v24
│ │ └── ic_launcher_foreground.xml
│ ├── layout
│ │ └── activity_main.xml
│ ├── mipmap-anydpi-v26
│ │ ├── ic_launcher_round.xml
│ │ └── ic_launcher.xml
│ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ ├── mipmap-xxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ ├── values
│ │ ├── colors.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── xml
│ └── device_filter.xml
└── src
└── com
└── camera
└── simplewebcam
├── CameraPreview.java
└── MainActivity.java
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.camera.simplewebcam"
android:sharedUserId="android.uid.system">
<uses-sdk
android:minSdkVersion="23"
android:targetSdkVersion="26" />
<uses-permission android:name="android.permission.INTERACT_ACROSS_USERS_FULL" />
<uses-permission android:name="android.permission.SET_ORIENTATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.NFC" />
<uses-permission android:name="android.permission.READ_SYNC_SETTINGS" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.VIBRATE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_MEDIA_STORAGE" />
<uses-permission android:name="android.permission.WRITE_SETTINGS" />
<uses-permission android:name="android.permission.WRITE_SYNC_SETTINGS" />
<uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEMS" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".MainActivity"
android:screenOrientation="landscape">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
LOCAL_PATH:= $(call my-dir)
LOCAL_JNI_SHARED_LIBRARIES := \
libImageProc
include $(CLEAR_VARS)
LOCAL_PACKAGE_NAME := SimpleWebCam
#android P(9.0) need add, will ignore system hide api. so if your app use system hide api will build error
#LOCAL_SDK_VERSION := current
#LOCAL_USE_AAPT2 := true
LOCAL_PRIVATE_PLATFORM_APIS := true
LOCAL_CERTIFICATE := platform
LOCAL_PRIVILEGED_MODULE := true
LOCAL_MODULE_TAGS := optional
LOCAL_SRC_FILES := $(call all-java-files-under, src)
LOCAL_RESOURCE_DIR += $(LOCAL_PATH)/res
LOCAL_STATIC_JAVA_LIBRARIES := \
android-support-v4 \
android-support-v13 \
xmp_toolkit \
android-slices-builders \
android-slices-core \
android-slices-view \
android-support-v7-cardview \
android-support-v7-preference \
android-support-v7-recyclerview \
android-support-v14-preference \
androidx.appcompat_appcompat
LOCAL_DEX_PREOPT := false
LOCAL_PROGUARD_ENABLED:= disabled
include $(BUILD_PACKAGE)
# Use the following include to make our test apk.
include $(call all-makefiles-under,$(LOCAL_PATH))
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
ImageProc.c
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE := libImageProc
LOCAL_LDLIBS := -llog -ljnigraphics
include $(BUILD_SHARED_LIBRARY)
#include "ImageProc.h"
int errnoexit(const char *s)
{
LOGE("%s error %d, %s", s, errno, strerror (errno));
return ERROR_LOCAL;
}
int xioctl(int fd, int request, void *arg)
{
int r;
do r = ioctl (fd, request, arg);
while (-1 == r && EINTR == errno);
return r;
}
int checkCamerabase(void){
struct stat st;
int i;
int start_from_4 = 1;
/* if /dev/video[0-3] exist, camerabase=4, otherwise, camrerabase = 0 */
for(i=0 ; i<4 ; i++){
sprintf(dev_name,"/dev/video%d",i);
if (-1 == stat (dev_name, &st)) {
start_from_4 &= 0;
}else{
start_from_4 &= 1;
}
}
if(start_from_4){
return 4;
}else{
return 0;
}
}
// 打开设备驱动节点
int opendevice(int i)
{
struct stat st;
sprintf(dev_name,"/dev/video%d",i);
//stat() 获得文件属性,并判断是否为字符设备文件
if (-1 == stat (dev_name, &st)) {
LOGE("Cannot identify '%s': %d, %s", dev_name, errno, strerror (errno));
return ERROR_LOCAL;
}
if (!S_ISCHR (st.st_mode)) {
LOGE("%s is no device", dev_name);
return ERROR_LOCAL;
}
fd = open (dev_name, O_RDWR | O_NONBLOCK, 0);
if (-1 == fd) {
LOGE("Cannot open '%s': %d, %s", dev_name, errno, strerror (errno));
return ERROR_LOCAL;
}
return SUCCESS_LOCAL;
}
// 查询驱动功能
int initdevice(void)
{
struct v4l2_capability cap;
struct v4l2_cropcap cropcap;
struct v4l2_crop crop;
struct v4l2_format fmt;
unsigned int min;
if (-1 == xioctl (fd, VIDIOC_QUERYCAP, &cap)) {
if (EINVAL == errno) {
LOGE("%s is no V4L2 device", dev_name);
return ERROR_LOCAL;
} else {
return errnoexit ("VIDIOC_QUERYCAP");
}
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
LOGE("%s is no video capture device", dev_name);
return ERROR_LOCAL;
}
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
LOGE("%s does not support streaming i/o", dev_name);
return ERROR_LOCAL;
}
CLEAR (cropcap);
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (0 == xioctl (fd, VIDIOC_CROPCAP, &cropcap)) {
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect;
if (-1 == xioctl (fd, VIDIOC_S_CROP, &crop)) {
switch (errno) {
case EINVAL:
break;
default:
break;
}
}
} else {
}
// 设置视频格式
CLEAR (fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = IMG_WIDTH;
fmt.fmt.pix.height = IMG_HEIGHT;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (-1 == xioctl (fd, VIDIOC_S_FMT, &fmt))
return errnoexit ("VIDIOC_S_FMT");
min = fmt.fmt.pix.width * 2;
if (fmt.fmt.pix.bytesperline < min)
fmt.fmt.pix.bytesperline = min;
min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
if (fmt.fmt.pix.sizeimage < min)
fmt.fmt.pix.sizeimage = min;
return initmmap ();
}
// 申请帧缓存并映射到用户空间
int initmmap(void)
{
struct v4l2_requestbuffers req;
CLEAR (req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl (fd, VIDIOC_REQBUFS, &req)) {
if (EINVAL == errno) {
LOGE("%s does not support memory mapping", dev_name);
return ERROR_LOCAL;
} else {
return errnoexit ("VIDIOC_REQBUFS");
}
}
if (req.count < 2) {
LOGE("Insufficient buffer memory on %s", dev_name);
return ERROR_LOCAL;
}
buffers = calloc (req.count, sizeof (*buffers));
if (!buffers) {
LOGE("Out of memory");
return ERROR_LOCAL;
}
for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (-1 == xioctl (fd, VIDIOC_QUERYBUF, &buf))
return errnoexit ("VIDIOC_QUERYBUF");
buffers[n_buffers].length = buf.length;
buffers[n_buffers].start =
mmap (NULL ,
buf.length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
fd, buf.m.offset);
if (MAP_FAILED == buffers[n_buffers].start)
return errnoexit ("mmap");
}
return SUCCESS_LOCAL;
}
// 将帧缓存加入缓存队列并启动视频采集
int startcapturing(void)
{
unsigned int i;
enum v4l2_buf_type type;
for (i = 0; i < n_buffers; ++i) {
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (-1 == xioctl (fd, VIDIOC_QBUF, &buf))
return errnoexit ("VIDIOC_QBUF");
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl (fd, VIDIOC_STREAMON, &type))
return errnoexit ("VIDIOC_STREAMON");
return SUCCESS_LOCAL;
}
// 从缓存队列中取出一帧
int readframeonce(void)
{
for (;;) {
fd_set fds;
struct timeval tv;
int r;
FD_ZERO (&fds);
FD_SET (fd, &fds);
tv.tv_sec = 2;
tv.tv_usec = 0;
r = select (fd + 1, &fds, NULL, NULL, &tv);
if (-1 == r) {
if (EINTR == errno)
continue;
return errnoexit ("select");
}
if (0 == r) {
LOGE("select timeout");
return ERROR_LOCAL;
}
if (readframe ()==1)
break;
}
return SUCCESS_LOCAL;
}
void processimage (const void *p)
{
yuyv422toABGRY((unsigned char *)p);
}
int readframe(void)
{
struct v4l2_buffer buf;
unsigned int i;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl (fd, VIDIOC_DQBUF, &buf)) {
switch (errno) {
case EAGAIN:
return 0;
case EIO:
default:
return errnoexit ("VIDIOC_DQBUF");
}
}
assert (buf.index < n_buffers);
processimage (buffers[buf.index].start);
if (-1 == xioctl (fd, VIDIOC_QBUF, &buf))
return errnoexit ("VIDIOC_QBUF");
return 1;
}
int stopcapturing(void)
{
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl (fd, VIDIOC_STREAMOFF, &type))
return errnoexit ("VIDIOC_STREAMOFF");
return SUCCESS_LOCAL;
}
int uninitdevice(void)
{
unsigned int i;
for (i = 0; i < n_buffers; ++i)
if (-1 == munmap (buffers[i].start, buffers[i].length))
return errnoexit ("munmap");
free (buffers);
return SUCCESS_LOCAL;
}
int closedevice(void)
{
if (-1 == close (fd)){
fd = -1;
return errnoexit ("close");
}
fd = -1;
return SUCCESS_LOCAL;
}
void yuyv422toABGRY(unsigned char *src)
{
int width=0;
int height=0;
width = IMG_WIDTH;
height = IMG_HEIGHT;
int frameSize =width*height*2;
int i;
if((!rgb || !ybuf)){
return;
}
int *lrgb = NULL;
int *lybuf = NULL;
lrgb = &rgb[0];
lybuf = &ybuf[0];
if(yuv_tbl_ready==0){
for(i=0 ; i<256 ; i++){
y1192_tbl[i] = 1192*(i-16);
if(y1192_tbl[i]<0){
y1192_tbl[i]=0;
}
v1634_tbl[i] = 1634*(i-128);
v833_tbl[i] = 833*(i-128);
u400_tbl[i] = 400*(i-128);
u2066_tbl[i] = 2066*(i-128);
}
yuv_tbl_ready=1;
}
for(i=0 ; i<frameSize ; i+=4){
unsigned char y1, y2, u, v;
y1 = src[i];
u = src[i+1];
y2 = src[i+2];
v = src[i+3];
int y1192_1=y1192_tbl[y1];
int r1 = (y1192_1 + v1634_tbl[v])>>10;
int g1 = (y1192_1 - v833_tbl[v] - u400_tbl[u])>>10;
int b1 = (y1192_1 + u2066_tbl[u])>>10;
int y1192_2=y1192_tbl[y2];
int r2 = (y1192_2 + v1634_tbl[v])>>10;
int g2 = (y1192_2 - v833_tbl[v] - u400_tbl[u])>>10;
int b2 = (y1192_2 + u2066_tbl[u])>>10;
r1 = r1>255 ? 255 : r1<0 ? 0 : r1;
g1 = g1>255 ? 255 : g1<0 ? 0 : g1;
b1 = b1>255 ? 255 : b1<0 ? 0 : b1;
r2 = r2>255 ? 255 : r2<0 ? 0 : r2;
g2 = g2>255 ? 255 : g2<0 ? 0 : g2;
b2 = b2>255 ? 255 : b2<0 ? 0 : b2;
*lrgb++ = 0xff000000 | b1<<16 | g1<<8 | r1;
*lrgb++ = 0xff000000 | b2<<16 | g2<<8 | r2;
if(lybuf!=NULL){
*lybuf++ = y1;
*lybuf++ = y2;
}
}
}
void
Java_com_camera_simplewebcam_CameraPreview_pixeltobmp( JNIEnv* env,jobject thiz,jobject bitmap){
jboolean bo;
AndroidBitmapInfo info;
void* pixels;
int ret;
int i;
int *colors;
int width=0;
int height=0;
if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
width = info.width;
height = info.height;
if(!rgb || !ybuf) return;
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Bitmap format is not RGBA_8888 !");
return;
}
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
colors = (int*)pixels;
int *lrgb =NULL;
lrgb = &rgb[0];
for(i=0 ; i<width*height ; i++){
*colors++ = *lrgb++;
}
AndroidBitmap_unlockPixels(env, bitmap);
}
jint
Java_com_camera_simplewebcam_CameraPreview_prepareCamera( JNIEnv* env,jobject thiz, jint videoid){
int ret;
if(camerabase<0){
camerabase = checkCamerabase();
}
ret = opendevice(camerabase + videoid);
if(ret != ERROR_LOCAL){
ret = initdevice();
}
if(ret != ERROR_LOCAL){
ret = startcapturing();
if(ret != SUCCESS_LOCAL){
stopcapturing();
uninitdevice ();
closedevice ();
LOGE("device resetted");
}
}
if(ret != ERROR_LOCAL){
rgb = (int *)malloc(sizeof(int) * (IMG_WIDTH*IMG_HEIGHT));
ybuf = (int *)malloc(sizeof(int) * (IMG_WIDTH*IMG_HEIGHT));
}
return ret;
}
jint
Java_com_camera_simplewebcam_CameraPreview_prepareCameraWithBase( JNIEnv* env,jobject thiz, jint videoid, jint videobase){
int ret;
camerabase = videobase;
return Java_com_camera_simplewebcam_CameraPreview_prepareCamera(env,thiz,videoid);
}
void
Java_com_camera_simplewebcam_CameraPreview_processCamera( JNIEnv* env,
jobject thiz){
readframeonce();
}
void
Java_com_camera_simplewebcam_CameraPreview_stopCamera(JNIEnv* env,jobject thiz){
stopcapturing ();
uninitdevice ();
closedevice ();
if(rgb) free(rgb);
if(ybuf) free(ybuf);
fd = -1;
}
#include <jni.h>
#include <android/log.h>
#include <android/bitmap.h>
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <fcntl.h> /* low-level i/o */
#include <unistd.h>
#include <errno.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <asm/types.h> /* for videodev2.h */
#include <linux/videodev2.h>
#include <linux/usbdevice_fs.h>
#define LOG_TAG "WebCam"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#define CLEAR(x) memset (&(x), 0, sizeof (x))
#define IMG_WIDTH 640
#define IMG_HEIGHT 480
#define ERROR_LOCAL -1
#define SUCCESS_LOCAL 0
struct buffer {
void * start;
size_t length;
};
static char dev_name[16];
static int fd = -1;
struct buffer * buffers = NULL;
static unsigned int n_buffers = 0;
int camerabase = -1;
int *rgb = NULL;
int *ybuf = NULL;
int yuv_tbl_ready=0;
int y1192_tbl[256];
int v1634_tbl[256];
int v833_tbl[256];
int u400_tbl[256];
int u2066_tbl[256];
int errnoexit(const char *s);
int xioctl(int fd, int request, void *arg);
int checkCamerabase(void);
int opendevice(int videoid);
int initdevice(void);
int initmmap(void);
int startcapturing(void);
int readframeonce(void);
int readframe(void);
void processimage (const void *p);
int stopcapturing(void);
int uninitdevice(void);
int closedevice(void);
void yuyv422toABGRY(unsigned char *src);
jint Java_com_camera_simplewebcam_CameraPreview_prepareCamera( JNIEnv* env,jobject thiz, jint videoid);
jint Java_com_camera_simplewebcam_CameraPreview_prepareCameraWithBase( JNIEnv* env,jobject thiz, jint videoid, jint videobase);
void Java_com_camera_simplewebcam_CameraPreview_processCamera( JNIEnv* env,jobject thiz);
void Java_com_camera_simplewebcam_CameraPreview_stopCamera(JNIEnv* env,jobject thiz);
void Java_com_camera_simplewebcam_CameraPreview_pixeltobmp( JNIEnv* env,jobject thiz,jobject bitmap);
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<com.camera.simplewebcam.CameraPreview
android:id="@+id/cp"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"/>
</LinearLayout>
package com.camera.simplewebcam;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final boolean DEBUG = true;
private static final String TAG="WebCam";
protected Context context;
private SurfaceHolder holder;
Thread mainLoop = null;
private Bitmap bmp=null;
private boolean cameraExists=false;
private boolean shouldStop=false;
// /dev/videox (x=cameraId+cameraBase) is used.
// In some omap devices, system uses /dev/video[0-3],
// so users must use /dev/video[4-].
// In such a case, try cameraId=0 and cameraBase=4
private int cameraId=4;
private int cameraBase=0;
// This definition also exists in ImageProc.h.
// Webcam must support the resolution 640x480 with YUYV format.
static final int IMG_WIDTH=640;
static final int IMG_HEIGHT=480;
// The following variables are used to draw camera images.
private int winWidth=0;
private int winHeight=0;
private Rect rect;
private int dw, dh;
private float rate;
// JNI functions
public native int prepareCamera(int videoid);
public native int prepareCameraWithBase(int videoid, int camerabase);
public native void processCamera();
public native void stopCamera();
public native void pixeltobmp(Bitmap bitmap);
static {
System.loadLibrary("ImageProc");
}
public CameraPreview(Context context) {
super(context);
this.context = context;
if(DEBUG) Log.d(TAG,"CameraPreview constructed");
setFocusable(true);
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
public CameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
this.context = context;
if(DEBUG) Log.d(TAG,"CameraPreview constructed");
setFocusable(true);
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
@Override
public void run() {
while (true && cameraExists) {
//obtaining display area to draw a large image
if(winWidth==0){
winWidth=this.getWidth();
winHeight=this.getHeight();
if(winWidth*3/4<=winHeight){
dw = 0;
dh = (winHeight-winWidth*3/4)/2;
rate = ((float)winWidth)/IMG_WIDTH;
rect = new Rect(dw,dh,dw+winWidth-1,dh+winWidth*3/4-1);
}else{
dw = (winWidth-winHeight*4/3)/2;
dh = 0;
rate = ((float)winHeight)/IMG_HEIGHT;
rect = new Rect(dw,dh,dw+winHeight*4/3 -1,dh+winHeight-1);
}
}
// obtaining a camera image (pixel data are stored in an array in JNI).
processCamera();
// camera image to bmp
pixeltobmp(bmp);
Canvas canvas = getHolder().lockCanvas();
if (canvas != null)
{
// draw camera bmp on canvas
canvas.drawBitmap(bmp,null,rect,null);
getHolder().unlockCanvasAndPost(canvas);
}
if(shouldStop){
shouldStop = false;
break;
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if(DEBUG) Log.d(TAG, "surfaceCreated");
if(bmp==null){
bmp = Bitmap.createBitmap(IMG_WIDTH, IMG_HEIGHT, Bitmap.Config.ARGB_8888);
}
// /dev/videox (x=cameraId + cameraBase) is used
int ret = prepareCameraWithBase(cameraId, cameraBase);
if(ret!=-1) cameraExists = true;
mainLoop = new Thread(this);
mainLoop.start();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if(DEBUG) Log.d(TAG, "surfaceChanged");
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if(DEBUG) Log.d(TAG, "surfaceDestroyed");
if(cameraExists){
shouldStop = true;
while(shouldStop){
try{
Thread.sleep(100); // wait for thread stopping
}catch(Exception e){}
}
}
stopCamera();
}
}
package com.camera.simplewebcam;
import android.app.Activity;
import android.os.Bundle;
public class MainActivity extends Activity {
CameraPreview cp;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cp = (CameraPreview) findViewById(R.id.cp);
}
}
/dev/v4l-subdev.* u:object_r:video_device:s0
+/dev/video4 u:object_r:video_device:s0
/dev/vm_bms u:object_r:vm_bms_device:s0
+#uvc Camera
+allow system_app video_device:chr_file { read write open getattr ioctl};
+
hal_client_domain(system_app, hal_perf)
/dev/video* 0660 system camera
+/dev/video4 0666 root root
@@ -380,7 +380,7 @@ neverallow appdomain {
dm_device
radio_device
rpmsg_device
- video_device
+ -video_device
}:chr_file { read write };
@@ -380,7 +380,7 @@ neverallow appdomain {
dm_device
radio_device
rpmsg_device
- video_device
+ -video_device
}:chr_file { read write };