标签:
博主QQ:1356438802
本文实验平台:Eclipse + Opencv 2.4.10 + MTK Android 4.4平板(这一直是我的Android实验平台)
可能各位看官,看到前面的文章会觉得很凌乱,一会儿这个平台,一会儿那个平台。
其实我的主要思路就是:opencv中的任何一个功能,首先在windows上验证成功,再到Ubuntu,然后到Android上验证!
在windows上,由于其系统通用性,各方面支持肯定更好,所以我一定能验证成功,然后我再去Android上面做这些功能。这样前面的经验可以给我做参考,在Android上调试时可以敏锐滴知道问题出在哪里,并且这个过程下来三个平台都熟悉了,对于以后我的应用程序跨平台移植也是有帮助的!
回顾这段时间的工作,主要是三个阶段:
1. 在各个平台能够正常调用opencv的函数。例如yanzi_OpenCV4Android app的JNI里面有个灰度处理的函数Java_luo_uvc_jni_ImageProc_grayProc,就只是为了验证opencv可以调用。
2. 在各个平台能够正常获取图像,实现实时预览。这个阶段前面在windows、Ubuntu和Android基本都实现了,只是效果有差别而已。
3. 对2中的函数调用升级——在各个平台使用opencv2的C++类实现预览和录像。实际上opencv2的C++类只是对opencv1的C函数做了一次封装,但是看起来代码更干净简洁,也更方便和MAT结合。windows和Ubuntu已经验证完了,可以参考前面系列文章。
接下来我们验证VideoCapture和VideoWriter在Android上的使用!
把文章《我的Opencv4Android添加V4L2支持的移植记录(2)》中的例程yanzi_OpenCV4Android拷贝过来做如下修改
yanzi_OpenCV4Android6
ImageProc.cpp
#include "ImageProc.h"
#include "cv.h"
#include "highgui.h"
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include <string>
#include <vector>
using namespace cv;
using namespace std;
#ifdef __cplusplus
extern "C"
{
#pragma message("------------------------ ImageProc.cpp")
#endif
#define NAMELEN	(64)
Mat frame;
VideoCapture capture;
char *prefix = NULL;
char fileName[NAMELEN] = {0};
VideoWriter writer;
int id;
JNIEXPORT jintArray JNICALL Java_luo_uvc_jni_ImageProc_grayProc(JNIEnv* env,
		jclass obj, jintArray buf, jint w, jint h)
{
	jint *cbuf;
	cbuf = env->GetIntArrayElements(buf, false);
	if (cbuf == NULL)
	{
		return 0;
	}
	Mat imgData(h, w, CV_8UC4, (unsigned char*) cbuf);
	uchar* ptr = imgData.ptr(0);
	for (int i = 0; i < w * h; i++)
	{
		//计算公式:Y(亮度) = 0.299*R + 0.587*G + 0.114*B
		//对于一个int四字节,其彩色值存储方式为:BGRA
		int grayScale = (int) (ptr[4 * i + 2] * 0.299 + ptr[4 * i + 1] * 0.587
				+ ptr[4 * i + 0] * 0.114);
		ptr[4 * i + 1] = grayScale;
		ptr[4 * i + 2] = grayScale;
		ptr[4 * i + 0] = grayScale;
	}
	int size = w * h;
	jintArray result = env->NewIntArray(size);
	env->SetIntArrayRegion(result, 0, size, cbuf);
	env->ReleaseIntArrayElements(buf, cbuf, 0);
	return result;
}
JNIEXPORT jint JNICALL Java_luo_uvc_jni_ImageProc_connectCamera(JNIEnv* env,
		jclass obj, jint device)
{
	id = device;
	if (capture.isOpened())
	{
		LOGE("camera is already opened!");
		return -2;
	}
	else
	{
		//打开一个默认的摄像头
		capture.open(-1);
		if (capture.isOpened())
		{
			LOGE("camera open success!");
			return 0;
		}
	}
//	LOGI("usb camera Java_luo_uvc_jni_ImageProc_connectCamera end ....\n");
	return -1;
}
JNIEXPORT jint JNICALL Java_luo_uvc_jni_ImageProc_releaseCamera(JNIEnv* env,
		jclass obj)
{
//	LOGI("usb camera Java_luo_uvc_jni_ImageProc_releaseCamera start ....\n");
	if (capture.isOpened())
	{
		capture.release();
		LOGE("camera release success!");
		return 0;
	}
//	LOGI("usb camera Java_luo_uvc_jni_ImageProc_releaseCamera end ....\n");
	return -1;
}
struct FrameInfoClass
{
	jfieldID width;
	jfieldID heigth;
	jfieldID imageSize;
	jfieldID pixels;
};
JNIEXPORT jobject JNICALL Java_luo_uvc_jni_ImageProc_getFrame(JNIEnv* _env,
		jclass obj)
{
//	LOGI("------------Java_luo_uvc_jni_ImageProc_getFrame\n");
	if (capture.isOpened())
	{
		LOGI("------------start capture frame\n");
		//取图片帧
		capture >> frame;
		if (frame.empty())
		{
			LOGE("capture frame empty!");
			return NULL;
		}
		//将图片写入视频文件
		if(writer.isOpened())
		{
			//录制视频用的frame不需要转格式
			writer.write(frame);
		}
		struct FrameInfoClass frameInfoClass;
		//内部类用$
		//luo/uvc/jni/ImageProc$FrameInfo
		jclass class2 = _env->FindClass("luo/uvc/jni/ImageProc$FrameInfo");
		frameInfoClass.width = _env->GetFieldID(class2, "mWidth", "I");
		frameInfoClass.heigth = _env->GetFieldID(class2, "mHeight", "I");
		frameInfoClass.imageSize = _env->GetFieldID(class2, "mImageSize", "I");
		frameInfoClass.pixels = _env->GetFieldID(class2, "mPixels", "[I");
		//
		jobject joFrame = _env->AllocObject(class2);
		LOGI("frame->cols = %d\n", frame.cols);
		LOGI("frame->rows = %d\n", frame.rows);
//		LOGI("frame->imageSize = %d\n", frame->imageSize);
		_env->SetIntField(joFrame, frameInfoClass.width, frame.cols);
		_env->SetIntField(joFrame, frameInfoClass.heigth, frame.rows);
//		_env->SetIntField(joFrame, frameInfoClass.imageSize, frame->imageSize);
		int size = frame.cols * frame.rows;
		//创建一个新的java数组(jarray),但是jarray不是C数组类型,不能直接访问jarray
		jintArray jiArr = _env->NewIntArray(size);
		jint *ji;
#if 1   //可用
		//RGB --> ARGB8888
		Mat frameARGB;
		cvtColor(frame, frameARGB, CV_RGB2RGBA);
		//JNI支持一系列的Get/Release<Type>ArrayElement 函数,允许本地代码获取一个指向基本C类型数组的元素的指针。
		ji = _env->GetIntArrayElements(jiArr, 0);
		memcpy((jbyte *) ji, frameARGB.data, 4 * size);
		_env->ReleaseIntArrayElements(jiArr, ji, 0); //可加,可不加
		_env->SetObjectField(joFrame, frameInfoClass.pixels, jiArr);
#else   //可用
		//可以使用GetIntArrayRegion函数来把一个 int数组中的所有元素复制到一个C缓冲区中
		//SetIntArrayRegion则是逆过程
		_env->SetIntArrayRegion(jiArr, 0, 2, abc);
		_env->SetObjectField(joFrame, company_class.money, jiArr);
#endif
//		LOGI("Java_luo_uvc_jni_ImageProc_getFrame end\n");
		return joFrame;
	}
//	LOGI("=================Java_luo_uvc_jni_ImageProc_getFrame failed\n");
	return 0;
}
JNIEXPORT jint JNICALL Java_luo_uvc_jni_ImageProc_startRecord
  (JNIEnv *env, jclass, jstring jstr)
{
	if(writer.isOpened() == false)
	{
		 prefix = jstringToChar(env, jstr);
		 memset(fileName, 0, NAMELEN);
		 sprintf(fileName, "/storage/sdcard0/Movies/%s.avi", prefix);
		 LOGI("fileName: %s", fileName);
		 FREE(prefix);
		 writer.open(fileName, CV_FOURCC('F', 'L', 'V', '1')/*有效*/, 30, cv::Size(640, 480),true);
//		 writer.open(fileName, CV_FOURCC('M', 'J', 'P', 'G')/*有效*/, 30, cv::Size(640, 480),true);
//		 writer.open(fileName, CV_FOURCC('D', 'I', 'V', 'X')/*有效*/, 30, cv::Size(640, 480),true);
//		 writer.open(fileName, CV_FOURCC('X', 'V', 'I', 'D')/*有效*/, 30, cv::Size(640, 480),true);
		 if(writer.isOpened())
		 {
			 LOGE("writer open successful!");
			 return 0;
		 }
		 else
		 {
			 LOGE("writer open failed!");
			 return -2;
		 }
	}
	else
	{
		//实际上已经在录像
		return -1;
	}
}
JNIEXPORT jint JNICALL Java_luo_uvc_jni_ImageProc_stopRecord
  (JNIEnv *, jclass)
{
	if (writer.isOpened())
	{
		//如果正在录像,则停止录像
		writer.release();
		LOGI("%s end record!", fileName);
		return 0;
	}
	return -1;
}
JNIEXPORT jint JNICALL Java_luo_uvc_jni_ImageProc_getWidth(JNIEnv* env,
		jclass obj)
{
	return 0;
}
JNIEXPORT jint JNICALL Java_luo_uvc_jni_ImageProc_getHeight(JNIEnv* env,
		jclass obj)
{
	return 0;
}
//jstring to char*
char* jstringToChar(JNIEnv* env, jstring jstr)
{
	char* rtn = NULL;
	jclass clsstring = env->FindClass("java/lang/String");
	jstring strencode = env->NewStringUTF("utf-8");
	jmethodID mid = env->GetMethodID(clsstring, "getBytes", "(Ljava/lang/String;)[B");
	jbyteArray barr= (jbyteArray)env->CallObjectMethod(jstr, mid, strencode);
	jsize alen = env->GetArrayLength(barr);
	jbyte* ba = env->GetByteArrayElements(barr, JNI_FALSE);
	if (alen > 0)
	{
		rtn = (char*)calloc(1, alen + 1);
		memcpy(rtn, ba, alen);
		rtn[alen] = 0;
	}
	env->ReleaseByteArrayElements(barr, ba, 0);
	return rtn;
}
#ifdef __cplusplus
}
#endif
/* end of extern */
package luo.uvc.jni;
import java.text.SimpleDateFormat;
import java.util.Date;
import luo.uvc.jni.ImageProc.FrameInfo;
import android.R.bool;
import android.R.string;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.Bitmap.Config;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.Toast;
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, Runnable
{
	public static final String TAG = "UVCCameraPreview";
	protected Context context;
	private SurfaceHolder holder;
	Thread mainLoop = null;
	
	public static final int SET_PREVIEW_TEXT = 0;
	public static final int SET_RECORD_TEXT = 1;
	private boolean mIsOpened = false;
	private boolean mIsRecording = false;
	private boolean shouldStop = false;
	public callback textCallback;
	// The following variables are used to draw camera images.
	private int winWidth = 0;
	private int winHeight = 0;
	private Rect rect;
	private int dw, dh;
	private float rate;
	public CameraPreview(Context context)
	{
		super(context);
		// TODO Auto-generated constructor stub
		this.context = context;
		Log.d(TAG, "CameraPreview constructed");
		setFocusable(true);
		holder = getHolder();
		holder.addCallback(this);
		holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
	}
	//注意:使用findViewById获取CameraPreview,会调用这个构造函数
	public CameraPreview(Context context, AttributeSet attrs)
	{
		super(context, attrs);
		this.context = context;
		Log.d(TAG, "CameraPreview constructed");
		setFocusable(true);
		holder = getHolder();
		holder.addCallback(this);
		holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
		
	}	
	public void initPreview()
	{
		int index = -1;
		if (mIsOpened == false)
		{
			if (0 == ImageProc.connectCamera(index))
			{
				Log.i(TAG, "open uvc success!!!");
				mIsOpened = true;
				textCallback.setViewText(SET_PREVIEW_TEXT, "关闭");
				if (null != mainLoop)
				{
					shouldStop = false;
					Log.i(TAG, "preview mainloop starting...");
					mainLoop.start();
				}
				Toast.makeText(context.getApplicationContext(), "成功打开摄像头", Toast.LENGTH_SHORT).show();
			} else
			{
				Log.i(TAG, "open uvc fail!!!");
				mIsOpened = false;
				Toast.makeText(context.getApplicationContext(), "摄像头打开失败", Toast.LENGTH_SHORT).show();
			}
		} else
		{
			uninitPreview();
		}
	}
	public void uninitPreview()
	{
		//结束录制
		uninitRecord();
		
		//停止预览线程
		if (null != mainLoop)
		{
			Log.i(TAG, mainLoop.isAlive() ? "mainloop is alive!" : "mainloop is not alive!");
			if (mainLoop.isAlive())
			{
				shouldStop = true;
				while (shouldStop)
				{
					try
					{
						Thread.sleep(100); // wait for thread stopping
					} catch (Exception e)
					{
					}
				}
			}
		}
		//关闭camera
		if (mIsOpened)
		{
			mIsOpened = false;
			ImageProc.releaseCamera();
			textCallback.setViewText(SET_PREVIEW_TEXT, "打开");
			Log.i(TAG, "release camera...");
		}
	}
	
	public void initRecord()
	{
		if(mIsOpened)
		{
			if(mIsRecording == false)
			{
				Log.i(TAG, "init camera record!");
				Date date = new Date();
				SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMddHHmmss");				
				String dateString = simpleDateFormat.format(date);
				if(null == dateString)
				{
					dateString = "luoyouren";
				}
				Log.i(TAG, dateString);
				
				if(0 == ImageProc.startRecord(dateString))
				{
					mIsRecording = true;
					textCallback.setViewText(SET_RECORD_TEXT, "停止");
					Toast.makeText(context.getApplicationContext(), "开始录制...", Toast.LENGTH_SHORT).show();
				}
				else 
				{
					mIsRecording = false;
					Log.e(TAG, "init camera record failed!");
					Toast.makeText(context.getApplicationContext(), "录制启动失败!", Toast.LENGTH_SHORT).show();
				}
				return;
			}
			else 
			{
				uninitRecord();
				return;
			}
		}
		else 
		{
			Log.e(TAG, "camera has not been opened!");
			return;
		}
	}
	
	public void uninitRecord()
	{
		if(mIsRecording)
		{
			Log.i(TAG, "camera is already recording! So we stop it.");
			ImageProc.stopRecord();
			mIsRecording = false;
			textCallback.setViewText(SET_RECORD_TEXT, "录制");
			return;
		}
	}
	public boolean isOpen()
	{
		return mIsOpened;
	}
	
	public boolean isRecording()
	{
		return mIsRecording;
	}
	@Override
	public void run()
	{
		// TODO Auto-generated method stub
		while (true && mIsOpened)
		{
			// get camera frame
			FrameInfo frame = ImageProc.getFrame();
			if (null == frame)
			{
				continue;
			}
			int w = frame.getWidth();
			int h = frame.getHeigth();
			Log.i(TAG, "frame.width = " + w + " frame.height = " + h);
			// 根据图像大小更新显示区域大小
			// 一般来说图像大小不会变化: 640x480
			updateRect(w, h);
			Bitmap resultImg = Bitmap.createBitmap(w, h, Config.ARGB_8888);
			resultImg.setPixels(frame.getPixels(), 0, w, 0, 0, w, h);
			// 刷surfaceview显示
			Canvas canvas = getHolder().lockCanvas();
			if (canvas != null)
			{
				// draw camera bmp on canvas
				canvas.drawBitmap(resultImg, null, rect, null);
				getHolder().unlockCanvasAndPost(canvas);
			}
			if (shouldStop)
			{
				shouldStop = false;
				Log.i(TAG, "mainloop will stop!");
				break;
			}
		}
		
		Log.i(TAG, "mainloop break while!");
	}
	public void updateRect(int frame_w, int frame_h)
	{
		// obtaining display area to draw a large image
		if (winWidth == 0)
		{
			winWidth = this.getWidth();
			winHeight = this.getHeight();
			if (winWidth * 3 / 4 <= winHeight)
			{
				dw = 0;
				dh = (winHeight - winWidth * 3 / 4) / 2;
				rate = ((float) winWidth) / frame_w;
				rect = new Rect(dw, dh, dw + winWidth - 1, dh + winWidth * 3 / 4 - 1);
			} else
			{
				dw = (winWidth - winHeight * 4 / 3) / 2;
				dh = 0;
				rate = ((float) winHeight) / frame_h;
				rect = new Rect(dw, dh, dw + winHeight * 4 / 3 - 1, dh + winHeight - 1);
			}
		}
	}
	@Override
	public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3)
	{
		// TODO Auto-generated method stub
	}
	@Override
	public void surfaceCreated(SurfaceHolder arg0)
	{
		// TODO Auto-generated method stub
		mainLoop = new Thread(this);
		
		updateRect(512, 512);
		// 将lena图像加载程序中并进行显示
		Bitmap resultImg = BitmapFactory.decodeResource(getResources(), R.drawable.lena);
		// 刷surfaceview显示
		Canvas canvas = getHolder().lockCanvas();
		if (canvas != null)
		{
			// draw camera bmp on canvas
			canvas.drawBitmap(resultImg, null, rect, null);
			getHolder().unlockCanvasAndPost(canvas);
		}
	}
	@Override
	public void surfaceDestroyed(SurfaceHolder arg0)
	{
		// TODO Auto-generated method stub
	}
}
编译、安装、运行后,Camera实时预览正常,和《我的Opencv4Android添加V4L2支持的移植记录(2)》中一样,但是录像失败!
我们可以来看看opencv里面,对于各个平台VideoWriter功能的分发情况
/**
 * Videowriter dispatching method: it tries to find the first
 * API that can write a given stream.
 */
CV_IMPL CvVideoWriter* cvCreateVideoWriter( const char* filename, int fourcc,
                                            double fps, CvSize frameSize, int is_color )
{
    //CV_FUNCNAME( "cvCreateVideoWriter" );
    CvVideoWriter *result = 0;
#pragma message("------------------------ cvCreateVideoWriter")
    if(!fourcc || !fps)
        result = cvCreateVideoWriter_Images(filename);
#ifdef HAVE_FFMPEG
#pragma message("------------------------ HAVE_FFMPEG")
    if(!result)
        result = cvCreateVideoWriter_FFMPEG_proxy (filename, fourcc, fps, frameSize, is_color);
#endif
#ifdef HAVE_VFW
#pragma message("------------------------ HAVE_VFW")
    if(!result)
        result = cvCreateVideoWriter_VFW(filename, fourcc, fps, frameSize, is_color);
#endif
#ifdef HAVE_MSMF
#pragma message("------------------------ HAVE_MSMF")
    if (!result)
        result = cvCreateVideoWriter_MSMF(filename, fourcc, fps, frameSize, is_color);
#endif
/*  #ifdef HAVE_XINE
    if(!result)
        result = cvCreateVideoWriter_XINE(filename, fourcc, fps, frameSize, is_color);
    #endif
*/
#ifdef HAVE_AVFOUNDATION
#pragma message("------------------------ HAVE_AVFOUNDATION")
    if (! result)
        result = cvCreateVideoWriter_AVFoundation(filename, fourcc, fps, frameSize, is_color);
#endif
#if defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT)
#pragma message("------- defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT)")
    if(!result)
        result = cvCreateVideoWriter_QT(filename, fourcc, fps, frameSize, is_color);
#endif
#ifdef HAVE_GSTREAMER
#pragma message("------------------------ HAVE_GSTREAMER")
    if (! result)
        result = cvCreateVideoWriter_GStreamer(filename, fourcc, fps, frameSize, is_color);
#endif
#if !defined(HAVE_FFMPEG) &&     !defined(HAVE_VFW) &&     !defined(HAVE_MSMF) &&     !defined(HAVE_AVFOUNDATION) &&     !defined(HAVE_QUICKTIME) &&     !defined(HAVE_QTKIT) &&     !defined(HAVE_GSTREAMER)
// If none of the writers is used
// these statements suppress 'unused parameter' warnings.
    (void)frameSize;
    (void)is_color;
#endif
#pragma message("------------------------ cvCreateVideoWriter_Images")
    if(!result)
        result = cvCreateVideoWriter_Images(filename);
    return result;
}
enum
{
    CV_CAP_ANY      =0,     // autodetect
    CV_CAP_MIL      =100,   // MIL proprietary drivers
    CV_CAP_VFW      =200,   // platform native
    CV_CAP_V4L      =200,
    CV_CAP_V4L2     =200,
    CV_CAP_FIREWARE =300,   // IEEE 1394 drivers
    CV_CAP_FIREWIRE =300,
    CV_CAP_IEEE1394 =300,
    CV_CAP_DC1394   =300,
    CV_CAP_CMU1394  =300,
    CV_CAP_STEREO   =400,   // TYZX proprietary drivers
    CV_CAP_TYZX     =400,
    CV_TYZX_LEFT    =400,
    CV_TYZX_RIGHT   =401,
    CV_TYZX_COLOR   =402,
    CV_TYZX_Z       =403,
    CV_CAP_QT       =500,   // QuickTime
    CV_CAP_UNICAP   =600,   // Unicap drivers
    CV_CAP_DSHOW    =700,   // DirectShow (via videoInput)
    CV_CAP_MSMF     =1400,  // Microsoft Media Foundation (via videoInput)
    CV_CAP_PVAPI    =800,   // PvAPI, Prosilica GigE SDK
    CV_CAP_OPENNI   =900,   // OpenNI (for Kinect)
    CV_CAP_OPENNI_ASUS =910,   // OpenNI (for Asus Xtion)
    CV_CAP_ANDROID  =1000,  // Android
    CV_CAP_ANDROID_BACK =CV_CAP_ANDROID+99, // Android back camera
    CV_CAP_ANDROID_FRONT =CV_CAP_ANDROID+98, // Android front camera
    CV_CAP_XIAPI    =1100,   // XIMEA Camera API
    CV_CAP_AVFOUNDATION = 1200,  // AVFoundation framework for iOS (OS X Lion will have the same API)
    CV_CAP_GIGANETIX = 1300,  // Smartek Giganetix GigEVisionSDK
    CV_CAP_INTELPERC = 1500 // Intel Perceptual Computing SDK
};
看看之前的Cmake的结果对比就知道:
Opencv在我增加V4L2后,也支持NativeCamera和V4L2两种Video I/O,而Ubuntu有FFMPEG的支持(不懂FFMPEG的去问度娘)。并且在cvCreateVideoWriter函数中也没有对NativeCamera的支持,所以我的APP录像失败。
所以如果要让yanzi_OpenCV4Android 能够录像,就要添加FFMPEG的支持!!!!
PS:
昨天我一直在纠结一个问题,VFW和DirectShow都是微软视频处理框架,DirectShow是VFW的升级版。但是在Opencv中,
cvCreateCameraCapture 读摄像头
cvCreateFileCapture 读视频文件
cvCreateVideoWriter 写视频文件
和摄像头相关的cvCreateCameraCapture 里面对DirectShow和VFW都有支持,但是和文件相关的后面两个函数只对VFW有支持。
我猜测大概是DirectShow不善于处理视频文件。
我的Opencv4Android添加V4L2支持的移植记录(3)
标签:
原文地址:http://blog.csdn.net/luoyouren/article/details/51895475