码迷,mamicode.com
首页 > Windows程序 > 详细

人脸检测的API例子

时间:2014-09-24 11:17:56      阅读:380      评论:0      收藏:0      [点我收藏+]

标签:android   style   http   color   io   os   java   ar   for   

package cliu.TutorialOnFaceDetect;

/*
* MyImageView.java
* Download by http://www.codefans.net
* [AUTHOR]: Chunyen Liu
* [SDK ]: Android SDK 2.1 and up
* [NOTE ]: developer.com tutorial, "Face Detection with Android APIs"
*/

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.widget.ImageView;

class MyImageView extends ImageView {
private Bitmap mBitmap;
private Canvas mCanvas;
private int mBitmapWidth = 200;
private int mBitmapHeight = 200;
private Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private int mDisplayStyle = 0;
private int [] mPX = null;
private int [] mPY = null;

public MyImageView(Context c) {
super(c);
init();
}

public MyImageView(Context c, AttributeSet attrs) {
super(c, attrs);
init();
}



private void init() {
mBitmap = Bitmap.createBitmap(mBitmapWidth, mBitmapHeight, Bitmap.Config.RGB_565);
mCanvas = new Canvas(mBitmap);

mPaint.setStyle(Paint.Style.STROKE);
mPaint.setStrokeCap(Paint.Cap.ROUND);
mPaint.setColor(0x80ff0000);
mPaint.setStrokeWidth(3);
}

public Bitmap getBitmap() {
return mBitmap;
}

@Override
public void setImageBitmap(Bitmap bm) {
if (bm != null) {
mBitmapWidth = bm.getWidth();
mBitmapHeight = bm.getHeight();

mBitmap = Bitmap.createBitmap(mBitmapWidth, mBitmapHeight, Bitmap.Config.RGB_565);
mCanvas = new Canvas();
mCanvas.setBitmap(mBitmap);
mCanvas.drawBitmap(bm, 0, 0, null);
}

super.setImageBitmap(bm);
}

@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);

mBitmapWidth = (mBitmap != null) ? mBitmap.getWidth() : 0;
mBitmapHeight = (mBitmap != null) ? mBitmap.getHeight() : 0;
if (mBitmapWidth == w && mBitmapHeight == h) {
return;
}

if (mBitmapWidth < w) mBitmapWidth = w;
if (mBitmapHeight < h) mBitmapHeight = h;
}

// set up detected face features for display
public void setDisplayPoints(int [] xx, int [] yy, int total, int style) {
mDisplayStyle = style;
mPX = null;
mPY = null;

if (xx != null && yy != null && total > 0) {
mPX = new int[total];
mPY = new int[total];

for (int i = 0; i < total; i++) {
mPX[i] = xx[i];
mPY[i] = yy[i];
}
}
}

@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);

if (mBitmap != null) {
canvas.drawBitmap(mBitmap, 0, 0, null);

if (mPX != null && mPY != null) {
for (int i = 0; i < mPX.length; i++) {
if (mDisplayStyle == 1) {
canvas.drawCircle(mPX[i], mPY[i], 10.0f, mPaint);
} else {
canvas.drawRect(mPX[i] - 20, mPY[i] - 20, mPX[i] + 20, mPY[i] + 20, mPaint);
}
}
}
}
}
}

 

 

----------------------------------------------

package cliu.TutorialOnFaceDetect;

/*
* TutorialOnFaceDetect
* Download by http://www.codefans.net
* [AUTHOR]: Chunyen Liu
* [SDK ]: Android SDK 2.1 and up
* [NOTE ]: developer.com tutorial, "Face Detection with Android APIs"
*/

import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.PointF;
import android.media.FaceDetector;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.widget.LinearLayout.LayoutParams;

public class TutorialOnFaceDetect extends Activity {
private MyImageView mIV;
private Bitmap mFaceBitmap;
private int mFaceWidth = 200;
private int mFaceHeight = 200;
private static final int MAX_FACES = 10;
private static String TAG = "TutorialOnFaceDetect";
private static boolean DEBUG = false;

protected static final int GUIUPDATE_SETFACE = 999;
protected Handler mHandler = new Handler(){
// @Override
public void handleMessage(Message msg) {
mIV.invalidate();
super.handleMessage(msg);
}
};

@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

mIV = new MyImageView(this);
setContentView(mIV, new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));

// load the photo
Bitmap b = BitmapFactory.decodeResource(getResources(), R.drawable.face3);
mFaceBitmap = b.copy(Bitmap.Config.RGB_565, true);
b.recycle();

mFaceWidth = mFaceBitmap.getWidth();
mFaceHeight = mFaceBitmap.getHeight();
mIV.setImageBitmap(mFaceBitmap);
mIV.invalidate();

// perform face detection in setFace() in a background thread
doLengthyCalc();
}

public void setFace() {
FaceDetector fd;
FaceDetector.Face [] faces = new FaceDetector.Face[MAX_FACES];
PointF eyescenter = new PointF();
float eyesdist = 0.0f;
int [] fpx = null;
int [] fpy = null;
int count = 0;

try {
fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);
count = fd.findFaces(mFaceBitmap, faces);
} catch (Exception e) {
Log.e(TAG, "setFace(): " + e.toString());
return;
}

// check if we detect any faces
if (count > 0) {
fpx = new int[count * 2];
fpy = new int[count * 2];

for (int i = 0; i < count; i++) {
try {
faces[i].getMidPoint(eyescenter);
eyesdist = faces[i].eyesDistance();

// set up left eye location
fpx[2 * i] = (int)(eyescenter.x - eyesdist / 2);
fpy[2 * i] = (int)eyescenter.y;

// set up right eye location
fpx[2 * i + 1] = (int)(eyescenter.x + eyesdist / 2);
fpy[2 * i + 1] = (int)eyescenter.y;

if (DEBUG)
Log.e(TAG, "setFace(): face " + i + ": confidence = " + faces[i].confidence()
+ ", eyes distance = " + faces[i].eyesDistance()
+ ", pose = ("+ faces[i].pose(FaceDetector.Face.EULER_X) + ","
+ faces[i].pose(FaceDetector.Face.EULER_Y) + ","
+ faces[i].pose(FaceDetector.Face.EULER_Z) + ")"
+ ", eyes midpoint = (" + eyescenter.x + "," + eyescenter.y +")");
} catch (Exception e) {
Log.e(TAG, "setFace(): face " + i + ": " + e.toString());
}
}
}

mIV.setDisplayPoints(fpx, fpy, count * 2, 1);
}

private void doLengthyCalc() {
Thread t = new Thread() {
Message m = new Message();

public void run() {
try {
setFace();
m.what = TutorialOnFaceDetect.GUIUPDATE_SETFACE;
TutorialOnFaceDetect.this.mHandler.sendMessage(m);
} catch (Exception e) {
Log.e(TAG, "doLengthyCalc(): " + e.toString());
}
}
};

t.start();
}

}

 

-------------------------------------------------------

package cliu.TutorialOnFaceDetect;

/*
* TutorialOnFaceDetect1
* Download by http://www.codefans.net
* [AUTHOR]: Chunyen Liu
* [SDK ]: Android SDK 2.1 and up
* [NOTE ]: developer.com tutorial, "Face Detection with Android APIs"
*/

import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.PointF;
import android.media.FaceDetector;
import android.os.Bundle;
import android.util.Log;
import android.widget.LinearLayout.LayoutParams;

public class TutorialOnFaceDetect1 extends Activity {
private MyImageView mIV;
private Bitmap mFaceBitmap;
private int mFaceWidth = 200;
private int mFaceHeight = 200;
private static final int MAX_FACES = 10;
private static String TAG = "TutorialOnFaceDetect";

@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

mIV = new MyImageView(this);
setContentView(mIV, new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));

// load the photo
Bitmap b = BitmapFactory.decodeResource(getResources(), R.drawable.face3);
mFaceBitmap = b.copy(Bitmap.Config.RGB_565, true);
b.recycle();

mFaceWidth = mFaceBitmap.getWidth();
mFaceHeight = mFaceBitmap.getHeight();
mIV.setImageBitmap(mFaceBitmap);

// perform face detection and set the feature points
setFace();

mIV.invalidate();
}

public void setFace() {
FaceDetector fd;
FaceDetector.Face [] faces = new FaceDetector.Face[MAX_FACES];
PointF midpoint = new PointF();
int [] fpx = null;
int [] fpy = null;
int count = 0;

try {
fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);
count = fd.findFaces(mFaceBitmap, faces);
} catch (Exception e) {
Log.e(TAG, "setFace(): " + e.toString());
return;
}

// check if we detect any faces
if (count > 0) {
fpx = new int[count];
fpy = new int[count];

for (int i = 0; i < count; i++) {
try {
faces[i].getMidPoint(midpoint);

fpx[i] = (int)midpoint.x;
fpy[i] = (int)midpoint.y;
} catch (Exception e) {
Log.e(TAG, "setFace(): face " + i + ": " + e.toString());
}
}
}

mIV.setDisplayPoints(fpx, fpy, count, 0);
}
}

人脸检测的API例子

标签:android   style   http   color   io   os   java   ar   for   

原文地址:http://www.cnblogs.com/qiaoxu/p/3989893.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!