标签:format tco 展示 surface err 地址 catch over default
原文:Android Camera2 拍照(二)——使用TextureView
上一篇博文简单介绍了使用Camera2 API拍摄照片,并使用SurfaceView作为预览界面。实际上,相对于SurfaceView, TextureView更适合用于视频和拍摄照片。SurfaceView也有它的使用场合,这将在另外一篇中阐述。本文将使用TextureView作为预览界面,再次向大家展示Camera2 API的简单应用。
在布局文件中加入TextureView控件,然后实现其监听事件
textureView = (TextureView) findViewById(R.id.textureView);
然后我们可以在OnResume()方法中设置监听SurefaceTexture的事件
textureView.setSurfaceTextureListener(textureListener);
当SurefaceTexture准备好后会回调SurfaceTextureListener 的onSurfaceTextureAvailable()方法
private TextureView.SurfaceTextureListener mTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
//当SurefaceTexture可用的时候,设置相机参数并打开相机
setupCamera(width, height);
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private void setupCamera(int width, int height) {
//获取摄像头的管理者CameraManager
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
//遍历所有摄像头
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
//此处默认打开后置摄像头
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT)
continue;
//获取StreamConfigurationMap,它是管理摄像头支持的所有输出格式和尺寸
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
//根据TextureView的尺寸设置预览尺寸
mPreviewSize = getOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height);
//获取相机支持的最大拍照尺寸
mCaptureSize = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new Comparator<Size>() {
@Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getHeight() * rhs.getWidth());
}
});
//此ImageReader用于拍照所需
setupImageReader();
mCameraId = cameraId;
break;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
//选择sizeMap中大于并且最接近width和height的size
private Size getOptimalSize(Size[] sizeMap, int width, int height) {
List<Size> sizeList = new ArrayList<>();
for (Size option : sizeMap) {
if (width > height) {
if (option.getWidth() > width && option.getHeight() > height) {
sizeList.add(option);
}
} else {
if (option.getWidth() > height && option.getHeight() > width) {
sizeList.add(option);
}
}
}
if (sizeList.size() > 0) {
return Collections.min(sizeList, new Comparator<Size>() {
@Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getWidth() * rhs.getHeight());
}
});
}
return sizeMap[0];
}
private void setupImageReader() {
//2代表ImageReader中最多可以获取两帧图像流
mImageReader = ImageReader.newInstance(mCaptureSize.getWidth(), mCaptureSize.getHeight(),
ImageFormat.JPEG, 1);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
final Image image = reader.acquireNextImage();
mCameraHandler.post(new imageSaver(image));
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);//由缓冲区存入字节数组
final Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
if (bitmap != null) {
ivShow.setImageBitmap(bitmap);
}
}
});
}
}, mCameraHandler);
}
public static class imageSaver implements Runnable {
private Image mImage;
public imageSaver(Image image) {
mImage = image;
}
@Override
public void run() {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.capacity()];
buffer.get(data);
String path = Environment.getExternalStorageDirectory() + "/DCIM/361camera/";
File mImageFile = new File(path);
if (!mImageFile.exists()) {
boolean ret = mImageFile.mkdirs();
assert (ret);
}
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String fileName = path + "IMG_" + timeStamp + ".jpg";
FileOutputStream fos = null;
try {
fos = new FileOutputStream(fileName);
fos.write(data, 0, data.length);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
mImage.close();
}
}
}
private void openCamera() {
mCameraManager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
if (ActivityCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
//申请WRITE_EXTERNAL_STORAGE权限
requestPermissions(new String[]{Manifest.permission.CAMERA},
REQUEST_CAMERA_CODE);
//return;
} else {
mCameraManager.openCamera(mCameraId, mStateCallback, mCameraHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
startPreview();
}
@Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
@Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private void startPreview() {
SurfaceTexture mSurfaceTexture = mTextureView.getSurfaceTexture();
mSurfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(mSurfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
try {
mCaptureRequest = mCaptureRequestBuilder.build();
mCameraCaptureSession = session;
mCameraCaptureSession.setRepeatingRequest(mCaptureRequest, null, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
首先创建一个ImageReader,并监听它的事件(见上面的代码setupImageReader())。然后开启预览之前,设置ImageReader为输出Surface(见上面setupCamera()的代码)。
public void takePicture() {
lockFocus();
}
private void lockFocus() {
try {
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
mCameraCaptureSession.capture(mCaptureRequestBuilder.build(), mCaptureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
Android Camera2 拍照(二)——使用TextureView
标签:format tco 展示 surface err 地址 catch over default
原文地址:https://www.cnblogs.com/lonelyxmas/p/9175770.html