/**
* 录屏服务
*/
public class RecordService extends Service {
private static final String TAG = "bqt";
private static final String mVideoPath = Environment.getExternalStorageDirectory().getPath() + "/";
private MediaProjection mMpj;
private VirtualDisplay mVirtualDisplay;
private int windowWidth;
private int windowHeight;
private int screenDensity;
private Surface mSurface;
private MediaCodec mMediaCodec;
private MediaMuxer mMuxer;
private LinearLayout mCaptureLl;
private WindowManager wm;
private boolean isRecordOn;
private AtomicBoolean mIsQuit = new AtomicBoolean(false);
private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
private boolean mMuxerStarted = false;
private int mVideoTrackIndex = -1;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
createEnvironment();
configureMedia();
createFloatView();
}
private void configureMedia() {
//MediaFormat是用来定义视频格式相关信息的。video/avc,这里的avc是高级视频编码Advanced Video Coding
//windowWidth和windowHeight是视频的尺寸,这个尺寸不能超过视频采集时采集到的尺寸,否则会直接crash
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", windowWidth, windowHeight);
//设置码率,通常码率越高,视频越清晰,但是对应的视频也越大,500000代表500k
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2000000);
//设置帧率,通常这个值越高,视频会显得越流畅,一般默认我设置成30,你最低可以设置成24,不要低于这个值,低于24会明显卡顿
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
//COLOR_FormatSurface这里表明数据将是一个graphicbuffer元数据
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
//设置帧间隔,它指的是,关键帧的间隔时间。通常情况下,你设置成多少问题都不大。
//比如你设置成10,那就是10秒一个关键帧,那10秒内的预览都是一个截图。如果你有需求要做视频的预览,那你最好设置成1
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);
try {
//创建一个MediaCodec的实例
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
e.printStackTrace();
}
//定义这个实例的格式,也就是上面我们定义的format,其他参数不用过于关注
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
//这一步非常关键,它设置的是MediaCodec的编码源,也就是说,我要告诉mEncoder,你给我解码哪些流。
//很出乎大家的意料,MediaCodec并没有要求我们传一个流文件进去,而是要求我们指定一个surface,这个surface其实就是用来展示屏幕采集数据的surface
mSurface = mMediaCodec.createInputSurface();
mMediaCodec.start();
}
private void createEnvironment() {
wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics metric = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(metric);
windowWidth = metric.widthPixels;
windowHeight = metric.heightPixels;
screenDensity = metric.densityDpi;
}
@SuppressLint("InflateParams")
private void createFloatView() {
final WindowManager.LayoutParams params = new WindowManager.LayoutParams(WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.RGBA_8888);
params.x = windowWidth;
params.y = windowHeight / 2;
params.gravity = Gravity.LEFT | Gravity.TOP;
params.width = WindowManager.LayoutParams.WRAP_CONTENT;
params.height = WindowManager.LayoutParams.WRAP_CONTENT;
LayoutInflater inflater = LayoutInflater.from(getApplicationContext());
mCaptureLl = (LinearLayout) inflater.inflate(R.layout.float_record, null);
final ImageView mCaptureIv = (ImageView) mCaptureLl.findViewById(R.id.iv_record);
wm.addView(mCaptureLl, params);
mCaptureIv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
isRecordOn = !isRecordOn;
if (isRecordOn) {
mCaptureIv.setImageResource(R.drawable.ic_recording);
Toast.makeText(RecordService.this.getApplicationContext(), "开始录屏", Toast.LENGTH_SHORT).show();
recordStart();
} else {
mCaptureIv.setImageResource(R.drawable.ic_record);
Toast.makeText(RecordService.this.getApplicationContext(), "结束录屏", Toast.LENGTH_SHORT).show();
recordStop();
}
}
});
mCaptureIv.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
params.x = (int) (motionEvent.getRawX() - mCaptureIv.getMeasuredWidth() / 2);
params.y = (int) (motionEvent.getRawY() - mCaptureIv.getMeasuredHeight() / 2 - 20);
wm.updateViewLayout(mCaptureLl, params);
return false;
}
});
}
private void recordStop() {
mIsQuit.set(true);
}
private void recordStart() {
configureMedia();
startVirtual();
new Thread() {
@Override
public void run() {
Log.e(TAG, "start startRecord");
startRecord();
}
}.start();
}
private void startRecord() {
try {
mMuxer = new MediaMuxer(mVideoPath + System.currentTimeMillis() + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
recordVirtualDisplay();
} catch (IOException e) {
e.printStackTrace();
} finally {
release();
}
}
private void startVirtual() {
if (mMpj == null) mMpj = MyApplication.mMpmngr.getMediaProjection(MyApplication.mResultCode, MyApplication.mResultIntent);
mVirtualDisplay = mMpj.createVirtualDisplay("record_screen", windowWidth, windowHeight, screenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mSurface, null, null);
}
private void recordVirtualDisplay() {
while (!mIsQuit.get()) {
int index = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10000);
Log.i(TAG, "dequeue output buffer index=" + index);
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {//后续输出格式变化
resetOutputFormat();
} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {//请求超时
Log.d(TAG, "retrieving buffers time out!");
try {
// wait 10ms
Thread.sleep(10);
} catch (InterruptedException e) {
}
} else if (index >= 0) {//有效输出
if (!mMuxerStarted) {
throw new IllegalStateException("MediaMuxer dose not call addTrack(format) ");
}
encodeToVideoTrack(index);
mMediaCodec.releaseOutputBuffer(index, false);
}
}
}
private void encodeToVideoTrack(int index) {
ByteBuffer encodedData = mMediaCodec.getOutputBuffer(index);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {//是编码需要的特定数据,不是媒体数据
// The codec config data was pulled out and fed to the muxer when we got the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size == 0) {
Log.d(TAG, "info.size == 0, drop it.");
encodedData = null;
} else {
Log.d(TAG, "got buffer, info: size=" + mBufferInfo.size + ", presentationTimeUs=" + mBufferInfo.presentationTimeUs + ", offset="
+ mBufferInfo.offset);
}
if (encodedData != null) {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mVideoTrackIndex, encodedData, mBufferInfo);//写入
Log.i(TAG, "sent " + mBufferInfo.size + " bytes to muxer...");
}
}
private void resetOutputFormat() {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new IllegalStateException("output format already changed!");
}
MediaFormat newFormat = mMediaCodec.getOutputFormat();
Log.i(TAG, "output format changed.\n new format: " + newFormat.toString());
mVideoTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
Log.i(TAG, "started media muxer, videoIndex=" + mVideoTrackIndex);
}
private void release() {
mIsQuit.set(false);
mMuxerStarted = false;
Log.i(TAG, " release() ");
if (mMediaCodec != null) {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
}
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
mVirtualDisplay = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
@Override
public void onDestroy() {
super.onDestroy();
release();
if (mMpj != null) {
mMpj.stop();
}
if (mCaptureLl != null) {
wm.removeView(mCaptureLl);
}
}
}/**
* 截屏服务
* @author 白乾涛
*/
public class CaptureService extends Service {
private static final String TAG = "bqt";
private static final String mImagePath = Environment.getExternalStorageDirectory().getPath() + "/screenshort/";
private MediaProjection mMpj;
private ImageView mCaptureIv;
private LinearLayout mCaptureLl;
private ImageReader mImageReader;
private String mImageName;
private int screenDensity;
private int windowWidth;
private int windowHeight;
private VirtualDisplay mVirtualDisplay;
private WindowManager wm;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
createEnvironment();
createFloatView();
}
private void createEnvironment() {
wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics metric = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(metric);
windowWidth = metric.widthPixels;
windowHeight = metric.heightPixels;
screenDensity = metric.densityDpi;
mImageReader = ImageReader.newInstance(windowWidth, windowHeight, 0x1, 2);
}
@SuppressLint("InflateParams")
private void createFloatView() {
final WindowManager.LayoutParams params = new WindowManager.LayoutParams(WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.RGBA_8888);
params.x = 0;
params.y = windowHeight / 2;
params.gravity = Gravity.LEFT | Gravity.TOP;
params.width = WindowManager.LayoutParams.WRAP_CONTENT;
params.height = WindowManager.LayoutParams.WRAP_CONTENT;
LayoutInflater inflater = LayoutInflater.from(getApplicationContext());
mCaptureLl = (LinearLayout) inflater.inflate(R.layout.float_capture, null);
mCaptureIv = (ImageView) mCaptureLl.findViewById(R.id.iv_capture);
wm.addView(mCaptureLl, params);
mCaptureIv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mCaptureIv.setVisibility(View.INVISIBLE);
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e(TAG, "start startVirtual");
startVirtual();
}
}, 500);
// Handler handler1 = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e(TAG, "start startCapture");
startCapture();
}
}, 1000);
// Handler handler2 = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e(TAG, "start stopVirtual");
mCaptureIv.setVisibility(View.VISIBLE);
stopVirtual();
}
}, 1500);
}
});
mCaptureIv.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
params.x = (int) (motionEvent.getRawX() - mCaptureIv.getMeasuredWidth() / 2);
params.y = (int) (motionEvent.getRawY() - mCaptureIv.getMeasuredHeight() / 2 - 20);
wm.updateViewLayout(mCaptureLl, params);
return false;
}
});
}
private void stopVirtual() {
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
mVirtualDisplay = null;
}
}
private void startCapture() {
mImageName = System.currentTimeMillis() + ".png";
Log.e(TAG, "image name is : " + mImageName);
Image image = mImageReader.acquireLatestImage();
int width = image.getWidth();
int height = image.getHeight();
final Image.Plane[] planes = image.getPlanes();
final ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * width;
Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
image.close();
if (bitmap != null) {
Log.e(TAG, "bitmap create success ");
try {
File fileFolder = new File(mImagePath);
if (!fileFolder.exists()) fileFolder.mkdirs();
File file = new File(mImagePath, mImageName);
if (!file.exists()) {
Log.e(TAG, "file create success ");
file.createNewFile();
}
FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, out);
out.flush();
out.close();
Log.e(TAG, "file save success ");
Toast.makeText(this.getApplicationContext(), "截图成功", Toast.LENGTH_SHORT).show();
} catch (IOException e) {
Log.e(TAG, e.toString());
e.printStackTrace();
}
}
}
private void startVirtual() {
if (mMpj == null) mMpj = MyApplication.mMpmngr.getMediaProjection(MyApplication.mResultCode, MyApplication.mResultIntent);
mVirtualDisplay = mMpj.createVirtualDisplay("capture_screen", windowWidth, windowHeight, screenDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mImageReader.getSurface(), null, null);
}
@Override
public void onDestroy() {
super.onDestroy();
if (mCaptureLl != null) {
wm.removeView(mCaptureLl);
}
if (mMpj != null) {
mMpj.stop();
mMpj = null;
}
}
}