基礎(chǔ)概念
Camera2是Android5.0推出的新的相機(jī)api(在5.0以下系統(tǒng)沒(méi)法使用)昌犹。它采用管道式的設(shè)計(jì)坚芜,支持單個(gè)或者順序執(zhí)行的多個(gè)請(qǐng)求。
與Camera1的區(qū)別:
Camera2采用CameraManager來(lái)管理斜姥,而不是像camera1一樣返回camera對(duì)象讓我們進(jìn)行操作鸿竖。camera2的預(yù)覽,拍照都是請(qǐng)求一次會(huì)話(管道的輸入然后輸出)铸敏。
新增了許多camera1不具備的功能缚忧,如對(duì)焦模式,曝光模式杈笔,快門闪水,還有輸出原始照片數(shù)據(jù)(RAW)等
基本用法
- 創(chuàng)建TextureView,加入TextureView狀態(tài)回調(diào)
- TextureView回調(diào)成功桩撮,獲取CameraManager對(duì)象敦第,打開連接camera,并加入camera調(diào)用回調(diào)方法
- camera連接回調(diào)成功店量,創(chuàng)建相機(jī)會(huì)話對(duì)象芜果,并加入相機(jī)會(huì)話的回調(diào)方法
- 在相機(jī)會(huì)話的回調(diào)方法中處理拍照生成圖片等操作
接下來(lái)我們會(huì)一步步用真實(shí)的代碼去講解每一步操作
第一步:創(chuàng)建TextureView,并加入TextureView的回調(diào)
/**
* 創(chuàng)建textureview回調(diào)方法
**/
private final TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
第二步:在onSurfaceTextureAvailable() 中去創(chuàng)建CameraDevice對(duì)象來(lái)連接Camera融师。當(dāng)然其中要設(shè)置一些參數(shù)來(lái)配置我們需要的Camera右钾。比如正反攝像頭,自動(dòng)對(duì)焦等旱爆。
private void openCamera(int width, int height) {
setUpCameraOutputs(width, height); // 該方法用于在系統(tǒng)返回的CameraId列表(不同分辨率舀射,正反鏡頭等)獲取最適合的CameraId
configureTransform(width, height); // 根據(jù)返回的鏡頭去調(diào)整TextureView的大小
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waitting to lock camera opening");
}
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); // mStateCallback的代碼在下面
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
提前創(chuàng)建好的相機(jī)狀態(tài)回調(diào)
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
mCameraOpenCloseLock.release();
mCameraDevice = camera;
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
}
};
第三步:camera連接回調(diào)成功,創(chuàng)建相機(jī)會(huì)話對(duì)象
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
if (mCameraDevice == null) {
return;
}
mCaptureSession = session;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(mPreviewRequestBuilder);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest,
mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
第四步:最后看下相機(jī)會(huì)話的回調(diào)處理方法怀伦,用于處理拍照生成圖片等操作
private CameraCaptureSession.CaptureCallback mCaptureCallback =
new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
break;
}
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
} else {
runPrecaptureSequence();
}
}
break;
}
case STATE_WAITING_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
}
break;
}
}
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
process(partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
process(result);
}
};