前言
- 源碼:
https://github.com/Peakmain/Video_Audio/tree/master/app/src/main/java/com/peakmain/video_audio/utils - 我的簡書:http://www.reibang.com/u/3ff32f5aea98
- 我的Github:https://github.com/peakmain
基礎(chǔ)知識
- 在Android5.0以前我們用的都是Camera岸霹,雖然使用簡單萄唇,但是已經(jīng)被拋棄了,Android5.0之后便出現(xiàn)了Camera2趾撵,但是Camera2使用非常繁瑣屎勘,于是后來又出現(xiàn)CameraX施籍,但是CameraX實際也只是對Camera2的封裝
- Android的坐標系
- 傳感器與屏幕方向不一致,將圖像傳感器的坐標系逆時針90度概漱,才能顯示到屏幕的坐標
- 所以看到的屏幕是逆時針旋轉(zhuǎn)了90度
-
因此我們將圖像順時針旋轉(zhuǎn)90度才能看到正常的畫面
image.png
- 相機捕獲的類型是nv21丑慎, 但是手機識別的是nv12,所以通常我們需要將nv21數(shù)據(jù)轉(zhuǎn)成nv12瓤摧。兩個都是yuv420
- nv21和nv12的區(qū)別
- nv12:YYYYYYYYY UVUV=>YUV420SP
-
nv21:YYYYYYYYY VUVU=>YUV420SP
兩者主要區(qū)別UV方向不同
Camera的封裝和使用
- 因為Camera使用比較簡單而且已經(jīng)過時了竿裂,我們主要過一遍
- Camera.open打開相機,0代表后置攝像頭照弥,1代表前置攝像頭
- Camera.getParameters();獲取相機的相關(guān)參數(shù)腻异,比如大小
- Buffer需要設(shè)置成
width*height*3/2
,原因是产喉,y:u:v=4:1:1捂掰,而y的值是width*height - setPreviewCallbackWithBuffer設(shè)置預(yù)覽的回調(diào)敢会,會重寫onPreviewFrame方法
- startPreview:開始預(yù)覽
public class CameraSurface extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private Camera mCamera;
private Camera.Size size;
byte[] mBuffer;
public CameraSurface(Context context) {
this(context, null);
}
public CameraSurface(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public CameraSurface(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
getHolder().addCallback(this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
startPrview();
}
private void startPrview() {
//打開相機曾沈,后攝像頭
mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
//獲取相機相關(guān)的參數(shù)
Camera.Parameters parameters = mCamera.getParameters();
size = parameters.getPreviewSize();
try {
mCamera.setPreviewDisplay(getHolder());
mCamera.setDisplayOrientation(90);
mBuffer = new byte[size.width * size.height * 3 / 2];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
private volatile boolean isCaptrue;
public void startCaptrue() {
isCaptrue = true;
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (isCaptrue) {
byte[] nv12 = FileUtils.nv21toNV12(data);
FileUtils.portraitData2Raw(nv12, mBuffer, size.width, size.height);
isCaptrue = false;
captrue(mBuffer);
ToastUtils.showLong("保存成功");
}
mCamera.addCallbackBuffer(data);
}
int index = 0;
private void captrue(byte[] bytes) {
String fileName = "Camera_" + index++ + ".jpg";
File sdRoot = Environment.getExternalStorageDirectory();
File pictureFile = new File(sdRoot, fileName);
if (!pictureFile.exists()) {
try {
pictureFile.createNewFile();
FileOutputStream fileOutputStream = new FileOutputStream(pictureFile);
YuvImage image = new YuvImage(bytes, ImageFormat.NV21,size.height, size.width,null); //將NV21 data保存成YuvImage
image.compressToJpeg(
new Rect(0, 0, image.getWidth(), image.getHeight()),
100, fileOutputStream);
} catch (IOException e) {
e.printStackTrace();
}
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
}
Camera2的使用和封裝
-
流程
這里方便大家記住,我畫了一個流程圖
image.png - start封裝代碼
fun start(textureView: TextureView?, cameraId: Int) {
mTextureView = textureView
//攝像頭的管理類
val cameraManager =
mContext.getSystemService(Context.CAMERA_SERVICE) as CameraManager
val characteristics: CameraCharacteristics
try {
characteristics = if (cameraId == 0) {
cameraManager.getCameraCharacteristics("" + CameraCharacteristics.LENS_FACING_FRONT)
} else {
cameraManager.getCameraCharacteristics("" + CameraCharacteristics.LENS_FACING_BACK)
}
//管理攝像頭支持的所有輸出格式和尺寸
val map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
//最合適的尺寸
mPreviewSize = getBestSupportedSize(
ArrayList(
listOf(
*map.getOutputSizes(
SurfaceTexture::class.java
)
)
)
)
mImageReader = ImageReader.newInstance(
mPreviewSize.width, mPreviewSize.height
, ImageFormat.YUV_420_888, 2
)
mBackgroundThread = HandlerThread("Camera2Helper")
mBackgroundThread?.start()
mBackgroundHandler = Handler(mBackgroundThread!!.looper)
//拍照獲得圖片的回調(diào)
mImageReader.setOnImageAvailableListener(
OnImageAvailableListenerImpl(),
mBackgroundHandler
)
if (ActivityCompat.checkSelfPermission(
mContext,
Manifest.permission.CAMERA
) != PackageManager.PERMISSION_GRANTED
) {
return
}
if (cameraId == 0) {
cameraManager.openCamera(
"" + CameraCharacteristics.LENS_FACING_FRONT,
mDeviceStateCallback,
mBackgroundHandler
)
} else {
cameraManager.openCamera(
"" + CameraCharacteristics.LENS_FACING_BACK,
mDeviceStateCallback,
mBackgroundHandler
)
}
} catch (e: CameraAccessException) {
e.printStackTrace()
}
}
setOnImageAvailableListener是獲得數(shù)據(jù)的回調(diào)鸥昏,這里我們可以對數(shù)據(jù)進行回調(diào)
private inner class OnImageAvailableListenerImpl : OnImageAvailableListener {
private var y: ByteArray? = null
private var u: ByteArray? = null
private var v: ByteArray? = null
override fun onImageAvailable(reader: ImageReader) {
val image = reader.acquireNextImage()
val planes = image.planes
//初始化y u v
if (y == null) {
y = ByteArray((planes[0].buffer.limit() - planes[0].buffer.position()))
u = ByteArray(
planes[1].buffer.limit() - planes[1].buffer.position()
)
v = ByteArray(
planes[2].buffer.limit() - planes[2].buffer.position()
)
}
if (image.planes[0].buffer.remaining() == y?.size) {
//分別填到 yuv
planes[0].buffer[y]
planes[1].buffer[u]
planes[2].buffer[v]
}
mCamera2Listener?.invoke(y, u, v, mPreviewSize, planes[0].rowStride)
image.close()
}
}
- mDeviceStateCallback打開相機回調(diào)
private val mDeviceStateCallback: CameraDevice.StateCallback =
object : CameraDevice.StateCallback() {
override fun onOpened(camera: CameraDevice) {
mCameraDevice = camera
createCameraPreviewSession()
}
override fun onDisconnected(camera: CameraDevice) {
camera.close()
mCameraDevice = null
}
override fun onError(camera: CameraDevice, error: Int) {
camera.close()
mCameraDevice = null
}
}
- 創(chuàng)建相機預(yù)覽
private fun createCameraPreviewSession() {
try {
val texture = mTextureView!!.surfaceTexture
//設(shè)置預(yù)覽的寬高
texture.setDefaultBufferSize(mPreviewSize.width, mPreviewSize.height)
val surface = Surface(texture)
// 創(chuàng)建預(yù)覽需要的CaptureRequest.Builder
mPreviewRequestBuilder =
mCameraDevice!!.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
//設(shè)置自動對焦
mPreviewRequestBuilder.set(
CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE
)
mPreviewRequestBuilder.addTarget(surface)
mPreviewRequestBuilder.addTarget(mImageReader.surface)
//該對象負責管理處理預(yù)覽請求和拍照請求
mCameraDevice!!.createCaptureSession(
listOf(
surface,
mImageReader.surface
), mCaptureStateCallback, mBackgroundHandler
)
} catch (e: CameraAccessException) {
e.printStackTrace()
}
}
- setRepeatingRequest創(chuàng)建捕獲會話塞俱,可以在這里做拍照等功能,我這里是音視頻吏垮,所以我這里是個空實現(xiàn)
private val mCaptureStateCallback: CameraCaptureSession.StateCallback =
object : CameraCaptureSession.StateCallback() {
override fun onConfigured(session: CameraCaptureSession) {
if (null == mCameraDevice) {
return
}
mCaptureSession = session
try {
mCaptureSession!!.setRepeatingRequest(
mPreviewRequestBuilder.build(),
object : CaptureCallback() {},
mBackgroundHandler
)
} catch (e: CameraAccessException) {
e.printStackTrace()
}
}
override fun onConfigureFailed(session: CameraCaptureSession) {}
}
CameraX的使用和封裝
- 添加依賴
implementation "androidx.camera:camera-core:1.0.0-alpha05"
implementation "androidx.camera:camera-camera2:1.0.0-alpha05"
- Camerax的api就比較簡單了
- CameraX.bindToLifecycle
- 第一個參數(shù)LifecycleOwner,一般傳入activity就可以了
- 第二個參數(shù)UseCase障涯,可以傳入多個參數(shù),
- Preview代表預(yù)覽圖片回調(diào)
- ImageAnalysis代表數(shù)據(jù)分析回調(diào)
- setOnPreviewOutputUpdateListener,會重寫onUpdated方法膳汪,主要防止切換盡頭報錯唯蝶,做一些處理
- setAnalyzer設(shè)置分析分析器去分析和接收圖片,我們可將圖片進行回調(diào)
完整代碼
- CameraX.bindToLifecycle
class CameraXHelper(
private var mLifecycleOwner: LifecycleOwner? = null,
private var mTextureView: TextureView
) :
OnPreviewOutputUpdateListener, ImageAnalysis.Analyzer {
private val mHandlerThread: HandlerThread = HandlerThread("CameraXHelper")
var width = SizeUtils.screenWidth
var height = SizeUtils.screenHeight
//設(shè)置后攝像頭
private val currentFacing = LensFacing.BACK
fun startCamera() {
if (mLifecycleOwner == null) {
return
}
CameraX.bindToLifecycle(mLifecycleOwner, preView, analysis)
}
//預(yù)覽
//setTargetResolution設(shè)置預(yù)覽尺寸
private val preView: Preview
get() {
//預(yù)覽
//setTargetResolution設(shè)置預(yù)覽尺寸
val previewConfig =
PreviewConfig.Builder().setTargetResolution(Size(width, height))
.setLensFacing(currentFacing).build()
val preview = Preview(previewConfig)
preview.onPreviewOutputUpdateListener = this
return preview
}
private val analysis: ImageAnalysis
get() {
val imageAnalysisConfig = ImageAnalysisConfig.Builder()
.setCallbackHandler(Handler(mHandlerThread.looper))
.setLensFacing(currentFacing)
.setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
.setTargetResolution(Size(width, height))
.build()
val imageAnalysis = ImageAnalysis(imageAnalysisConfig)
imageAnalysis.analyzer = this
return imageAnalysis
}
override fun onUpdated(output: PreviewOutput) {
val surfaceTexture = output.surfaceTexture
//防止切換鏡頭報錯
if (mTextureView.surfaceTexture !== surfaceTexture) {
if (mTextureView.isAvailable) {
// 當切換攝像頭時,會報錯
val parent = mTextureView.parent as ViewGroup
parent.removeView(mTextureView)
parent.addView(mTextureView, 0)
parent.requestLayout()
}
mTextureView.surfaceTexture = surfaceTexture
}
}
private val lock =
ReentrantLock()
private var y: ByteArray?=null
private var u: ByteArray?=null
private var v: ByteArray?=null
override fun analyze(image: ImageProxy, rotationDegrees: Int) {
lock.lock()
try {
val planes = image.planes
//初始化y v u
if (y == null) {
y = ByteArray(
planes[0].buffer.limit() - planes[0].buffer.position()
)
u = ByteArray(
planes[1].buffer.limit() - planes[1].buffer.position()
)
v = ByteArray(
planes[2].buffer.limit() - planes[2].buffer.position()
)
}
if (image.planes[0].buffer.remaining() == y!!.size) {
planes[0].buffer[y]
planes[1].buffer[u]
planes[2].buffer[v]
val stride = planes[0].rowStride
val size = Size(image.width, image.height)
if (cameraXListener != null) {
cameraXListener!!.invoke(y, u, v, size, stride)
}
}
} catch (e: Exception) {
e.printStackTrace()
} finally {
lock.unlock()
}
}
private var cameraXListener: ((ByteArray?, ByteArray?, ByteArray?, Size, Int) -> Unit)? = null
fun setCameraXListener(cameraXListener: ((ByteArray?, ByteArray?, ByteArray?, Size, Int) -> Unit)) {
this.cameraXListener = cameraXListener
}
init {
//子線程中回調(diào)
mHandlerThread.start()
mLifecycleOwner = mLifecycleOwner
}
}