相機(jī)之使用OpenGL預(yù)覽
相機(jī)之使用OpenGL拍照
相機(jī)之使用OpenGL錄像
添加音頻
步驟
- 創(chuàng)建音頻格式 MediaFormat
- 創(chuàng)建 MediaCodec 音頻編碼器
- 初始化 AudioRecord ,并調(diào)用 startRecording() 開始錄制音頻
- 在線程中使用read(buffer, BUFFER_SIZE)方法讀取音頻
- 將讀取到的音頻數(shù)據(jù)放入 MediaCodec 的輸入緩沖區(qū)中
- 在 MediaCodec 輸出緩沖區(qū)使用 MediaMuxer 和視頻一起封裝到MP4
注意點(diǎn)
- 一定要設(shè)置時(shí)間戳bufferInfo.presentationTimeUs,否則音視頻不同步
- MediaMuxer 的 start() 和 release() 只能調(diào)用一次踢步,而編碼的時(shí)候又必須將音頻和視頻兩個(gè)格式軌道addTrack進(jìn)MediaMuxer 之后重挑,才能調(diào)用start()驮审,結(jié)束的時(shí)候也需要音視頻都結(jié)束編碼才能release() 阱驾。我這里采用了CyclicBarrier來處理拘泞,他可以計(jì)數(shù)到達(dá)TRACK_COUNT后恨统,線程才能繼續(xù)運(yùn)行叁扫,其第二個(gè)參數(shù)作用是:最后一個(gè)到達(dá)線程要做的任務(wù)
- 視頻可以通過mediaCodec.signalEndOfInputStream()結(jié)束錄制,但音頻需要mediaCodec.queueInputBuffer傳入結(jié)束標(biāo)志BUFFER_FLAG_END_OF_STREAM
視頻和音頻共用部分
open class BaseRecorder(val mediaMuxer: MediaMuxer) {
var isStart = false
lateinit var mediaCodec: MediaCodec
var trackIndex: Int = 0
private var prePtsUs: Long = 0
companion object {
private const val TAG = "BaseRecorder"
}
/**
* 計(jì)算數(shù)據(jù)顯示的時(shí)間戳
*/
fun getPtsUs(): Long {
var result = System.nanoTime() / 1000L
if (result < prePtsUs) {
result += (prePtsUs - result)
}
prePtsUs = result
return result
}
/**
* 將編碼后的數(shù)據(jù)寫入Muxer畜埋,生成MP4文件
*/
open fun writeToMuxer(endOfStream: Boolean) {
var bufferInfo = MediaCodec.BufferInfo()
loop@ while (true) {
//得到當(dāng)前編碼器的狀態(tài)
var status = mediaCodec.dequeueOutputBuffer(bufferInfo, 10_000)
// Log.d(TAG, "writeToMuxer: status=$status")
when (status) {
//稍后再試莫绣,直接退出循環(huán),直到下次調(diào)用writeToMuxer
MediaCodec.INFO_TRY_AGAIN_LATER -> {
if (!endOfStream) {
break@loop
}
}
//格式變化悠鞍,為mediaMuxer添加軌道对室,一共兩個(gè)軌道,一個(gè)音頻咖祭,一個(gè)視頻掩宜,如果都添加了,就可以開始封裝為MP4了
MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {
var outputFormat = mediaCodec.outputFormat
//添加格式軌道
trackIndex = mediaMuxer.addTrack(outputFormat)
Log.d(TAG, "writeToMuxer: currentThread=${Thread.currentThread().name}")
MediaRecorder.startMuxer.await()
}
MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED -> {
Log.d(TAG, "getCodec: INFO_OUTPUT_BUFFERS_CHANGED")
}
else -> {
//得到編碼好的數(shù)據(jù)
var outputBuffer = mediaCodec.getOutputBuffer(status)
if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG != 0) {
bufferInfo.size = 0
}
//數(shù)據(jù)大小不等于0
if (bufferInfo.size != 0) {
//設(shè)置數(shù)據(jù)顯示的時(shí)間戳
bufferInfo.presentationTimeUs = getPtsUs()
outputBuffer.position(bufferInfo.offset)
outputBuffer.limit(bufferInfo.offset + bufferInfo.size)
//將編碼后的數(shù)據(jù)寫入相應(yīng)軌道
mediaMuxer.writeSampleData(trackIndex, outputBuffer, bufferInfo)
}
//釋放Buffer,以便復(fù)用
mediaCodec.releaseOutputBuffer(status, false)
//此次編碼完成么翰,退出
if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
Log.d(TAG, "writeToMuxer: BUFFER_FLAG_END_OF_STREAM break@loop")
MediaRecorder.stopMuxer.await()
break@loop
}
}
}
}
}
}
AudioRecorder
class AudioRecorder(
mediaMuxer: MediaMuxer
) : BaseRecorder(mediaMuxer) {
private var recordHandler: Handler
private lateinit var audioRecord: AudioRecord
companion object {
private const val TAG = "AudioRecorder"
private const val SAMPLE_RATE_IN_HZ = 44100
const val BIT_RATE = 64000
private const val CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO
private const val AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT
private const val TIMEOUT_US = 1000000000L
val BUFFER_SIZE: Int by lazy {
AudioRecord.getMinBufferSize(
SAMPLE_RATE_IN_HZ,
CHANNEL_CONFIG,
AUDIO_FORMAT
)
}
}
init {
//創(chuàng)建音頻格式牺汤,參數(shù)對應(yīng):mime type、采樣率浩嫌、聲道數(shù)
var audioFormat = MediaFormat.createAudioFormat(
MediaFormat.MIMETYPE_AUDIO_AAC,
SAMPLE_RATE_IN_HZ, 1
)
audioFormat.setInteger(
MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectLC
)
//設(shè)置比特率
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE)
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1)
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, CHANNEL_CONFIG)
//創(chuàng)建音頻編碼器mediaCodec
mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC)
//配置音頻編碼器mediaCodec
mediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
//在線程中錄制音頻
var recordThread = HandlerThread("audioThread").apply { start() }
recordHandler = Handler(recordThread.looper)
}
/**
* 開始錄制音頻
*/
fun start() {
//使用AudioRecord進(jìn)行錄制
audioRecord = AudioRecord(
MediaRecorder.AudioSource.MIC,
SAMPLE_RATE_IN_HZ,
CHANNEL_CONFIG,
AUDIO_FORMAT,
BUFFER_SIZE
)
if (audioRecord.state != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "start: audioRecord init failed!")
return
}
audioRecord.startRecording()
isStart = true
recordHandler.post {
mediaCodec.start()
var buffer = ByteBuffer.allocateDirect(BUFFER_SIZE)
while (isStart) {
//將音頻數(shù)據(jù)讀取到buffer中
var readLength = audioRecord.read(buffer, BUFFER_SIZE)
buffer.position(readLength)
buffer.flip()
//將讀取到的音頻數(shù)據(jù)寫到mediaCodec中
inputDataToCodec(buffer, readLength)
writeToMuxer(false)
}
}
}
/**
* 將AudioRecord讀到的音頻數(shù)據(jù)放入mediaCodec
*/
private fun inputDataToCodec(buffer: ByteBuffer?, readLength: Int) {
//獲取一個(gè)可用的InputBuffer的索引
var index = mediaCodec.dequeueInputBuffer(TIMEOUT_US)
Log.d(TAG, "prepareDataForCodec: $index")
//如果讀取的音頻數(shù)據(jù)長等于0檐迟,說明沒有數(shù)據(jù),結(jié)束編碼
if (readLength <= 0) {
mediaCodec.queueInputBuffer(
index,
0,
0,
System.nanoTime() / 1000L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM
)
} else if (index >= 0) {
//將音頻數(shù)據(jù)寫入mediaCodec码耐,進(jìn)行編碼
var inputBuffer = mediaCodec.getInputBuffer(index)
inputBuffer.put(buffer)
mediaCodec.queueInputBuffer(
index,
0,
readLength,
System.nanoTime() / 1000L,
0
)
}
}
/**
* 將音頻流封裝進(jìn)Muxer
*/
override fun writeToMuxer(endOfStream: Boolean) {
Log.e(TAG, "writeToMuxer: endOfStream=$endOfStream")
if (endOfStream) {
//結(jié)束音頻錄制锅减,因此長度寫入0
inputDataToCodec(null, 0)
// mediaCodec.signalEndOfInputStream()
}
//調(diào)用父類的方法,將mediaCodec編碼后的數(shù)據(jù)寫入Muxer
super.writeToMuxer(endOfStream)
}
/**
* 結(jié)束錄音
*/
fun stop() {
isStart = false
recordHandler.post {
//寫入標(biāo)志位伐坏,停止錄制
writeToMuxer(true)
//釋放各種資源
mediaCodec.stop()
mediaCodec.release()
audioRecord.stop()
audioRecord.release()
recordHandler.looper.quitSafely()
}
}
}
VideoRecorder
class VideoRecorder(
private val context: Context,
mediaMuxer: MediaMuxer,
private var width: Int,
private val height: Int,
private val eglContext: EGLContext
) : BaseRecorder(mediaMuxer) {
companion object {
private const val TAG = "VideoRecorder"
const val FRAME_RATE = 25
const val I_FRAME_INTERVAL = 10
}
private lateinit var eglBase: EglBase
private lateinit var recordHandler: Handler
fun start() {
//創(chuàng)建視頻格式
var videoFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height)
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, calculateBitRate())
//CQ 完全不控制碼率怔匣,盡最大可能保證圖像質(zhì)量
//CBR 編碼器會盡量把輸出碼率控制為設(shè)定值
//VBR 編碼器會根據(jù)圖像內(nèi)容的復(fù)雜度(實(shí)際上是幀間變化量的大小)來動態(tài)調(diào)整輸出碼率桦沉,圖像復(fù)雜則碼率高每瞒,圖像簡單則碼率低
videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR)
//設(shè)置幀率
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE)
//設(shè)置I幀的間隔
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL)
//顏色格式是GraphicBuffer元數(shù)據(jù)
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface)
//創(chuàng)建視頻編碼器mediaCodec
mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
//配置視頻編碼器mediaCodec的格式
mediaCodec.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
//通過編碼器創(chuàng)建一個(gè)Surface,之后的圖像數(shù)據(jù)繪制到這上面纯露,再進(jìn)行保存
var inputSurface = mediaCodec.createInputSurface()
//為了不阻塞剿骨,錄制在單開的一個(gè)線程中進(jìn)行
var recordThread = HandlerThread("videoThread").apply { start() }
recordHandler = Handler(recordThread.looper)
recordHandler.post {
eglBase = EglBase(context, eglContext, inputSurface, width, height)
mediaCodec.start()
}
isStart = true
}
/**
* 計(jì)算碼率
*/
private fun calculateBitRate(): Int {
val bitrate = (0.25f * FRAME_RATE * width * height).toInt()
Log.d(TAG, "calculateBitRate: bitrate=$bitrate")
return bitrate
}
/**
* 編碼這一幀數(shù)據(jù)
*/
fun encodeFrame(textureId: Int, timestamp: Long) {
//沒有開始錄制,直接返回
if (!isStart) {
return
}
recordHandler.post {
if (isStart) {
Log.e(TAG, "encodeFrame: timestamp=$timestamp isStart=$isStart")
//在eglBase中繪制出這一幀內(nèi)容
eglBase.draw(textureId, timestamp)
//將這一幀封裝進(jìn)Muxer
writeToMuxer(false)
}
}
}
/**
* 將視頻流封裝進(jìn)Muxer
*/
override fun writeToMuxer(endOfStream: Boolean) {
Log.e(TAG, "writeToMuxer: endOfStream=$endOfStream")
if (endOfStream) {
mediaCodec.signalEndOfInputStream()
}
//調(diào)用父類的方法埠褪,將mediaCodec編碼后的數(shù)據(jù)寫入Muxer
super.writeToMuxer(endOfStream)
}
/**
* 停止錄制
*/
fun stop() {
isStart = false
recordHandler.post {
//寫入標(biāo)志位浓利,停止錄制
writeToMuxer(true)
//釋放各種資源
eglBase.release()
recordHandler.looper.quitSafely()
}
}
}
渲染器
class GlRenderer : GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
companion object {
private const val TAG = "MyRenderer"
}
private var width: Int=0
private var height: Int=0
//EGL上下文挤庇,錄像的時(shí)候使用
private lateinit var eglContext: EGLContext
private val glSurfaceView: GLSurfaceView
private val context: Context
//用于控制攝像頭,打開攝像頭之類的
private var cameraUtil: CameraUtil
//將攝像頭數(shù)據(jù)畫到FBO中
private lateinit var fboFilter: FboFilter
//蔣圖像數(shù)據(jù)畫到界面上
private lateinit var screenFilter: ScreenFilter
private lateinit var surfaceTexture: SurfaceTexture
private var textureId: Int = 0
private var matrix: FloatArray = FloatArray(16)
//錄像的工具
private var mediaRecorder: MediaRecorder? = null
constructor(glSurfaceView: GLSurfaceView) {
Log.d(TAG, "constructor: ")
this.glSurfaceView = glSurfaceView
context = glSurfaceView.context
cameraUtil = CameraUtil(context)
//設(shè)置版本
this.glSurfaceView.setEGLContextClientVersion(2)
this.glSurfaceView.setRenderer(this)
//當(dāng)有數(shù)據(jù)來就更新界面贷掖,即調(diào)用glSurfaceView.requestRender()就會觸發(fā)調(diào)用onDrawFrame來更新界面
this.glSurfaceView.renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
Log.d(TAG, "constructor: end")
}
override fun onDrawFrame(gl: GL10?) {
Log.d(TAG, "onDrawFrame: ")
//清除上一次數(shù)據(jù)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
//更新surfaceTexture數(shù)據(jù)
surfaceTexture.updateTexImage()
surfaceTexture.getTransformMatrix(matrix)
fboFilter.setUniforms(matrix)
//將textureId對應(yīng)紋理繪制到FBO中
// 這里一定要是局部變量或者另一個(gè)變量嫡秕,因?yàn)槿绻谶@里賦值改變了textureId,下一次執(zhí)行onDrawFrame時(shí)苹威,textureId的值就不對了
var textureId = fboFilter.onDrawFrame(textureId)
//fobTextureId紋理繪制到畫面上
screenFilter.onDrawFrame(textureId)
//如果當(dāng)前正在錄制的話昆咽,將fobTextureId紋理編碼
mediaRecorder?.encodeFrame(textureId, surfaceTexture.timestamp)
}
override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
Log.d(TAG, "onSurfaceChanged: $width $height")
GLES20.glViewport(0, 0, width, height)
//設(shè)置surfaceTexture寬高
surfaceTexture.setDefaultBufferSize(width, height)
//攝像頭不支持奇數(shù)的寬高
this.width = if ((width and 1) == 1) width - 1 else width
this.height = if ((height and 1) == 1) height - 1 else height
fboFilter = FboFilter(context, width, height)
screenFilter = ScreenFilter(context, width, height)
eglContext = EGL14.eglGetCurrentContext()
}
override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
Log.d(TAG, "onSurfaceCreated: ")
GLES20.glClearColor(1f, 1f, 0f, 1f)
// 生成一個(gè)紋理
val textureIds = IntArray(1)
GLES20.glGenTextures(textureIds.size, textureIds, 0)
textureId = textureIds[0]
//使用textureId創(chuàng)建一個(gè)SurfaceTexture,預(yù)覽的時(shí)候使用這個(gè)SurfaceTexture
surfaceTexture = SurfaceTexture(textureId)
//為surfaceTexture設(shè)置監(jiān)聽牙甫,當(dāng)預(yù)覽數(shù)據(jù)更新的時(shí)候掷酗,就會觸發(fā)onFrameAvailable回調(diào)
surfaceTexture.setOnFrameAvailableListener(this)
}
/**
* 預(yù)覽
*/
suspend fun startPreview(cameraId: String) {
//將cameraId對應(yīng)攝像頭的數(shù)據(jù)在surfaceTexture上顯示
val outputs = listOf(Surface(surfaceTexture))
cameraUtil.startPreview(cameraId, outputs)
}
fun stopPreview() {
cameraUtil.release()
}
/**
* 攝像頭新的一幀達(dá)到,更新glSurfaceView的界面
*/
override fun onFrameAvailable(surfaceTexture: SurfaceTexture?) {
glSurfaceView.requestRender()
}
/**
* 設(shè)置照片回調(diào)窟哺,在screenFilter畫到屏幕后泻轰,進(jìn)行回調(diào)保存當(dāng)前幀
*/
fun takePicture(pictureCallBack: (Bitmap) -> Unit) {
screenFilter.setSaveFrame(pictureCallBack)
}
/**
* 開始錄像
*/
fun startRecord(path: String) {
mediaRecorder = MediaRecorder(
glSurfaceView.context,
path,
width,
height,
eglContext
)
mediaRecorder?.start()
}
/**
* 停止錄像
*/
fun stopRecord() {
mediaRecorder?.stop()
}
}