
AndroID音视频开发学习路线,具备这些知识
摄像头相关配置参数
实时获取摄像头原始数据
NV21原始数据编码 *** 作的线程,编码h.264格式数据
public voID startEncoderThread(){ Thread EncoderThread = new Thread(new Runnable() { @Suppresslint("NewAPI") @OverrIDe public voID run() { isRuning = true; byte[] input = null; long pts = 0; long generateIndex = 0; while (isRuning) { if (CameraSurfaceVIEw.YUVQueue.size() >0){ input = CameraSurfaceVIEw.YUVQueue.poll(); byte[] yuv420sp = new byte[m_wIDth*m_height*3/2]; byte[] yuv420 = new byte[m_wIDth*m_height*3/2]; VIDeoYUVUtil.NV21ToNV12(input,yuv420sp,m_wIDth,m_height);// VIDeoYUVUtil.YUV420spRotate90Clockwise(yuv420sp,yuv420,m_wIDth,m_height); input = yuv420sp; } if (input != null) { try { long startMs = System.currentTimeMillis(); ByteBuffer[] inputBuffers = mediaCodec.getinputBuffers(); ByteBuffer[] outputBuffers = mediaCodec.getoutputBuffers(); int inputBufferIndex = mediaCodec.dequeueinputBuffer(-1); if (inputBufferIndex >= 0) { pts = computePresentationTime(generateIndex); ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); inputBuffer.put(input); mediaCodec.queueinputBuffer(inputBufferIndex, 0, input.length, pts, 0); generateIndex += 1; } MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); while (outputBufferIndex >= 0) { //Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+""); ByteBuffer outputBuffer = outputBuffers[outputBufferIndex]; byte[] outData = new byte[bufferInfo.size]; outputBuffer.get(outData); if(bufferInfo.flags == 2){ configbyte = new byte[bufferInfo.size]; configbyte = outData; }else if(bufferInfo.flags == 1){ byte[] keyframe = new byte[bufferInfo.size + configbyte.length]; System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length); System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length); outputStream.write(keyframe, 0, keyframe.length); if(onEncoderVIDeoListener!=null){ onEncoderVIDeoListener.encoderVIDeoData(keyframe,keyframe.length); } }else{ outputStream.write(outData, 0, outData.length); if(onEncoderVIDeoListener!=null){ onEncoderVIDeoListener.encoderVIDeoData(outData,outData.length); } } mediaCodec.releaSEOutputBuffer(outputBufferIndex, false); outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); } } catch (Throwable t) { t.printstacktrace(); } } else { try { Thread.sleep(500); } catch (InterruptedException e) { e.printstacktrace(); } } } } }); EncoderThread.start(); }实时采集原始PCM音频数据的线程
原始PCM数据编码AAC音频数据
/** * 编码AAC音频线程 */ public voID startAudioEncodeAAC() { audioEncoderThread = new Thread() { @OverrIDe public voID run() { try { while (!audioEncoderLoop && !Thread.interrupted()) { AudioData audioData = audioQueue.take(); byte[] buffer = audioData.audioData; if(audioData != null && encoder != null) { int size = audioData.audioData.length; //录音时间 size/ 采样率*声道数 * bits/8 recordTime += size * 1.0 / (audioSamplerate * 2 * (16 / 8));// LogUtils.d("recordTime = " + recordTime); int inputBufferindex = encoder.dequeueinputBuffer(0); if(inputBufferindex >= 0) { ByteBuffer byteBuffer = encoder.getinputBuffers()[inputBufferindex]; byteBuffer.clear(); byteBuffer.put(buffer); encoder.queueinputBuffer(inputBufferindex, 0, size, 0, 0); } int index = encoder.dequeueOutputBuffer(info, 0); while(index >= 0) { try { perpCMSize = info.size + 7; outByteBuffer = new byte[perpCMSize]; ByteBuffer byteBuffer = encoder.getoutputBuffers()[index]; byteBuffer.position(info.offset); byteBuffer.limit(info.offset + info.size); addADtsheader(outByteBuffer, perpCMSize, aacsamplerate); byteBuffer.get(outByteBuffer, 7, info.size); byteBuffer.position(info.offset); if (audioEncderListener!=null){ audioEncderListener.encoderAudioData(outByteBuffer,perpCMSize); } encoder.releaSEOutputBuffer(index, false); index = encoder.dequeueOutputBuffer(info, 0); outByteBuffer = null; } catch (Exception e) { e.printstacktrace(); } } }} } catch (InterruptedException e) { e.printstacktrace(); } } }; audioEncoderThread.start(); }编码好的h.264视频数据和AAC音频数据经过封包成数据包通过网络协议(rtmp)实时发送数据。
将接收到的视频h.264数据解码成原始数据并播放。
送入解码线程进行解码 *** 作
H.264实时解码成原始数据并进行渲染画面
接收到的AAC音频数据送入解码音频的线程进行解码还原成原始PCM音频数据
/** * aac解码+播放 */public voID decode(byte[] buf, int offset, int length) { //输入ByteBuffer ByteBuffer[] codecinputBuffers = mDecoder.getinputBuffers(); //输出ByteBuffer ByteBuffer[] codecOutputBuffers = mDecoder.getoutputBuffers(); //等待时间,0->不等待,-1->一直等待 long kTimeOutUs = 0; try { //返回一个包含有效数据的input buffer的index,-1->不存在 int inputBufIndex = mDecoder.dequeueinputBuffer(kTimeOutUs); if (inputBufIndex >= 0) { //获取当前的ByteBuffer ByteBuffer dstBuf = codecinputBuffers[inputBufIndex]; //清空ByteBuffer dstBuf.clear(); //填充数据 dstBuf.put(buf, offset, length); //将指定index的input buffer提交给解码器 mDecoder.queueinputBuffer(inputBufIndex, 0, length, 0, 0); } //编解码器缓冲区 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); //返回一个output buffer的index,-1->不存在 int outputBufferIndex = mDecoder.dequeueOutputBuffer(info, kTimeOutUs); if (outputBufferIndex < 0) { //记录解码失败的次数 count++; } ByteBuffer outputBuffer; while (outputBufferIndex >= 0) { //获取解码后的ByteBuffer outputBuffer = codecOutputBuffers[outputBufferIndex]; //用来保存解码后的数据 byte[] outData = new byte[info.size]; outputBuffer.get(outData); //清空缓存 outputBuffer.clear(); //播放解码后的数据 mPlayer.playAudioTrack(outData, 0, info.size); //释放已经解码的buffer mDecoder.releaSEOutputBuffer(outputBufferIndex, false); //解码未解完的数据 outputBufferIndex = mDecoder.dequeueOutputBuffer(info, kTimeOutUs); } } catch (Exception e) { e.printstacktrace(); }将解码后的PCM送入AudioTrack进行播放,这个时候就有声音了。
因为音频和视频解码是分开的,所以在解码后,需要做音视频同步。通常有一些音视频同步算法。如在ffmepg中,以音频作为参考时钟,视频用于比较当前时钟和音频时钟的差值,如果快了,就要增大延迟,以便下一帧显示的晚一些;如果慢了,就要减少延迟,加快显示下一帧。
感谢各位关注。
总结
以上是内存溢出为你收集整理的Android音视频开发图解全部内容,希望文章能够帮你解决Android音视频开发图解所遇到的程序开发问题。
如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。
欢迎分享,转载请注明来源:内存溢出
微信扫一扫
支付宝扫一扫
评论列表(0条)