Android的Audio系统

    技术2022-05-19  21

    Audio系统综述

        Audio系统在Android中负责音频方面的数据流传输和控制功能,也负责音频设备的管理。    Audio系统主要的分成几个层次:1. media中库提供的Audio系统的上层接口2. AudioFlinger作为Audio系统的中枢3. Audio库的硬件抽象层提供底层的支持4. Audio接口通过JNI和Java框架提供给上层   Audio系统的上层接口主要提供了两方面的功能:放音(Track)和录音(Recorder)。

     

        Media库(libmedia.so)的Audio部分的目录中:        frameworks/base/include/media/        frameworks/base/media/libmedia/这部分的内容被编译成库libmedia.so,提供Audio部分的接口。    Audio Flinger (libaudioflinger.so):        frameworks/base/libs/audioflinger这部分内容被编译成库libaudioflinger.so。

        Audio的JNI部分:

            frameworks/base/core/jni    Audio的JAVA部分:        frameworks/base/media/java/android/media主要包含AudioManager和Audio系统的几个类。    Audio硬件抽象层的接口:        hardware/libhardware_legacy/include/hardware/

    Audio系统和上层接口

        Audio系统的结构:libmedia.so提供Audio接口,这些Audio接口既像上层开放,也向本地代码开发。libaudiofilnger.so提供Audio接口实现。Audio硬件抽象层提供到硬件的接口,供AudioFlinger 调用。Audio使用JNI和JAVA对上层提供接口。

     

        Android 的Audio的核心框架在media库中提供,其中对上面主要实现AudioSystem、AudioTrack和AudioRecorder三个类。提供了IAudioFlinger类接口,在这个类中,可以获得IAudioTrack和IAudioRecorder两个接口,分别用于声音的播放和录制。AudioTrack和AudioRecorder分别通过调用IAudioTrack和IAudioRecorder来实现。

     

        AudioTrack和AudioRecorder 都具有start,stop和pause等接口。前者具有write接口,用于声音的播放,后者具有read接口,用于声音的录制。    AudioSystem用于Audio系统的控制工作,主要包含一些set和get接口,是一个对上层的类。

    运用实例

    1.AudioFactory.java(用于获取AudioRecord和AudioTrack)

    import android.media.AudioFormat;import android.media.AudioManager;import android.media.AudioRecord;import android.media.AudioTrack;import android.media.MediaRecorder;

    public class AudioFactory { private AudioRecord audioRecord = null; private AudioTrack audioTrack = null;

     private int audioSource; private int sampleRateInHz; private int channelConfig; private int audioFormat; private int bufferSizeInBytes;

     private int streamType; private int mode;

     public AudioFactory() {  init(); }

     public AudioTrack getAudioTrack() {  if (audioTrack == null) {   audioTrack = new AudioTrack(streamType, sampleRateInHz,     channelConfig, audioFormat, bufferSizeInBytes, mode);  }  return audioTrack; }

     public AudioRecord getAudioRecord() {  if (audioRecord == null) {   audioRecord = new AudioRecord(audioSource, sampleRateInHz,     channelConfig, audioFormat, bufferSizeInBytes);  }  return audioRecord; }

     public int getAudioSource() {  return audioSource; }

     public void setAudioSource(int audioSource) {  this.audioSource = audioSource; }

     public int getSampleRateInHz() {  return sampleRateInHz; }

     public void setSampleRateInHz(int sampleRateInHz) {  this.sampleRateInHz = sampleRateInHz; }

     public int getChannelConfig() {  return channelConfig; }

     public void setChannelConfig(int channelConfig) {  this.channelConfig = channelConfig; }

     public int getAudioFormat() {  return audioFormat; }

     public void setAudioFormat(int audioFormat) {  this.audioFormat = audioFormat; }

     public int getBufferSizeInBytes() {  return bufferSizeInBytes; }

     public void setBufferSizeInBytes(int bufferSizeInBytes) {  this.bufferSizeInBytes = bufferSizeInBytes; }

     public int getStreamType() {  return streamType; }

     public void setStreamType(int streamType) {  this.streamType = streamType; }

     public int getMode() {  return mode; }

     public void setMode(int mode) {  this.mode = mode; }

     public void init() {  this.audioSource = MediaRecorder.AudioSource.MIC;  this.sampleRateInHz = 8000;  this.channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;  this.audioFormat = AudioFormat.ENCODING_PCM_16BIT;  this.bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz,    channelConfig, audioFormat);

      this.streamType = AudioManager.STREAM_MUSIC;  this.mode = AudioTrack.MODE_STREAM; }

    }

     

    2.AudioPalyer(接收socket传递过来的byte数组,用audioTrack播放。)

    import android.media.AudioTrack;

    public class AudioPalyer { private AudioTrack audioTrack; // private boolean isTalking; private AudioFactory audioFactory;

     // public AudioPalyer() { // this.audioFactory = new AudioFactory(); // this.audioTrack = audioFactory.getAudioTrack(); // }

     public AudioPalyer(boolean isTalking) {  this.audioFactory = new AudioFactory();  this.audioTrack = audioFactory.getAudioTrack(); }

     public void play() {  // this.isTalking = false;  byte[] audioData = null;  // UdpReceiver udpReceiver = new UdpReceiver();  byte[] bytes_pkg = null;

      // 开始播放  audioTrack.play();

      // while (!MyConfig.stopReceive) {  while (true) {   int sizeInBytes = audioFactory.getBufferSizeInBytes();   // 接收语音   // bytes_pkg = udpReceiver.receive();   // recording   audioData = new byte[sizeInBytes];   audioData = record(bytes_pkg);   // 播放语音   audioTrack.write(audioData, 0, sizeInBytes);  } }

     private byte[] record(byte[] audioData) {  // TODO Auto-generated method stub  return audioData; }

    }

     

    3.AudioRecorder.java(录音并发送)

    import android.media.AudioRecord;import android.media.AudioTrack;

    public class AudioRecorder { private AudioRecord audioRecord; private boolean isTalking; private AudioFactory audioFactory; private AudioTrack audioTrack;

     // public AudioRecorder() { // this.audioFactory = new AudioFactory(); // this.audioRecord = audioFactory.getAudioRecord(); // }

     public AudioRecorder(boolean isTalking) {  this.audioFactory = new AudioFactory();  this.audioRecord = audioFactory.getAudioRecord();  this.audioTrack = audioFactory.getAudioTrack(); }

     public void record() {  // 录音开始  this.isTalking = true;  audioRecord.startRecording();  audioTrack.play();  byte[] audioData = null;  // UdpSender udpSender = new UdpSender();

      while (this.isTalking) {   // 获取字节数组   int bufferSizeInBytes = audioFactory.getBufferSizeInBytes();   audioData = new byte[bufferSizeInBytes];   int size = audioRecord.read(audioData, 0, bufferSizeInBytes);   byte[] tmpBuf = new byte[size];   System.arraycopy(audioData, 0, tmpBuf, 0, size);   audioTrack.write(tmpBuf, 0, tmpBuf.length);   System.out.println("------" + audioData + "------");   // encording

       // 发送字节数组   // if (bytes_pkg != null) {   // udpSender.send(bytes_pkg);   // }  }  audioTrack.stop();  audioTrack = null;  audioRecord.stop();  audioRecord = null;  audioData = null; }

     private byte[] encord(byte[] audioData) {  // TODO Auto-generated method stub  return audioData; }

     public void stop() {  this.isTalking = false; }

     public boolean isTalking() {  return isTalking; }

     public void setTalking(boolean isTalking) {  this.isTalking = isTalking; }

    }

     


    最新回复(0)