android - aaudio



一個廣泛的項目:從麥克風到Android設備的音頻流 (0)

我想從藍牙麥克風中獲取音頻輸入,然後大聲地輸出,就像擴音器或簡單的擴聲系統一樣, 並實時播放

為此,我使用了AudioRecord和AudioTrack類,因為MediaRecorder和MediaPlayer都是通過寫入和讀取到外部文件來實現的,我可以想像這會延遲音頻的時間(這是真的嗎?)。

我已經放棄了使用任何藍牙類的想法,因為Android API不支持Android設備作為源/接收器場景中的接收器(至少現在還沒有),然後需要Android庫的本地編程(這是也是如此,對不對?)

現在。 該應用程序正在工作 ,但延遲時間太長 ,應用程序是假設實時播放音頻。

我的第一個問題是:AudioRecord / AudioTrack是否是正確的類(通過藍牙麥克風將音頻流式傳輸到設備揚聲器並實時播放)?

如果是這樣的話: 我怎樣才能減少音頻的延遲,從而實時傳送來自麥克風的音頻 整個代碼在這裡列出,所以請幫助自己:

在清單中,需要錄製音頻的權限。

<uses-permission android:name="android.permission.RECORD_AUDIO" />

我有一個專門的類來擴展一個線程來處理音頻:

public class AudioStreamer extends Thread {

/**
 * @Params:
 */
private int audioSource = MediaRecorder.AudioSource.MIC;
private int sampleRate = 11025;
private int streamType = AudioManager.STREAM_MUSIC;
private int mode = AudioTrack.MODE_STREAM;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;

private int channelConfigIn = AudioFormat.CHANNEL_IN_MONO;
private int channelConfigOut = AudioFormat.CHANNEL_OUT_MONO;

private int recordSize;
private int trackSize;

private AudioTrack track;
private AudioRecord recorder;

/**
 * Initializes the un-initialized params: buffer, bufferSize, track and recorder
 * starts recording/playing with AudioRecord and AudioTrack respectively
 */
public AudioStreamer() {
    System.out.println("New code!");

    recordSize = AudioRecord.getMinBufferSize(sampleRate,
            channelConfigIn, audioFormat);
    System.out.println("recordSize: "+recordSize);
    trackSize = AudioTrack.getMinBufferSize(sampleRate,
            channelConfigOut, audioFormat);
    System.out.println("trackSize: "+trackSize);

    recorder = new AudioRecord(audioSource, sampleRate,
            channelConfigIn, audioFormat, recordSize);

    if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
        track = new AudioTrack(streamType, sampleRate,
                channelConfigOut, audioFormat, trackSize, mode);

        if (track.getState() == AudioTrack.STATE_INITIALIZED) {
            System.out.println("Record and Track initialized");

        } else {
            System.out.println("Track != init");
        }
    } else {
        System.out.println("Recorder != init");
    }
}

/**
 * Runs thread--which reads and writes from/to the Android hardware
 */
public void run() {
    recorder.startRecording();
    track.play();

    if (recorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING
            && track.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
        System.out.println("Recorder and track playing");
    } else {
        System.out.println("Track and recorder != PLAYING");
    }

    short[] buffer = new short[recordSize];
    int audioLenght = 0;

    while (recorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING
            && track.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
        audioLenght = recorder.read(buffer, 0, recordSize);
        track.write(buffer, 0, audioLenght);
    }
}

/**
 * sets up the AudioManager for bluetooth audio streaming
 */
public void setAudioManager(AudioManager manager) {
    manager.setMode(AudioManager.MODE_IN_COMMUNICATION);

    // set true and test
    manager.setBluetoothScoOn(true);
    manager.setSpeakerphoneOn(true);
    System.out.println("bluetoothScoOn: " + manager.isBluetoothScoOn()
            + ", bluetoothA2DP: " + manager.isBluetoothA2dpOn()
            +", speakerPhone: "+manager.isSpeakerphoneOn());

    /**
     * Start BluetoothSCO
     */
    if (manager.isBluetoothA2dpOn()) {
        manager.startBluetoothSco();
        System.out.println("BtSco started");
    }
}

/**
 * Pauses the audio stream
 */
public void pause() {
    if (recorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
        recorder.stop();
    }

    if (track.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
        track.pause();
        track.flush();
    }

    if (track.getPlayState() == AudioTrack.PLAYSTATE_PAUSED
            && recorder.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) {
        System.out.println("Stopped");
    }
}

}

而我的主類提供了可點擊的按鈕,調用音頻流上的開始/停止方法:

public class MainActivity extends AppCompatActivity {
private AudioStreamer audioStreamer;
private AudioManager manager;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    System.out.println("Program running ... ");
    audioStreamer = new AudioStreamer();

    manager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioStreamer.setAudioManager(manager);
}

public void startAudioStreamer(View view) {
    audioStreamer.start();
}

public void pauseAudioStreamer(View view) {
    audioStreamer.pause();
}

}

通過圖形佈局顯示的按鈕是這樣的:

<?xml version="1.0" encoding="utf-8"?>

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"

android:id="@+id/activity_main"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
tools:context="com.android.audiorecordrevisited.MainActivity">

<Button
    android:id="@+id/startAudioStreamer"
    android:layout_width="wrap_content"
    android:layout_height="wrap_content"
    android:onClick="startAudioStreamer"
    android:text="Start AudioStreamer" />

<Button
    android:id="@+id/stopAudioStreamer"
    android:layout_width="wrap_content"
    android:layout_height="wrap_content"
    android:layout_below="@id/startAudioStreamer"
    android:onClick="pauseAudioStreamer"
    android:text="Stop Audio recording" />

 </RelativeLayout>

我怎樣才能減少音頻流的延遲? 我正在使用正確的類,還是有另一種/更好的方法來解決這個問題?

另外:音頻輸入似乎不是來自實際的藍牙麥克風,而是來自Android的硬件麥克風 - 這不是主意。 我怎樣才能直接讀取音頻輸入到藍牙麥克風而不是讀取內部的?





android-audiorecord