Two ways of Android calling system api recording (MediaRecorder, AudioRecord)

Keywords: Mobile Android encoding Java Google

Crap

Authority, authority, authority, must first obtain the recording authority, and other things will be delayed.

In addition, the new version of Android 10 system will adjust the recording and introduce the concept of recording focus. That is to say, the microphone can only be used by one APP before it can be used only when it is disconnected. Now it can be used in the form of grabbing, that is, if there is no audio focus, the code may not report an error, but it can't record a sound.

There are two recording modes provided by Android system API: MediaRecorder and AudioRecord

MediaRecorder: simple mode, easy to call, only start and end. The file after recording is also in the specified encoding format, and the system player can play it directly.

AudioRecord: in the original mode, you can pause and continue. You can get the recorded data in real time, and then perform some operations. Then the recorded data is the most original pcm data. The system player cannot play it directly.

MediaRecorder

If you don't say much, you can directly transfer the path of the file to be saved through the construction method, and then call the start and end method.


import android.media.MediaRecorder;
import android.os.Handler;

import java.io.File;
import java.io.IOException;

/**
 * Recording function
 */

public class MediaRecordingUtils {

    //File path
    private String filePath;

    private MediaRecorder mMediaRecorder;
    private final String TAG = "fan";
    public static final int MAX_LENGTH = 1000 * 60 * 200;// Maximum recording time, in milliseconds, 1000 * 60 * 10;

    private OnAudioStatusUpdateListener audioStatusUpdateListener;

    /**
     * File store default sdcard/record
     */
    public MediaRecordingUtils() {
    }

    public MediaRecordingUtils(String filePath) {
        this.filePath=filePath;
//        File path = new File(filePath);
//        if (!path.exists())
//            path.mkdirs();
//        this.FolderPath = filePath;
    }

    private long startTime;
    private long endTime;


    /**
     * Start recording in aac format
     * Recording file
     *
     * @return
     */
    public void startRecord() {
        // Start recording
        /* ①Initial: Instantiate the MediaRecorder object */
        if (mMediaRecorder == null)
            mMediaRecorder = new MediaRecorder();
        try {
            /* ②setAudioSource/setVedioSource */
            mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);// Set up microphone
            /* ②Set the encoding of audio file: AAC / AMR? Nb / AMR? MB / default sound (waveform) sampling */
            mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.DEFAULT);
            /*
             * ②Format the output file: three GPP / MPEG-4 / raw AMR / default three GPP (3gp format
             * ,H263 Video / ARM audio coding), MPEG-4, raw & AMR (only audio is supported and AMR & nb is required for audio coding)
             */
            mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);

//            filePath = FolderPath + DateUtil.getTimeForLong() + ".aac";
            /* ③Get ready */
            mMediaRecorder.setOutputFile(filePath);
            mMediaRecorder.setMaxDuration(MAX_LENGTH);
            mMediaRecorder.prepare();
            /* ④start */
            mMediaRecorder.start();
            // AudioRecord audioRecord.
            /* Get start time* */
            startTime = System.currentTimeMillis();
            updateMicStatus();
            ALog.e("fan", "startTime" + startTime);
        } catch (IllegalStateException e) {
            ALog.e(TAG, "call startAmr(File mRecAudioFile) failed!" + e.getMessage());
        } catch (IOException e) {
            ALog.e(TAG, "call startAmr(File mRecAudioFile) failed!" + e.getMessage());
        }
    }

    /**
     * Stop recording
     */
    public long stopRecord() {
        if (mMediaRecorder == null)
            return 0L;
        endTime = System.currentTimeMillis();

        //Some netizens said that when they call stop at 5.0 or above, they will report an error. After browsing the Google document, they found that there is a possibility of an error reported on it. If you catch an exception, just clean it up. Thank you for your feedback!
        try {
            mMediaRecorder.stop();
            mMediaRecorder.reset();
            mMediaRecorder.release();
            mMediaRecorder = null;

            audioStatusUpdateListener.onStop(filePath);
            filePath = "";

        } catch (RuntimeException e) {
            try {
                mMediaRecorder.reset();
                mMediaRecorder.release();
                mMediaRecorder = null;

                File file = new File(filePath);
                if (file.exists())
                    file.delete();

                filePath = "";
            } catch (Exception e1) {

            }

        }
        return endTime - startTime;
    }

    /**
     * cancel recording
     */
    public void cancelRecord() {

        try {

            mMediaRecorder.stop();
            mMediaRecorder.reset();
            mMediaRecorder.release();
            mMediaRecorder = null;

        } catch (RuntimeException e) {
            mMediaRecorder.reset();
            mMediaRecorder.release();
            mMediaRecorder = null;
        }
        File file = new File(filePath);
        if (file.exists())
            file.delete();

        filePath = "";

    }

    private final Handler mHandler = new Handler();
    private Runnable mUpdateMicStatusTimer = new Runnable() {
        public void run() {
            updateMicStatus();
        }
    };


    private int BASE = 1;
    private int SPACE = 100;// Interval sampling time

    public void setOnAudioStatusUpdateListener(OnAudioStatusUpdateListener audioStatusUpdateListener) {
        this.audioStatusUpdateListener = audioStatusUpdateListener;
    }

    /**
     * Update mic status
     */
    private void updateMicStatus() {

        if (mMediaRecorder != null) {
            double ratio = (double) mMediaRecorder.getMaxAmplitude() / BASE;
            double db = 0;// Decibel
            if (ratio > 1) {
                db = 20 * Math.log10(ratio);
                if (null != audioStatusUpdateListener) {
                    audioStatusUpdateListener.onUpdate(db, System.currentTimeMillis() - startTime);
                }
            }
            mHandler.postDelayed(mUpdateMicStatusTimer, SPACE);
        }
    }

    public String getFilePath() {
        return filePath;
    }

    public interface OnAudioStatusUpdateListener {
        /**
         * Recording...
         *
         * @param db   Current sound db
         * @param time Recording duration
         */
        public void onUpdate(double db, long time);

        /**
         * Stop recording
         *
         * @param filePath Save path
         */
        public void onStop(String filePath);
    }

}

AudioRecord


/**
 * Sound recording
 * Usage: 1-init, the suffix of filepath file is. PCM 2-start 3-stop
 * stop After that, all audio data will be written to the filePath file in pcm format, and added at the end, instead of being overwritten (to achieve the effect of pause recording and resume recording). It needs to be converted to other formats for the system player to play directly
 */
public class AudioRecordingUtils {


    //Specify the audio source. This is the same as the MediaRecorder.AudioSource.MIC refers to the microphone
    private static final int mAudioSource = MediaRecorder.AudioSource.MIC;
    //Specify the sampling rate (the sampling rate of mediadecoder is usually 8000Hz AAC and 44100Hz) Set the sampling rate to 44100, which is currently the common sampling rate. The official document indicates that this value can be compatible with all settings.)
    private static final int mSampleRateInHz = 44100;
    //Specifies the number of channels to capture audio. Specify a constant for this in the AudioFormat class
    private static final int mChannelConfig = AudioFormat.CHANNEL_IN_STEREO; //stereo
    //Specifies the number of audio quantization bits, and the following various possible constants are specified in the AudioFormaat class. Generally, we choose encoding PCM and encoding PCM to represent pulse code modulation, which is actually the original audio sample.
    //Therefore, the resolution of each sample can be set to 16 bits or 8 bits, 16 bits will take up more space and processing power, and the audio represented will be closer to the real.
    private static final int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
    //Specifies the buffer size. It can be obtained by calling the getMinBufferSize method of AudioRecord class.


    private AudioRecord audioRecord = null;  // Declare AudioRecord object
    private int recordBufSize = 0; // Declare the size field of recoordBufffer

    private boolean isRecording = false;

    private String saveFilePath;
    //    private FileOutputStream os = null;
    private File mRecordingFile;

    private OnAudioRecordingListener onAudioRecordingListener;

    public void init(String filePath, OnAudioRecordingListener onAudioRecordingListener) {
        this.onAudioRecordingListener = onAudioRecordingListener;
        saveFilePath = filePath;
        recordBufSize = AudioRecord.getMinBufferSize(mSampleRateInHz, mChannelConfig, mAudioFormat);//Calculate minimum buffer
        audioRecord = new AudioRecord(mAudioSource, mSampleRateInHz, mChannelConfig,
                mAudioFormat, recordBufSize);//Create AudioRecorder object

        //Create a stream to hold the data read from AudioRecord
        mRecordingFile = new File(saveFilePath);
        if (mRecordingFile.exists()) {//Audio file saved deleted
            mRecordingFile.delete();
        }
        try {
            mRecordingFile.createNewFile();//create a new file
        } catch (IOException e) {
            e.printStackTrace();
            ALog.e("lu", "Error creating save audio file");
        }

    }
    public static double bytes2Double(byte[] arr) {
        long value = 0;
        for (int i = 0; i < 8; i++) {
            value |= ((long) (arr[i] & 0xff)) << (8 * i);
        }
        return Double.longBitsToDouble(value);
    }
    public void startRecording() {
        //Determine whether the state of AudioRecord is initialized
        //After the AudioRecord object has been constructed, it is in the AudioRecord.state'initialized state.
        if (audioRecord == null || audioRecord.getState() == AudioRecord.STATE_UNINITIALIZED) {
            ALog.e("Initialization is not complete");
            return;
        }

        XyObservable.addTask(new XyCallBack() {//Meaning of opening a sub thread
            private double volume = 0;

            @Override
            public void run() {
                //Mark as start acquisition status
                isRecording = true;
                try {
                    //Get data flow to file
                    DataOutputStream mDataOutputStream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(mRecordingFile, true)));
                    byte[] buffer = new byte[recordBufSize];
                    audioRecord.startRecording();//Start recording
                    //getRecordingState gets the status of whether the current audiorolling is collecting data
                    while (isRecording && audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
                        int bufferReadResult = audioRecord.read(buffer, 0, recordBufSize);
                        for (int i = 0; i < bufferReadResult; i++) {
                            mDataOutputStream.write(buffer[i]);
                        }
                        setFinish();//The following finish() method will be called here. The finish() method is in the UI thread
                    }
                    mDataOutputStream.close();
                } catch (Throwable t) {
                    ALog.e("lu", "Recording Failed");
                    stopRecording();
                }
            }

            @Override
            public void finish() {
                if (onAudioRecordingListener != null) {
                    onAudioRecordingListener.onChange(volume);
                }
            }
        });

    }

    /**
     * suspend recording
     */
    public void pauseRecording() {
        isRecording = false;
        if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
            audioRecord.stop();
        }
    }

    //Stop recording
    public void stopRecording() {
        isRecording = false;
        //Stop recording, recycle AudioRecord object and free memory
        if (audioRecord != null) {
            if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
                audioRecord.stop();
            }
            if (audioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
                audioRecord.release();
            }
        }
    }

    public interface OnAudioRecordingListener {
        public void onChange(double volume);
    }
}

Then there's a way to convert the original pcm to wav format:


public class Pcm2WavUtils {

    /**
     * PCM File to WAV file
     *
     * @param inPcmFilePath  Enter PCM file path
     * @param outWavFilePath Output WAV file path
     * @param sampleRate     Sample rate, e.g. 44100
     * @param channels       Number of channels mono: 1 or dual: 2
     * @param bitNum         Sampling bits, 8 or 16
     */
    public void convertPcm2Wav(String inPcmFilePath, String outWavFilePath, int sampleRate,
                                      int channels, int bitNum) {

        FileInputStream in = null;
        FileOutputStream out = null;
        byte[] data = new byte[1024];

        try {
            //Sample byte rate
            long byteRate = sampleRate * channels * bitNum / 8;

            in = new FileInputStream(inPcmFilePath);
            out = new FileOutputStream(outWavFilePath);

            //PCM file size
            long totalAudioLen = in.getChannel().size();

            //Total size, 44 - 8 = 36, excluding RIFF and WAV, plus PCM file size
            long totalDataLen = totalAudioLen + 36;

            writeWaveFileHeader(out, totalAudioLen, totalDataLen, sampleRate, channels, byteRate);

            int length = 0;
            while ((length = in.read(data)) > 0) {
                out.write(data, 0, length);
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (in != null) {
                try {
                    in.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if (out != null) {
                try {
                    out.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    /**
     * Output WAV file
     *
     * @param out           WAV Output file stream
     * @param totalAudioLen Full audio PCM data size
     * @param totalDataLen  Entire data size
     * @param sampleRate    sampling rate
     * @param channels      Channels
     * @param byteRate      Sample byte rate
     * @throws IOException
     */
    private void writeWaveFileHeader(FileOutputStream out, long totalAudioLen,
                                            long totalDataLen, int sampleRate, int channels, long byteRate) throws IOException {
        byte[] header = new byte[44];
        header[0] = 'R'; // RIFF
        header[1] = 'I';
        header[2] = 'F';
        header[3] = 'F';
        header[4] = (byte) (totalDataLen & 0xff);//data size
        header[5] = (byte) ((totalDataLen >> 8) & 0xff);
        header[6] = (byte) ((totalDataLen >> 16) & 0xff);
        header[7] = (byte) ((totalDataLen >> 24) & 0xff);
        header[8] = 'W';//WAVE
        header[9] = 'A';
        header[10] = 'V';
        header[11] = 'E';
        //FMT Chunk
        header[12] = 'f'; // 'fmt '
        header[13] = 'm';
        header[14] = 't';
        header[15] = ' ';//Transition byte
        //data size
        header[16] = 16; // 4 bytes: size of 'fmt ' chunk
        header[17] = 0;
        header[18] = 0;
        header[19] = 0;
        //Code mode 10H is PCM code format
        header[20] = 1; // format = 1
        header[21] = 0;
        //Channel number
        header[22] = (byte) channels;
        header[23] = 0;
        //Sample rate, playback speed of each channel
        header[24] = (byte) (sampleRate & 0xff);
        header[25] = (byte) ((sampleRate >> 8) & 0xff);
        header[26] = (byte) ((sampleRate >> 16) & 0xff);
        header[27] = (byte) ((sampleRate >> 24) & 0xff);
        //Audio data transmission rate, sampling rate * number of channels * sampling depth / 8
        header[28] = (byte) (byteRate & 0xff);
        header[29] = (byte) ((byteRate >> 8) & 0xff);
        header[30] = (byte) ((byteRate >> 16) & 0xff);
        header[31] = (byte) ((byteRate >> 24) & 0xff);
        // Determine how many such bytes of data the system processes at a time, determine buffer, number of channels * number of sampling bits
        header[32] = (byte) (channels * 16 / 8);
        header[33] = 0;
        //Data bits per sample
        header[34] = 16;
        header[35] = 0;
        //Data chunk
        header[36] = 'd';//data
        header[37] = 'a';
        header[38] = 't';
        header[39] = 'a';
        header[40] = (byte) (totalAudioLen & 0xff);
        header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
        header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
        header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
        out.write(header, 0, 44);
    }
}

Posted by kulin on Tue, 26 Nov 2019 13:24:12 -0800