Click here to Skip to main content
15,891,607 members
Please Sign up or sign in to vote.
0.00/5 (No votes)
I'm trying to develop a simple app to recognize sound patterns. I want to record audio directly in to a smartphone and the live recording is visualized as a waveform. I've managed to get it done thanks to the codes that I found in blogs. The code I have implemented is as follows.

code for the recording

Java
package com.sound;

import android.app.Activity;
import android.widget.LinearLayout;
import android.os.Bundle;
import android.os.Environment;
import android.view.ViewGroup;
import android.widget.Button;
import android.view.View;
import android.content.Context;
import android.util.Log;
import android.media.MediaRecorder;
import android.media.MediaPlayer;

import java.io.IOException;

public class recorder extends Activity {
    private static final String LOG_TAG = "Audio_Record_Test";
    private static String mFileName = null;

    private MediaRecorder mRecorder = null;

    private MediaPlayer   mPlayer = null;

    private void onRecord(boolean start) {
        if (start) {
            startRecording();
        } else {
            stopRecording();
        }
    }

    private void onPlay(boolean start) {
        if (start) {
            startPlaying();
        } else {
            stopPlaying();
        }
    }

    private void startPlaying() {
        mPlayer = new MediaPlayer();
        try {
            mPlayer.setDataSource(mFileName);
            mPlayer.prepare();
            mPlayer.start();
        } catch (IOException e) {
            Log.e(LOG_TAG, "prepare() failed");
        }
    }

    private void stopPlaying() {
        mPlayer.release();
        mPlayer = null;
    }

    private void startRecording() {
        mRecorder = new MediaRecorder();
        mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
        mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
        mRecorder.setOutputFile(mFileName);
        mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);

        try {
            mRecorder.prepare();
        } catch (IOException e) {
            Log.e(LOG_TAG, "prepare() failed");
        }

        mRecorder.start();
    }

    private void stopRecording() {
        mRecorder.stop();
        mRecorder.release();
        mRecorder = null;
    }

    class RecordButton extends Button {
        boolean mStartRecording = true;

        OnClickListener clicker = new OnClickListener() {
            public void onClick(View v) {
                onRecord(mStartRecording);
                if (mStartRecording) {
                    setText("Stop recording");
                } else {
                    setText("Start recording");
                }
                mStartRecording = !mStartRecording;
            }
        };

        public RecordButton(Context ctx) {
            super(ctx);
            setText("Start recording");
            setOnClickListener(clicker);
        }
    }

    class PlayButton extends Button {
        boolean mStartPlaying = true;

        OnClickListener clicker = new OnClickListener() {
            public void onClick(View v) {
                onPlay(mStartPlaying);
                if (mStartPlaying) {
                    setText("Stop playing");
                } else {
                    setText("Start playing");
                }
                mStartPlaying = !mStartPlaying;
            }
        };

        public PlayButton(Context ctx) {
            super(ctx);
            setText("Start playing");
            setOnClickListener(clicker);
        }
    }

    public recorder() {
        mFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
        mFileName += "/audio_record_test.3gp";
    }

    @Override
    public void onCreate(Bundle icicle) {
        super.onCreate(icicle);

        LinearLayout ll = new LinearLayout(this);
        RecordButton mRecordButton = new RecordButton(this);
        ll.addView(mRecordButton,
                new LinearLayout.LayoutParams(
                        ViewGroup.LayoutParams.WRAP_CONTENT,
                        ViewGroup.LayoutParams.WRAP_CONTENT,
                        0));
        PlayButton mPlayButton = new PlayButton(this);
        ll.addView(mPlayButton,
                new LinearLayout.LayoutParams(
                        ViewGroup.LayoutParams.WRAP_CONTENT,
                        ViewGroup.LayoutParams.WRAP_CONTENT,
                        0));
        setContentView(ll);
    }

    @Override
    public void onPause() {
        super.onPause();
        if (mRecorder != null) {
            mRecorder.release();
            mRecorder = null;
        }

        if (mPlayer != null) {
            mPlayer.release();
            mPlayer = null;
        }
    }
}


code for the waveform activity

Java
package com.sound;

import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;

// This class captures hear sound and produce the waveform
public class monitor extends Activity implements View.OnClickListener {

    int frequency = 8000; // Sampling frequency
    int blockSize = 512; // Waveform window width
    int channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
    int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
    public RealDoubleFFT transformer;
    Button start;
    boolean started = false;
    RecordAudio recordTask;
    ImageView imageView;
    Bitmap bitmap;
    Canvas canvas;
    Paint paint;

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        // Get the view from activity_monitor.xml
        setContentView(R.layout.monitor);
        android.widget.Button Button_bac = (Button) findViewById(R.id.button_back2);
        Button_bac.setOnClickListener(new View.OnClickListener() {
            public void onClick(View arg1) {
                // Start menu.class
                Intent Intent_hom = new Intent(monitor.this, home.class);
                startActivity(Intent_hom);
            }
        });

        // Declare & locate the button in monitor.xml
        android.widget.Button Button_set = (Button) findViewById(R.id.button_set);
        android.widget.Button Button_check = (Button) findViewById(R.id.button_check);
        android.widget.Button Button_rev = (Button) findViewById(R.id.button_rev);

        // Capture button clicks
        Button_set.setOnClickListener(new View.OnClickListener() {
            public void onClick(View arg0) {

                // Start settings.class
                Intent Intent_set = new Intent(monitor.this, settings.class);
                startActivity(Intent_set);
            }
        });

        Button_check.setOnClickListener(new View.OnClickListener() {
            public void onClick(View arg0) {

                // Start capture.class
                Intent Intent_check = new Intent(monitor.this, capture.class);
                startActivity(Intent_check);
            }
        });

        Button_rev.setOnClickListener(new View.OnClickListener() {
            public void onClick(View arg0) {

                // Start review.class
                Intent Intent_rev = new Intent(monitor.this, review.class);
                startActivity(Intent_rev);
            }
        });

        start = (Button) this.findViewById(R.id.button_start);
        start.setOnClickListener(this);
        transformer = new RealDoubleFFT();
        imageView = (ImageView) this.findViewById(R.id.ImageView);
        bitmap = Bitmap.createBitmap(512, 512, Bitmap.Config.ARGB_8888); // View dimensions length x height
        canvas = new Canvas(bitmap);
        paint = new Paint();
        paint.setColor(Color.WHITE);
        imageView.setImageBitmap(bitmap);
    }

    class RecordAudio extends AsyncTask<Void, double[], Void> {

        @Override
        protected Void doInBackground(Void... arg0) {
            try {
                int bufferSize = AudioRecord.getMinBufferSize(frequency, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
                AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, frequency, channelConfiguration, audioEncoding, bufferSize);
                short[] buffer = new short[blockSize];
                double[] toTransform = new double[blockSize];
                audioRecord.startRecording();
                started = true;
                // This should true before calling following while loop
                while (started) {
                    int bufferReadResult = audioRecord.read(buffer, 0, blockSize);
                    for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
                        toTransform[i] = (double) buffer[i] / 1024.0;
                        // signed 16 bit 32768.0
                    }
                    transformer.ft();
                    publishProgress(toTransform);
                }
                audioRecord.stop();
            } catch (Throwable t) {
                t.printStackTrace();
                Log.e("AudioRecord", "Recording Failed");
            }
            return null;
        }

        @Override
        protected void onProgressUpdate(double[]... toTransform) {
            canvas.drawColor(Color.BLACK);
            for (int i = 0; i < toTransform[0].length; i++) {
                int x;
                x = i;
                int downy = (int) (240 - (toTransform[0][i] * 80)); // Adjust the sensitivity, higher the value more sensitive
                int upy = 240;
                canvas.drawLine(x, downy, x, upy, paint);
            }
            imageView.invalidate();
            super.onProgressUpdate(toTransform);
        }
    }

    // Default constructor
    public void onClick(View arg0) {
        if (started) {
            started = false;
            start.setText("Start");
            recordTask.cancel(true);
        } else {
            started = true;
            start.setText("Stop");
            recordTask = new RecordAudio();
            recordTask.execute();
        }
    }

    // Fourier transform constructor
    public class RealDoubleFFT {
        public RealDoubleFFT() {
        }
        public void ft() {
        }
    }
}



The important and most difficult part is to compare the live recording real time with an audio file already saved in the smartphone. The app should be able to compare the two audio and identify similar sound patterns in them. Can some one please help.

Thanks

Vaji
Posted

This content, along with any associated source code and files, is licensed under The Code Project Open License (CPOL)



CodeProject, 20 Bay Street, 11th Floor Toronto, Ontario, Canada M5J 2N8 +1 (416) 849-8900