安卓:具有语音电平可视化的音频录制
2022-09-04 05:18:12
我需要创建一个Android应用程序,用于在显示语音(声音)级别可视化的同时录制语音。
我已经创建了一个录音应用程序,但我无法添加声级可视化。我该怎么做?
请有人帮我一个建议或一个示例教程链接或代码。
我需要创建一个Android应用程序,用于在显示语音(声音)级别可视化的同时录制语音。
我已经创建了一个录音应用程序,但我无法添加声级可视化。我该怎么做?
请有人帮我一个建议或一个示例教程链接或代码。
创建一个类似如下的 xml activity_recording.xml。
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="180dp"
android:layout_alignParentBottom="true"
android:background="#231f20" >
<ali.visualiser.VisualizerView
android:id="@+id/visualizer"
android:layout_width="220dp"
android:layout_height="75dp"
android:layout_centerHorizontal="true"
android:layout_margin="5dp" />
<TextView
android:id="@+id/txtRecord"
android:layout_width="wrap_content"
android:layout_height="40dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="25dp"
android:gravity="center"
android:text="Start Recording"
android:textColor="@android:color/white"
android:textSize="30sp" />
</RelativeLayout>
创建自定义可视化工具视图,如下所示。
package ali.visualiser;
import java.util.ArrayList;
import java.util.List;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.View;
public class VisualizerView extends View {
private static final int LINE_WIDTH = 1; // width of visualizer lines
private static final int LINE_SCALE = 75; // scales visualizer lines
private List<Float> amplitudes; // amplitudes for line lengths
private int width; // width of this View
private int height; // height of this View
private Paint linePaint; // specifies line drawing characteristics
// constructor
public VisualizerView(Context context, AttributeSet attrs) {
super(context, attrs); // call superclass constructor
linePaint = new Paint(); // create Paint for lines
linePaint.setColor(Color.GREEN); // set color to green
linePaint.setStrokeWidth(LINE_WIDTH); // set stroke width
}
// called when the dimensions of the View change
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
width = w; // new width of this View
height = h; // new height of this View
amplitudes = new ArrayList<Float>(width / LINE_WIDTH);
}
// clear all amplitudes to prepare for a new visualization
public void clear() {
amplitudes.clear();
}
// add the given amplitude to the amplitudes ArrayList
public void addAmplitude(float amplitude) {
amplitudes.add(amplitude); // add newest to the amplitudes ArrayList
// if the power lines completely fill the VisualizerView
if (amplitudes.size() * LINE_WIDTH >= width) {
amplitudes.remove(0); // remove oldest power value
}
}
// draw the visualizer with scaled lines representing the amplitudes
@Override
public void onDraw(Canvas canvas) {
int middle = height / 2; // get the middle of the View
float curX = 0; // start curX at zero
// for each item in the amplitudes ArrayList
for (float power : amplitudes) {
float scaledHeight = power / LINE_SCALE; // scale the power
curX += LINE_WIDTH; // increase X by LINE_WIDTH
// draw a line representing this item in the amplitudes ArrayList
canvas.drawLine(curX, middle + scaledHeight / 2, curX, middle
- scaledHeight / 2, linePaint);
}
}
}
创建录制活动类,如下所示。
package ali.visualiser;
import java.io.File;
import java.io.IOException;
import android.app.Activity;
import android.media.MediaRecorder;
import android.media.MediaRecorder.OnErrorListener;
import android.media.MediaRecorder.OnInfoListener;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
public class RecordingActivity extends Activity {
public static final String DIRECTORY_NAME_TEMP = "AudioTemp";
public static final int REPEAT_INTERVAL = 40;
private TextView txtRecord;
VisualizerView visualizerView;
private MediaRecorder recorder = null;
File audioDirTemp;
private boolean isRecording = false;
private Handler handler; // Handler for updating the visualizer
// private boolean recording; // are we currently recording?
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recording);
visualizerView = (VisualizerView) findViewById(R.id.visualizer);
txtRecord = (TextView) findViewById(R.id.txtRecord);
txtRecord.setOnClickListener(recordClick);
audioDirTemp = new File(Environment.getExternalStorageDirectory(),
DIRECTORY_NAME_TEMP);
if (audioDirTemp.exists()) {
deleteFilesInDir(audioDirTemp);
} else {
audioDirTemp.mkdirs();
}
// create the Handler for visualizer update
handler = new Handler();
}
OnClickListener recordClick = new OnClickListener() {
@Override
public void onClick(View v) {
if (!isRecording) {
// isRecording = true;
txtRecord.setText("Stop Recording");
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(audioDirTemp + "/audio_file"
+ ".mp3");
OnErrorListener errorListener = null;
recorder.setOnErrorListener(errorListener);
OnInfoListener infoListener = null;
recorder.setOnInfoListener(infoListener);
try {
recorder.prepare();
recorder.start();
isRecording = true; // we are currently recording
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
handler.post(updateVisualizer);
} else {
txtRecord.setText("Start Recording");
releaseRecorder();
}
}
};
private void releaseRecorder() {
if (recorder != null) {
isRecording = false; // stop recording
handler.removeCallbacks(updateVisualizer);
visualizerView.clear();
recorder.stop();
recorder.reset();
recorder.release();
recorder = null;
}
}
public static boolean deleteFilesInDir(File path) {
if( path.exists() ) {
File[] files = path.listFiles();
if (files == null) {
return true;
}
for(int i=0; i<files.length; i++) {
if(files[i].isDirectory()) {
}
else {
files[i].delete();
}
}
}
return true;
}
@Override
protected void onDestroy() {
super.onDestroy();
releaseRecorder();
}
// updates the visualizer every 50 milliseconds
Runnable updateVisualizer = new Runnable() {
@Override
public void run() {
if (isRecording) // if we are already recording
{
// get the current amplitude
int x = recorder.getMaxAmplitude();
visualizerView.addAmplitude(x); // update the VisualizeView
visualizerView.invalidate(); // refresh the VisualizerView
// update in 40 milliseconds
handler.postDelayed(this, REPEAT_INTERVAL);
}
}
};
}
结果
这是它的外观:https://www.youtube.com/watch?v=BoFG6S02GH0
当它到达终点时,动画将按预期继续:擦除图形的开头。
我喜欢阿里的答案,但这是一个更简单的版本,表现更好。真正的速度来自于使视图类的方法尽可能快。首先通过在绘制循环之外执行绘制不需要的任何计算,将正确的值存储在内存中,然后传递完全填充的结构来绘制例程,以允许硬件优化绘制多条线。onDraw
我启动了我的录制活动并对其进行全屏设置,但您可以创建布局资源或在任何位置添加视图。
活动性:
public class RecordingActivity extends Activity {
private VisualizerView visualizerView;
private MediaRecorder recorder = new MediaRecorder();
private Handler handler = new Handler();
final Runnable updater = new Runnable() {
public void run() {
handler.postDelayed(this, 1);
int maxAmplitude = recorder.getMaxAmplitude();
if (maxAmplitude != 0) {
visualizerView.addAmplitude(maxAmplitude);
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recording);
visualizerView = (VisualizerView) findViewById(R.id.visualizer);
try {
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile("/dev/null");
recorder.prepare();
recorder.start();
} catch (IllegalStateException | IOException ignored) {
}
}
@Override
protected void onDestroy() {
super.onDestroy();
handler.removeCallbacks(updater);
recorder.stop();
recorder.reset();
recorder.release();
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
handler.post(updater);
}
}
视图:
public class VisualizerView extends View {
private static final int MAX_AMPLITUDE = 32767;
private float[] amplitudes;
private float[] vectors;
private int insertIdx = 0;
private Paint pointPaint;
private Paint linePaint;
private int width;
private int height;
public VisualizerView(Context context, AttributeSet attrs) {
super(context, attrs);
linePaint = new Paint();
linePaint.setColor(Color.GREEN);
linePaint.setStrokeWidth(1);
pointPaint = new Paint();
pointPaint.setColor(Color.BLUE);
pointPaint.setStrokeWidth(1);
}
@Override
protected void onSizeChanged(int width, int h, int oldw, int oldh) {
this.width = width;
height = h;
amplitudes = new float[this.width * 2]; // xy for each point across the width
vectors = new float[this.width * 4]; // xxyy for each line across the width
}
/**
* modifies draw arrays. cycles back to zero when amplitude samples reach max screen size
*/
public void addAmplitude(int amplitude) {
invalidate();
float scaledHeight = ((float) amplitude / MAX_AMPLITUDE) * (height - 1);
int ampIdx = insertIdx * 2;
amplitudes[ampIdx++] = insertIdx; // x
amplitudes[ampIdx] = scaledHeight; // y
int vectorIdx = insertIdx * 4;
vectors[vectorIdx++] = insertIdx; // x0
vectors[vectorIdx++] = 0; // y0
vectors[vectorIdx++] = insertIdx; // x1
vectors[vectorIdx] = scaledHeight; // y1
// insert index must be shorter than screen width
insertIdx = ++insertIdx >= width ? 0 : insertIdx;
}
@Override
public void onDraw(Canvas canvas) {
canvas.drawLines(vectors, linePaint);
canvas.drawPoints(amplitudes, pointPaint);
}
}