android解码播放G711音频流该怎么做
2个回答
展开全部
这是很久的代码了,有点忘记了,给你参考下。
public final static String TAG = "DefaultTalkBackSession";
TalkBackSession session;
CodecLibObj obj;
private BlockingQueue<byte[]> inputQueue = new LinkedBlockingQueue<byte[]>();
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
try {
session = new DefaultTalkBackSession("192.168.78.65",15528);
obj = session.codecAbilityQuery();
if (obj != null) {
session.startLiveListening(new TalkBackSession.OnAudioDataReceiveListener() {
@Override
public void onAudioDataReceived(byte[] data) {
Log.d(TAG, "收到音频数据:" + Utils.toHex(data));
try {
inputQueue.put(data);
} catch (InterruptedException e) {
}
}
}, obj, 1);
session.startCall(obj, 1);
ulaw_play();
ulaw_recored();
}
} catch (Exception e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
protected void onDestroy() {
super.onDestroy();
if (session != null) {
if (obj != null) {
session.stopLiveListening();
session.endCall();
}
((DefaultTalkBackSession) session).stop();
}
ulaw_stop();
}
private void ulaw_stop(){
if(ulaw_decode_thread != null){
ulaw_decode_thread.interrupt();
}
ulawRunning = false;
}
private void ulaw_play(){
ulawRunning = true;
(ulaw_decode_thread = new Thread(ulaw_decode)).start();
}
boolean ulawRunning = false;
Thread ulaw_decode_thread;
/**
* 解码线程
*/
Runnable ulaw_decode = new Runnable(){
public void run() {
try {
Thread.sleep(200);
} catch (InterruptedException e) {
e.printStackTrace();
}
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
int samp_rate = 8000 ;
int maxjitter = AudioTrack.getMinBufferSize(samp_rate,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack track = new AudioTrack(AudioManager.STREAM_VOICE_CALL,samp_rate,AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
maxjitter, AudioTrack.MODE_STREAM);
track.play();
try {
while(ulawRunning)
{
byte[] dataForDecode = new byte[0];
try {
dataForDecode = inputQueue.take();
} catch (InterruptedException e) {
}
short[] audioData = new short [dataForDecode.length];
//decode
G711.ulaw2linear(dataForDecode, audioData, audioData.length);
Log.d(TAG, "audioData=" + Utils.toHex(audioData)) ;
//play
track.write(audioData, 0, audioData.length);
}
} catch (Exception e) {
e.printStackTrace();
}finally{
track.release();
track = null;
}
}
};
Runnable ulaw_encode = new Runnable(){
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
AudioRecord record = getAudioRecord();
int frame_size = 160;
// int frame_rate = 8000/frame_size ;
// long frame_period = 1000 / frame_rate;
short [] audioData = new short [frame_size];
byte[] encodeData = new byte[frame_size];
int num = 0;
try {
while(ulawRunning)
{
num = record.read(audioData, 0, frame_size);
if(num<=0) continue;
calc2(audioData,0,num);
//进行pcmu编码
G711.linear2ulaw(audioData, 0, encodeData, num);
//send
session.sendAudioData(encodeData);
}
} catch (Exception e) {
e.printStackTrace();
} finally{
record.release();
}
}
};
void calc2(short[] lin,int off,int len) {
int i,j;
for (i = 0; i < len; i++) {
j = lin[i+off];
lin[i+off] = (short)(j>>1);
}
}
private void ulaw_recored(){
new Thread (ulaw_encode).start();
}
private AudioRecord getAudioRecord(){
int samp_rate = 8000 ;
int min = AudioRecord.getMinBufferSize(samp_rate,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
Log.e(TAG, "min buffer size:"+min);
AudioRecord record = null;
record = new AudioRecord(
MediaRecorder.AudioSource.MIC,//the recording source
samp_rate, //采样频率,一般为8000hz/s
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT,
min);
record.startRecording();
return record;
}
展开全部
这是很久的代码了,有点忘记了,给你参考下。 public final static String TAG = "DefaultTalkBackSession"; TalkBackSession session; CodecLibObj obj; private BlockingQueue<byte[]> inputQueue = new LinkedBlockingQueue<byte[]>(); @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); try { session = new DefaultTalkBackSession("192.168.78.65",15528); obj = session.codecAbilityQuery(); if (obj != null) { session.startLiveListening(new TalkBackSession.OnAudioDataReceiveListener() { @Override public void onAudioDataReceived(byte[] data) { Log.d(TAG, "收到音频数据:" + Utils.toHex(data)); try { inputQueue.put(data); } catch (InterruptedException e) { } } }, obj, 1); session.startCall(obj, 1); ulaw_play(); ulaw_recored(); } } catch (Exception e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } } protected void onDestroy() { super.onDestroy(); if (session != null) { if (obj != null) { session.stopLiveListening(); session.endCall(); } ((DefaultTalkBackSession) session).stop(); } ulaw_stop(); } private void ulaw_stop(){ if(ulaw_decode_thread != null){ ulaw_decode_thread.interrupt(); } ulawRunning = false; } private void ulaw_play(){ ulawRunning = true; (ulaw_decode_thread = new Thread(ulaw_decode)).start(); } boolean ulawRunning = false; Thread ulaw_decode_thread; /** * 解码线程 */ Runnable ulaw_decode = new Runnable(){ public void run() { try { Thread.sleep(200); } catch (InterruptedException e) { e.printStackTrace(); } android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO); int samp_rate = 8000 ; int maxjitter = AudioTrack.getMinBufferSize(samp_rate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); AudioTrack track = new AudioTrack(AudioManager.STREAM_VOICE_CALL,samp_rate,AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, maxjitter, AudioTrack.MODE_STREAM); track.play(); try { while(ulawRunning) { byte[] dataForDecode = new byte[0]; try { dataForDecode = inputQueue.take(); } catch (InterruptedException e) { } short[] audioData = new short [dataForDecode.length]; //decode G711.ulaw2linear(dataForDecode, audioData, audioData.length); Log.d(TAG, "audioData=" + Utils.toHex(audioData)) ; //play track.write(audioData, 0, audioData.length); } } catch (Exception e) { e.printStackTrace(); }finally{ track.release(); track = null; } } }; Runnable ulaw_encode = new Runnable(){ public void run() { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO); AudioRecord record = getAudioRecord(); int frame_size = 160;// int frame_rate = 8000/frame_size ;// long frame_period = 1000 / frame_rate; short [] audioData = new short [frame_size]; byte[] encodeData = new byte[frame_size]; int num = 0; try { while(ulawRunning) { num = record.read(audioData, 0, frame_size); if(num<=0) continue; calc2(audioData,0,num); //进行pcmu编码 G711.linear2ulaw(audioData, 0, encodeData, num); //send session.sendAudioData(encodeData); } } catch (Exception e) { e.printStackTrace(); } finally{ record.release(); } } }; void calc2(short[] lin,int off,int len) { int i,j; for (i = 0; i < len; i++) { j = lin[i+off]; lin[i+off] = (short)(j>>1); } } private void ulaw_recored(){ new Thread (ulaw_encode).start(); } private AudioRecord getAudioRecord(){ int samp_rate = 8000 ; int min = AudioRecord.getMinBufferSize(samp_rate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); Log.e(TAG, "min buffer size:"+min); AudioRecord record = null; record = new AudioRecord( MediaRecorder.AudioSource.MIC,//the recording source samp_rate, //采样频率,一般为8000hz/s AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, min); record.startRecording(); return record; }
已赞过
已踩过<
评论
收起
你对这个回答的评价是?
推荐律师服务:
若未解决您的问题,请您详细描述您的问题,通过百度律临进行免费专业咨询