如果說你問筆者有關於 Android Streaming Audio Processing 的問題, 筆者覺得這有一定的難度. 先不講太多理論上的東西; 我把一些自己本人實做出來的提供讀者參考. 筆者的實作方式不一定是最好, 但應該是可以 work.
主要的架構如下:
...
public void onAudio(byte[] data) {
Thread t = new Thread(new audioRunnable(data));
t.setDaemon(true), t.start();
}
...
class audioRunnable implements Runnable {
private byte[] mData;
public audioRunnable(byte[] data) {
mData = new byte[data.length];
mData = Arrays.copyOf(data, data.length);
}
public void run() {
if (MainActivity.getMainActivity() != null)
MainActivity.getMainActivity().onAudio(mData);
}
}
...
public synchronized void onAudio(byte[] data){
...
track.write(chunk, 0, chunk.length);
...
}
...
public void SurfaceChanged(SurfaceHolder Hdr, int Format, int W, int H) {
...
track = new AudioTrack(AudioManager.STREAM_MUSIC, audioSample, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, 8192*2, AudioTrack.MODE_STREAM);
track.play();
...
}
AudioRunnable
首先請準備 audioRunnable . 我們 implements Runnable . 在 run() 呼叫 MainActivity 的 onAudio(byte[] data) . getMainActivity() 很簡單, 如下:
public static MainActivity getMainActivity() { return activity; }
class audioRunnable implements Runnable {
private byte[] mData;
public audioRunnable(byte[] data) {
mData = new byte[data.length];
//System.arraycopy(array1, 0, array, 0, array1.length);
mData = Arrays.copyOf(data, data.length);
}
public void run() {
if (MainActivity.getMainActivity() != null)
MainActivity.getMainActivity().onAudio(mData);
}
}
onAudio(byte[] data)
假設有 native 執行緒 callback 呼叫 nativeApp Object 之 onAudio() . 每一次 onAudio() 都 new 一個 audioRunnable 執行緒; 因為 Audio 是 blocking IO, 所以必需要是 new 執行緒. 每一次都 new 一個執行緒? Yes, 每一次都 new 一個執行緒. 執行緒裡面 setDaemon(true); start();
public class nativeApp {
...
public void onAudio(byte[] data) {
if (data == null) return;
if (data.length == 0) return;
Thread t = new Thread(new audioRunnable(data));
t.setDaemon(true);
t.start();
}
...
}
MainActivity 裡面的 onAudio(byte[] data)
MainActivity 裡面的 onAudio() 很簡單: 就 AudioTrack Object write() ; 別忘了在 write() 之前要先
track = new AudioTrack(AudioManager.STREAM_MUSIC, audioSample, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, 8192*2, AudioTrack.MODE_STREAM);
track.play();
也別忘了離開時要 CleanUp
....
if (track != null) {
track.stop(), track.release(), track = null;
}
public class MainActivity extends Activity {
...
private AudioTrack track;
private MediaCodec aCodec;
private MediaCodec.BufferInfo aCodecInfo;
...
private AudioTrack track;
public static int audioSample = 48000;
...
public void SurfaceChanged(SurfaceHolder Hdr, int Format, int W, int H) {
track = new AudioTrack(AudioManager.STREAM_MUSIC, audioSample, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, 8192*2, AudioTrack.MODE_STREAM);
track.play();
}
...
public void onAudio(byte[] data) {
...
track.write(chunk, 0, chunk.length);
...
}
...
}
SurfaceView surfaceChanged
SurfaceView 的 surfaceCreated() 是很好 initialize AudioTrack 的地方.
class mySurfaceView extends SurfaceView implements SurfaceHolder.Callback {
private int surfaceWidth;
private int surfaceHeight;
private Surface surface;
public mySurfaceView(Context context) {
super(context);
surfaceWidth = surfaceHeight = 0;
setFocusable(true);
setFocusableInTouchMode(true);
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
MainActivity activity = MainActivity.getMainActivity();
activity.setTouches(this, ev);
return true;
}
//@Override
public void surfaceCreated(SurfaceHolder Hdr) { this.surface = Hdr.getSurface(); }
//@Override
public void surfaceDestroyed(SurfaceHolder Hdr) { this.surface = null; }
//@Override
public void surfaceChanged(SurfaceHolder Hdr, int Format, int W, int H) {
surfaceWidth = W; surfaceHeight = H;
this.surface = Hdr.getSurface();
MainActivity activity = MainActivity.getMainActivity();
setWillNotDraw(false);
activity.SurfaceChanged(Hdr, Format, W, H);
}
}
MediaCodec AAC Decode
MediaCodec AAC Decoder 的設定如下:
同樣的, 也別忘了離開時要 CleanUp
if (aCodec != null) {
aCodec.flush(); aCodec.stop(); aCodec.release(); aCodec = null;
}
public void SurfaceChanged(SurfaceHolder Hdr, int Format, int W, int H) {
...
MediaFormat aformat = MediaFormat.createAudioFormat("audio/mp4a-latm", audioSample, audioChannel);
byte[] CSD0 = new byte[]{ (byte)0x11, (byte)0x90 };
aformat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
aformat.setInteger(MediaFormat.KEY_IS_ADTS, 1);
aformat.setByteBuffer("csd-0", ByteBuffer.wrap(CSD0));
aCodec = MediaCodec.createDecoderByType("audio/mp4a-latm");
aCodecInfo = new MediaCodec.BufferInfo();
try { aCodec.configure(aformat, null, null, 0), aCodec.start(); }
catch(Exception e) { Log.i("example", "onWindowFocusChanged(), NewCodec.configure throw Exception"); aCodec = null; }
track = new AudioTrack(AudioManager.STREAM_MUSIC, audioSample,
AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, 8192 * 2, AudioTrack.MODE_STREAM);
track.play();
}
public synchronized void onAudio(byte[] data) {
int J = 0;
int N = 0;
if (aCodec == null) return;
if (null == data) return;
if (0 == data.length) return;
ByteBuffer[] inputBuffers = null;
try { inputBuffers = aCodec.getInputBuffers(); }
catch(Exception e) { Log.i("example", "onAudio(), getInputBuffers() throw Exception"); return; }
if (inputBuffers == null) return;
try {
J = aCodec.dequeueInputBuffer(10000);
if(J >= 0) {
ByteBuffer inputBuffer = inputBuffers[J];
inputBuffer.rewind(); inputBuffer.put(data, 0, data.length);
aCodec.queueInputBuffer(J, 0, inputBuffer.position(), 0, 0);
N++;
}
}
catch(IllegalStateException e) { Log.i("example","onAudio(), dequeueInputBuffer throw IllegalStateException");return; }
catch(Exception e) { Log.i("example","onAudio(), dequeueInputBuffer throw Exception"); return; }
if (0 == N) return;
try {
J = aCodec.dequeueOutputBuffer(vCodecInfo, 30000);
if (J >= 0) {
ByteBuffer[] outputBuffers = aCodec.getOutputBuffers();
final ByteBuffer buf = outputBuffers[J];
byte[] chunk = new byte[aCodecInfo.size];
buf.get(chunk); buf.clear();
if (chunk.length > 0)
track.write(chunk, 0, chunk.length);
aCodec.releaseOutputBuffer(J, false);
}
else { Log.i("example", "onAudio(), dequeueOutputBuffer NG"); }
}
catch(IllegalStateException e) { Log.i("example", "onAudio(), dequeueOutputBuffer() throw IllegalStateException"); }
catch(Exception e) { Log.i("example", "onAudio(), dequeueOutputBuffer() throw Exception"); }
}
A Framebuffer Record Simple Example on Android 4.x Using NDK (Mediacodec)
#include <unistd.h> #include <errno.h> #include <unistd.h> #include <stdio.h> #include <fcntl.h> #include <stdlib.h> #include <linux/fb.h> #include <sys/ioctl.h> #include <sys/mman.h> #include <sys/types.h> #include <stdint.h> #if (OS==17) #include <binder/IMemory.h> #else //#include <media/stagefright/MediaMuxer.h> #endif #include <media/openmax/OMX_IVCommon.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/MediaCodec.h> #include <media/stagefright/MediaErrors.h> #include <media/ICrypto.h> #include <stdlib.h> #include <unistd.h> #include <string.h> #include <stdio.h> #include <fcntl.h> #include <signal.h> #include <getopt.h> #include <sys/wait.h> #include <signal.h> #include <gui/Surface.h> #include <gui/SurfaceComposerClient.h> #include <gui/ISurfaceComposer.h> #include <gui/SurfaceComposerClient.h> #include <gui/ISurfaceComposer.h> #include <binder/IPCThreadState.h> #include <utils/Errors.h> #include <utils/Thread.h> #include <utils/Timers.h> #include <utils/Compat.h> #include <ui/PixelFormat.h> #include <ui/DisplayInfo.h> #include <media/stagefright/Utils.h> #include <media/stagefright/foundation/AString.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/MediaCodec.h> #include <media/ICrypto.h> #include <media/stagefright/AudioPlayer.h> #include <media/stagefright/CameraSource.h> #include <media/stagefright/FileSource.h> #include <media/stagefright/MediaBufferGroup.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MetaData.h> #include <media/stagefright/MediaExtractor.h> #include <media/stagefright/MPEG4Writer.h> #include <media/stagefright/OMXClient.h> #include <media/stagefright/OMXCodec.h> #include <media/MediaPlayerInterface.h> #include <EGL/egl.h> #include <EGL/eglext.h> #include <GLES2/gl2.h> #include <GLES2/gl2ext.h> /////////////////////////////////////////////////////////// using namespace android; static int signalQuit = 0; static void signalHandler(int sig) { signalQuit = 1; kill(getpid(), SIGKILL); } static bool isDeviceRotated(int orientation) { return orientation != DISPLAY_ORIENTATION_0 && orientation != DISPLAY_ORIENTATION_180; } /////////////////////////////////////////////////////////// // int main(int argc, char *argv[]) { #if (OS==17) sp<ISurfaceTexture> bufferProducer; #elif (OS==18) sp<IGraphicBufferProducer> bufferProducer; #endif sp<ALooper> looper; Vector<sp<ABuffer> > inBuffers; sp<ABuffer> inBuffer; Vector<sp<ABuffer> > outBuffers; sp<ABuffer> outBuffer; status_t err; size_t bufIndex, offset, size; int64_t ptsUsec; uint32_t flags; static int kTimeout = 100000; // be responsive on signal FILE *out = 0; sp<IBinder> mainDpy; DisplayInfo mainDpyInfo; sp<ProcessState> self; sp<AMessage> format; sp<MetaData> meta; sp<SurfaceControl> surfaceControl; sp<Surface> surface; Parcel parcel; sp<SurfaceComposerClient> client; sp<MediaCodec> encoder; struct timespec clock = {0}; unsigned int firstTs = 0, newTs = 0, oldTs = 0, newFs = 0, oldFs = 0; /////////////////////////////////////////////////////////// setuid(0), setgid(0), self = ProcessState::self(), self->startThreadPool(); //DataSource::RegisterDefaultSniffers(); mainDpy = SurfaceComposerClient::getBuiltInDisplay(ISurfaceComposer::eDisplayIdMain); err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo), assert (err == NO_ERROR); looper = new ALooper, looper->setName("fbrecord_looper"), looper->start(); encoder = MediaCodec::CreateByType(looper, "video/avc", true), assert(encoder != NULL); /////////////////////////////////////////////////////////// #if (OS==17) meta = new MetaData(); meta->setInt32(kKeyWidth, mainDpyInfo.w), meta->setInt32(kKeyHeight, mainDpyInfo.h); meta->setCString(kKeyMIMEType, android::MEDIA_MIMETYPE_VIDEO_AVC), meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatAndroidOpaque); convertMetaDataToMessage(meta, &format); format->setInt32("bitrate", 4000000), format->setFloat("frame-rate", mainDpyInfo.fps), format->setInt32("i-frame-interval", 2); client = new SurfaceComposerClient(); surfaceControl = client->createSurface(String8("surface"), mainDpyInfo.w, mainDpyInfo.h, PIXEL_FORMAT_OPAQUE, 0); SurfaceComposerClient::openGlobalTransaction(), surfaceControl->setLayer(0x7fffffff), SurfaceComposerClient::closeGlobalTransaction(); SurfaceControl::writeSurfaceToParcel(surfaceControl, &parcel), parcel.setDataPosition(0), surface = Surface::readFromParcel(parcel); err = native_window_set_buffer_count(surface.get(), BufferQueue::NUM_BUFFER_SLOTS); err = encoder->configure(format, surface, 0, MediaCodec::CONFIGURE_FLAG_ENCODE), assert (err == NO_ERROR); bufferProducer = surface->getSurfaceTexture(); //getProducerInterface #elif (OS==18) format = new AMessage(); format->setInt32("width", mainDpyInfo.w), format->setInt32("height", mainDpyInfo.h); format->setString("mime", android::MEDIA_MIMETYPE_VIDEO_AVC), format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque); format->setInt32("bitrate", 4000000), format->setFloat("frame-rate", mainDpyInfo.fps), format->setInt32("i-frame-interval", 2); err = encoder->configure(format, 0, 0, MediaCodec::CONFIGURE_FLAG_ENCODE);//, assert (err == NO_ERROR); err = encoder->createInputSurface(&bufferProducer), assert (err == NO_ERROR); #endif //static inline int native_window_set_buffers_geometry(struct ANativeWindow* window, int w, int h, int format) //static inline int native_window_set_buffers_format(struct ANativeWindow* window, int format) /////////////////////////////////////////////////////////// Rect layerStackRect(mainDpyInfo.w, mainDpyInfo.h); Rect displayRect(0, 0, mainDpyInfo.w, mainDpyInfo.h); sp<IBinder> dpy = SurfaceComposerClient::createDisplay(String8("fbRecorder"), false); SurfaceComposerClient::openGlobalTransaction(); //native_window_dequeue_buffer_and_wait(ANativeWindow *anw, struct ANativeWindowBuffer **anb) SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer); SurfaceComposerClient::setDisplayProjection(dpy, DISPLAY_ORIENTATION_0, layerStackRect, displayRect); SurfaceComposerClient::setDisplayLayerStack(dpy, 0); SurfaceComposerClient::closeGlobalTransaction(); /////////////////////////////////////////////////////////// err = encoder->start(), assert (err == NO_ERROR); err = encoder->getInputBuffers(&inBuffers), assert (err == NO_ERROR); err = encoder->getOutputBuffers(&outBuffers), assert (err == NO_ERROR); signal(SIGABRT, signalHandler), signal(SIGINT, signalHandler), signal(SIGTERM, signalHandler), signal(SIGHUP, signalHandler); if(access("fbrecord.mp4", F_OK ) != -1) unlink("fbrecord.mp4"); out = fopen("fbrecord.mp4", "w"); clock_gettime(CLOCK_REALTIME, &clock); firstTs = oldTs = newTs = clock.tv_sec * 1000 + clock.tv_nsec / 1000000; DQBUFFER: if (signalQuit) goto BAITOUT; err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec, &flags, kTimeout); if (err == NO_ERROR) { if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) size = 0; if (size != 0) { /////////////////////////////////////////////////////////// inBuffer = inBuffers[bufIndex]; outBuffer = outBuffers[bufIndex]; if (out) fwrite ((const void *)outBuffer->data(), outBuffer->size(), 1, out); /////////////////////////////////////////////////////////// clock_gettime(CLOCK_REALTIME, &clock); newTs = clock.tv_sec * 1000 + clock.tv_nsec / 1000000; newFs++; if (0 == (newFs % 10) && newFs > 40) printf("AverageFps=%02.02f, CurrentFps=%02.02f \n", (float)(1000 * newFs - 1) / (float)(newTs - firstTs), (float)(1000 * (newFs - oldFs)) / (float)(newTs - oldTs)), oldFs = newFs, oldTs = newTs; err = encoder->releaseOutputBuffer(bufIndex); } } else if (err == INVALID_OPERATION) goto BAITOUT; else if (err == INFO_OUTPUT_BUFFERS_CHANGED) err = encoder->getOutputBuffers(&outBuffers), assert (err == NO_ERROR); else if (err == -EAGAIN) { } else if (err == INFO_FORMAT_CHANGED) { sp<AMessage> newFormat; err = encoder->getOutputFormat(&newFormat), assert (err == NO_ERROR); assert (err == NO_ERROR); } sched_yield(); goto DQBUFFER; /////////////////////////////////////////////////////////// BAITOUT: encoder->stop(); encoder->release(); looper->stop(); if (out) fclose (out); client->dispose(); //IPCThreadState::self()->joinThreadPool(); return 0; }
An Audio Record Simple Example on Android 4.x Using NDK (Stagefright, OpenMax, Mediacodec)
#include <unistd.h>
#include <errno.h>
#include <unistd.h>
#include <stdio.h>
#include <fcntl.h>
#include <stdlib.h>
#include <linux/fb.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <stdint.h>
#include <tinyalsa/asoundlib.h>
#if (OS==17)
#include <binder/IMemory.h>
#endif
#include <media/stagefright/NuMediaExtractor.h>
#include <media/openmax/OMX_IVCommon.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaErrors.h>
//#include <media/stagefright/MediaMuxer.h>
#include <media/ICrypto.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <stdio.h>
#include <fcntl.h>
#include <signal.h>
#include <getopt.h>
#include <sys/wait.h>
#include <signal.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
#include <gui/ISurfaceComposer.h>
#include <gui/SurfaceComposerClient.h>
#include <gui/ISurfaceComposer.h>
#include <binder/IPCThreadState.h>
#include <binder/ProcessState.h>
#include <utils/Errors.h>
#include <utils/Thread.h>
#include <utils/Timers.h>
#include <utils/Compat.h>
#include <ui/PixelFormat.h>
#include <ui/DisplayInfo.h>
#include <media/stagefright/Utils.h>
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaCodec.h>
#include <media/ICrypto.h>
#include <media/mediarecorder.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/CameraSource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
#include <media/MediaPlayerInterface.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <system/audio.h>
///////////////////////////////////////////////////////////
using namespace android;
///////////////////////////////////////////////////////////
#include <binder/ProcessState.h>
#include <media/mediarecorder.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
#include <system/audio.h>
#include <dlfcn.h>
#include "SineSource.h"
#include "voAAC.h"
#include "cmnMemory.h"
///////////////////////////////////////////////////////////
using namespace android;
typedef int (VO_API * VOGETAUDIODECAPI) (VO_AUDIO_CODECAPI * pDecHandle);
#define READ_SIZE (1024 * 64)
static int signalQuit = 0;
static void signalHandler(int sig)
{
signalQuit = 1;
}
///////////////////////////////////////////////////////////
int main(int argc, char *argv[])
{
static unsigned char outBuf[READ_SIZE] = {0};
static unsigned char inBuf[READ_SIZE] = {0};
struct pcm_config config = {0};
struct pcm *pcm;
AACENC_PARAM param;
VO_AUDIO_CODECAPI AudioAPI;
VO_MEM_OPERATOR moper;
VO_CODEC_INIT_USERDATA useData;
VO_HANDLE hCodec;
VO_CODECBUFFER inData;
VO_CODECBUFFER outData;
VO_AUDIO_OUTPUTINFO outInfo;
void *handle;
void *pfunc;
VOGETAUDIODECAPI pGetAPI;
int res;
int pcm_buffer_size = 0;
FILE *out = 0;
///////////////////////////////////////////////////////////
setuid(0), setgid(0);
config.channels = 2, config.rate = 44100, config.period_size = 1024, config.period_count = 4;
config.format = PCM_FORMAT_S16_LE;
pcm = pcm_open(0, 1, PCM_IN, &config);
assert(pcm != NULL), assert(pcm_is_ready(pcm));
pcm_buffer_size = pcm_get_buffer_size(pcm);
assert(pcm_buffer_size != 0);
moper.Alloc = cmnMemAlloc;
moper.Copy = cmnMemCopy;
moper.Free = cmnMemFree;
moper.Set = cmnMemSet;
moper.Check = cmnMemCheck;
useData.memflag = VO_IMF_USERMEMOPERATOR;
useData.memData = (VO_PTR)(&moper);
handle = dlopen("libstagefright.so", RTLD_NOW), assert(handle != 0);
pfunc = dlsym(handle, "voGetAACEncAPI"), assert(pfunc != 0);
pGetAPI = (VOGETAUDIODECAPI)pfunc;
res = pGetAPI(&AudioAPI), assert(0 == res);
res = AudioAPI.Init(&hCodec, VO_AUDIO_CodingAAC, &useData), assert(res >= 0);
param.adtsUsed = 1, param.bitRate = 0, param.nChannels = 2, param.sampleRate = 44100;
if(0 == param.bitRate)
{
int scale = 441;
if(param.sampleRate % 8000 == 0)
scale = 480;
param.bitRate = 640 * param.nChannels * param.sampleRate / scale;
}
res = AudioAPI.SetParam(hCodec, VO_PID_AAC_ENCPARAM, ¶m);
signal(SIGABRT, signalHandler);
signal(SIGINT, signalHandler);
signal(SIGTERM, signalHandler);
signal(SIGHUP, signalHandler);
if(access("tinyrecord.mp3", F_OK ) != -1) unlink("tinyrecord.mp3");
out = fopen("tinyrecord.mp3", "w");
AGAIN:
if (signalQuit)
goto QUIT;
res = pcm_read(pcm, inBuf, pcm_buffer_size);
if (0 != res)
goto QUIT;
inData.Buffer = inBuf, inData.Length = pcm_buffer_size;
res = AudioAPI.SetInputData(hCodec, &inData);
outData.Buffer = outBuf, outData.Length = pcm_buffer_size;
res = AudioAPI.GetOutputData(hCodec,&outData, &outInfo);
if(res == VO_ERR_LICENSE_ERROR)
goto QUIT;
assert(0 == res);
if (out)
fwrite ((const void *)outBuf, pcm_buffer_size, 1, out);
printf(".");
goto AGAIN;
QUIT:
if (out)
fclose (out);
dlclose(handle);
pcm_close(pcm);
printf("\n");
return 0;
}
#if 0
int main()
{
MediaBuffer *buffer;
int32_t maxInputSize = 0;
sp<AMessage> format;
OMXClient client;
sp<MetaData> meta;
sp<MediaSource> encoder;
sp<MediaSource> decoder;
unsigned char CSD0[] = { 0x11, 0x90 };
android::ProcessState::self()->startThreadPool();
sp<MediaSource> source = new SineSource(44100, 2);
if (source->getFormat()->findInt32(kKeyMaxInputSize, &maxInputSize)) {}
meta = new MetaData;
//meta->setCString(kKeyMIMEType, android::MEDIA_MIMETYPE_AUDIO_AAC);
meta->setCString(kKeyMIMEType, android::MEDIA_MIMETYPE_AUDIO_MPEG);
/*
//unsigned char CSD0[] = { 0x11, 0x90 };
// OMX_AUDIO_AACObjectLC
CSD0[0] = ((0x02 << 3) | (index >> 1));
CSD0[1] = ((index & 0x01) << 7) | (mChannels << 3);
*/
//meta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectLC);
//CSD0[0] = ((0x02 << 3) | (0 >> 1));
//CSD0[1] = ((0 & 0x01) << 7) | (2 << 3);
//meta->setData(kKeyAACProfile, MetaData::TYPE_POINTER, CSD0, sizeof(CSD0));
meta->setInt32(kKeySampleRate, 44100);
meta->setInt32(kKeyChannelCount, 2);
meta->setInt32(kKeyMaxInputSize, maxInputSize);
CHECK_EQ(client.connect(), (status_t)OK);
printf("(%s %d) client.connect() OK, maxInputSize=%d\n", __FILE__, __LINE__, maxInputSize);
encoder = OMXCodec::Create(client.interface(), meta, true, source);
if (encoder == NULL)
{
printf("(%s %d) encoder NULL\n", __FILE__, __LINE__);
client.disconnect(), IPCThreadState::self()->joinThreadPool();
return 1;
}
printf("(%s %d) encoder::Create() OK\n", __FILE__, __LINE__);
decoder = OMXCodec::Create(client.interface(), meta, false, encoder);
//if (CHECK_EQ(decoder, NULL))
if (decoder == NULL)
{
printf("(%s %d) decoder NULL\n", __FILE__, __LINE__);
client.disconnect(), IPCThreadState::self()->joinThreadPool();
return 1;
}
printf("(%s %d) decoder::Create() OK\n", __FILE__, __LINE__);
CHECK_EQ(decoder->start(), (status_t)OK);
printf("(%s %d) decoder->start() OK\n", __FILE__, __LINE__);
while (decoder->read(&buffer) == OK)
{
printf(".");
buffer->release();
buffer = NULL;
}
CHECK_EQ(decoder->stop(), (status_t)OK);
printf("(%s %d) decoder->stop() OK\n", __FILE__, __LINE__);
client.disconnect(), IPCThreadState::self()->joinThreadPool();
return 0;
}
#endif
#if 0
static int signalQuit = 0;
static void signalHandler(int sig)
{
signalQuit = 1;
kill(getpid(), SIGKILL);
}
///////////////////////////////////////////////////////////
//
int main(int argc, char *argv[])
{
int QNUM = 0;
size_t outIndex;
size_t inIndex;
int64_t presentationTimeUs = 0;
int64_t ptsUsec;
sp<ABuffer> inBuffer;
Vector<sp<ABuffer> > inBuffers;
Vector<sp<ABuffer> > outBuffers;
sp<ABuffer> outBuffer;
sp<AMessage> format;
sp<ALooper> looper;
sp<MediaCodec> codec;
const char mime[] = "audio/mp4a-latm";
//const char mime[] = "audio/mpeg";
//AString mime;
struct pcm_config config = {0};
struct pcm *pcm;
status_t err = UNKNOWN_ERROR;
size_t offset, size;
uint32_t flags;
FILE *out = 0;
//unsigned char CSD0[] = { 0x11, 0x90 };
AString str;
setuid(0), setgid(0);
sp<ProcessState> self = ProcessState::self();
self->startThreadPool();
DataSource::RegisterDefaultSniffers();
config.channels = 2, config.rate = 44100, config.period_size = 1024, config.period_count = 4;
config.format = PCM_FORMAT_S16_LE;
pcm = pcm_open(0, 1, PCM_IN, &config);
assert(pcm != NULL), assert(pcm_is_ready(pcm));
//size = pcm_get_buffer_size(pcm), assert(size != 0), printf("(%s %d) size = %d\n", __FILE__, __LINE__, size);
#if 1
format = new AMessage;
format->clear();
format->setString("mime", mime);
//byte[] CSD0 = new byte[]{ (byte)0x11, (byte)0x90 };
format->setInt32("aac-profile", 0x00000002);
format->setInt32("sample-rate", 44100);
format->setInt32("bitrate", 256000);
format->setInt32("channel-count", 2);
format->setInt32("max-input-size", 32768);
#else
// audio/mpeg
sp<MetaData> meta;
sp<NuMediaExtractor> extractor = new NuMediaExtractor;
extractor->setDataSource("audio.mp3");
//extractor = MediaExtractor::Create(new FileSource("audio.mp3"));
assert(extractor != NULL), assert(extractor->countTracks() > 0);
err = extractor->selectTrack(0);
err = extractor->getTrackFormat(0, &format);
//meta = extractor->getTrackMetaData(0), assert(meta != NULL);
//convertMetaDataToMessage(meta, &format);
/*
audioFormat = new MediaFormat();
audioFormat.setString(MediaFormat.KEY_MIME, AUDIO_MIME_TYPE);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 128000);
audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 16384);
*/
#endif
looper = new ALooper;
looper->setName("tinyrecord_looper");
looper->start();
//codec = MediaCodec::CreateByType(looper, mime.c_str(), true);
codec = MediaCodec::CreateByType(looper, mime, true), assert(codec > 0);
err = codec->configure(format, 0, 0, MediaCodec::CONFIGURE_FLAG_ENCODE), assert (err == NO_ERROR);
err = codec->start(), assert (err == NO_ERROR);
err = codec->getInputBuffers(&inBuffers), assert (err == NO_ERROR);
err = codec->getOutputBuffers(&outBuffers), assert (err == NO_ERROR);
signal(SIGABRT, signalHandler);
signal(SIGINT, signalHandler);
signal(SIGTERM, signalHandler);
signal(SIGHUP, signalHandler);
if(access("tinyrecord.mp3", F_OK ) != -1)
unlink("tinyrecord.mp3");
out = fopen("tinyrecord.mp3", "w");
DQBUFFER:
if (signalQuit) goto BAITOUT;
QNUM = 0, inIndex = -1;
err = codec->dequeueInputBuffer(&inIndex);
//if (inIndex >= 0 && err == NO_ERROR)
if (inIndex >= 0)
{
inBuffer = inBuffers[inIndex];
//size = pcm_get_buffer_size(pcm);
size = pcm_frames_to_bytes(pcm, pcm_get_buffer_size(pcm));
if (size)
{
offset = 0, flags = 0;
if (signalQuit) flags |= MediaCodec::BufferFlags::BUFFER_FLAG_EOS;
inBuffer->setRange(offset, size);
///////////////////////////////////////////////////////////
// Blocking
int ret = pcm_read(pcm, inBuffer->data(), size);
if (ret == 0)
{
//err = codec->queueInputBuffer(inIndex, offset, size, presentationTimeUs++, flags);
err = codec->queueInputBuffer(inIndex, offset, size, 0, flags);
QNUM++;
//printf("(%s %d) queueInputBuffer=%d\n", __FILE__, __LINE__, err);
}
else
printf("(%s %d) pcm_read()=%d\n", __FILE__, __LINE__, ret);
}
}
else
printf("(%s %d) DBG\n", __FILE__, __LINE__);
//MEDIA_MIMETYPE_AUDIO_AMR_WB
if (QNUM)
{
outIndex = -1;
err = codec->dequeueOutputBuffer(&outIndex, &offset, &size, &ptsUsec, &flags);
if (outIndex >= 0 && err == NO_ERROR)
//if (outIndex >= 0)
{
outBuffer = outBuffers[outIndex];
if (out)
{
if (outBuffer->size() > 0)
{
fwrite ((const void *)outBuffer->data(), outBuffer->size(), 1, out);
printf(".");
}
else printf("0");
}
else printf("x");
codec->releaseOutputBuffer(outIndex);
if (flags & MediaCodec::BufferFlags::BUFFER_FLAG_EOS)
goto BAITOUT;
}
else
printf("*");
//printf("(%s %d) dequeueOutputBuffer fail\n", __FILE__, __LINE__);
}
else printf("o");
sched_yield();
goto DQBUFFER;
BAITOUT:
if (pcm) pcm_close(pcm);
if (out) fclose(out);
looper->stop();
codec->stop();
codec->release();
IPCThreadState::self()->joinThreadPool();
return 0;
}
#endif
http://artistehsu.pixnet.net/blog/post/355768539
Email: jasonc@mail2000.com.tw