Prepare th following Native.cpp 以及 Native.h ; in this Header, you need to declare the nativeCallback() and nativeStart() functions.
#ifndef _NATIVE_H_ #define _NATIVE_H_ ///////////////////////////////////////////////////////////////////////////// // com.example.androidservice/.MyService #define CLASS com_example_androidservice_MyService #define _CLASS "com.example.androidservice/MyService" #define NAME3(CLASS3, FUNC3) Java_ ## CLASS3 ## _ ##native## FUNC3 #define NAME2(CLASS2, FUNC2) NAME3(CLASS2, FUNC2) #define NAME(FUNC) NAME2(CLASS, FUNC) ///////////////////////////////////////////////////////////////////////////// #define nativeCallback NAME(Callback) #define nativeStart NAME(Start) #if defined(__cplusplus) || defined(__CPLUSPLUS__) extern "C" { #endif JNIEXPORT void nativeCallback(JNIEnv *env, jobject obj); JNIEXPORT jint nativeStart(JNIEnv *env, jobject obj); #if defined(__cplusplus) || defined(__CPLUSPLUS__) } #endif #endif //_NATIVE_H_
// Native.cpp #include <sys/time.h> #include <sys/resource.h> #include <time.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <sys/sysinfo.h> #include <pthread.h> #include <signal.h> #include <sched.h> #include <jni.h> #include <android/log.h> #include "Native.h" void nativeCallback(JNIEnv* env, jobject jobj) { } int nativeStart(JNIEnv* env, jobject jobj){ return 0; }
Build
Refer to the Android Studio 2.2.1 External Tools (ndkBuild) .
Global Variables in Native.cpp
Including the nativeObj, nativeClass, cachedJVM . The others are only used by the specific native thread.
... static jobject nativeObj; static jclass nativeClass; JavaVM *cachedJVM = 0; ... static JNIEnv *PortEnv = 0; static pthread_t P = 0; ...
JNI_OnLoad
JNI_OnLoad() initial the cachedJVM global variable.
jint JNI_OnLoad(JavaVM *jvm, void *reserved) { JNIEnv *env = 0; cachedJVM = jvm; if (jvm->GetEnv((void **)&env, JNI_VERSION_1_6) != JNI_OK) return -1; return JNI_VERSION_1_6; }
GetJniEnv
The GetJniEnv() routine takes responsible for the JNIEnv that the specific native thread returns. Note please, we use the "cachedJVM->AttachCurrentThread()" ; hence the "cachedJVM->DetachCurrentThread()" must be invoked before relinquished .
JNIEnv *GetJniEnv(){ JNIEnv *env = NULL; jint ret; if (NULL == cachedJVM) return NULL; ret = cachedJVM->GetEnv((void**)&env, JNI_VERSION_1_6); switch (ret) { case JNI_OK: return env; case JNI_EDETACHED: if (cachedJVM->AttachCurrentThread(&env, NULL) < 0) return NULL; return env; case JNI_EVERSION: default: return NULL; } }
nativeCallBack
The nativeCallback() routine initializes the nativeObj global variable. Note please, we use the "env->NewGlobalRef()" ; which is the counterpart of the "env->DeleteGlobalRef()" .
void nativeCallback(JNIEnv* env, jobject jobj) { jclass storeclassID; nativeObj = env->NewGlobalRef(jobj); storeclassID = env->FindClass(_CLASS); if (env->ExceptionCheck() == JNI_TRUE) env->ExceptionDescribe(); nativeClass = (jclass)env->NewGlobalRef(storeclassID); if (env->ExceptionCheck() == JNI_TRUE) env->ExceptionDescribe(); // void DeleteGlobalRef(jobject gref); }
Ctreate Native Thread using nativeStart()
int nativeStart(JNIEnv* env, jobject jobj) { if (0 == P) pthread_create(&P, NULL, PortThread, 0); return 0; }
Native Thread Invoke CallVoidMethod()
Before native thread using CallVoidMethod , we need to realize that:
1. the Java environment for the given thread, we initialize to the PortEnv global variable .
2. the java CLASS NAME, the global variable "nativeClass", initialized in nativeCallback().
3. the CLASS instance, the global variable "nativeObj", initialized in nativeCallback() .
4. jmethodID, use "PortEnv->GetMethodID()" to get the jmethodID .
5. finally, invoking PortOneEnv->CallVoidMethod() .
static void *PortThread(void *param) { jmethodID cachedmethodID; PortEnv = GetJniEnv(); cachedmethodID = PortEnv->GetMethodID(nativeClass, "onNative", "(II)V"); ... for (;;) { ... PortOneEnv->CallVoidMethod(nativeObj, cachedmethodID, 1, codes[i].code); ... } } ... if (PortEnv) { if (cachedJVM) cachedJVM->DetachCurrentThread(); } return 0; }
The 2 Signatures of CallVoidMethod()
1. public void synchronized onNative(int action, int keycode) ; signature is "(II)V"
2. public void onVideo(byte[] data) ; signature is "([B)V" . Note also please about the usages of NewByteArray, SetByteArrayRegion 以及 DeleteLocalRef.
static void *VideoThread(void *param) { ... jmethodID cachedmethodID = vEnv->GetMethodID(nativeClass, "onVideo", "([B)V"); ... jbyteArray bs = vEnv->NewByteArray(bsize); if(vEnv->ExceptionCheck()) vEnv->ExceptionClear(); else { if (bs) { vEnv->SetByteArrayRegion(bs, (jsize)0, (jsize)bsize, (jbyte *)(pointer + sizeof(size_t))); if(vEnv->ExceptionCheck()) vEnv->ExceptionClear(); else { vEnv->CallVoidMethod(nativeObj, cachedmethodID, bs); if(vEnv->ExceptionCheck()) vEnv->ExceptionClear(); vEnv->DeleteLocalRef(bs); if(vEnv->ExceptionCheck()) vEnv->ExceptionClear(); } } } ... }
Demonstration
MyService.java
in MyService.java, what you have to do is:
1. loadLibrary
2. declare native methods: nativeCallback nativeStart
3. implement the "onNative()".
public class MyService extends Service { static { System.loadLibrary("Native"); } private native void nativeCallback(); private native int nativeStart(); ... public void synchronized onNative(int action, int keycode) { ... } ... }
An Framebuffer Record Simple Example on Android 4.x Using NDK
#include <unistd.h> #include <errno.h> #include <unistd.h> #include <stdio.h> #include <fcntl.h> #include <stdlib.h> #include <linux/fb.h> #include <sys/ioctl.h> #include <sys/mman.h> #include <sys/types.h> #include <stdint.h> #if (OS==17) #include <binder/IMemory.h> #else //#include <media/stagefright/MediaMuxer.h> #endif #include <media/openmax/OMX_IVCommon.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/MediaCodec.h> #include <media/stagefright/MediaErrors.h> #include <media/ICrypto.h> #include <stdlib.h> #include <unistd.h> #include <string.h> #include <stdio.h> #include <fcntl.h> #include <signal.h> #include <getopt.h> #include <sys/wait.h> #include <signal.h> #include <gui/Surface.h> #include <gui/SurfaceComposerClient.h> #include <gui/ISurfaceComposer.h> #include <gui/SurfaceComposerClient.h> #include <gui/ISurfaceComposer.h> #include <binder/IPCThreadState.h> #include <utils/Errors.h> #include <utils/Thread.h> #include <utils/Timers.h> #include <utils/Compat.h> #include <ui/PixelFormat.h> #include <ui/DisplayInfo.h> #include <media/stagefright/Utils.h> #include <media/stagefright/foundation/AString.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/MediaCodec.h> #include <media/ICrypto.h> #include <media/stagefright/AudioPlayer.h> #include <media/stagefright/CameraSource.h> #include <media/stagefright/FileSource.h> #include <media/stagefright/MediaBufferGroup.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MetaData.h> #include <media/stagefright/MediaExtractor.h> #include <media/stagefright/MPEG4Writer.h> #include <media/stagefright/OMXClient.h> #include <media/stagefright/OMXCodec.h> #include <media/MediaPlayerInterface.h> #include <EGL/egl.h> #include <EGL/eglext.h> #include <GLES2/gl2.h> #include <GLES2/gl2ext.h> /////////////////////////////////////////////////////////// using namespace android; static int signalQuit = 0; static void signalHandler(int sig) { signalQuit = 1; kill(getpid(), SIGKILL); } static bool isDeviceRotated(int orientation) { return orientation != DISPLAY_ORIENTATION_0 && orientation != DISPLAY_ORIENTATION_180; } /////////////////////////////////////////////////////////// // int main(int argc, char *argv[]) { #if (OS==17) sp<ISurfaceTexture> bufferProducer; #elif (OS==18) sp<IGraphicBufferProducer> bufferProducer; #endif sp<ALooper> looper; Vector<sp<ABuffer> > inBuffers; sp<ABuffer> inBuffer; Vector<sp<ABuffer> > outBuffers; sp<ABuffer> outBuffer; status_t err; size_t bufIndex, offset, size; int64_t ptsUsec; uint32_t flags; static int kTimeout = 100000; // be responsive on signal FILE *out = 0; sp<IBinder> mainDpy; DisplayInfo mainDpyInfo; sp<ProcessState> self; sp<AMessage> format; sp<MetaData> meta; sp<SurfaceControl> surfaceControl; sp<Surface> surface; Parcel parcel; sp<SurfaceComposerClient> client; sp<MediaCodec> encoder; struct timespec clock = {0}; unsigned int firstTs = 0, newTs = 0, oldTs = 0, newFs = 0, oldFs = 0; /////////////////////////////////////////////////////////// setuid(0), setgid(0), self = ProcessState::self(), self->startThreadPool(); //DataSource::RegisterDefaultSniffers(); mainDpy = SurfaceComposerClient::getBuiltInDisplay(ISurfaceComposer::eDisplayIdMain); err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo), assert (err == NO_ERROR); looper = new ALooper, looper->setName("fbrecord_looper"), looper->start(); encoder = MediaCodec::CreateByType(looper, "video/avc", true), assert(encoder != NULL); /////////////////////////////////////////////////////////// #if (OS==17) meta = new MetaData(); meta->setInt32(kKeyWidth, mainDpyInfo.w), meta->setInt32(kKeyHeight, mainDpyInfo.h); meta->setCString(kKeyMIMEType, android::MEDIA_MIMETYPE_VIDEO_AVC), meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatAndroidOpaque); convertMetaDataToMessage(meta, &format); format->setInt32("bitrate", 4000000), format->setFloat("frame-rate", mainDpyInfo.fps), format->setInt32("i-frame-interval", 2); client = new SurfaceComposerClient(); surfaceControl = client->createSurface(String8("surface"), mainDpyInfo.w, mainDpyInfo.h, PIXEL_FORMAT_OPAQUE, 0); SurfaceComposerClient::openGlobalTransaction(), surfaceControl->setLayer(0x7fffffff), SurfaceComposerClient::closeGlobalTransaction(); SurfaceControl::writeSurfaceToParcel(surfaceControl, &parcel), parcel.setDataPosition(0), surface = Surface::readFromParcel(parcel); err = native_window_set_buffer_count(surface.get(), BufferQueue::NUM_BUFFER_SLOTS); err = encoder->configure(format, surface, 0, MediaCodec::CONFIGURE_FLAG_ENCODE), assert (err == NO_ERROR); bufferProducer = surface->getSurfaceTexture(); //getProducerInterface #elif (OS==18) format = new AMessage(); format->setInt32("width", mainDpyInfo.w), format->setInt32("height", mainDpyInfo.h); format->setString("mime", android::MEDIA_MIMETYPE_VIDEO_AVC), format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque); format->setInt32("bitrate", 4000000), format->setFloat("frame-rate", mainDpyInfo.fps), format->setInt32("i-frame-interval", 2); err = encoder->configure(format, 0, 0, MediaCodec::CONFIGURE_FLAG_ENCODE);//, assert (err == NO_ERROR); err = encoder->createInputSurface(&bufferProducer), assert (err == NO_ERROR); #endif //static inline int native_window_set_buffers_geometry(struct ANativeWindow* window, int w, int h, int format) //static inline int native_window_set_buffers_format(struct ANativeWindow* window, int format) /////////////////////////////////////////////////////////// Rect layerStackRect(mainDpyInfo.w, mainDpyInfo.h); Rect displayRect(0, 0, mainDpyInfo.w, mainDpyInfo.h); sp<IBinder> dpy = SurfaceComposerClient::createDisplay(String8("fbRecorder"), false); SurfaceComposerClient::openGlobalTransaction(); //native_window_dequeue_buffer_and_wait(ANativeWindow *anw, struct ANativeWindowBuffer **anb) SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer); SurfaceComposerClient::setDisplayProjection(dpy, DISPLAY_ORIENTATION_0, layerStackRect, displayRect); SurfaceComposerClient::setDisplayLayerStack(dpy, 0); SurfaceComposerClient::closeGlobalTransaction(); /////////////////////////////////////////////////////////// err = encoder->start(), assert (err == NO_ERROR); err = encoder->getInputBuffers(&inBuffers), assert (err == NO_ERROR); err = encoder->getOutputBuffers(&outBuffers), assert (err == NO_ERROR); signal(SIGABRT, signalHandler), signal(SIGINT, signalHandler), signal(SIGTERM, signalHandler), signal(SIGHUP, signalHandler); if(access("fbrecord.mp4", F_OK ) != -1) unlink("fbrecord.mp4"); out = fopen("fbrecord.mp4", "w"); clock_gettime(CLOCK_REALTIME, &clock); firstTs = oldTs = newTs = clock.tv_sec * 1000 + clock.tv_nsec / 1000000; DQBUFFER: if (signalQuit) goto BAITOUT; err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec, &flags, kTimeout); if (err == NO_ERROR) { if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) size = 0; if (size != 0) { /////////////////////////////////////////////////////////// inBuffer = inBuffers[bufIndex]; outBuffer = outBuffers[bufIndex]; if (out) fwrite ((const void *)outBuffer->data(), outBuffer->size(), 1, out); /////////////////////////////////////////////////////////// clock_gettime(CLOCK_REALTIME, &clock); newTs = clock.tv_sec * 1000 + clock.tv_nsec / 1000000; newFs++; if (0 == (newFs % 10) && newFs > 40) printf("AverageFps=%02.02f, CurrentFps=%02.02f \n", (float)(1000 * newFs - 1) / (float)(newTs - firstTs), (float)(1000 * (newFs - oldFs)) / (float)(newTs - oldTs)), oldFs = newFs, oldTs = newTs; err = encoder->releaseOutputBuffer(bufIndex); } } else if (err == INVALID_OPERATION) goto BAITOUT; else if (err == INFO_OUTPUT_BUFFERS_CHANGED) err = encoder->getOutputBuffers(&outBuffers), assert (err == NO_ERROR); else if (err == -EAGAIN) { } else if (err == INFO_FORMAT_CHANGED) { sp<AMessage> newFormat; err = encoder->getOutputFormat(&newFormat), assert (err == NO_ERROR); assert (err == NO_ERROR); } sched_yield(); goto DQBUFFER; /////////////////////////////////////////////////////////// BAITOUT: encoder->stop(); encoder->release(); looper->stop(); if (out) fclose (out); client->dispose(); //IPCThreadState::self()->joinThreadPool(); return 0; }
Email: jasonc@mail2000.com.tw