具体的例子在android的 ndk的sample文件夹下, 这里把底层的例子备份下.
loop.h
#include <pthread.h>
#include <semaphore.h>
struct loopermessage;
class looper {
public:
looper();
~looper();
void post(int what, void *data, bool flush = false);
void quit();
virtual void handle(int what, void *data);
private:
void addmsg(loopermessage *msg, bool flush);
static void* trampoline(void* p);
void loop();
loopermessage *head;
pthread_t worker;
sem_t headwriteprotect;
sem_t headdataavailable;
bool running;
};
loop.cpp
#include "looper.h"
#include <assert.h>
#include <jni.h>
#include <pthread.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <limits.h>
#include <semaphore.h>
// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
#include <android/log.h>
#define TAG "NativeCodec-looper"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
struct loopermessage;
typedef struct loopermessage loopermessage;
struct loopermessage {
int what;
void *obj;
loopermessage *next;
bool quit;
};
void* looper::trampoline(void* p) {
((looper*)p)->loop();
return NULL;
}
looper::looper() {
sem_init(&headdataavailable, 0, 0);
sem_init(&headwriteprotect, 0, 1);
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_create(&worker, &attr, trampoline, this);
running = true;
}
looper::~looper() {
if (running) {
LOGV("Looper deleted while still running. Some messages will not be processed");
quit();
}
}
void looper::post(int what, void *data, bool flush) {
loopermessage *msg = new loopermessage();
msg->what = what;
msg->obj = data;
msg->next = NULL;
msg->quit = false;
addmsg(msg, flush);
}
void looper::addmsg(loopermessage *msg, bool flush) {
sem_wait(&headwriteprotect);
loopermessage *h = head;
if (flush) {
while(h) {
loopermessage *next = h->next;
delete h;
h = next;
}
h = NULL;
}
if (h) {
while (h->next) {
h = h->next;
}
h->next = msg;
} else {
head = msg;
}
LOGV("post msg %d", msg->what);
sem_post(&headwriteprotect);
sem_post(&headdataavailable);
}
void looper::loop() {
while(true) {
// wait for available message
sem_wait(&headdataavailable);
// get next available message
sem_wait(&headwriteprotect);
loopermessage *msg = head;
if (msg == NULL) {
LOGV("no msg");
sem_post(&headwriteprotect);
continue;
}
head = msg->next;
sem_post(&headwriteprotect);
if (msg->quit) {
LOGV("quitting");
delete msg;
return;
}
LOGV("processing msg %d", msg->what);
handle(msg->what, msg->obj);
delete msg;
}
}
void looper::quit() {
LOGV("quit");
loopermessage *msg = new loopermessage();
msg->what = 0;
msg->obj = NULL;
msg->next = NULL;
msg->quit = true;
addmsg(msg, false);
void *retval;
pthread_join(worker, &retval);
sem_destroy(&headdataavailable);
sem_destroy(&headwriteprotect);
running = false;
}
void looper::handle(int what, void* obj) {
LOGV("dropping msg %d %p", what, obj);
}
正式例子.
/* This is a JNI example where we use native methods to play video
* using the native AMedia* APIs.
* See the corresponding Java source file located at:
*
* src/com/example/nativecodec/NativeMedia.java
*
* In this example we use assert() for "impossible" error conditions,
* and explicit handling and recovery for more likely error conditions.
*/
#include <assert.h>
#include <jni.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <limits.h>
#include "looper.h"
#include "media/NdkMediaCodec.h"
#include "media/NdkMediaExtractor.h"
// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
#include <android/log.h>
#define TAG "NativeCodec"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
// for native window JNI
#include <android/native_window_jni.h>
typedef struct {
int fd;
ANativeWindow* window;
AMediaExtractor* ex;
AMediaCodec *codec;
int64_t renderstart;
bool sawInputEOS;
bool sawOutputEOS;
bool isPlaying;
bool renderonce;
} workerdata;
workerdata data = {-1, NULL, NULL, NULL, 0, false, false, false, false};
enum {
kMsgCodecBuffer,
kMsgPause,
kMsgResume,
kMsgPauseAck,
kMsgDecodeDone,
kMsgSeek,
};
class mylooper: public looper {
virtual void handle(int what, void* obj);
};
static mylooper *mlooper = NULL;
int64_t systemnanotime() {
timespec now;
clock_gettime(CLOCK_MONOTONIC, &now);
return now.tv_sec * 1000000000LL + now.tv_nsec;
}
void doCodecWork(workerdata *d) {
ssize_t bufidx = -1;
if (!d->sawInputEOS) {
bufidx = AMediaCodec_dequeueInputBuffer(d->codec, 2000);
LOGV("input buffer %zd", bufidx);
if (bufidx >= 0) {
size_t bufsize;
uint8_t *buf = AMediaCodec_getInputBuffer(d->codec, bufidx, &bufsize);
ssize_t sampleSize = AMediaExtractor_readSampleData(d->ex, buf, bufsize);
if (sampleSize < 0) {
sampleSize = 0;
d->sawInputEOS = true;
LOGV("EOS");
}
int64_t presentationTimeUs = AMediaExtractor_getSampleTime(d->ex);
AMediaCodec_queueInputBuffer(d->codec, bufidx, 0, sampleSize, presentationTimeUs,
d->sawInputEOS ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);
AMediaExtractor_advance(d->ex);
}
}
if (!d->sawOutputEOS) {
AMediaCodecBufferInfo info;
ssize_t status = AMediaCodec_dequeueOutputBuffer(d->codec, &info, 0);
if (status >= 0) {
if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
LOGV("output EOS");
d->sawOutputEOS = true;
}
int64_t presentationNano = info.presentationTimeUs * 1000;
if (d->renderstart < 0) {
d->renderstart = systemnanotime() - presentationNano;
}
int64_t delay = (d->renderstart + presentationNano) - systemnanotime();
if (delay > 0) {
usleep(delay / 1000);
}
AMediaCodec_releaseOutputBuffer(d->codec, status, info.size != 0);
if (d->renderonce) {
d->renderonce = false;
return;
}
} else if (status == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
LOGV("output buffers changed");
} else if (status == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
AMediaFormat *format = NULL;
format = AMediaCodec_getOutputFormat(d->codec);
LOGV("format changed to: %s", AMediaFormat_toString(format));
AMediaFormat_delete(format);
} else if (status == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
LOGV("no output buffer right now");
} else {
LOGV("unexpected info code: %zd", status);
}
}
if (!d->sawInputEOS || !d->sawOutputEOS) {
mlooper->post(kMsgCodecBuffer, d);
}
}
void mylooper::handle(int what, void* obj) {
switch (what) {
case kMsgCodecBuffer:
doCodecWork((workerdata*)obj);
break;
case kMsgDecodeDone:
{
workerdata *d = (workerdata*)obj;
AMediaCodec_stop(d->codec);
AMediaCodec_delete(d->codec);
AMediaExtractor_delete(d->ex);
d->sawInputEOS = true;
d->sawOutputEOS = true;
}
break;
case kMsgSeek:
{
workerdata *d = (workerdata*)obj;
AMediaExtractor_seekTo(d->ex, 0, AMEDIAEXTRACTOR_SEEK_NEXT_SYNC);
AMediaCodec_flush(d->codec);
d->renderstart = -1;
d->sawInputEOS = false;
d->sawOutputEOS = false;
if (!d->isPlaying) {
d->renderonce = true;
post(kMsgCodecBuffer, d);
}
LOGV("seeked");
}
break;
case kMsgPause:
{
workerdata *d = (workerdata*)obj;
if (d->isPlaying) {
// flush all outstanding codecbuffer messages with a no-op message
d->isPlaying = false;
post(kMsgPauseAck, NULL, true);
}
}
break;
case kMsgResume:
{
workerdata *d = (workerdata*)obj;
if (!d->isPlaying) {
d->renderstart = -1;
d->isPlaying = true;
post(kMsgCodecBuffer, d);
}
}
break;
}
}
extern "C" {
jboolean Java_com_example_nativecodec_NativeCodec_createStreamingMediaPlayer(JNIEnv* env,
jclass clazz, jstring filename)
{
LOGV("@@@ create");
// convert Java string to UTF-8
const char *utf8 = env->GetStringUTFChars(filename, NULL);
LOGV("opening %s", utf8);
int fd = open(utf8, O_RDONLY);
env->ReleaseStringUTFChars(filename, utf8);
if (fd < 0) {
LOGV("failed: %d (%s)", fd, strerror(errno));
return JNI_FALSE;
}
data.fd = fd;
workerdata *d = &data;
AMediaExtractor *ex = AMediaExtractor_new();
media_status_t err = AMediaExtractor_setDataSourceFd(ex, d->fd, 0 , LONG_MAX);
close(d->fd);
if (err != AMEDIA_OK) {
LOGV("setDataSource error: %d", err);
return JNI_FALSE;
}
int numtracks = AMediaExtractor_getTrackCount(ex);
AMediaCodec *codec = NULL;
LOGV("input has %d tracks", numtracks);
for (int i = 0; i < numtracks; i++) {
AMediaFormat *format = AMediaExtractor_getTrackFormat(ex, i);
const char *s = AMediaFormat_toString(format);
LOGV("track %d format: %s", i, s);
const char *mime;
if (!AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime)) {
LOGV("no mime type");
return JNI_FALSE;
} else if (!strncmp(mime, "video/", 6)) {
// Omitting most error handling for clarity.
// Production code should check for errors.
AMediaExtractor_selectTrack(ex, i);
codec = AMediaCodec_createDecoderByType(mime);
AMediaCodec_configure(codec, format, d->window, NULL, 0);
d->ex = ex;
d->codec = codec;
d->renderstart = -1;
d->sawInputEOS = false;
d->sawOutputEOS = false;
d->isPlaying = false;
d->renderonce = true;
AMediaCodec_start(codec);
}
AMediaFormat_delete(format);
}
mlooper = new mylooper();
mlooper->post(kMsgCodecBuffer, d);
return JNI_TRUE;
}
// set the playing state for the streaming media player
void Java_com_example_nativecodec_NativeCodec_setPlayingStreamingMediaPlayer(JNIEnv* env,
jclass clazz, jboolean isPlaying)
{
LOGV("@@@ playpause: %d", isPlaying);
if (mlooper) {
if (isPlaying) {
mlooper->post(kMsgResume, &data);
} else {
mlooper->post(kMsgPause, &data);
}
}
}
// shut down the native media system
void Java_com_example_nativecodec_NativeCodec_shutdown(JNIEnv* env, jclass clazz)
{
LOGV("@@@ shutdown");
if (mlooper) {
mlooper->post(kMsgDecodeDone, &data, true /* flush */);
mlooper->quit();
delete mlooper;
mlooper = NULL;
}
if (data.window) {
ANativeWindow_release(data.window);
data.window = NULL;
}
}
// set the surface
void Java_com_example_nativecodec_NativeCodec_setSurface(JNIEnv *env, jclass clazz, jobject surface)
{
// obtain a native window from a Java surface
if (data.window) {
ANativeWindow_release(data.window);
data.window = NULL;
}
data.window = ANativeWindow_fromSurface(env, surface);
LOGV("@@@ setsurface %p", data.window);
}
// rewind the streaming media player
void Java_com_example_nativecodec_NativeCodec_rewindStreamingMediaPlayer(JNIEnv *env, jclass clazz)
{
LOGV("@@@ rewind");
mlooper->post(kMsgSeek, &data);
}
}
网友评论