diff --git a/doc/README.md b/doc/README.md
new file mode 100644
index 0000000..d98d442
--- /dev/null
+++ b/doc/README.md
@@ -0,0 +1 @@
+# doc
diff --git a/livestream-to-earn-android/app/.gitignore b/livestream-to-earn-android/app/.gitignore
new file mode 100644
index 0000000..796b96d
--- /dev/null
+++ b/livestream-to-earn-android/app/.gitignore
@@ -0,0 +1 @@
+/build
diff --git a/livestream-to-earn-android/app/broadcaster/.gitignore b/livestream-to-earn-android/app/broadcaster/.gitignore
new file mode 100644
index 0000000..796b96d
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/.gitignore
@@ -0,0 +1 @@
+/build
diff --git a/livestream-to-earn-android/app/broadcaster/build.gradle b/livestream-to-earn-android/app/broadcaster/build.gradle
new file mode 100644
index 0000000..8818128
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/build.gradle
@@ -0,0 +1,33 @@
+apply plugin: 'com.android.library'
+
+android {
+ compileSdkVersion 26
+ buildToolsVersion "25.0.1"
+
+ defaultConfig {
+ minSdkVersion 26
+ targetSdkVersion 26
+ versionCode 1
+ versionName "1.0"
+
+ testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
+
+ }
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+ }
+ }
+}
+
+dependencies {
+ implementation fileTree(include: ['*.jar'], dir: 'libs')
+ androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
+ exclude group: 'com.android.support', module: 'support-annotations'
+ })
+ implementation 'net.butterflytv.utils:rtmp-client:3.1.0'
+ implementation 'com.android.support:support-annotations:25.3.1'
+ implementation 'com.android.support:design:25.3.1'
+ testCompile 'junit:junit:4.12'
+}
diff --git a/livestream-to-earn-android/app/broadcaster/consumer-rules.pro b/livestream-to-earn-android/app/broadcaster/consumer-rules.pro
new file mode 100644
index 0000000..e69de29
diff --git a/livestream-to-earn-android/app/broadcaster/proguard-rules.pro b/livestream-to-earn-android/app/broadcaster/proguard-rules.pro
new file mode 100644
index 0000000..0f4b2a5
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/proguard-rules.pro
@@ -0,0 +1,17 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in /Users/mekya/Library/Android/sdk/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
diff --git a/livestream-to-earn-android/app/broadcaster/src/androidTest/java/com/psudoanon/broadcaster/ExampleInstrumentedTest.kt b/livestream-to-earn-android/app/broadcaster/src/androidTest/java/com/psudoanon/broadcaster/ExampleInstrumentedTest.kt
new file mode 100644
index 0000000..94cc6e9
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/androidTest/java/com/psudoanon/broadcaster/ExampleInstrumentedTest.kt
@@ -0,0 +1,24 @@
+package com.psudoanon.broadcaster
+
+import androidx.test.platform.app.InstrumentationRegistry
+import androidx.test.ext.junit.runners.AndroidJUnit4
+
+import org.junit.Test
+import org.junit.runner.RunWith
+
+import org.junit.Assert.*
+
+/**
+ * Instrumented test, which will execute on an Android device.
+ *
+ * See [testing documentation](http://d.android.com/tools/testing).
+ */
+@RunWith(AndroidJUnit4::class)
+class ExampleInstrumentedTest {
+ @Test
+ fun useAppContext() {
+ // Context of the app under test.
+ val appContext = InstrumentationRegistry.getInstrumentation().targetContext
+ assertEquals("com.psudoanon.broadcaster.test", appContext.packageName)
+ }
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/AndroidManifest.xml b/livestream-to-earn-android/app/broadcaster/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..3aac298
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/AndroidManifest.xml
@@ -0,0 +1,8 @@
+
+ * The object is created on the UI thread, and all handlers run there. Messages are
+ * sent from other threads, using sendMessage().
+ */
+public class CameraHandler extends Handler {
+ private static final String TAG = CameraHandler.class.getSimpleName();
+
+ public static final int MSG_SET_SURFACE_TEXTURE = 0;
+
+
+ public interface ICameraViewer {
+
+ void handleSetSurfaceTexture(SurfaceTexture st);
+ }
+
+
+ // Weak reference to the Activity; only access this from the UI thread.
+ private WeakReference
+ * Do not call any methods here directly from another thread -- use the
+ * GLSurfaceView#queueEvent() call.
+ */
+public class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
+ private static final String TAG = CameraSurfaceRenderer.class.getSimpleName();
+ private static final boolean VERBOSE = false;
+
+ private static final int RECORDING_OFF = 0;
+ private static final int RECORDING_ON = 1;
+ private static final int RECORDING_RESUMED = 2;
+ private static final int RECORDER_CONFIG_CHANGED = 3;
+
+ private CameraHandler mCameraHandler;
+ private TextureMovieEncoder mVideoEncoder;
+
+ private FullFrameRect mFullScreen;
+
+ private final float[] mSTMatrix = new float[16];
+ private int mTextureId;
+
+ private SurfaceTexture mSurfaceTexture;
+ private boolean mRecordingEnabled;
+ private int mRecordingStatus;
+ private int mFrameCount;
+
+ // width/height of the incoming camera preview frames
+ private boolean mIncomingSizeUpdated;
+ private int mIncomingWidth;
+ private int mIncomingHeight;
+ private IMediaMuxer mWriterHandler;
+ private long mRecordingStartTime;
+ private int bitrate;
+ private int frameRate = 25;
+
+ /**
+ * Constructs CameraSurfaceRenderer.
+ *
+ * @param cameraHandler Handler for communicating with UI thread
+ * @param movieEncoder video encoder object
+ */
+ public CameraSurfaceRenderer(CameraHandler cameraHandler,
+ TextureMovieEncoder movieEncoder) {
+ mCameraHandler = cameraHandler;
+ mVideoEncoder = movieEncoder;
+
+
+ mTextureId = -1;
+
+ mRecordingStatus = -1;
+ mRecordingEnabled = false;
+ mFrameCount = -1;
+
+ mIncomingSizeUpdated = false;
+ mIncomingWidth = mIncomingHeight = -1;
+
+ }
+
+ private Texture2dProgram.ProgramType mEffectType = Texture2dProgram.ProgramType.TEXTURE_EXT ;
+
+ public void setEffect(Texture2dProgram.ProgramType effectType) {
+ this.mEffectType = effectType;
+ }
+
+ /**
+ * Notifies the renderer thread that the activity is pausing.
+ *
+ * For best results, call this *after* disabling Camera preview.
+ */
+ public void notifyPausing() {
+ if (mSurfaceTexture != null) {
+ Log.d(TAG, "renderer pausing -- releasing SurfaceTexture");
+ mSurfaceTexture.release();
+ mSurfaceTexture = null;
+ }
+ if (mFullScreen != null) {
+ mFullScreen.release(false); // assume the GLSurfaceView EGL activity is about
+ mFullScreen = null; // to be destroyed
+ }
+ mIncomingWidth = mIncomingHeight = -1;
+ }
+
+ /**
+ * Notifies the renderer that we want to stop or start recording.
+ */
+ public void startRecording(long recordingStartTime) {
+ mRecordingEnabled = true;
+ Log.d(TAG, "changeRecordingState: was " + mRecordingEnabled + " now " + mRecordingEnabled);
+ //if (isRecording)
+ {
+ mRecordingStartTime = recordingStartTime;
+ }
+ }
+
+ public void stopRecording(){
+ mRecordingEnabled = false;
+ }
+
+ /**
+ * Records the size of the incoming camera preview frames.
+ *
+ * It's not clear whether this is guaranteed to execute before or after onSurfaceCreated(),
+ * so we assume it could go either way. (Fortunately they both run on the same thread,
+ * so we at least know that they won't execute concurrently.)
+ */
+ public void setCameraPreviewSize(int width, int height) {
+ Log.d(TAG, "setCameraPreviewSize");
+ mIncomingWidth = width;
+ mIncomingHeight = height;
+ mIncomingSizeUpdated = true;
+ if (mIncomingHeight >= 720) {
+ bitrate = 850000;
+ } else if (mIncomingHeight >= 480) {
+ bitrate = 550000;
+ } else if (mIncomingHeight >= 360) {
+ bitrate = 450000;
+ } else if (mIncomingHeight >= 288) {
+ bitrate = 350000;
+ } else if (mIncomingHeight >= 240) {
+ bitrate = 250000;
+ } else //if (mIncomingHeight >= 144)
+ {
+ bitrate = 100000;
+ }
+ }
+
+ public int getBitrate() {
+ return bitrate;
+ }
+
+ public void setBitrate(int bitrate) {
+ this.bitrate = bitrate;
+ }
+
+ @Override
+ public void onSurfaceCreated(GL10 unused, EGLConfig config) {
+ Log.d(TAG, "onSurfaceCreated");
+
+ // We're starting up or coming back. Either way we've got a new EGLContext that will
+ // need to be shared with the video encoder, so figure out if a recording is already
+ // in progress.
+ mRecordingEnabled = mVideoEncoder.isRecording();
+ if (mRecordingEnabled) {
+ mRecordingStatus = RECORDING_RESUMED;
+ } else {
+ mRecordingStatus = RECORDING_OFF;
+ }
+
+ // Set up the texture blitter that will be used for on-screen display. This
+ // is *not* applied to the recording, because that uses a separate shader.
+ mFullScreen = new FullFrameRect(
+ new Texture2dProgram(mEffectType));
+
+ mVideoEncoder.setEffect(mEffectType);
+
+ mTextureId = mFullScreen.createTextureObject();
+
+ // Create a SurfaceTexture, with an external texture, in this EGL activity. We don't
+ // have a Looper in this thread -- GLSurfaceView doesn't create one -- so the frame
+ // available messages will arrive on the main thread.
+ mSurfaceTexture = new SurfaceTexture(mTextureId);
+
+ System.out.println("//Tell the UI thread to enable the camera preview.");
+ mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
+ CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
+ }
+
+ public void setFrameRate(int frameRate) {
+ this.frameRate = frameRate;
+ if (mVideoEncoder != null) {
+ mVideoEncoder.setFrameRate(frameRate);
+ }
+ }
+
+ public int getFrameRate() {
+ return mVideoEncoder != null ? mVideoEncoder.getFrameRate() : 0;
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 unused, int width, int height) {
+ Log.d(TAG, "onSurfaceChanged " + width + "x" + height);
+ GLES20.glViewport(0, 0, width, height);
+ }
+
+ @Override
+ public void onDrawFrame(GL10 unused) {
+ if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureId);
+ boolean showBox = false;
+
+
+ // Latch the latest frame. If there isn't anything new, we'll just re-use whatever
+ // was there before.
+ mSurfaceTexture.updateTexImage();
+
+ // If the recording state is changing, take care of it here. Ideally we wouldn't
+ // be doing all this in onDrawFrame(), but the EGLContext sharing with GLSurfaceView
+ // makes it hard to do elsewhere.
+ if (mRecordingEnabled) {
+
+ switch (mRecordingStatus) {
+ case RECORDING_OFF:
+ Log.d(TAG, "START recording bitrate: " +bitrate);
+ {
+
+ // start recording
+ boolean started = mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
+ mWriterHandler, mIncomingWidth, mIncomingHeight, bitrate, frameRate, EGL14.eglGetCurrentContext(), mEffectType),
+ mRecordingStartTime);
+ if (started) {
+ mRecordingStatus = RECORDING_ON;
+ }
+ else {
+ mRecordingStatus = RECORDING_OFF;
+ }
+ }
+ break;
+ case RECORDER_CONFIG_CHANGED:
+ mVideoEncoder.releaseRecording();
+ mRecordingStatus = RECORDING_OFF;
+ break;
+ case RECORDING_RESUMED:
+ Log.d(TAG, "RESUME recording");
+ mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
+ mRecordingStatus = RECORDING_ON;
+ break;
+ case RECORDING_ON:
+ // yay
+ break;
+ default:
+ throw new RuntimeException("unknown status " + mRecordingStatus);
+ }
+ } else {
+ switch (mRecordingStatus) {
+ case RECORDING_ON:
+ case RECORDING_RESUMED:
+ // stop recording
+ Log.d(TAG, "STOP recording");
+ mVideoEncoder.stopRecording();
+ mRecordingStatus = RECORDING_OFF;
+ break;
+ case RECORDING_OFF:
+ // yay
+ break;
+ default:
+ throw new RuntimeException("unknown status " + mRecordingStatus);
+ }
+ }
+
+ // Set the video encoder's texture name. We only need to do this once, but in the
+ // current implementation it has to happen after the video encoder is started, so
+ // we just do it here.
+ //
+ // TODO: be less lame.
+ mVideoEncoder.setTextureId(mTextureId);
+
+ // Tell the video encoder thread that a new frame is available.
+ // This will be ignored if we're not actually recording.
+ mVideoEncoder.frameAvailable(mSurfaceTexture);
+
+ if (mIncomingWidth <= 0 || mIncomingHeight <= 0) {
+ // Texture size isn't set yet. This is only used for the filters, but to be
+ // safe we can just skip drawing while we wait for the various races to resolve.
+ // (This seems to happen if you toggle the screen off/on with power button.)
+ Log.i(TAG, "Drawing before incoming texture size set; skipping");
+ return;
+ }
+
+ if (mIncomingSizeUpdated) {
+ mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight);
+ mIncomingSizeUpdated = false;
+ }
+
+ // Draw the video frame.
+ mSurfaceTexture.getTransformMatrix(mSTMatrix);
+ mFullScreen.drawFrame(mTextureId, mSTMatrix);
+
+ // Draw a flashing box if we're recording. This only appears on screen.
+ /* showBox = (mRecordingStatus == RECORDING_ON);
+ if (showBox && (++mFrameCount & 0x04) == 0) {
+ drawBox();
+ }
+ */
+ }
+
+ /**
+ * Draws a red box in the corner.
+ */
+ private void drawBox() {
+ GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
+ GLES20.glScissor(0, 0, 100, 100);
+ GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
+ }
+
+ public void setOptions(IMediaMuxer writerHandler) {
+ mWriterHandler = writerHandler;
+ }
+
+ // this function should be called after incoming width and height changed
+ public void recorderConfigChanged() {
+ // pay attention to this function, it causes throwing an exception in some circumstance when
+ // it is called in recording state
+ mRecordingStatus = RECORDER_CONFIG_CHANGED;
+ }
+
+ public List
+ * The object wraps an encoder running on a dedicated thread. The various control messages
+ * may be sent from arbitrary threads (typically the app UI thread). The encoder thread
+ * manages both sides of the encoder (feeding and draining); the only external input is
+ * the GL texture.
+ *
+ * The design is complicated slightly by the need to create an EGL activity that shares state
+ * with a view that gets restarted if (say) the device orientation changes. When the view
+ * in question is a GLSurfaceView, we don't have full control over the EGL activity creation
+ * on that side, so we have to bend a bit backwards here.
+ *
+ * To use:
+ *
+ * Object is immutable, which means we can safely pass it between threads without
+ * explicit synchronization (and don't need to worry about it getting tweaked out from
+ * under us).
+ *
+ * TODO: make frame rate and iframe interval configurable? Maybe use builder pattern
+ * with reasonable defaults for those and bit rate.
+ */
+ public static class EncoderConfig {
+ final int mWidth;
+ final int mHeight;
+ final EGLContext mEglContext;
+ final IMediaMuxer writerHandler;
+ final Texture2dProgram.ProgramType mProgramType;
+ public int mFrameRate;
+ public int mBitRate;
+
+ public EncoderConfig(IMediaMuxer handler, int width, int height, int bitRate, int frameRate,
+ EGLContext sharedEglContext, Texture2dProgram.ProgramType programType) {
+ writerHandler = handler;
+ mWidth = width;
+ mHeight = height;
+ mBitRate = bitRate;
+ mEglContext = sharedEglContext;
+ mProgramType = programType;
+ mFrameRate = frameRate;
+ }
+
+ }
+
+ /**
+ * Tells the video recorder to start recording. (Call from non-encoder thread.)
+ *
+ * Creates a new thread, which will create an encoder using the provided configuration.
+ *
+ * Returns after the recorder thread has started and is ready to accept Messages. The
+ * encoder may not yet be fully configured.
+ */
+ public boolean startRecording(EncoderConfig config, long mRecordingStartTime) {
+ Log.d(TAG, "Encoder: startRecording()");
+ synchronized (mReadyFence) {
+ if (mRunning) {
+ Log.w(TAG, "Encoder thread already running");
+ return false;
+ }
+ this.mRecordingStartTime = mRecordingStartTime;
+ mRunning = true;
+ new Thread(this, "TextureMovieEncoder").start();
+ while (!mReady) {
+ try {
+ mReadyFence.wait();
+ } catch (InterruptedException ie) {
+ // ignore
+ }
+ }
+ }
+
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
+ return true;
+ }
+
+
+ public void releaseRecording() {
+ if (mHandler != null) {
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_RELEASE_RECORDING));
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
+ }
+ }
+
+ /**
+ * Tells the video recorder to stop recording. (Call from non-encoder thread.)
+ *
+ * Returns immediately; the encoder/muxer may not yet be finished creating the movie.
+ *
+ * TODO: have the encoder thread invoke a callback on the UI thread just before it shuts down
+ * so we can provide reasonable status UI (and let the caller know that movie encoding
+ * has completed).
+ */
+ public void stopRecording() {
+ if (mHandler != null) {
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
+ }
+ // We don't know when these will actually finish (or even start). We don't want to
+ // delay the UI thread though, so we return immediately.
+ }
+
+ /**
+ * Returns true if recording has been started.
+ */
+ public boolean isRecording() {
+ synchronized (mReadyFence) {
+ return mRunning;
+ }
+ }
+
+ /**
+ * Tells the video recorder to refresh its EGL surface. (Call from non-encoder thread.)
+ */
+ public void updateSharedContext(EGLContext sharedContext) {
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext));
+ }
+
+ /**
+ * Tells the video recorder that a new frame is available. (Call from non-encoder thread.)
+ *
+ * This function sends a message and returns immediately. This isn't sufficient -- we
+ * don't want the caller to latch a new frame until we're done with this one -- but we
+ * can get away with it so long as the input frame rate is reasonable and the encoder
+ * thread doesn't stall.
+ *
+ * TODO: either block here until the texture has been rendered onto the encoder surface,
+ * or have a separate "block if still busy" method that the caller can execute immediately
+ * before it calls updateTexImage(). The latter is preferred because we don't want to
+ * stall the caller while this thread does work.
+ */
+ public void frameAvailable(SurfaceTexture st) {
+ synchronized (mReadyFence) {
+ if (!mReady) {
+ return;
+ }
+ }
+
+ if (mHandler == null) {
+ return;
+ }
+
+ float[] transform = new float[16]; // TODO - avoid alloc every frame
+ st.getTransformMatrix(transform);
+ /*
+ long timestamp = st.getTimestamp();
+ if (timestamp == 0) {
+ // Seeing this after device is toggled off/on with power button. The
+ // first frame back has a zero timestamp.
+ //
+ // MPEG4Writer thinks this is cause to abort() in native code, so it's very
+ // important that we just ignore the frame.
+ Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
+ return;
+ }
+ */
+
+ long frameTime = System.currentTimeMillis();
+ if (mVideoEncoder != null && (frameTime - mLastFrameTime) >= getFrameInterval())
+ {
+ Log.d(TAG, " get frame interval :" + getFrameInterval());
+ // encode data at least in every 50 milliseconds, it measn 20fps or less
+ long timestamp = (frameTime - mRecordingStartTime)
+ * 1000000; // convert it to nano seconds
+ mLastFrameTime = frameTime;
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
+ (int) (timestamp >> 32), (int) timestamp, transform));
+ }
+ }
+
+ private long getFrameInterval() {
+ return 1000 / mEncoderConfig.mFrameRate;
+ }
+
+ public void setFrameRate(int framerate) {
+ if (mEncoderConfig != null) {
+ mEncoderConfig.mFrameRate = framerate;
+ }
+ }
+
+ public void setBitrate(int bitrate) {
+ if (mEncoderConfig != null) {
+ mEncoderConfig.mBitRate = bitrate;
+ }
+ }
+
+ public void setIframeInterval(int seconds) {
+ mIframeInterval = seconds;
+ }
+
+ public int getFrameRate() {
+ return mEncoderConfig != null ? mEncoderConfig.mFrameRate : 0;
+ }
+
+ /**
+ * Tells the video recorder what texture name to use. This is the external texture that
+ * we're receiving camera previews in. (Call from non-encoder thread.)
+ *
+ * TODO: do something less clumsy
+ */
+ public void setTextureId(int id) {
+ synchronized (mReadyFence) {
+ if (!mReady) {
+ return;
+ }
+ }
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
+ }
+
+ public void setEffect(Texture2dProgram.ProgramType programType) {
+ synchronized (mReadyFence) {
+ if (!mReady) {
+ return;
+ }
+ }
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_CHANGE_EFFECT, 0, 0, programType));
+ }
+
+ /**
+ * Encoder thread entry point. Establishes Looper/Handler and waits for messages.
+ *
+ * @see Thread#run()
+ */
+ @Override
+ public void run() {
+ // Establish a Looper for this thread, and define a Handler for it.
+ Looper.prepare();
+ synchronized (mReadyFence) {
+ mHandler = new EncoderHandler(this);
+ mReady = true;
+ mReadyFence.notify();
+ }
+ Looper.loop();
+
+ Log.d(TAG, "Encoder thread exiting");
+ synchronized (mReadyFence) {
+ mReady = mRunning = false;
+ mHandler = null;
+ }
+ }
+
+
+ /**
+ * Handles encoder state change requests. The handler is created on the encoder thread.
+ */
+ private static class EncoderHandler extends Handler {
+
+
+ private WeakReference
+ * The texture is rendered onto the encoder's input surface, along with a moving
+ * box (just because we can).
+ *
+ * @param transform The texture transform, from SurfaceTexture.
+ * @param timestampNanos The frame's timestamp, from SurfaceTexture.
+ */
+ private void handleFrameAvailable(float[] transform, long timestampNanos) {
+ if (VERBOSE) Log.d(TAG, "handleFrameAvailable tr=" + transform);
+ if (mFullScreen != null) {
+ mVideoEncoder.drainEncoder(false);
+ mFullScreen.drawFrame(mTextureId, transform);
+
+ // drawBox(mFrameNum++);
+
+ mInputWindowSurface.setPresentationTime(timestampNanos);
+ mInputWindowSurface.swapBuffers();
+ }
+ }
+
+ /**
+ * Handles a request to stop encoding.
+ */
+ private void handleStopRecording(boolean stopMuxer) {
+ Log.d(TAG, "handleStopRecording");
+ mVideoEncoder.drainEncoder(true);
+ releaseEncoder();
+ if (stopMuxer) {
+ mVideoEncoder.stopMuxer();
+ }
+ }
+
+ /**
+ * Sets the texture name that SurfaceTexture will use when frames are received.
+ */
+ private void handleSetTexture(int id) {
+ //Log.d(TAG, "handleSetTexture " + id);
+ mTextureId = id;
+ }
+
+ /**
+ * Tears down the EGL surface and activity we've been using to feed the MediaCodec input
+ * surface, and replaces it with a new one that shares with the new activity.
+ *
+ * This is useful if the old activity we were sharing with went away (maybe a GLSurfaceView
+ * that got torn down) and we need to hook up with the new one.
+ */
+ private void handleUpdateSharedContext(EGLContext newSharedContext) {
+ Log.d(TAG, "handleUpdatedSharedContext " + newSharedContext);
+
+ // Release the EGLSurface and EGLContext.
+ mInputWindowSurface.releaseEglSurface();
+ mFullScreen.release(false);
+ mEglCore.release();
+
+ // Create a new EGLContext and recreate the window surface.
+ mEglCore = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE);
+ mInputWindowSurface.recreate(mEglCore);
+ mInputWindowSurface.makeCurrent();
+
+ // Create new programs and such for the new activity.
+ mFullScreen = new FullFrameRect(
+ new Texture2dProgram(mProgramType));
+ }
+
+ private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate, int frameRate,
+ IMediaMuxer writerHandle, Texture2dProgram.ProgramType programType)
+ throws IllegalStateException
+ {
+ try {
+ mVideoEncoder = new VideoEncoderCore(width, height, bitRate, frameRate, mIframeInterval, writerHandle);
+
+ } catch (IOException ioe) {
+ throw new RuntimeException(ioe);
+ }
+ mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
+ mInputWindowSurface = new WindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true);
+ mInputWindowSurface.makeCurrent();
+
+ mProgramType = programType;
+ mFullScreen = new FullFrameRect(
+ new Texture2dProgram(programType));
+
+ }
+
+ private void releaseEncoder() {
+ mVideoEncoder.release();
+ if (mInputWindowSurface != null) {
+ mInputWindowSurface.release();
+ mInputWindowSurface = null;
+ }
+ if (mFullScreen != null) {
+ mFullScreen.release(false);
+ mFullScreen = null;
+ }
+ if (mEglCore != null) {
+ mEglCore.release();
+ mEglCore = null;
+ }
+ }
+
+ /**
+ * Draws a box, with position offset.
+ */
+ private void drawBox(int posn) {
+ final int width = mInputWindowSurface.getWidth();
+ int xpos = (posn * 4) % (width - 50);
+ GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
+ GLES20.glScissor(xpos, 0, 100, 100);
+ GLES20.glClearColor(1.0f, 0.0f, 1.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
+ }
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/VideoEncoderCore.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/VideoEncoderCore.java
new file mode 100644
index 0000000..2809afc
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/VideoEncoderCore.java
@@ -0,0 +1,294 @@
+/*
+ * Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.psudoanon.broadcaster.encoder;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.util.Log;
+import android.view.Surface;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import com.psudoanon.broadcaster.network.IMediaMuxer;
+
+/**
+ * This class wraps up the core components used for surface-input video encoding.
+ *
+ * Once created, frames are fed to the input surface. Remember to provide the presentation
+ * time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
+ * producer side doesn't get backed up.
+ *
+ * This class is not thread-safe, with one exception: it is valid to use the input surface
+ * on one thread, and drain the output on a different thread.
+ */
+public class VideoEncoderCore {
+ private static final String TAG = VideoEncoderCore.class.getSimpleName();
+ private static final boolean VERBOSE = false;
+
+ // TODO: these ought to be configurable as well
+ private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
+ //private int frameRate = 20; // 20fps
+ private static final int IFRAME_INTERVAL = 2; // 2 seconds between I-frames
+ private IMediaMuxer mWriterHandler;
+
+ private Surface mInputSurface;
+ private MediaCodec mEncoder;
+ private MediaCodec.BufferInfo mBufferInfo;
+ private boolean mMuxerStarted;
+ private Map
+ * If endOfStream is not set, this returns when there is no more data to drain. If it
+ * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
+ * Calling this with endOfStream set should be done once, swipe_right before stopping the muxer.
+ *
+ * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
+ * not recording audio.
+ */
+ public void drainEncoder(boolean endOfStream) {
+ if (mEncoder == null) {
+ return;
+ }
+ final int TIMEOUT_USEC = 10000;
+ if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
+
+ if (endOfStream) {
+ if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
+ mEncoder.signalEndOfInputStream();
+ }
+
+ ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
+ while (true) {
+ int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
+ if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
+ // no output available yet
+ if (!endOfStream) {
+ break; // out of while
+ } else {
+ if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
+ }
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ // not expected for an encoder
+ encoderOutputBuffers = mEncoder.getOutputBuffers();
+ Log.d("VideoEncoder", "INFO_OUTPUT_BUFFERS_CHANGED");
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ // should happen before receiving buffers, and should only happen once
+ if (mMuxerStarted) {
+ throw new RuntimeException("format changed twice");
+ }
+ MediaFormat newFormat = mEncoder.getOutputFormat();
+ ByteBuffer sps = newFormat.getByteBuffer("csd-0");
+ ByteBuffer pps = newFormat.getByteBuffer("csd-1");
+ byte[] config = new byte[sps.limit() + pps.limit()];
+ sps.get(config, 0, sps.limit());
+ pps.get(config, sps.limit(), pps.limit());
+
+ mWriterHandler.writeVideo(config, config.length, 0);
+
+ Log.d(TAG, "encoder output format changed: " + newFormat);
+
+ mMuxerStarted = true;
+ } else if (encoderStatus < 0) {
+ Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
+ encoderStatus);
+ // let's ignore it
+
+ } else {
+ ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
+ if (encodedData == null) {
+ throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
+ " was null");
+ }
+
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ // The codec config data was pulled out and fed to the muxer when we got
+ // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
+ if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
+ mBufferInfo.size = 0;
+ }
+
+ if (mBufferInfo.size != 0) {
+ if (!mMuxerStarted) {
+ throw new RuntimeException("muxer hasn't started");
+ }
+
+ // adjust the ByteBuffer values to match BufferInfo (not needed?)
+ encodedData.position(mBufferInfo.offset);
+ encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
+
+ long presentationTimeInMillis = mBufferInfo.presentationTimeUs/1000; //convert it to milliseconds
+ //first it should be divided to 1000 and assign value to a long
+ //then cast it to int -
+ // Othe wise after about 35 minutes(exceeds integer max size) presentationTime will be negative
+ //in this assignment int presetationTime = (int)mBufferInfo.presentationTimeUs/1000;
+ int presetationTime = (int)presentationTimeInMillis;
+ byte[] data = getBuffer(mBufferInfo.size, mWriterHandler.getLastVideoFrameTimeStamp(), presetationTime);
+ encodedData.get(data, 0, mBufferInfo.size);
+ encodedData.position(mBufferInfo.offset);
+
+ mWriterHandler.writeVideo(data, mBufferInfo.size, presetationTime);
+ }
+ mEncoder.releaseOutputBuffer(encoderStatus, false);
+
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ if (!endOfStream) {
+ Log.w(TAG, "reached end of stream unexpectedly");
+ } else {
+ if (VERBOSE) Log.d(TAG, "end of stream reached");
+ }
+ reservedBuffers.clear();
+ break; // out of while
+ }
+ }
+ }
+ }
+
+ private byte[] getBuffer(int size, int lastSentFrameTimestamp, int currentTimeStamp)
+ {
+ /**
+ * how does it work?
+ * we put byte array with their timestamp value to a hash map
+ * when there is a new output buffer array, we check the last frame timestamp of mediamuxer
+ * if the byte buffer timestamp is less than the value of last frame timestamp of mediamuxer
+ * it means that we can use that byte buffer again because it is already written to network
+ */
+ Iterator
+ * Triangles are 0-1-2 and 2-1-3 (counter-clockwise winding).
+ */
+ private static final float RECTANGLE_COORDS[] = {
+ -0.5f, -0.5f, // 0 bottom left
+ 0.5f, -0.5f, // 1 bottom swipe_right
+ -0.5f, 0.5f, // 2 top left
+ 0.5f, 0.5f, // 3 top swipe_right
+ };
+ private static final float RECTANGLE_TEX_COORDS[] = {
+ 0.0f, 1.0f, // 0 bottom left
+ 1.0f, 1.0f, // 1 bottom swipe_right
+ 0.0f, 0.0f, // 2 top left
+ 1.0f, 0.0f // 3 top swipe_right
+ };
+ private static final FloatBuffer RECTANGLE_BUF =
+ GlUtil.createFloatBuffer(RECTANGLE_COORDS);
+ private static final FloatBuffer RECTANGLE_TEX_BUF =
+ GlUtil.createFloatBuffer(RECTANGLE_TEX_COORDS);
+
+ /**
+ * A "full" square, extending from -1 to +1 in both dimensions. When the model/view/projection
+ * matrix is identity, this will exactly cover the viewport.
+ *
+ * The texture coordinates are Y-inverted relative to RECTANGLE. (This seems to work out
+ * swipe_right with external textures from SurfaceTexture.)
+ */
+ private static final float FULL_RECTANGLE_COORDS[] = {
+ -1.0f, -1.0f, // 0 bottom left
+ 1.0f, -1.0f, // 1 bottom swipe_right
+ -1.0f, 1.0f, // 2 top left
+ 1.0f, 1.0f, // 3 top swipe_right
+ };
+ private static final float FULL_RECTANGLE_TEX_COORDS[] = {
+ 0.0f, 0.0f, // 0 bottom left
+ 1.0f, 0.0f, // 1 bottom swipe_right
+ 0.0f, 1.0f, // 2 top left
+ 1.0f, 1.0f // 3 top swipe_right
+ };
+ private static final FloatBuffer FULL_RECTANGLE_BUF =
+ GlUtil.createFloatBuffer(FULL_RECTANGLE_COORDS);
+ private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
+ GlUtil.createFloatBuffer(FULL_RECTANGLE_TEX_COORDS);
+
+
+ private FloatBuffer mVertexArray;
+ private FloatBuffer mTexCoordArray;
+ private int mVertexCount;
+ private int mCoordsPerVertex;
+ private int mVertexStride;
+ private int mTexCoordStride;
+ private Prefab mPrefab;
+
+ /**
+ * Enum values for constructor.
+ */
+ public enum Prefab {
+ TRIANGLE, RECTANGLE, FULL_RECTANGLE
+ }
+
+ /**
+ * Prepares a drawable from a "pre-fabricated" shape definition.
+ *
+ * Does no EGL/GL operations, so this can be done at any time.
+ */
+ public Drawable2d(Prefab shape) {
+ switch (shape) {
+ case TRIANGLE:
+ mVertexArray = TRIANGLE_BUF;
+ mTexCoordArray = TRIANGLE_TEX_BUF;
+ mCoordsPerVertex = 2;
+ mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT;
+ mVertexCount = TRIANGLE_COORDS.length / mCoordsPerVertex;
+ break;
+ case RECTANGLE:
+ mVertexArray = RECTANGLE_BUF;
+ mTexCoordArray = RECTANGLE_TEX_BUF;
+ mCoordsPerVertex = 2;
+ mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT;
+ mVertexCount = RECTANGLE_COORDS.length / mCoordsPerVertex;
+ break;
+ case FULL_RECTANGLE:
+ mVertexArray = FULL_RECTANGLE_BUF;
+ mTexCoordArray = FULL_RECTANGLE_TEX_BUF;
+ mCoordsPerVertex = 2;
+ mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT;
+ mVertexCount = FULL_RECTANGLE_COORDS.length / mCoordsPerVertex;
+ break;
+ default:
+ throw new RuntimeException("Unknown shape " + shape);
+ }
+ mTexCoordStride = 2 * SIZEOF_FLOAT;
+ mPrefab = shape;
+ }
+
+ /**
+ * Returns the array of vertices.
+ *
+ * To avoid allocations, this returns internal state. The caller must not modify it.
+ */
+ public FloatBuffer getVertexArray() {
+ return mVertexArray;
+ }
+
+ /**
+ * Returns the array of texture coordinates.
+ *
+ * To avoid allocations, this returns internal state. The caller must not modify it.
+ */
+ public FloatBuffer getTexCoordArray() {
+ return mTexCoordArray;
+ }
+
+ /**
+ * Returns the number of vertices stored in the vertex array.
+ */
+ public int getVertexCount() {
+ return mVertexCount;
+ }
+
+ /**
+ * Returns the width, in bytes, of the data for each vertex.
+ */
+ public int getVertexStride() {
+ return mVertexStride;
+ }
+
+ /**
+ * Returns the width, in bytes, of the data for each texture coordinate.
+ */
+ public int getTexCoordStride() {
+ return mTexCoordStride;
+ }
+
+ /**
+ * Returns the number of position coordinates per vertex. This will be 2 or 3.
+ */
+ public int getCoordsPerVertex() {
+ return mCoordsPerVertex;
+ }
+
+ @Override
+ public String toString() {
+ if (mPrefab != null) {
+ return "[Drawable2d: " + mPrefab + "]";
+ } else {
+ return "[Drawable2d: ...]";
+ }
+ }
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglCore.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglCore.java
new file mode 100644
index 0000000..56b1704
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglCore.java
@@ -0,0 +1,372 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.psudoanon.broadcaster.encoder.gles;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.util.Log;
+import android.view.Surface;
+
+/**
+ * Core EGL state (display, activity, config).
+ *
+ * The EGLContext must only be attached to one thread at a time. This class is not thread-safe.
+ */
+public final class EglCore {
+ private static final String TAG = GlUtil.TAG;
+
+ /**
+ * Constructor flag: surface must be recordable. This discourages EGL from using a
+ * pixel format that cannot be converted efficiently to something usable by the video
+ * encoder.
+ */
+ public static final int FLAG_RECORDABLE = 0x01;
+
+ /**
+ * Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this
+ * flag, GLES2 is used.
+ */
+ public static final int FLAG_TRY_GLES3 = 0x02;
+
+ // Android-specific extension.
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+ private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
+ private EGLConfig mEGLConfig = null;
+ private int mGlVersion = -1;
+
+
+ /**
+ * Prepares EGL display and activity.
+ *
+ * Equivalent to EglCore(null, 0).
+ */
+ public EglCore() {
+ this(null, 0);
+ }
+
+ /**
+ * Prepares EGL display and activity.
+ *
+ * @param sharedContext The activity to share, or null if sharing is not desired.
+ * @param flags Configuration bit flags, e.g. FLAG_RECORDABLE.
+ */
+ public EglCore(EGLContext sharedContext, int flags) {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("EGL already set up");
+ }
+
+ if (sharedContext == null) {
+ sharedContext = EGL14.EGL_NO_CONTEXT;
+ }
+
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+ mEGLDisplay = null;
+ throw new RuntimeException("unable to initialize EGL14");
+ }
+
+ // Try to get a GLES3 activity, if requested.
+ if ((flags & FLAG_TRY_GLES3) != 0) {
+ //Log.d(TAG, "Trying GLES 3");
+ EGLConfig config = getConfig(flags, 3);
+ if (config != null) {
+ int[] attrib3_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
+ EGL14.EGL_NONE
+ };
+ EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
+ attrib3_list, 0);
+
+ if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) {
+ //Log.d(TAG, "Got GLES 3 config");
+ mEGLConfig = config;
+ mEGLContext = context;
+ mGlVersion = 3;
+ }
+ }
+ }
+ if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed
+ //Log.d(TAG, "Trying GLES 2");
+ EGLConfig config = getConfig(flags, 2);
+ if (config == null) {
+ throw new RuntimeException("Unable to find a suitable EGLConfig");
+ }
+ int[] attrib2_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_NONE
+ };
+ EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
+ attrib2_list, 0);
+ checkEglError("eglCreateContext");
+ mEGLConfig = config;
+ mEGLContext = context;
+ mGlVersion = 2;
+ }
+
+ // Confirm with query.
+ int[] values = new int[1];
+ EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
+ values, 0);
+ Log.d(TAG, "EGLContext created, client version " + values[0]);
+ }
+
+ /**
+ * Finds a suitable EGLConfig.
+ *
+ * @param flags Bit flags from constructor.
+ * @param version Must be 2 or 3.
+ */
+ private EGLConfig getConfig(int flags, int version) {
+ int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
+ if (version >= 3) {
+ renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
+ }
+
+ // The actual surface is generally RGBA or RGBX, so situationally omitting alpha
+ // doesn't really help. It can also lead to a huge performance hit on glReadPixels()
+ // when reading into a GL_RGBA buffer.
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_ALPHA_SIZE, 8,
+ //EGL14.EGL_DEPTH_SIZE, 16,
+ //EGL14.EGL_STENCIL_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, renderableType,
+ EGL14.EGL_NONE, 0, // placeholder for recordable [@-3]
+ EGL14.EGL_NONE
+ };
+ if ((flags & FLAG_RECORDABLE) != 0) {
+ attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID;
+ attribList[attribList.length - 2] = 1;
+ }
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+ numConfigs, 0)) {
+ Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
+ return null;
+ }
+ return configs[0];
+ }
+
+ /**
+ * Discards all resources held by this class, notably the EGL activity. This must be
+ * called from the thread where the activity was created.
+ *
+ * On completion, no activity will be current.
+ */
+ public void release() {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ // Android is unusual in that it uses a reference-counted EGLDisplay. So for
+ // every eglInitialize() we need an eglTerminate().
+ EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
+ EGL14.EGL_NO_CONTEXT);
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(mEGLDisplay);
+ }
+
+ mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ mEGLContext = EGL14.EGL_NO_CONTEXT;
+ mEGLConfig = null;
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ // We're limited here -- finalizers don't run on the thread that holds
+ // the EGL state, so if a surface or activity is still current on another
+ // thread we can't fully release it here. Exceptions thrown from here
+ // are quietly discarded. Complain in the log file.
+ Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
+ release();
+ }
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's
+ * still current in a activity.
+ */
+ public void releaseSurface(EGLSurface eglSurface) {
+ EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
+ }
+
+ /**
+ * Creates an EGL surface associated with a Surface.
+ *
+ * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
+ */
+ public EGLSurface createWindowSurface(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new RuntimeException("invalid surface: " + surface);
+ }
+
+ // Create a window surface, and attach it to the Surface we received.
+ int[] surfaceAttribs = {
+ EGL14.EGL_NONE
+ };
+ EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
+ surfaceAttribs, 0);
+ checkEglError("eglCreateWindowSurface");
+ if (eglSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ return eglSurface;
+ }
+
+ /**
+ * Creates an EGL surface associated with an offscreen buffer.
+ */
+ public EGLSurface createOffscreenSurface(int width, int height) {
+ int[] surfaceAttribs = {
+ EGL14.EGL_WIDTH, width,
+ EGL14.EGL_HEIGHT, height,
+ EGL14.EGL_NONE
+ };
+ EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig,
+ surfaceAttribs, 0);
+ checkEglError("eglCreatePbufferSurface");
+ if (eglSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ return eglSurface;
+ }
+
+ /**
+ * Makes our EGL activity current, using the supplied surface for both "draw" and "read".
+ */
+ public void makeCurrent(EGLSurface eglSurface) {
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ // called makeCurrent() before create?
+ Log.d(TAG, "NOTE: makeCurrent w/o display");
+ }
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ /**
+ * Makes our EGL activity current, using the supplied "draw" and "read" surfaces.
+ */
+ public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ // called makeCurrent() before create?
+ Log.d(TAG, "NOTE: makeCurrent w/o display");
+ }
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent(draw,read) failed");
+ }
+ }
+
+ /**
+ * Makes no activity current.
+ */
+ public void makeNothingCurrent() {
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
+ EGL14.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ /**
+ * Calls eglSwapBuffers. Use this to "publish" the current frame.
+ *
+ * @return false on failure
+ */
+ public boolean swapBuffers(EGLSurface eglSurface) {
+ return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
+ }
+
+ /**
+ * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
+ */
+ public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
+ EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
+ }
+
+ /**
+ * Returns true if our activity and the specified surface are current.
+ */
+ public boolean isCurrent(EGLSurface eglSurface) {
+ return mEGLContext.equals(EGL14.eglGetCurrentContext()) &&
+ eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
+ }
+
+ /**
+ * Performs a simple surface query.
+ */
+ public int querySurface(EGLSurface eglSurface, int what) {
+ int[] value = new int[1];
+ EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
+ return value[0];
+ }
+
+ /**
+ * Queries a string value.
+ */
+ public String queryString(int what) {
+ return EGL14.eglQueryString(mEGLDisplay, what);
+ }
+
+ /**
+ * Returns the GLES version this activity is configured for (currently 2 or 3).
+ */
+ public int getGlVersion() {
+ return mGlVersion;
+ }
+
+ /**
+ * Writes the current display, activity, and surface to the log.
+ */
+ public static void logCurrent(String msg) {
+ EGLDisplay display;
+ EGLContext context;
+ EGLSurface surface;
+
+ display = EGL14.eglGetCurrentDisplay();
+ context = EGL14.eglGetCurrentContext();
+ surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
+ Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", activity=" + context +
+ ", surface=" + surface);
+ }
+
+ /**
+ * Checks for EGL errors. Throws an exception if an error has been raised.
+ */
+ private void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglSurfaceBase.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglSurfaceBase.java
new file mode 100644
index 0000000..b5339d1
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglSurfaceBase.java
@@ -0,0 +1,197 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.psudoanon.broadcaster.encoder.gles;
+
+import android.graphics.Bitmap;
+import android.opengl.EGL14;
+import android.opengl.EGLSurface;
+import android.opengl.GLES20;
+import android.util.Log;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Common base class for EGL surfaces.
+ *
+ * There can be multiple surfaces associated with a single activity.
+ */
+public class EglSurfaceBase {
+ protected static final String TAG = GlUtil.TAG;
+
+ // EglCore object we're associated with. It may be associated with multiple surfaces.
+ protected EglCore mEglCore;
+
+ private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
+ private int mWidth = -1;
+ private int mHeight = -1;
+
+ protected EglSurfaceBase(EglCore eglCore) {
+ mEglCore = eglCore;
+ }
+
+ /**
+ * Creates a window surface.
+ *
+ * @param surface May be a Surface or SurfaceTexture.
+ */
+ public void createWindowSurface(Object surface) {
+ if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
+ throw new IllegalStateException("surface already created");
+ }
+ mEGLSurface = mEglCore.createWindowSurface(surface);
+
+ // Don't cache width/height here, because the size of the underlying surface can change
+ // out from under us (see e.g. HardwareScalerActivity).
+ //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
+ //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
+ }
+
+ /**
+ * Creates an off-screen surface.
+ */
+ public void createOffscreenSurface(int width, int height) {
+ if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
+ throw new IllegalStateException("surface already created");
+ }
+ mEGLSurface = mEglCore.createOffscreenSurface(width, height);
+ mWidth = width;
+ mHeight = height;
+ }
+
+ /**
+ * Returns the surface's width, in pixels.
+ *
+ * If this is called on a window surface, and the underlying surface is in the process
+ * of changing size, we may not see the new size swipe_right away (e.g. in the "surfaceChanged"
+ * callback). The size should match after the next buffer swap.
+ */
+ public int getWidth() {
+ if (mWidth < 0) {
+ return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
+ } else {
+ return mWidth;
+ }
+ }
+
+ /**
+ * Returns the surface's height, in pixels.
+ */
+ public int getHeight() {
+ if (mHeight < 0) {
+ return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
+ } else {
+ return mHeight;
+ }
+ }
+
+ /**
+ * Release the EGL surface.
+ */
+ public void releaseEglSurface() {
+ mEglCore.releaseSurface(mEGLSurface);
+ mEGLSurface = EGL14.EGL_NO_SURFACE;
+ mWidth = mHeight = -1;
+ }
+
+ /**
+ * Makes our EGL activity and surface current.
+ */
+ public void makeCurrent() {
+ mEglCore.makeCurrent(mEGLSurface);
+ }
+
+ /**
+ * Makes our EGL activity and surface current for drawing, using the supplied surface
+ * for reading.
+ */
+ public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
+ mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
+ }
+
+ /**
+ * Calls eglSwapBuffers. Use this to "publish" the current frame.
+ *
+ * @return false on failure
+ */
+ public boolean swapBuffers() {
+ boolean result = mEglCore.swapBuffers(mEGLSurface);
+ if (!result) {
+ Log.d(TAG, "WARNING: swapBuffers() failed");
+ }
+ return result;
+ }
+
+ /**
+ * Sends the presentation time stamp to EGL.
+ *
+ * @param nsecs Timestamp, in nanoseconds.
+ */
+ public void setPresentationTime(long nsecs) {
+ mEglCore.setPresentationTime(mEGLSurface, nsecs);
+ }
+
+ /**
+ * Saves the EGL surface to a file.
+ *
+ * Expects that this object's EGL surface is current.
+ */
+ public void saveFrame(File file) throws IOException {
+ if (!mEglCore.isCurrent(mEGLSurface)) {
+ throw new RuntimeException("Expected EGL activity/surface is not current");
+ }
+
+ // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
+ // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
+ // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
+ // Bitmap "copy pixels" method wants the same format GL provides.
+ //
+ // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
+ // here often.
+ //
+ // Making this even more interesting is the upside-down nature of GL, which means
+ // our output will look upside down relative to what appears on screen if the
+ // typical GL conventions are used.
+
+ String filename = file.toString();
+
+ int width = getWidth();
+ int height = getHeight();
+ ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
+ buf.order(ByteOrder.LITTLE_ENDIAN);
+ GLES20.glReadPixels(0, 0, width, height,
+ GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
+ GlUtil.checkGlError("glReadPixels");
+ buf.rewind();
+
+ BufferedOutputStream bos = null;
+ try {
+ bos = new BufferedOutputStream(new FileOutputStream(filename));
+ Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bmp.copyPixelsFromBuffer(buf);
+ bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
+ bmp.recycle();
+ } finally {
+ if (bos != null) bos.close();
+ }
+ Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
+ }
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/FullFrameRect.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/FullFrameRect.java
new file mode 100644
index 0000000..55dc138
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/FullFrameRect.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.psudoanon.broadcaster.encoder.gles;
+
+/**
+ * This class essentially represents a viewport-sized sprite that will be rendered with
+ * a texture, usually from an external source like the camera or video decoder.
+ */
+public class FullFrameRect {
+ private final Drawable2d mRectDrawable = new Drawable2d(Drawable2d.Prefab.FULL_RECTANGLE);
+ private Texture2dProgram mProgram;
+
+ /**
+ * Prepares the object.
+ *
+ * @param program The program to use. FullFrameRect takes ownership, and will release
+ * the program when no longer needed.
+ */
+ public FullFrameRect(Texture2dProgram program) {
+ mProgram = program;
+ }
+
+ /**
+ * Releases resources.
+ *
+ * This must be called with the appropriate EGL activity current (i.e. the one that was
+ * current when the constructor was called). If we're about to destroy the EGL activity,
+ * there's no value in having the caller make it current just to do this cleanup, so you
+ * can pass a flag that will tell this function to skip any EGL-activity-specific cleanup.
+ */
+ public void release(boolean doEglCleanup) {
+ if (mProgram != null) {
+ if (doEglCleanup) {
+ mProgram.release();
+ }
+ mProgram = null;
+ }
+ }
+
+ /**
+ * Returns the program currently in use.
+ */
+ public Texture2dProgram getProgram() {
+ return mProgram;
+ }
+
+ /**
+ * Changes the program. The previous program will be released.
+ *
+ * The appropriate EGL activity must be current.
+ */
+ public void changeProgram(Texture2dProgram program) {
+ mProgram.release();
+ mProgram = program;
+ }
+
+ /**
+ * Creates a texture object suitable for use with drawFrame().
+ */
+ public int createTextureObject() {
+ return mProgram.createTextureObject();
+ }
+
+ /**
+ * Draws a viewport-filling rect, texturing it with the specified texture object.
+ */
+ public void drawFrame(int textureId, float[] texMatrix) {
+ // Use the identity matrix for MVP so our 2x2 FULL_RECTANGLE covers the viewport.
+ mProgram.draw(GlUtil.IDENTITY_MATRIX, mRectDrawable.getVertexArray(), 0,
+ mRectDrawable.getVertexCount(), mRectDrawable.getCoordsPerVertex(),
+ mRectDrawable.getVertexStride(),
+ texMatrix, mRectDrawable.getTexCoordArray(), textureId,
+ mRectDrawable.getTexCoordStride());
+ }
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/GlUtil.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/GlUtil.java
new file mode 100644
index 0000000..b946b3e
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/GlUtil.java
@@ -0,0 +1,195 @@
+/*
+ * Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.psudoanon.broadcaster.encoder.gles;
+
+import android.opengl.GLES20;
+import android.opengl.GLES30;
+import android.opengl.Matrix;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Some OpenGL utility functions.
+ */
+public class GlUtil {
+ public static final String TAG = "Grafika";
+
+ /** Identity matrix for general use. Don't modify or life will get weird. */
+ public static final float[] IDENTITY_MATRIX;
+ static {
+ IDENTITY_MATRIX = new float[16];
+ Matrix.setIdentityM(IDENTITY_MATRIX, 0);
+ }
+
+ private static final int SIZEOF_FLOAT = 4;
+
+
+ private GlUtil() {} // do not instantiate
+
+ /**
+ * Creates a new program from the supplied vertex and fragment shaders.
+ *
+ * @return A handle to the program, or 0 on failure.
+ */
+ public static int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+
+ int program = GLES20.glCreateProgram();
+ checkGlError("glCreateProgram");
+ if (program == 0) {
+ Log.e(TAG, "Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: ");
+ Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+ }
+
+ /**
+ * Compiles the provided shader source.
+ *
+ * @return A handle to the shader, or 0 on failure.
+ */
+ public static int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ checkGlError("glCreateShader type=" + shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":");
+ Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+ }
+
+ /**
+ * Checks to see if a GLES error has been raised.
+ */
+ public static void checkGlError(String op) {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ String msg = op + ": glError 0x" + Integer.toHexString(error);
+ Log.e(TAG, msg);
+ throw new RuntimeException(msg);
+ }
+ }
+
+ /**
+ * Checks to see if the location we obtained is valid. GLES returns -1 if a label
+ * could not be found, but does not set the GL error.
+ *
+ * Throws a RuntimeException if the location is invalid.
+ */
+ public static void checkLocation(int location, String label) {
+ if (location < 0) {
+ throw new RuntimeException("Unable to locate '" + label + "' in program");
+ }
+ }
+
+ /**
+ * Creates a texture from raw data.
+ *
+ * @param data Image data, in a "direct" ByteBuffer.
+ * @param width Texture width, in pixels (not bytes).
+ * @param height Texture height, in pixels.
+ * @param format Image data format (use constant appropriate for glTexImage2D(), e.g. GL_RGBA).
+ * @return Handle to texture.
+ */
+ public static int createImageTexture(ByteBuffer data, int width, int height, int format) {
+ int[] textureHandles = new int[1];
+ int textureHandle;
+
+ GLES20.glGenTextures(1, textureHandles, 0);
+ textureHandle = textureHandles[0];
+ GlUtil.checkGlError("glGenTextures");
+
+ // Bind the texture handle to the 2D texture target.
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);
+
+ // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering
+ // is smaller or larger than the source image.
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ GlUtil.checkGlError("loadImageTexture");
+
+ // Load the data from the buffer into the texture handle.
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format,
+ width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data);
+ GlUtil.checkGlError("loadImageTexture");
+
+ return textureHandle;
+ }
+
+ /**
+ * Allocates a direct float buffer, and populates it with the float array data.
+ */
+ public static FloatBuffer createFloatBuffer(float[] coords) {
+ // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
+ ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT);
+ bb.order(ByteOrder.nativeOrder());
+ FloatBuffer fb = bb.asFloatBuffer();
+ fb.put(coords);
+ fb.position(0);
+ return fb;
+ }
+
+ /**
+ * Writes GL version info to the log.
+ */
+ public static void logVersionInfo() {
+ Log.i(TAG, "vendor : " + GLES20.glGetString(GLES20.GL_VENDOR));
+ Log.i(TAG, "renderer: " + GLES20.glGetString(GLES20.GL_RENDERER));
+ Log.i(TAG, "version : " + GLES20.glGetString(GLES20.GL_VERSION));
+
+ if (false) {
+ int[] values = new int[1];
+ GLES30.glGetIntegerv(GLES30.GL_MAJOR_VERSION, values, 0);
+ int majorVersion = values[0];
+ GLES30.glGetIntegerv(GLES30.GL_MINOR_VERSION, values, 0);
+ int minorVersion = values[0];
+ if (GLES30.glGetError() == GLES30.GL_NO_ERROR) {
+ Log.i(TAG, "iversion: " + majorVersion + "." + minorVersion);
+ }
+ }
+ }
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/Texture2dProgram.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/Texture2dProgram.java
new file mode 100644
index 0000000..688a056
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/Texture2dProgram.java
@@ -0,0 +1,435 @@
+/*
+ * Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.psudoanon.broadcaster.encoder.gles;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.util.Log;
+
+import java.nio.FloatBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * GL program and supporting functions for textured 2D shapes.
+ */
+public class Texture2dProgram {
+ private static final String TAG = GlUtil.TAG;
+
+ public enum ProgramType {
+ TEXTURE_2D, TEXTURE_EXT, TEXTURE_EXT_BW, TEXTURE_EXT_FILT,
+ TEXTURE_EXT_SEPIA, TEXTURE_EXT_CROSSPROCESS, TEXTURE_EXT_POSTERIZE,TEXTURE_EXT_GRAYSCALE
+ }
+
+ public static final List
+ * The appropriate EGL activity must be current (i.e. the one that was used to create
+ * the program).
+ */
+ public void release() {
+ Log.d(TAG, "deleting program " + mProgramHandle);
+ GLES20.glDeleteProgram(mProgramHandle);
+ mProgramHandle = -1;
+ }
+
+ /**
+ * Returns the program type.
+ */
+ public ProgramType getProgramType() {
+ return mProgramType;
+ }
+
+ /**
+ * Creates a texture object suitable for use with this program.
+ *
+ * On exit, the texture will be bound.
+ */
+ public int createTextureObject() {
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ GlUtil.checkGlError("glGenTextures");
+
+ int texId = textures[0];
+ GLES20.glBindTexture(mTextureTarget, texId);
+ GlUtil.checkGlError("glBindTexture " + texId);
+
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_NEAREST);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GlUtil.checkGlError("glTexParameter");
+
+ return texId;
+ }
+
+ /**
+ * Configures the convolution filter values.
+ *
+ * @param values Normalized filter values; must be KERNEL_SIZE elements.
+ */
+ public void setKernel(float[] values, float colorAdj) {
+ if (values.length != KERNEL_SIZE) {
+ throw new IllegalArgumentException("Kernel size is " + values.length +
+ " vs. " + KERNEL_SIZE);
+ }
+ System.arraycopy(values, 0, mKernel, 0, KERNEL_SIZE);
+ mColorAdjust = colorAdj;
+ //Log.d(TAG, "filt kernel: " + Arrays.toString(mKernel) + ", adj=" + colorAdj);
+ }
+
+ /**
+ * Sets the size of the texture. This is used to find adjacent texels when filtering.
+ */
+ public void setTexSize(int width, int height) {
+ float rw = 1.0f / width;
+ float rh = 1.0f / height;
+
+ // Don't need to create a new array here, but it's syntactically convenient.
+ mTexOffset = new float[] {
+ -rw, -rh, 0f, -rh, rw, -rh,
+ -rw, 0f, 0f, 0f, rw, 0f,
+ -rw, rh, 0f, rh, rw, rh
+ };
+ //Log.d(TAG, "filt size: " + width + "x" + height + ": " + Arrays.toString(mTexOffset));
+ }
+
+ /**
+ * Issues the draw call. Does the full setup on every call.
+ *
+ * @param mvpMatrix The 4x4 projection matrix.
+ * @param vertexBuffer Buffer with vertex position data.
+ * @param firstVertex Index of first vertex to use in vertexBuffer.
+ * @param vertexCount Number of vertices in vertexBuffer.
+ * @param coordsPerVertex The number of coordinates per vertex (e.g. x,y is 2).
+ * @param vertexStride Width, in bytes, of the position data for each vertex (often
+ * vertexCount * sizeof(float)).
+ * @param texMatrix A 4x4 transformation matrix for texture coords. (Primarily intended
+ * for use with SurfaceTexture.)
+ * @param texBuffer Buffer with vertex texture data.
+ * @param texStride Width, in bytes, of the texture data for each vertex.
+ */
+ public void draw(float[] mvpMatrix, FloatBuffer vertexBuffer, int firstVertex,
+ int vertexCount, int coordsPerVertex, int vertexStride,
+ float[] texMatrix, FloatBuffer texBuffer, int textureId, int texStride) {
+ GlUtil.checkGlError("draw start");
+
+ // Select the program.
+ GLES20.glUseProgram(mProgramHandle);
+ GlUtil.checkGlError("glUseProgram");
+
+ // Set the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(mTextureTarget, textureId);
+
+ // Copy the model / view / projection matrix over.
+ GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mvpMatrix, 0);
+ GlUtil.checkGlError("glUniformMatrix4fv");
+
+ // Copy the texture transformation matrix over.
+ GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, texMatrix, 0);
+ GlUtil.checkGlError("glUniformMatrix4fv");
+
+ // Enable the "aPosition" vertex attribute.
+ GLES20.glEnableVertexAttribArray(maPositionLoc);
+ GlUtil.checkGlError("glEnableVertexAttribArray");
+
+ // Connect vertexBuffer to "aPosition".
+ GLES20.glVertexAttribPointer(maPositionLoc, coordsPerVertex,
+ GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
+ GlUtil.checkGlError("glVertexAttribPointer");
+
+ // Enable the "aTextureCoord" vertex attribute.
+ GLES20.glEnableVertexAttribArray(maTextureCoordLoc);
+ GlUtil.checkGlError("glEnableVertexAttribArray");
+
+ // Connect texBuffer to "aTextureCoord".
+ GLES20.glVertexAttribPointer(maTextureCoordLoc, 2,
+ GLES20.GL_FLOAT, false, texStride, texBuffer);
+ GlUtil.checkGlError("glVertexAttribPointer");
+
+ // Populate the convolution kernel, if present.
+ if (muKernelLoc >= 0) {
+ GLES20.glUniform1fv(muKernelLoc, KERNEL_SIZE, mKernel, 0);
+ GLES20.glUniform2fv(muTexOffsetLoc, KERNEL_SIZE, mTexOffset, 0);
+ GLES20.glUniform1f(muColorAdjustLoc, mColorAdjust);
+ }
+
+ // Draw the rect.
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, firstVertex, vertexCount);
+ GlUtil.checkGlError("glDrawArrays");
+
+ // Done -- disable vertex array, texture, and program.
+ GLES20.glDisableVertexAttribArray(maPositionLoc);
+ GLES20.glDisableVertexAttribArray(maTextureCoordLoc);
+ GLES20.glBindTexture(mTextureTarget, 0);
+ GLES20.glUseProgram(0);
+ }
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/WindowSurface.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/WindowSurface.java
new file mode 100644
index 0000000..cefe54f
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/WindowSurface.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.psudoanon.broadcaster.encoder.gles;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+
+/**
+ * Recordable EGL window surface.
+ *
+ * It's good practice to explicitly release() the surface, preferably from a "finally" block.
+ */
+public class WindowSurface extends EglSurfaceBase {
+ private Surface mSurface;
+ private boolean mReleaseSurface;
+
+ /**
+ * Associates an EGL surface with the native window surface.
+ *
+ * Set releaseSurface to true if you want the Surface to be released when release() is
+ * called. This is convenient, but can interfere with framework classes that expect to
+ * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the
+ * surfaceDestroyed() callback won't fire).
+ */
+ public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) {
+ super(eglCore);
+ createWindowSurface(surface);
+ mSurface = surface;
+ mReleaseSurface = releaseSurface;
+ }
+
+ /**
+ * Associates an EGL surface with the SurfaceTexture.
+ */
+ public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) {
+ super(eglCore);
+ createWindowSurface(surfaceTexture);
+ }
+
+ /**
+ * Releases any resources associated with the EGL surface (and, if configured to do so,
+ * with the Surface as well).
+ *
+ * Does not require that the surface's EGL activity be current.
+ */
+ public void release() {
+ releaseEglSurface();
+ if (mSurface != null) {
+ if (mReleaseSurface) {
+ mSurface.release();
+ }
+ mSurface = null;
+ }
+ }
+
+ /**
+ * Recreate the EGLSurface, using the new EglBase. The caller should have already
+ * freed the old EGLSurface with releaseEglSurface().
+ *
+ * This is useful when we want to update the EGLSurface associated with a Surface.
+ * For example, if we want to share with a different EGLContext, which can only
+ * be done by tearing down and recreating the activity. (That's handled by the caller;
+ * this just creates a new EGLSurface for the Surface we were handed earlier.)
+ *
+ * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
+ * activity somewhere, the create call will fail with complaints from the Surface
+ * about already being connected.
+ */
+ public void recreate(EglCore newEglCore) {
+ if (mSurface == null) {
+ throw new RuntimeException("not yet implemented for SurfaceTexture");
+ }
+ mEglCore = newEglCore; // switch to new activity
+ createWindowSurface(mSurface); // create new surface
+ }
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/IMediaMuxer.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/IMediaMuxer.java
new file mode 100644
index 0000000..a49927e
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/IMediaMuxer.java
@@ -0,0 +1,39 @@
+package com.psudoanon.broadcaster.network;
+
+/**
+ * Created by faraklit on 03.03.2016.
+ */
+public interface IMediaMuxer {
+
+ int SEND_AUDIO = 0;
+ int SEND_VIDEO = 1;
+ int STOP_STREAMING = 2;
+
+ /**
+ *
+ * @return the last audio frame timestamp in milliseconds
+ */
+ int getLastAudioFrameTimeStamp();
+
+
+ /**
+ *
+ * @return the last video frame timestamp in milliseconds
+ */
+ int getLastVideoFrameTimeStamp();
+
+
+ boolean open(String uri);
+
+ boolean isConnected();
+
+ void writeAudio(byte[] data, int size, int presentationTime);
+
+ void writeVideo(byte[] data, int length, int presentationTime);
+
+ void stopMuxer();
+
+ int getFrameCountInQueue();
+
+ int getVideoFrameCountInQueue();
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/RTMPStreamer.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/RTMPStreamer.java
new file mode 100644
index 0000000..d413294
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/RTMPStreamer.java
@@ -0,0 +1,342 @@
+package com.psudoanon.broadcaster.network;
+
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+
+
+import net.butterflytv.rtmp_client.RTMPMuxer;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+
+/**
+ * Created by faraklit on 09.02.2016.
+ */
+public class RTMPStreamer extends Handler implements IMediaMuxer {
+
+
+ private static final boolean DEBUG = false;
+ private static final String TAG = RTMPStreamer.class.getSimpleName();
+ RTMPMuxer rtmpMuxer = new RTMPMuxer();
+
+ public int frameCount;
+ public int result = 0;
+ private int lastVideoFrameTimeStamp;
+ private int lastAudioFrameTimeStamp;
+ private int mLastReceivedVideoFrameTimeStamp = -1;
+ private int mLastReceivedAudioFrameTimeStamp = -1;
+ private int lastSentFrameTimeStamp = -1;
+ private Object frameSynchronized = new Object();
+ private boolean isConnected = false;
+
+ public class Frame {
+ byte[] data;
+ int timestamp;
+ int length;
+
+ public Frame(byte[] data, int length, int timestamp) {
+ this.data = data;
+ this.length = length;
+ this.timestamp = timestamp;
+ }
+ }
+
+ private ArrayList audioFrameList = new ArrayList<>();
+ private ArrayList videoFrameList = new ArrayList<>();
+
+
+ public RTMPStreamer(Looper looper) {
+ super(looper);
+ mLastReceivedVideoFrameTimeStamp = -1;
+ mLastReceivedAudioFrameTimeStamp = -1;
+ lastSentFrameTimeStamp = -1;
+ }
+
+ public int getLastReceivedVideoFrameTimeStamp() {
+ return mLastReceivedVideoFrameTimeStamp;
+ }
+
+ public int getLastReceivedAudioFrameTimeStamp() {
+ return mLastReceivedAudioFrameTimeStamp;
+ }
+
+ public int getLastSentFrameTimeStamp() {
+ return lastSentFrameTimeStamp;
+ }
+
+ /**
+ *
+ * @param url of the stream
+ */
+ public boolean open(String url) {
+ frameCount = 0;
+ lastVideoFrameTimeStamp = 0;
+ lastAudioFrameTimeStamp = 0;
+ mLastReceivedVideoFrameTimeStamp = -1;
+ mLastReceivedAudioFrameTimeStamp = -1;
+ lastSentFrameTimeStamp = -1;
+ isConnected = false;
+ int result = rtmpMuxer.open(url, 0, 0);
+
+ if (result > 0) {
+ // file_open("/mnt/sdcard/stream.flv" + (int) Math.random() * 1000);
+ // writeFLVHeader(true, true);
+ isConnected = true;
+ }
+ return isConnected;
+ }
+
+ public void close() {
+ Log.i(TAG, "close rtmp connection");
+ isConnected = false;
+ rtmpMuxer.close();
+ }
+
+ /**
+ * It is critically important to send the frames in time order.
+ * If an audio packet's timestamp is before to any video packet timestamp,
+ * connection can be closed by server. So we make packet ordering below according packet's timestamp
+ *
+ * @param msg
+ */
+ @Override
+ public void handleMessage(Message msg) {
+ switch (msg.what) {
+ case SEND_AUDIO: {
+ /**
+ * msg.obj aac data,
+ * msg.arg1 length of the data
+ * msg.arg2 timestamp
+ */
+
+ if ((msg.arg2 >= mLastReceivedAudioFrameTimeStamp) && (msg.arg1 > 0)) {
+ //some initial frames(decoder params) may be equal to previos ones
+ // add packet if the new frame timestamp is bigger than the last frame
+ // otherwise discard the packet. If we don't discard it, rtmp connection totally drops
+ mLastReceivedAudioFrameTimeStamp = msg.arg2;
+ audioFrameList.add(new Frame((byte[]) msg.obj, msg.arg1, msg.arg2));
+ }
+ else {
+ Log.w(TAG, "discarding audio packet because time stamp is older than last packet or data lenth equal to zero");
+ }
+ sendFrames();
+ }
+ break;
+ case SEND_VIDEO: {
+
+ /**
+ * msg.obj h264 nal unit,
+ * msg.arg1 length of the data
+ * msg.arg2 timestamp
+ */
+ if ((msg.arg2 >= mLastReceivedVideoFrameTimeStamp) && (msg.arg1 > 0)) {
+ //some initial frames(decoder params) may be equal to previous ones
+ // add packet if the new frame timestamp is bigger than the last frame
+ // otherwise discard the packet. If we don't discard it, rtmp connection totally drops
+ mLastReceivedVideoFrameTimeStamp = msg.arg2;
+ videoFrameList.add(new Frame((byte[]) msg.obj, msg.arg1, msg.arg2));
+ }
+ else {
+
+ Log.w(TAG, "discarding videp packet because time stamp is older than last packet or data lenth equal to zero");
+ }
+ sendFrames();
+ }
+ break;
+ case STOP_STREAMING:
+ finishframes();
+ close();
+ break;
+ }
+
+
+
+ }
+
+ private void finishframes()
+ {
+ int videoFrameListSize, audioFrameListSize;
+ do {
+ sendFrames();
+
+ videoFrameListSize = videoFrameList.size();
+ audioFrameListSize = audioFrameList.size();
+ //one of the frame list should be exhausted while the other have frames
+ } while ((videoFrameListSize > 0) && (audioFrameListSize > 0));
+
+ if (videoFrameListSize > 0) {
+ //send all video frames remained in the list
+ sendVideoFrames(videoFrameList.get(videoFrameListSize - 1).timestamp);
+ }
+ else if (audioFrameListSize > 0) {
+ //send all audio frames remained in the list
+ sendAudioFrames(audioFrameList.get(audioFrameListSize - 1).timestamp);
+ }
+
+ }
+
+ private void sendFrames() {
+ // this is a simple sorting algorithm.
+ // we do not know the audio or video frames timestamp in advance and they are not
+ // deterministic. So we send video frames with the timestamp is less than the first one in the list
+ // and the same algorithm applies for audio frames.
+ int listSize = videoFrameList.size();
+ if (listSize > 0) {
+ sendAudioFrames(videoFrameList.get(0).timestamp);
+ }
+
+ listSize = audioFrameList.size();
+ if (listSize > 0) {
+ sendVideoFrames(audioFrameList.get(0).timestamp);
+ }
+ }
+
+ private void sendAudioFrames(int timestamp) {
+ Iterator iterator = audioFrameList.iterator();
+ while (iterator.hasNext())
+ {
+ Frame audioFrame = iterator.next();
+ if (audioFrame.timestamp <= timestamp)
+ {
+ // frame time stamp should be equal or less than the previous timestamp
+ // in some cases timestamp of audio and video frames may be equal
+ if (audioFrame.timestamp >= lastSentFrameTimeStamp) {
+ if (audioFrame.timestamp == lastSentFrameTimeStamp) {
+ audioFrame.timestamp++;
+ }
+ if (isConnected) {
+ int result = rtmpMuxer.writeAudio(audioFrame.data, 0, audioFrame.length, audioFrame.timestamp);
+
+ if (DEBUG) {
+ Log.d(TAG, "send audio result: " + result + " time:" + audioFrame.timestamp + " length:" + audioFrame.length);
+ }
+
+ if (result < 0) {
+ close();
+ }
+ }
+ lastAudioFrameTimeStamp = audioFrame.timestamp;
+ lastSentFrameTimeStamp = audioFrame.timestamp;
+ synchronized (frameSynchronized) {
+ frameCount--;
+ }
+ }
+ iterator.remove();
+ }
+ else {
+ //if timestamp is bigger than the auio frame timestamp
+ //it will be sent later so break the loop
+ break;
+ }
+ }
+ }
+
+ private void sendVideoFrames(int timestamp) {
+ Iterator iterator = videoFrameList.iterator();
+ while (iterator.hasNext()) {
+ Frame frame = iterator.next();
+ if ((frame.timestamp <= timestamp))
+ {
+ // frame time stamp should be equal or less than timestamp
+ // in some cases timestamp of audio and video frames may be equal
+ if (frame.timestamp >= lastSentFrameTimeStamp) {
+ if (frame.timestamp == lastSentFrameTimeStamp) {
+ frame.timestamp++;
+ }
+ if (isConnected) {
+ int result = rtmpMuxer.writeVideo(frame.data, 0, frame.length, frame.timestamp);
+ if (DEBUG) {
+ Log.d(TAG, "send video result: " + result + " time:" + frame.timestamp + " length:" + frame.length);
+ }
+ if (result < 0) {
+ close();
+ }
+ }
+ lastVideoFrameTimeStamp = frame.timestamp;
+ lastSentFrameTimeStamp = frame.timestamp;
+ synchronized (frameSynchronized) {
+ frameCount--;
+ }
+ }
+
+ iterator.remove();
+ }
+ else {
+ //if frame timestamp is not smaller than the timestamp
+ // break the loop, it will be sent later
+ break;
+ }
+ }
+ }
+
+ public int getLastAudioFrameTimeStamp() {
+ return lastAudioFrameTimeStamp;
+ }
+
+ public int getLastVideoFrameTimeStamp() {
+ return lastVideoFrameTimeStamp;
+ }
+
+ public void writeFLVHeader(boolean hasAudio, boolean hasVideo) {
+ rtmpMuxer.write_flv_header(hasAudio, hasVideo);
+ }
+
+ public void file_open(String s) {
+ rtmpMuxer.file_open(s);
+ }
+
+
+ public void file_close() {
+ rtmpMuxer.file_close();
+ }
+
+ public boolean isConnected() {
+ return isConnected;
+ }
+
+ @Override
+ public void writeAudio(byte[] data, int size, int presentationTime) {
+ Message message = obtainMessage(IMediaMuxer.SEND_AUDIO, data);
+ message.arg1 = size;
+ message.arg2 = presentationTime;
+ sendMessage(message);
+ synchronized (frameSynchronized) {
+ frameCount++;
+ }
+ if (DEBUG) Log.d(TAG, "writeAudio size: " + size + " time:" + presentationTime);
+ }
+
+ @Override
+ public void writeVideo(byte[] data, int length, int presentationTime) {
+ Message message = obtainMessage(IMediaMuxer.SEND_VIDEO, data);
+ message.arg1 = length;
+ message.arg2 = presentationTime;
+ sendMessage(message);
+ synchronized (frameSynchronized) {
+ frameCount++;
+ }
+
+ if (DEBUG) Log.d(TAG, "writeVideo size: " + length + " time:" + presentationTime);
+ }
+
+ @Override
+ public void stopMuxer() {
+ sendEmptyMessage(RTMPStreamer.STOP_STREAMING);
+ }
+
+ @Override
+ public int getFrameCountInQueue() {
+ synchronized (frameSynchronized) {
+ return frameCount;
+ }
+ }
+
+ public int getVideoFrameCountInQueue() {
+ synchronized (frameSynchronized) {
+ return videoFrameList.size();
+ }
+ }
+}
+
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Resolution.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Resolution.java
new file mode 100644
index 0000000..8a78db6
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Resolution.java
@@ -0,0 +1,18 @@
+package com.psudoanon.broadcaster.utils;
+
+import java.io.Serializable;
+
+/**
+ * Created by mekya on 28/03/2017.
+ */
+
+public class Resolution implements Serializable
+{
+ public final int width;
+ public final int height;
+
+ public Resolution(int width, int height) {
+ this.width = width;
+ this.height = height;
+ }
+}
\ No newline at end of file
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Utils.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Utils.java
new file mode 100644
index 0000000..2742f5d
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Utils.java
@@ -0,0 +1,69 @@
+package com.psudoanon.broadcaster.utils;
+
+import android.content.Context;
+import android.content.SharedPreferences;
+
+
+public class Utils {
+
+ public static final String APP_SHARED_PREFERENCES = "applicationDetails";
+ private static final String DOES_ENCODER_WORKS = Utils.class.getName() + ".DOES_ENCODER_WORKS";
+
+
+ public static final int ENCODER_NOT_TESTED = -1;
+ public static final int ENCODER_WORKS = 1;
+ public static final int ENCODER_NOT_WORKS = 0;
+
+
+ //public static final String SHARED_PREFERENCE_FIRST_INSTALLATION="FIRST_INSTALLATION";
+ private static SharedPreferences sharedPreference = null;
+
+
+
+ public static String getDurationString(int seconds) {
+
+ if(seconds < 0 || seconds > 2000000)//there is an codec problem and duration is not set correctly,so display meaningfull string
+ seconds = 0;
+ int hours = seconds / 3600;
+ int minutes = (seconds % 3600) / 60;
+ seconds = seconds % 60;
+
+ if(hours == 0)
+ return twoDigitString(minutes) + " : " + twoDigitString(seconds);
+ else
+ return twoDigitString(hours) + " : " + twoDigitString(minutes) + " : " + twoDigitString(seconds);
+ }
+
+ public static String twoDigitString(int number) {
+
+ if (number == 0) {
+ return "00";
+ }
+
+ if (number / 10 == 0) {
+ return "0" + number;
+ }
+
+ return String.valueOf(number);
+ }
+
+ public static SharedPreferences getDefaultSharedPreferences(Context context) {
+ if (sharedPreference == null) {
+ sharedPreference = context.getSharedPreferences(APP_SHARED_PREFERENCES, Context.MODE_PRIVATE);
+ }
+ return sharedPreference;
+ }
+
+
+ public static int doesEncoderWorks(Context context) {
+ return getDefaultSharedPreferences(context).getInt(DOES_ENCODER_WORKS, ENCODER_NOT_TESTED);
+ }
+
+ public static void setEncoderWorks(Context context, boolean works) {
+ SharedPreferences sharedPreferences = getDefaultSharedPreferences(context);
+ SharedPreferences.Editor editor = sharedPreferences.edit();
+ editor.putInt(DOES_ENCODER_WORKS, works ? ENCODER_WORKS : ENCODER_NOT_WORKS);
+ editor.apply();
+ }
+
+}
diff --git a/livestream-to-earn-android/app/broadcaster/src/main/res/values/strings.xml b/livestream-to-earn-android/app/broadcaster/src/main/res/values/strings.xml
new file mode 100644
index 0000000..b41a3a6
--- /dev/null
+++ b/livestream-to-earn-android/app/broadcaster/src/main/res/values/strings.xml
@@ -0,0 +1,22 @@
+
+ *
+ *
+ * TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases.
+ */
+public class TextureMovieEncoder implements Runnable {
+ private static final String TAG = TextureMovieEncoder.class.getSimpleName();
+ private static final boolean VERBOSE = false;
+
+ private static final int MSG_START_RECORDING = 0;
+ private static final int MSG_STOP_RECORDING = 1;
+ private static final int MSG_FRAME_AVAILABLE = 2;
+ private static final int MSG_SET_TEXTURE_ID = 3;
+ private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
+ private static final int MSG_QUIT = 5;
+ private static final int MSG_RELEASE_RECORDING = 6;
+ private static final int MSG_CHANGE_EFFECT = 7;
+
+ // ----- accessed exclusively by encoder thread -----
+ private WindowSurface mInputWindowSurface;
+ private EglCore mEglCore;
+ private FullFrameRect mFullScreen;
+ private int mTextureId;
+ private int mFrameNum;
+ private VideoEncoderCore mVideoEncoder;
+
+ // ----- accessed by multiple threads -----
+ private volatile EncoderHandler mHandler;
+
+ private Object mReadyFence = new Object(); // guards ready/running
+ private boolean mReady;
+ private boolean mRunning;
+ private long mRecordingStartTime;
+ private long mLastFrameTime = 0;
+ private Texture2dProgram.ProgramType mProgramType;
+ private EncoderConfig mEncoderConfig;
+ private int mIframeInterval = 2;
+
+ /**
+ * Encoder configuration.
+ *