diff --git a/doc/README.md b/doc/README.md new file mode 100644 index 0000000..d98d442 --- /dev/null +++ b/doc/README.md @@ -0,0 +1 @@ +# doc diff --git a/livestream-to-earn-android/app/.gitignore b/livestream-to-earn-android/app/.gitignore new file mode 100644 index 0000000..796b96d --- /dev/null +++ b/livestream-to-earn-android/app/.gitignore @@ -0,0 +1 @@ +/build diff --git a/livestream-to-earn-android/app/broadcaster/.gitignore b/livestream-to-earn-android/app/broadcaster/.gitignore new file mode 100644 index 0000000..796b96d --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/.gitignore @@ -0,0 +1 @@ +/build diff --git a/livestream-to-earn-android/app/broadcaster/build.gradle b/livestream-to-earn-android/app/broadcaster/build.gradle new file mode 100644 index 0000000..8818128 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/build.gradle @@ -0,0 +1,33 @@ +apply plugin: 'com.android.library' + +android { + compileSdkVersion 26 + buildToolsVersion "25.0.1" + + defaultConfig { + minSdkVersion 26 + targetSdkVersion 26 + versionCode 1 + versionName "1.0" + + testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" + + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } +} + +dependencies { + implementation fileTree(include: ['*.jar'], dir: 'libs') + androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', { + exclude group: 'com.android.support', module: 'support-annotations' + }) + implementation 'net.butterflytv.utils:rtmp-client:3.1.0' + implementation 'com.android.support:support-annotations:25.3.1' + implementation 'com.android.support:design:25.3.1' + testCompile 'junit:junit:4.12' +} diff --git a/livestream-to-earn-android/app/broadcaster/consumer-rules.pro b/livestream-to-earn-android/app/broadcaster/consumer-rules.pro new file mode 100644 index 0000000..e69de29 diff --git a/livestream-to-earn-android/app/broadcaster/proguard-rules.pro b/livestream-to-earn-android/app/broadcaster/proguard-rules.pro new file mode 100644 index 0000000..0f4b2a5 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/proguard-rules.pro @@ -0,0 +1,17 @@ +# Add project specific ProGuard rules here. +# By default, the flags in this file are appended to flags specified +# in /Users/mekya/Library/Android/sdk/tools/proguard/proguard-android.txt +# You can edit the include path and order by changing the proguardFiles +# directive in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# Add any project specific keep options here: + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} diff --git a/livestream-to-earn-android/app/broadcaster/src/androidTest/java/com/psudoanon/broadcaster/ExampleInstrumentedTest.kt b/livestream-to-earn-android/app/broadcaster/src/androidTest/java/com/psudoanon/broadcaster/ExampleInstrumentedTest.kt new file mode 100644 index 0000000..94cc6e9 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/androidTest/java/com/psudoanon/broadcaster/ExampleInstrumentedTest.kt @@ -0,0 +1,24 @@ +package com.psudoanon.broadcaster + +import androidx.test.platform.app.InstrumentationRegistry +import androidx.test.ext.junit.runners.AndroidJUnit4 + +import org.junit.Test +import org.junit.runner.RunWith + +import org.junit.Assert.* + +/** + * Instrumented test, which will execute on an Android device. + * + * See [testing documentation](http://d.android.com/tools/testing). + */ +@RunWith(AndroidJUnit4::class) +class ExampleInstrumentedTest { + @Test + fun useAppContext() { + // Context of the app under test. + val appContext = InstrumentationRegistry.getInstrumentation().targetContext + assertEquals("com.psudoanon.broadcaster.test", appContext.packageName) + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/AndroidManifest.xml b/livestream-to-earn-android/app/broadcaster/src/main/AndroidManifest.xml new file mode 100644 index 0000000..3aac298 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/AndroidManifest.xml @@ -0,0 +1,8 @@ + + + + + + + diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/AudioRecorderThread.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/AudioRecorderThread.java new file mode 100644 index 0000000..c0e3ede --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/AudioRecorderThread.java @@ -0,0 +1,86 @@ +package com.psudoanon.broadcaster; + +import android.media.AudioFormat; +import android.media.MediaRecorder; +import android.os.Message; +import android.util.Log; + +import com.psudoanon.broadcaster.encoder.AudioHandler; + +/** + * Created by mekya on 28/03/2017. + */ + +class AudioRecorderThread extends Thread { + + private static final String TAG = AudioRecorderThread.class.getSimpleName(); + private final int mSampleRate; + private final long startTime; + private volatile boolean stopThread = false; + + private android.media.AudioRecord audioRecord; + private AudioHandler audioHandler; + + public AudioRecorderThread(int sampleRate, long recordStartTime, AudioHandler audioHandler) { + this.mSampleRate = sampleRate; + this.startTime = recordStartTime; + this.audioHandler = audioHandler; + } + + + @Override + public void run() { + //Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO); + + int bufferSize = android.media.AudioRecord + .getMinBufferSize(mSampleRate, + AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT); + byte[][] audioData; + int bufferReadResult; + + audioRecord = new android.media.AudioRecord(MediaRecorder.AudioSource.MIC, + mSampleRate, AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT, bufferSize); + + // divide byte buffersize to 2 to make it short buffer + audioData = new byte[1000][bufferSize]; + + audioRecord.startRecording(); + + int i = 0; + byte[] data; + while ((bufferReadResult = audioRecord.read(audioData[i], 0, audioData[i].length)) > 0) { + + data = audioData[i]; + + Message msg = Message.obtain(audioHandler, AudioHandler.RECORD_AUDIO, data); + msg.arg1 = bufferReadResult; + msg.arg2 = (int)(System.currentTimeMillis() - startTime); + audioHandler.sendMessage(msg); + + + i++; + if (i == 1000) { + i = 0; + } + if (stopThread) { + break; + } + } + + Log.d(TAG, "AudioThread Finished, release audioRecord"); + + } + + public void stopAudioRecording() { + + if (audioRecord != null && audioRecord.getRecordingState() == android.media.AudioRecord.RECORDSTATE_RECORDING) { + stopThread = true; + audioRecord.stop(); + audioRecord.release(); + audioRecord = null; + } + } + +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/CameraHandler.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/CameraHandler.java new file mode 100644 index 0000000..b36ea11 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/CameraHandler.java @@ -0,0 +1,68 @@ +package com.psudoanon.broadcaster; + +/** + * Created by faraklit on 17.02.2016. + */ + +import android.graphics.SurfaceTexture; +import android.os.Handler; +import android.os.Message; +import android.util.Log; + + +import java.lang.ref.WeakReference; + +/** + * Handles camera operation requests from other threads. Necessary because the Camera + * must only be accessed from one thread. + *

+ * The object is created on the UI thread, and all handlers run there. Messages are + * sent from other threads, using sendMessage(). + */ +public class CameraHandler extends Handler { + private static final String TAG = CameraHandler.class.getSimpleName(); + + public static final int MSG_SET_SURFACE_TEXTURE = 0; + + + public interface ICameraViewer { + + void handleSetSurfaceTexture(SurfaceTexture st); + } + + + // Weak reference to the Activity; only access this from the UI thread. + private WeakReference cameraViewerWeakReference; + + public CameraHandler(ICameraViewer cameraViewer) { + cameraViewerWeakReference = new WeakReference(cameraViewer); + } + + /** + * Drop the reference to the activity. Useful as a paranoid measure to ensure that + * attempts to access a stale Activity through a handler are caught. + */ + public void invalidateHandler() { + cameraViewerWeakReference.clear(); + } + + @Override // runs on UI thread + public void handleMessage(Message inputMessage) { + int what = inputMessage.what; + Log.d(TAG, "CameraHandler [" + this + "]: what=" + what); + + ICameraViewer cameraViewer = cameraViewerWeakReference.get(); + if (cameraViewer == null) { + Log.w(TAG, "CameraHandler.handleMessage: cameraViewer is null"); + return; + } + + switch (what) { + case MSG_SET_SURFACE_TEXTURE: + cameraViewer.handleSetSurfaceTexture((SurfaceTexture) inputMessage.obj); + break; + default: + throw new RuntimeException("unknown msg " + what); + } + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/CameraProxy.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/CameraProxy.java new file mode 100644 index 0000000..a158d5b --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/CameraProxy.java @@ -0,0 +1,228 @@ +package com.psudoanon.broadcaster; + +/** + * Created by faraklit on 13.06.2016. + */ + +import android.graphics.SurfaceTexture; +import android.hardware.Camera; +import android.os.ConditionVariable; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Looper; +import android.os.Message; +import android.util.Log; +import android.view.SurfaceHolder; + +import java.io.IOException; + +public class CameraProxy { + private static final String TAG = "CameraProxy"; + + private static final int RELEASE = 1; + private static final int AUTOFOCUS = 2; + private static final int CANCEL_AUTOFOCUS = 3; + private static final int SET_PREVIEW_CALLBACK_WITH_BUFFER = 4; + private static final int SET_PARAMETERS = 5; + private static final int START_SMOOTH_ZOOM = 6; + private static final int ADD_CALLBACK_BUFFER = 7; + private static final int SET_ERROR_CALLBACK = 8; + private static final int SET_PREVIEW_DISPLAY = 9; + private static final int START_PREVIEW = 10; + private static final int STOP_PREVIEW = 11; + private static final int OPEN_CAMERA = 12; + private static final int SET_DISPLAY_ORIENTATION = 13; + private static final int SET_PREVIEW_TEXTURE = 14; + private final HandlerThread ht; + + private Camera _camera; + private final CameraHandler _handler; + private final ConditionVariable _signal = new ConditionVariable(); + private volatile Camera.Parameters _parameters; + private boolean released = false; + + public CameraProxy(int cameraId) { + ht = new HandlerThread("Camera Proxy Thread"); + ht.start(); + + _handler = new CameraHandler(ht.getLooper()); + _signal.close(); + _handler.obtainMessage(OPEN_CAMERA, cameraId, 0).sendToTarget(); + _signal.block(); + if (_camera != null) { + _handler.obtainMessage(SET_ERROR_CALLBACK, new ErrorCallback()).sendToTarget(); + } + } + + public boolean isCameraAvailable() { + return _camera != null && !isReleased(); + } + + public void release() { + released = true; + _signal.close(); + _handler.sendEmptyMessage(RELEASE); + _signal.block(); + ht.quitSafely(); + + } + + public void autoFocus(Camera.AutoFocusCallback callback) { + _handler.obtainMessage(AUTOFOCUS, callback).sendToTarget(); + } + + public void cancelAutoFocus() { + _handler.sendEmptyMessage(CANCEL_AUTOFOCUS); + } + + public void setPreviewCallbackWithBuffer(Camera.PreviewCallback callback) { + _handler.obtainMessage(SET_PREVIEW_CALLBACK_WITH_BUFFER, callback).sendToTarget(); + } + + public Camera.Parameters getParameters() { + return _parameters; + } + + public void setParameters(Camera.Parameters parameters) { + _parameters = parameters; + _handler.obtainMessage(SET_PARAMETERS, parameters).sendToTarget(); + } + + public void startSmoothZoom(int level) { + _handler.obtainMessage(START_SMOOTH_ZOOM, level, 0).sendToTarget(); + } + + public void addCallbackBuffer(byte[] buffer) { + _handler.obtainMessage(ADD_CALLBACK_BUFFER, buffer).sendToTarget(); + } + + public void setPreviewDisplay(SurfaceHolder holder) { + _signal.close(); + _handler.obtainMessage(SET_PREVIEW_DISPLAY, holder).sendToTarget(); + _signal.block(); + } + + public void startPreview() { + _handler.sendEmptyMessage(START_PREVIEW); + } + + public void stopPreview() { + _signal.close(); + _handler.sendEmptyMessage(STOP_PREVIEW); + _signal.block(); + } + + public void setDisplayOrientation(int displayOrientation) { + _handler.obtainMessage(SET_DISPLAY_ORIENTATION, displayOrientation, 0).sendToTarget(); + } + + public void setPreviewTexture(SurfaceTexture previewTexture) { + _handler.obtainMessage(SET_PREVIEW_TEXTURE, previewTexture).sendToTarget(); + } + + public boolean isReleased() { + return released; + } + + + private class CameraHandler extends Handler { + public CameraHandler(Looper looper) { + super(looper); + } + + @Override + public void handleMessage(final Message msg) { + try { + switch (msg.what) { + case OPEN_CAMERA: + _camera = Camera.open(msg.arg1); + _parameters = _camera.getParameters(); + break; + + case SET_DISPLAY_ORIENTATION: + _camera.setDisplayOrientation(msg.arg1); + break; + + case RELEASE: + _camera.release(); + break; + + case AUTOFOCUS: + _camera.autoFocus((Camera.AutoFocusCallback)msg.obj); + break; + + case CANCEL_AUTOFOCUS: + _camera.cancelAutoFocus(); + break; + + case SET_PREVIEW_TEXTURE: + _camera.setPreviewTexture((SurfaceTexture) msg.obj); + break; + + case SET_PARAMETERS: + _camera.setParameters((Camera.Parameters)msg.obj); + break; + + case START_SMOOTH_ZOOM: + _camera.startSmoothZoom(msg.arg1); + break; + + case ADD_CALLBACK_BUFFER: + _camera.addCallbackBuffer((byte[])msg.obj); + break; + + case SET_ERROR_CALLBACK: + _camera.setErrorCallback((Camera.ErrorCallback)msg.obj); + break; + + case SET_PREVIEW_DISPLAY: + _camera.setPreviewDisplay((SurfaceHolder)msg.obj); + break; + + case START_PREVIEW: + _camera.startPreview(); + break; + + case STOP_PREVIEW: + _camera.stopPreview(); + break; + + default: + Log.e(TAG, "Invalid message: " + msg.what); + break; + } + } + catch (RuntimeException e) { + handleException(msg, e); + } + catch (IOException e) { + handleException(msg, new RuntimeException(e.getMessage(), e)); + } + + _signal.open(); + } + + private void handleException(Message msg, RuntimeException e) { + Log.e(TAG, "Camera operation failed", e); + + if (msg.what != RELEASE && _camera != null) { + try { + released = true; + _camera.release(); + } + catch (Exception e2) { + Log.e(TAG, "Failed to release camera on error", e); + } + } + + // throw e; + } + } + + private static class ErrorCallback implements Camera.ErrorCallback { + @Override + public void onError(int error, Camera camera) { + Log.e(TAG, "Got camera error callback. error=" + error); + } + } +} \ No newline at end of file diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/ILiveVideoBroadcaster.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/ILiveVideoBroadcaster.java new file mode 100644 index 0000000..6b56a62 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/ILiveVideoBroadcaster.java @@ -0,0 +1,111 @@ +package com.psudoanon.broadcaster; + +import android.app.Activity; +import android.opengl.GLSurfaceView; + +import java.util.ArrayList; + +import com.psudoanon.broadcaster.utils.Resolution; + +/** + * Created by mekya on 29/03/2017. + */ + +public interface ILiveVideoBroadcaster { + + /** + * Initializes video broadcaster + * @param activity + * the activity which is using this service + * @param mGLView + * the GLSurfaceView which is used to render camera view + */ + void init(Activity activity, GLSurfaceView gLView); + + /** + * Checks whether camera and microphone permissions are granted + * @return true if permissions are granted + * false if permissions are not granted + */ + boolean isPermissionGranted(); + + /** + * Request for missiong permissions + * Camera and microphone permissions are required + */ + void requestPermission(); + + /** + * Opens camera in an another thread and render camera view on GLSurfaceView + * @param cameraId specifies which camera to open + * can be + * Camera.CameraInfo.CAMERA_FACING_BACK, Camera.CameraInfo.CAMERA_FACING_FRONT; + * + */ + void openCamera(int cameraId); + + /** + * Changes the camera, + * if active camera is back camera, releases the back camera and + * open the front camera, it behaves same with the front camera + */ + void changeCamera(); + + + /** + * Set adaptive streaming enable or disable + * + * @param enable, if true , adaptive streaming is enabled, defaults false + */ + void setAdaptiveStreaming(boolean enable); + + /** + * Set the resolution of the active camera + * @param size + */ + void setResolution(Resolution size); + + + /** + * @return the supported preview sizes of the active camera + */ + ArrayList getPreviewSizeList(); + + /** + * + * @return current preview size of the active camera + */ + Resolution getPreviewSize(); + + /** + * Sets the display orientation of the camera for portrait or landscape orientation + */ + void setDisplayOrientation(); + + /** + * Pauses and releases the camera, it is safe to call this function in OnPause of the activity + */ + void pause(); + + /** + * + * @return true if broadcasting is active and app is connected to server + * false if it is not connected or connection is dropped + */ + boolean isConnected(); + + /** + * Starts broadcasting the specified url + * @param url the rtmp url which should be in form rtmp://SERVER_ADDRESS/APP_NAME/STREAM_NAME + * @return true if it starts broadcasting successfully, + * false if something is wrong and cannot start + */ + boolean startBroadcasting(String url); + + /** + * Stops broadcastings to the server + */ + void stopBroadcasting(); + + +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/LiveVideoBroadcaster.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/LiveVideoBroadcaster.java new file mode 100644 index 0000000..be2ac7b --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/LiveVideoBroadcaster.java @@ -0,0 +1,837 @@ +package com.psudoanon.broadcaster; + +import android.Manifest; +import android.app.Activity; +import android.app.AlertDialog; +import android.app.Service; +import android.content.Context; +import android.content.DialogInterface; +import android.content.Intent; +import android.content.pm.PackageManager; +import android.graphics.SurfaceTexture; +import android.hardware.Camera; +import android.media.AudioFormat; +import android.media.AudioRecord; +import android.net.ConnectivityManager; +import android.net.NetworkInfo; +import android.opengl.GLSurfaceView; +import android.os.AsyncTask; +import android.os.Binder; +import android.os.HandlerThread; +import android.os.IBinder; +import android.os.Process; +import android.util.Log; +import android.view.Surface; +import android.view.View; + +import androidx.annotation.Nullable; +import androidx.core.app.ActivityCompat; +import androidx.core.content.ContextCompat; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Timer; +import java.util.TimerTask; + +import com.google.android.material.snackbar.Snackbar; +import com.psudoanon.broadcaster.encoder.AudioHandler; +import com.psudoanon.broadcaster.encoder.CameraSurfaceRenderer; +import com.psudoanon.broadcaster.encoder.TextureMovieEncoder; +import com.psudoanon.broadcaster.encoder.VideoEncoderCore; +import com.psudoanon.broadcaster.network.IMediaMuxer; +import com.psudoanon.broadcaster.network.RTMPStreamer; +import com.psudoanon.broadcaster.utils.Resolution; +import com.psudoanon.broadcaster.utils.Utils; + + +/** + * Created by mekya on 28/03/2017. + */ + +public class LiveVideoBroadcaster extends Service implements ILiveVideoBroadcaster, CameraHandler.ICameraViewer, SurfaceTexture.OnFrameAvailableListener { + + private static final String TAG = LiveVideoBroadcaster.class.getSimpleName(); + private volatile static CameraProxy sCameraProxy; + private IMediaMuxer mRtmpStreamer; + private AudioRecorderThread audioThread; + private boolean isRecording = false; + private GLSurfaceView mGLView; + private CameraSurfaceRenderer mRenderer; + private CameraHandler mCameraHandler; + private AudioHandler audioHandler; + private Activity context; + private volatile static boolean sCameraReleased; + private ArrayList choosenPreviewsSizeList; + private final IBinder mBinder = new LocalBinder(); + private int currentCameraId= Camera.CameraInfo.CAMERA_FACING_BACK; + + private int frameRate = 20; + public static final int PERMISSIONS_REQUEST = 8954; + + public final static int SAMPLE_AUDIO_RATE_IN_HZ = 44100; + private static TextureMovieEncoder sVideoEncoder = new TextureMovieEncoder(); + private Resolution previewSize; + private AlertDialog mAlertDialog; + private HandlerThread mRtmpHandlerThread; + private HandlerThread audioHandlerThread; + private ConnectivityManager connectivityManager; + private boolean adaptiveStreamingEnabled = false; + private Timer adaptiveStreamingTimer = null; + private int mAudioBitrate = 64000; + + public boolean isConnected() { + if (mRtmpStreamer != null) { + return mRtmpStreamer.isConnected(); + } + return false; + } + + @Override + public void onFrameAvailable(SurfaceTexture surfaceTexture) { + mGLView.requestRender(); + } + + public void pause() { + + + if (mAlertDialog != null && mAlertDialog.isShowing()) { + mAlertDialog.dismiss(); + } + + //first making mGLView GONE is important otherwise + //camera function is called after release exception may be thrown + //especially in htc one x 4.4.2 + mGLView.setVisibility(View.GONE); + stopBroadcasting(); + + mGLView.queueEvent(new Runnable() { + @Override + public void run() { + // Tell the renderer that it's about to be paused so it can clean up. + mRenderer.notifyPausing(); + if (!sCameraReleased /*|| context.equals(sCurrentActivity.get())*/) { + releaseCamera(); + } + } + }); + mGLView.onPause(); + mGLView.setOnTouchListener(null); + + } + + public void setDisplayOrientation() { + if (sCameraProxy != null) { + + sCameraProxy.setDisplayOrientation(getCameraDisplayOrientation()); + if (!isConnected()) { + setRendererPreviewSize(); + } + } + } + + public ArrayList getPreviewSizeList() { + return choosenPreviewsSizeList; + } + + public Resolution getPreviewSize() { + return previewSize; + } + + public class LocalBinder extends Binder { + public ILiveVideoBroadcaster getService() { + // Return this instance of LocalService so clients can call public methods + return LiveVideoBroadcaster.this; + } + } + + @Override + public void onCreate() { + super.onCreate(); + + + } + + @Override + public void onDestroy() { + audioHandlerThread.quitSafely(); + mRtmpHandlerThread.quitSafely(); + mCameraHandler.invalidateHandler(); + super.onDestroy(); + } + + public void init(Activity activity, GLSurfaceView glView) { + try { + audioHandlerThread = new HandlerThread("AudioHandlerThread", Process.THREAD_PRIORITY_AUDIO); + audioHandlerThread.start(); + audioHandler = new AudioHandler(audioHandlerThread.getLooper()); + mCameraHandler = new CameraHandler(this); + this.context = activity; + + // Define a handler that receives camera-control messages from other threads. All calls + // to Camera must be made on the same thread. Note we create this before the renderer + // thread, so we know the fully-constructed object will be visible. + mRenderer = new CameraSurfaceRenderer(mCameraHandler, sVideoEncoder); + mGLView = glView; + mGLView.setRenderer(mRenderer); + mGLView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); + + mRtmpHandlerThread = new HandlerThread("RtmpStreamerThread"); //, Process.THREAD_PRIORITY_BACKGROUND); + mRtmpHandlerThread.start(); + mRtmpStreamer = new RTMPStreamer(mRtmpHandlerThread.getLooper()); + + connectivityManager = (ConnectivityManager) this.getSystemService( + Context.CONNECTIVITY_SERVICE); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public boolean hasConnection() { + NetworkInfo activeNetwork = connectivityManager.getActiveNetworkInfo(); + if (activeNetwork != null && activeNetwork.isConnected()) { + return true; + } + return false; + } + + public boolean startBroadcasting(String rtmpUrl) { + + isRecording = false; + + if (sCameraProxy == null || sCameraProxy.isReleased()) { + Log.w(TAG, "Camera should be opened before calling this function"); + return false; + } + + if (!hasConnection()) { + Log.w(TAG, "There is no active network connection"); + } + + + if (Utils.doesEncoderWorks(context) != Utils.ENCODER_WORKS) { + Log.w(TAG, "This device does not have hardware encoder"); + Snackbar.make(mGLView, R.string.not_eligible_for_broadcast, Snackbar.LENGTH_LONG).show(); + return false; + } + + try { + // rtmpURL = "rtmp://a.rtmp.youtube.com/live2/"; + // streamURL = "raqk-ppy4-0p33-7phc"; + boolean result = mRtmpStreamer.open(rtmpUrl); + if (result) { + final long recordStartTime = System.currentTimeMillis(); + mGLView.queueEvent(new Runnable() { + @Override + public void run() { + mRenderer.setOptions(mRtmpStreamer); + setRendererPreviewSize(); + // notify the renderer that we want to change the encoder's state + mRenderer.startRecording(recordStartTime); + } + }); + + + int minBufferSize = AudioRecord + .getMinBufferSize(SAMPLE_AUDIO_RATE_IN_HZ, + AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT); + + audioHandler.startAudioEncoder(mRtmpStreamer, SAMPLE_AUDIO_RATE_IN_HZ, minBufferSize, mAudioBitrate); + + audioThread = new AudioRecorderThread(SAMPLE_AUDIO_RATE_IN_HZ, recordStartTime, audioHandler); + audioThread.start(); + isRecording = true; + + if (adaptiveStreamingEnabled) { + adaptiveStreamingTimer = new Timer(); + adaptiveStreamingTimer.schedule(new TimerTask() { + public int previousFrameCount; + public int frameQueueIncreased; + @Override + public void run() { + + + int frameCountInQueue = mRtmpStreamer.getVideoFrameCountInQueue(); + Log.d(TAG, "video frameCountInQueue : " + frameCountInQueue); + if (frameCountInQueue > previousFrameCount) { + frameQueueIncreased++; + } + else { + frameQueueIncreased--; + } + previousFrameCount = frameCountInQueue; + + if (frameQueueIncreased > 10) { + //decrease bitrate + System.out.println("decrease bitrate"); + mGLView.queueEvent(new Runnable() { + @Override + public void run() { + int frameRate = mRenderer.getFrameRate(); + if (frameRate >= 13) { + frameRate -= 3; + mRenderer.setFrameRate(frameRate); + } + else { + int bitrate = mRenderer.getBitrate(); + if (bitrate > 200000) { //200kbit + bitrate -= 100000; + mRenderer.setBitrate(bitrate); + // notify the renderer that we want to change the encoder's state + mRenderer.recorderConfigChanged(); + } + } + } + }); + frameQueueIncreased = 0; + + } + + if (frameQueueIncreased < -10) { + //increase bitrate + System.out.println("//increase bitrate"); + mGLView.queueEvent(new Runnable() { + @Override + public void run() { + int frameRate = mRenderer.getFrameRate(); + if (frameRate <= 27) { + frameRate += 3; + mRenderer.setFrameRate(frameRate); + } + else { + int bitrate = mRenderer.getBitrate(); + if (bitrate < 2000000) { //2Mbit + bitrate += 100000; + mRenderer.setBitrate(bitrate); + // notify the renderer that we want to change the encoder's state + mRenderer.recorderConfigChanged(); + } + } + } + }); + + frameQueueIncreased = 0; + } + + + + } + }, 0, 500); + } + } + + } + catch (Exception e) { + e.printStackTrace(); + } + return isRecording; + } + + + public void stopBroadcasting() { + if (isRecording) { + + mGLView.queueEvent(new Runnable() { + @Override + public void run() { + // notify the renderer that we want to change the encoder's state + mRenderer.stopRecording(); + } + }); + if (adaptiveStreamingTimer != null) { + adaptiveStreamingTimer.cancel(); + adaptiveStreamingTimer = null; + } + + if (audioThread != null) { + audioThread.stopAudioRecording(); + } + + if (audioHandler != null) { + audioHandler.sendEmptyMessage(AudioHandler.END_OF_STREAM); + } + + int i = 0; + while (sVideoEncoder.isRecording()) { + try { + Thread.sleep(50); + } catch (InterruptedException e) { + e.printStackTrace(); + } + if (i>5) { + //timeout 250ms + //force stop recording + sVideoEncoder.stopRecording(); + break; + } + i++; + } + } + + } + + public void setResolution(Resolution size) { + Camera.Parameters parameters = sCameraProxy.getParameters(); + parameters.setPreviewSize(size.width, size.height); + parameters.setRecordingHint(true); + System.out.println("set resolution stop preview"); + sCameraProxy.stopPreview(); + sCameraProxy.setParameters(parameters); + sCameraProxy.startPreview(); + previewSize = size; + setRendererPreviewSize(); + } + + public void setAudioBitrate(int bitrate) { + mAudioBitrate = bitrate; + } + + public void setVideoBitrate(int bitrate) { + mRenderer.setBitrate(bitrate); + } + + public void setIFrameIntervalSeconds(int seconds) { + sVideoEncoder.setIframeInterval(seconds); + } + + private void setRendererPreviewSize() + { + int rotation = context.getWindowManager().getDefaultDisplay() + .getRotation(); + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) { + mGLView.queueEvent(new Runnable() { + @Override + public void run() { + mRenderer.setCameraPreviewSize(previewSize.height, previewSize.width); + } + }); + } + else { + mGLView.queueEvent(new Runnable() { + @Override + public void run() { + mRenderer.setCameraPreviewSize(previewSize.width, previewSize.height); + } + }); + } + } + + @Override + public void handleSetSurfaceTexture(SurfaceTexture st) { + if (sCameraProxy != null && !context.isFinishing() && st != null) { + { + st.setOnFrameAvailableListener(this); + sCameraProxy.stopPreview(); + sCameraProxy.setPreviewTexture(st); + sCameraProxy.startPreview(); + } + } + } + + public void startFlash() { + Camera.Parameters parameters = sCameraProxy.getParameters(); + if (parameters != null) { + parameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH); + setCameraParameters(parameters); + } + } + + public void stopFlash() { + Camera.Parameters parameters = sCameraProxy.getParameters(); + if (parameters != null) { + parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); + setCameraParameters(parameters); + } + } + + public void openCamera(int cameraId) { + //check permission + if (!isPermissionGranted()) + { + requestPermission(); + return; + } + + if(cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT && + !getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)) { + //if fron camera is requested but not found, then open the back camera + cameraId = Camera.CameraInfo.CAMERA_FACING_BACK; + } + + + currentCameraId = cameraId; + mGLView.setVisibility(View.GONE); + new AsyncTask() { + + @Override + protected void onPreExecute() { + } + + @Override + protected Camera.Parameters doInBackground(Integer... params) { + + Camera.Parameters parameters = null; + sCameraReleased = false; + System.out.println("--- releaseCamera call in doInBackground --- "); + releaseCamera(); + try { + int tryCount = 0; + do { + sCameraProxy = new CameraProxy(params[0]); + if (sCameraProxy.isCameraAvailable()) { + break; + } + Thread.sleep(1000); + tryCount++; + } while (tryCount <= 3); + if (sCameraProxy.isCameraAvailable()) { + System.out.println("--- camera opened --- "); + parameters = sCameraProxy.getParameters(); + if (parameters != null) { + setCameraParameters(parameters); + + if (Utils.doesEncoderWorks(context) == Utils.ENCODER_NOT_TESTED) + { + boolean encoderWorks = VideoEncoderCore.doesEncoderWork(previewSize.width, previewSize.height, 300000, 20); + Utils.setEncoderWorks(context, encoderWorks); + } + } + } + else { + sCameraProxy = null; + } + Log.d(TAG, "onResume complete: " + this); + + } catch (Exception e) { + e.printStackTrace(); + } + + return parameters; + } + + @Override + protected void onPostExecute(Camera.Parameters parameters) { + if (context.isFinishing()) { + releaseCamera(); + } + else if (sCameraProxy != null && parameters != null) { + mGLView.setVisibility(View.VISIBLE); + mGLView.onResume(); + //mGLView.setAlpha(0.7f); + setRendererPreviewSize(); + + if (Utils.doesEncoderWorks(context) != Utils.ENCODER_WORKS) { + showEncoderNotExistDialog(); + } + + + + } + else { + Snackbar.make(mGLView, R.string.camera_not_running_properly, Snackbar.LENGTH_LONG) + .show(); + } + + } + }.execute(currentCameraId); + } + + private void releaseCamera() { + try { + if (sCameraProxy != null) { + System.out.println("releaseCamera stop preview"); + sCameraProxy.release(); + sCameraProxy = null; + sCameraReleased = true; + System.out.println("-- camera released --"); + } + } catch (Exception ex) { + ex.printStackTrace(); + } + } + + + @Override + public void setAdaptiveStreaming(boolean enable) { + this.adaptiveStreamingEnabled = enable; + } + + private int setCameraParameters(Camera.Parameters parameters) { + + List previewSizeList = parameters.getSupportedPreviewSizes(); + Collections.sort(previewSizeList, new Comparator() { + + @Override + public int compare(Camera.Size lhs, Camera.Size rhs) { + if (lhs.height == rhs.height) { + return lhs.width == rhs.width ? 0 : (lhs.width > rhs.width ? 1 : -1); + } else if (lhs.height > rhs.height) { + return 1; + } + return -1; + } + }); + + int preferredHeight = 720; + + choosenPreviewsSizeList = new ArrayList<>(); + + int diff = Integer.MAX_VALUE; + Resolution choosenSize = null; + for (int i = 0; i < previewSizeList.size(); i++) { + Camera.Size size = previewSizeList.get(i); + + if ((size.width % 16 == 0) && (size.height % 16 == 0)) { + Resolution resolutionSize = new Resolution(size.width, size.height); + choosenPreviewsSizeList.add(resolutionSize); + int currentDiff = Math.abs(size.height - preferredHeight); + if (currentDiff < diff) { + diff = currentDiff; + choosenSize = resolutionSize; + } + } + } + + int[] requestedFrameRate = new int[]{frameRate * 1000, frameRate * 1000}; + int[] bestFps = findBestFrameRate(parameters.getSupportedPreviewFpsRange(), requestedFrameRate); + parameters.setPreviewFpsRange(bestFps[0], bestFps[1]); + + int len = choosenPreviewsSizeList.size(); + int resolutionIndex = len-1; + + if (choosenSize != null) { + resolutionIndex = choosenPreviewsSizeList.indexOf(choosenSize); + } + + + if (resolutionIndex >=0) { + Resolution size = choosenPreviewsSizeList.get(resolutionIndex); + parameters.setPreviewSize(size.width, size.height); + parameters.setRecordingHint(true); + } + if (parameters.getSupportedFocusModes().contains( + Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); + } + sCameraProxy.setDisplayOrientation(getCameraDisplayOrientation()); + + + if (parameters.isVideoStabilizationSupported()) { + parameters.setVideoStabilization(true); + } + + //sCameraDevice.setParameters(parameters); + sCameraProxy.setParameters(parameters); + Camera.Size size = parameters.getPreviewSize(); + this.previewSize = new Resolution(size.width, size.height); + + return len; + } + + + public boolean isPermissionGranted() { + boolean cameraPermissionGranted = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) + == PackageManager.PERMISSION_GRANTED; + + boolean microPhonePermissionGranted = ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) + == PackageManager.PERMISSION_GRANTED; + + return cameraPermissionGranted && microPhonePermissionGranted; + } + + public void requestPermission() { + + boolean cameraPermissionGranted = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) + == PackageManager.PERMISSION_GRANTED; + + boolean microPhonePermissionGranted = ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) + == PackageManager.PERMISSION_GRANTED; + + + final List permissionList = new ArrayList(); + if (!cameraPermissionGranted) { + permissionList.add(Manifest.permission.CAMERA); + } + if (!microPhonePermissionGranted) { + permissionList.add(Manifest.permission.RECORD_AUDIO); + } + if (permissionList.size() > 0 ) + { + if (ActivityCompat.shouldShowRequestPermissionRationale(context, + Manifest.permission.CAMERA)) { + mAlertDialog = new AlertDialog.Builder(context) + .setTitle(R.string.permission) + .setMessage(getString(R.string.camera_permission_is_required)) + .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { + public void onClick(DialogInterface dialog, int which) { + String[] permissionArray = permissionList.toArray(new String[permissionList.size()]); + ActivityCompat.requestPermissions(context, + permissionArray, + PERMISSIONS_REQUEST); + } + }) + .show(); + } + else if (ActivityCompat.shouldShowRequestPermissionRationale(context, + Manifest.permission.RECORD_AUDIO)) { + mAlertDialog = new AlertDialog.Builder(context) + .setMessage(getString(R.string.microphone_permission_is_required)) + .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { + public void onClick(DialogInterface dialog, int which) { + String[] permissionArray = permissionList.toArray(new String[permissionList.size()]); + ActivityCompat.requestPermissions(context, + permissionArray, + PERMISSIONS_REQUEST); + } + }) + .show(); + + } + else { + String[] permissionArray = permissionList.toArray(new String[permissionList.size()]); + ActivityCompat.requestPermissions(context, + permissionArray, + PERMISSIONS_REQUEST); + } + + } + } + + public int[] findBestFrameRate(List frameRateList, int[] requestedFrameRate) { + int[] bestRate = frameRateList.get(0); + int requestedAverage = (requestedFrameRate[0] + requestedFrameRate[1]) / 2; + int bestRateAverage = (bestRate[0] + bestRate[1]) / 2; + + int size = frameRateList.size(); + for (int i=1; i < size; i++) { + int[] rate = frameRateList.get(i); + + int rateAverage = (rate[0] + rate[1]) / 2; + + + if (Math.abs(requestedAverage - bestRateAverage) >= Math.abs(requestedAverage - rateAverage)) { + + if ((Math.abs(requestedFrameRate[0] - rate[0]) <= + Math.abs(requestedFrameRate[0] - bestRate[0])) || + (Math.abs(requestedFrameRate[1] - rate[1]) <= + Math.abs(requestedFrameRate[1] - bestRate[1]))) { + bestRate = rate; + bestRateAverage = rateAverage; + } + } + } + + return bestRate; + } + + public void showEncoderNotExistDialog() { + mAlertDialog = new AlertDialog.Builder(context) + //.setTitle("") + .setMessage(R.string.not_eligible_for_broadcast) + .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { + public void onClick(DialogInterface dialog, int which) { + } + }) + .show(); + } + + public int getCameraDisplayOrientation() { + Camera.CameraInfo info = + new Camera.CameraInfo(); + Camera.getCameraInfo(currentCameraId, info); + int rotation = context.getWindowManager().getDefaultDisplay() + .getRotation(); + int degrees = 0; + switch (rotation) { + case Surface.ROTATION_0: degrees = 0; break; + case Surface.ROTATION_90: degrees = 90; break; + case Surface.ROTATION_180: degrees = 180; break; + case Surface.ROTATION_270: degrees = 270; break; + } + + int result; + if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { + result = (info.orientation + degrees) % 360; + result = (360 - result) % 360; // compensate the mirror + } else { // back-facing + result = (info.orientation - degrees + 360) % 360; + } + return result; + } + + public void changeCamera() { + if (!getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)) { + Snackbar.make(mGLView, R.string.only_one_camera_exists, Snackbar.LENGTH_LONG).show(); + return; + } + if (sCameraProxy == null) { + Snackbar.make(mGLView, R.string.first_call_open_camera, Snackbar.LENGTH_LONG).show(); + return; + } + + //swap the id of the camera to be used + if(currentCameraId == Camera.CameraInfo.CAMERA_FACING_BACK){ + currentCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT; + } + else { + currentCameraId = Camera.CameraInfo.CAMERA_FACING_BACK; + } + + new AsyncTask() { + + @Override + protected void onPreExecute() { + super.onPreExecute(); + mGLView.queueEvent(new Runnable() { + @Override + public void run() { + // Tell the renderer that it's about to be paused so it can clean up. + mRenderer.notifyPausing(); + } + }); + mGLView.onPause(); + mGLView.setOnTouchListener(null); + } + + @Override + protected Camera.Parameters doInBackground(Void... voids) { + releaseCamera(); + try { + sCameraProxy = new CameraProxy(currentCameraId); + Camera.Parameters parameters = sCameraProxy.getParameters(); + if (parameters != null) { + setCameraParameters(parameters); + return parameters; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return null; + } + + @Override + protected void onPostExecute(Camera.Parameters parameters) { + super.onPostExecute(parameters); + if (parameters != null) { + mGLView.onResume(); + setRendererPreviewSize(); + } + else { + Snackbar.make(mGLView, R.string.camera_not_running_properly, Snackbar.LENGTH_LONG) + .show(); + } + + } + }.execute(); + } + + @Nullable + @Override + public IBinder onBind(Intent intent) { + return mBinder; + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/OnEventListener.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/OnEventListener.java new file mode 100644 index 0000000..b113a7f --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/OnEventListener.java @@ -0,0 +1,7 @@ +package com.psudoanon.broadcaster; + + +public interface OnEventListener { + public void onSuccess(T object); + public void onFailure(Exception e); +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/AudioEncoder.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/AudioEncoder.java new file mode 100644 index 0000000..6087371 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/AudioEncoder.java @@ -0,0 +1,242 @@ +package com.psudoanon.broadcaster.encoder; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import com.psudoanon.broadcaster.network.IMediaMuxer; + +/** + * Created by faraklit on 03.02.2016. + */ +public class AudioEncoder extends Thread { + + final int TIMEOUT_USEC = 10000; + + private static final String AUDIO_MIME_TYPE = "audio/mp4a-latm"; + private ByteBuffer[] mAudioInputBuffers; + private ByteBuffer[] mAudioOutputBuffers; + private MediaCodec mAudioEncoder; + private IMediaMuxer mMuxerHandler; + private Map reservedBuffers = new HashMap(); + private static int roundTimes; + private static long roundOffset; + + + /** + * + * @param sampleRate recommended setting is 44100 + * @param channelCount recommended setting is 1 + * @param bitrate recommended setting is 64000 + * @return + */ + public boolean startAudioEncoder(int sampleRate, int channelCount, int bitrate, int maxInputSize, IMediaMuxer muxerHandler) { + mMuxerHandler = muxerHandler; + MediaFormat audioFormat = MediaFormat.createAudioFormat(AUDIO_MIME_TYPE, sampleRate, channelCount); + audioFormat.setInteger( + MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); + audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); + audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize); + + + try { + mAudioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE); + mAudioEncoder.configure( + audioFormat, + null /* surface */, + null /* crypto */, + MediaCodec.CONFIGURE_FLAG_ENCODE); + + mAudioEncoder.start(); + mAudioInputBuffers = mAudioEncoder.getInputBuffers(); + mAudioOutputBuffers = mAudioEncoder.getOutputBuffers(); + start(); + return true; + } catch (IOException | IllegalStateException e) { + e.printStackTrace(); + mAudioEncoder = null; + } + return false; + } + /* + * @param data + * @param pts presentation time stamp in microseconds + */ + public void encodeAudio(byte[] data, int length, long pts) { + if (mAudioEncoder == null) { + return; + } + int bufferRemaining; + + for (int i = 0; i < 3 ; i++) { + int inputBufferId = mAudioEncoder.dequeueInputBuffer(TIMEOUT_USEC); + + if (inputBufferId >= 0) { + ByteBuffer inputBuf = mAudioInputBuffers[inputBufferId]; + inputBuf.clear(); + bufferRemaining = inputBuf.remaining(); + if (bufferRemaining < length) { + inputBuf.put(data, 0, bufferRemaining); + } else { + inputBuf.put(data, 0, length); + } + + //length equals to inputbuffer position + mAudioEncoder.queueInputBuffer(inputBufferId, 0, inputBuf.position(), pts, 0); + break; + } + try { + Thread.sleep(100); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + + + public void stopEncoding() + { + //////////////////////// Stop Signal For Audio Encoder /////////////////////////// + for (int i = 0; i < 3; i++) { + if (mAudioEncoder == null) { + break; + } + + int inputBufferId = mAudioEncoder.dequeueInputBuffer(TIMEOUT_USEC); + + if (inputBufferId >= 0) { + mAudioEncoder.queueInputBuffer(inputBufferId, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + break; + } + try { + Thread.sleep(500); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + + private static long previousPresentationTimeUs; + public static long getUnsignedInt(long x) { + return x & 0xffffffffL; + } + + public void run() { + //Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO); + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + roundTimes = 0; + roundOffset = 0; + previousPresentationTimeUs = 0; + + for (;;) { + /******************** AUDIO **************************/ + int outputBufferId = mAudioEncoder.dequeueOutputBuffer(info, TIMEOUT_USEC); + if (outputBufferId >= 0) { + // mVideoOutputBuffers[outputBufferId] is ready to be processed or rendered. + ByteBuffer encodedData = mAudioOutputBuffers[outputBufferId]; + if (encodedData == null) { + System.out.println(" encoded data null audio"); + continue; + } + + encodedData.position(info.offset); + encodedData.limit(info.offset + info.size); + // first packet is 2 byte audio specific config + + //there is a bug in audio encoder- it starts to give negative values after integer max size is exceeded + //so getUnSignedInt need to be used + if (previousPresentationTimeUs < 0 && info.presentationTimeUs > 0) { + roundTimes++; + roundOffset = roundTimes * 4294967296L; + } + + long presentationTimeInMillis = (roundOffset + getUnsignedInt(info.presentationTimeUs)) / 1000; //convert it to milliseconds + //first it should be divided to 1000 and assign value to a long + //then cast it to int - + // Otherwise after about 35 minutes(exceeds integer max size) presentationTime will be negative + //in this assignment int presentationTime = (int)info.presentationTimeUs/1000; //convert it to milliseconds + int presentationTime = (int)presentationTimeInMillis; + + byte[] data = getBuffer(info.size, mMuxerHandler.getLastAudioFrameTimeStamp(), presentationTime); + + encodedData.get(data, 0, info.size); + encodedData.position(info.offset); + + mMuxerHandler.writeAudio(data, info.size, presentationTime); + + previousPresentationTimeUs = info.presentationTimeUs; + + mAudioEncoder.releaseOutputBuffer(outputBufferId, false); + } else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + mAudioOutputBuffers = mAudioEncoder.getOutputBuffers(); + } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + } + else if (outputBufferId == MediaCodec.INFO_TRY_AGAIN_LATER) { + + } + + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + //end of stream + // do not break here, let video break the loop + reservedBuffers.clear(); + break; + } + else { + + } + } + release(); + } + + public byte[] getBuffer(int size, int lastSentFrameTimestamp, int currentTimeStamp) + { + /** + * how does it work? + * we put byte array with their timestamp value to a hash map + * when there is a new output buffer array, we check the last frame timestamp of mediamuxer + * if the byte buffer timestamp is less than the value of last frame timestamp of mediamuxer + * it means that we can use that byte buffer again because it is already written to network + */ + Iterator> iterator = reservedBuffers.entrySet().iterator(); + + while(iterator.hasNext()) { + Map.Entry next = iterator.next(); + if (next.getKey() <= lastSentFrameTimestamp) + { + // it means this frame is sent + byte[] value = (byte[]) next.getValue(); + iterator.remove(); + if (value.length >= size) + { + reservedBuffers.put(currentTimeStamp, value); + return value; + } + // if byte array length is not bigger than requested size, + // we give this array to soft hands of GC + } + } + + // no eligible data found, create a new byte + byte[] data = new byte[size]; + reservedBuffers.put(currentTimeStamp, data); + return data; + } + + private void release() + { + try { + if (mAudioEncoder != null) { + mAudioEncoder.stop(); + mAudioEncoder.release(); + } + }catch (Exception e){ + e.printStackTrace(); + } + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/AudioHandler.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/AudioHandler.java new file mode 100644 index 0000000..965773c --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/AudioHandler.java @@ -0,0 +1,80 @@ +package com.psudoanon.broadcaster.encoder; + +import android.os.Handler; +import android.os.Looper; +import android.os.Message; +import android.util.Log; + +import com.psudoanon.broadcaster.network.IMediaMuxer; + +/** + * Created by mekya on 28/03/2017. + */ +public class AudioHandler extends Handler { + + public static final int RECORD_AUDIO = 0; + public static final int END_OF_STREAM = 2; + + private AudioEncoder audioEncoder = null; + + public AudioEncoder getAudioEncoder() { + return audioEncoder; + } + + public AudioHandler(Looper looper) { + super(looper); + + } + + public boolean startAudioEncoder(IMediaMuxer muxerHandler, int sampleRate, int bufferSize) { + boolean result = false; + + audioEncoder = new AudioEncoder(); + try { + result = audioEncoder.startAudioEncoder(sampleRate, 1, 64000, bufferSize, muxerHandler); + } catch (Exception e) { + e.printStackTrace(); + audioEncoder = null; + } + return result; + } + + public boolean startAudioEncoder(IMediaMuxer muxerHandler, int sampleRate, int bufferSize, int bitrate) { + boolean result = false; + + audioEncoder = new AudioEncoder(); + try { + result = audioEncoder.startAudioEncoder(sampleRate, 1, bitrate, bufferSize, muxerHandler); + } catch (Exception e) { + e.printStackTrace(); + audioEncoder = null; + } + return result; + } + + @Override + public void handleMessage(Message msg) { + if (audioEncoder == null) { + return; + } + + + + switch (msg.what) { + case END_OF_STREAM: + if (audioEncoder.getState() == Thread.State.RUNNABLE) { + Log.d("audio handler", "stop audio encoding..."); + audioEncoder.stopEncoding(); + removeMessages(RECORD_AUDIO); + } + break; + case RECORD_AUDIO: + /* msg.obj is the byte array buffer + * msg.arg1 is the length of the byte array + * msg.arg2 is the timestamp of frame in milliseconds + */ + audioEncoder.encodeAudio((byte[]) msg.obj, msg.arg1, msg.arg2 * 1000); + break; + } + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/CameraSurfaceRenderer.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/CameraSurfaceRenderer.java new file mode 100644 index 0000000..2c90403 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/CameraSurfaceRenderer.java @@ -0,0 +1,338 @@ +package com.psudoanon.broadcaster.encoder; + +/** + * Created by faraklit on 17.02.2016. + */ + +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.GLES20; +import android.opengl.GLSurfaceView; +import android.util.Log; + + +import java.util.List; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +import com.psudoanon.broadcaster.CameraHandler; +import com.psudoanon.broadcaster.encoder.gles.FullFrameRect; +import com.psudoanon.broadcaster.encoder.gles.Texture2dProgram; +import com.psudoanon.broadcaster.network.IMediaMuxer; + +/** + * Renderer object for our GLSurfaceView. + *

+ * Do not call any methods here directly from another thread -- use the + * GLSurfaceView#queueEvent() call. + */ +public class CameraSurfaceRenderer implements GLSurfaceView.Renderer { + private static final String TAG = CameraSurfaceRenderer.class.getSimpleName(); + private static final boolean VERBOSE = false; + + private static final int RECORDING_OFF = 0; + private static final int RECORDING_ON = 1; + private static final int RECORDING_RESUMED = 2; + private static final int RECORDER_CONFIG_CHANGED = 3; + + private CameraHandler mCameraHandler; + private TextureMovieEncoder mVideoEncoder; + + private FullFrameRect mFullScreen; + + private final float[] mSTMatrix = new float[16]; + private int mTextureId; + + private SurfaceTexture mSurfaceTexture; + private boolean mRecordingEnabled; + private int mRecordingStatus; + private int mFrameCount; + + // width/height of the incoming camera preview frames + private boolean mIncomingSizeUpdated; + private int mIncomingWidth; + private int mIncomingHeight; + private IMediaMuxer mWriterHandler; + private long mRecordingStartTime; + private int bitrate; + private int frameRate = 25; + + /** + * Constructs CameraSurfaceRenderer. + *

+ * @param cameraHandler Handler for communicating with UI thread + * @param movieEncoder video encoder object + */ + public CameraSurfaceRenderer(CameraHandler cameraHandler, + TextureMovieEncoder movieEncoder) { + mCameraHandler = cameraHandler; + mVideoEncoder = movieEncoder; + + + mTextureId = -1; + + mRecordingStatus = -1; + mRecordingEnabled = false; + mFrameCount = -1; + + mIncomingSizeUpdated = false; + mIncomingWidth = mIncomingHeight = -1; + + } + + private Texture2dProgram.ProgramType mEffectType = Texture2dProgram.ProgramType.TEXTURE_EXT ; + + public void setEffect(Texture2dProgram.ProgramType effectType) { + this.mEffectType = effectType; + } + + /** + * Notifies the renderer thread that the activity is pausing. + *

+ * For best results, call this *after* disabling Camera preview. + */ + public void notifyPausing() { + if (mSurfaceTexture != null) { + Log.d(TAG, "renderer pausing -- releasing SurfaceTexture"); + mSurfaceTexture.release(); + mSurfaceTexture = null; + } + if (mFullScreen != null) { + mFullScreen.release(false); // assume the GLSurfaceView EGL activity is about + mFullScreen = null; // to be destroyed + } + mIncomingWidth = mIncomingHeight = -1; + } + + /** + * Notifies the renderer that we want to stop or start recording. + */ + public void startRecording(long recordingStartTime) { + mRecordingEnabled = true; + Log.d(TAG, "changeRecordingState: was " + mRecordingEnabled + " now " + mRecordingEnabled); + //if (isRecording) + { + mRecordingStartTime = recordingStartTime; + } + } + + public void stopRecording(){ + mRecordingEnabled = false; + } + + /** + * Records the size of the incoming camera preview frames. + *

+ * It's not clear whether this is guaranteed to execute before or after onSurfaceCreated(), + * so we assume it could go either way. (Fortunately they both run on the same thread, + * so we at least know that they won't execute concurrently.) + */ + public void setCameraPreviewSize(int width, int height) { + Log.d(TAG, "setCameraPreviewSize"); + mIncomingWidth = width; + mIncomingHeight = height; + mIncomingSizeUpdated = true; + if (mIncomingHeight >= 720) { + bitrate = 850000; + } else if (mIncomingHeight >= 480) { + bitrate = 550000; + } else if (mIncomingHeight >= 360) { + bitrate = 450000; + } else if (mIncomingHeight >= 288) { + bitrate = 350000; + } else if (mIncomingHeight >= 240) { + bitrate = 250000; + } else //if (mIncomingHeight >= 144) + { + bitrate = 100000; + } + } + + public int getBitrate() { + return bitrate; + } + + public void setBitrate(int bitrate) { + this.bitrate = bitrate; + } + + @Override + public void onSurfaceCreated(GL10 unused, EGLConfig config) { + Log.d(TAG, "onSurfaceCreated"); + + // We're starting up or coming back. Either way we've got a new EGLContext that will + // need to be shared with the video encoder, so figure out if a recording is already + // in progress. + mRecordingEnabled = mVideoEncoder.isRecording(); + if (mRecordingEnabled) { + mRecordingStatus = RECORDING_RESUMED; + } else { + mRecordingStatus = RECORDING_OFF; + } + + // Set up the texture blitter that will be used for on-screen display. This + // is *not* applied to the recording, because that uses a separate shader. + mFullScreen = new FullFrameRect( + new Texture2dProgram(mEffectType)); + + mVideoEncoder.setEffect(mEffectType); + + mTextureId = mFullScreen.createTextureObject(); + + // Create a SurfaceTexture, with an external texture, in this EGL activity. We don't + // have a Looper in this thread -- GLSurfaceView doesn't create one -- so the frame + // available messages will arrive on the main thread. + mSurfaceTexture = new SurfaceTexture(mTextureId); + + System.out.println("//Tell the UI thread to enable the camera preview."); + mCameraHandler.sendMessage(mCameraHandler.obtainMessage( + CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture)); + } + + public void setFrameRate(int frameRate) { + this.frameRate = frameRate; + if (mVideoEncoder != null) { + mVideoEncoder.setFrameRate(frameRate); + } + } + + public int getFrameRate() { + return mVideoEncoder != null ? mVideoEncoder.getFrameRate() : 0; + } + + @Override + public void onSurfaceChanged(GL10 unused, int width, int height) { + Log.d(TAG, "onSurfaceChanged " + width + "x" + height); + GLES20.glViewport(0, 0, width, height); + } + + @Override + public void onDrawFrame(GL10 unused) { + if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureId); + boolean showBox = false; + + + // Latch the latest frame. If there isn't anything new, we'll just re-use whatever + // was there before. + mSurfaceTexture.updateTexImage(); + + // If the recording state is changing, take care of it here. Ideally we wouldn't + // be doing all this in onDrawFrame(), but the EGLContext sharing with GLSurfaceView + // makes it hard to do elsewhere. + if (mRecordingEnabled) { + + switch (mRecordingStatus) { + case RECORDING_OFF: + Log.d(TAG, "START recording bitrate: " +bitrate); + { + + // start recording + boolean started = mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig( + mWriterHandler, mIncomingWidth, mIncomingHeight, bitrate, frameRate, EGL14.eglGetCurrentContext(), mEffectType), + mRecordingStartTime); + if (started) { + mRecordingStatus = RECORDING_ON; + } + else { + mRecordingStatus = RECORDING_OFF; + } + } + break; + case RECORDER_CONFIG_CHANGED: + mVideoEncoder.releaseRecording(); + mRecordingStatus = RECORDING_OFF; + break; + case RECORDING_RESUMED: + Log.d(TAG, "RESUME recording"); + mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext()); + mRecordingStatus = RECORDING_ON; + break; + case RECORDING_ON: + // yay + break; + default: + throw new RuntimeException("unknown status " + mRecordingStatus); + } + } else { + switch (mRecordingStatus) { + case RECORDING_ON: + case RECORDING_RESUMED: + // stop recording + Log.d(TAG, "STOP recording"); + mVideoEncoder.stopRecording(); + mRecordingStatus = RECORDING_OFF; + break; + case RECORDING_OFF: + // yay + break; + default: + throw new RuntimeException("unknown status " + mRecordingStatus); + } + } + + // Set the video encoder's texture name. We only need to do this once, but in the + // current implementation it has to happen after the video encoder is started, so + // we just do it here. + // + // TODO: be less lame. + mVideoEncoder.setTextureId(mTextureId); + + // Tell the video encoder thread that a new frame is available. + // This will be ignored if we're not actually recording. + mVideoEncoder.frameAvailable(mSurfaceTexture); + + if (mIncomingWidth <= 0 || mIncomingHeight <= 0) { + // Texture size isn't set yet. This is only used for the filters, but to be + // safe we can just skip drawing while we wait for the various races to resolve. + // (This seems to happen if you toggle the screen off/on with power button.) + Log.i(TAG, "Drawing before incoming texture size set; skipping"); + return; + } + + if (mIncomingSizeUpdated) { + mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight); + mIncomingSizeUpdated = false; + } + + // Draw the video frame. + mSurfaceTexture.getTransformMatrix(mSTMatrix); + mFullScreen.drawFrame(mTextureId, mSTMatrix); + + // Draw a flashing box if we're recording. This only appears on screen. + /* showBox = (mRecordingStatus == RECORDING_ON); + if (showBox && (++mFrameCount & 0x04) == 0) { + drawBox(); + } + */ + } + + /** + * Draws a red box in the corner. + */ + private void drawBox() { + GLES20.glEnable(GLES20.GL_SCISSOR_TEST); + GLES20.glScissor(0, 0, 100, 100); + GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + GLES20.glDisable(GLES20.GL_SCISSOR_TEST); + } + + public void setOptions(IMediaMuxer writerHandler) { + mWriterHandler = writerHandler; + } + + // this function should be called after incoming width and height changed + public void recorderConfigChanged() { + // pay attention to this function, it causes throwing an exception in some circumstance when + // it is called in recording state + mRecordingStatus = RECORDER_CONFIG_CHANGED; + } + + public List getEffectList() { + return Texture2dProgram.EFFECTS; + } + + +} + diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/TextureMovieEncoder.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/TextureMovieEncoder.java new file mode 100644 index 0000000..5331c4f --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/TextureMovieEncoder.java @@ -0,0 +1,517 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.psudoanon.broadcaster.encoder; + +import android.graphics.SurfaceTexture; +import android.opengl.EGLContext; +import android.opengl.GLES20; +import android.os.Handler; +import android.os.Looper; +import android.os.Message; +import android.util.Log; + +import java.io.IOException; +import java.lang.ref.WeakReference; + +import com.psudoanon.broadcaster.encoder.gles.EglCore; +import com.psudoanon.broadcaster.encoder.gles.FullFrameRect; +import com.psudoanon.broadcaster.encoder.gles.Texture2dProgram; +import com.psudoanon.broadcaster.encoder.gles.WindowSurface; +import com.psudoanon.broadcaster.network.IMediaMuxer; + +/** + * Encode a movie from frames rendered from an external texture image. + *

+ * The object wraps an encoder running on a dedicated thread. The various control messages + * may be sent from arbitrary threads (typically the app UI thread). The encoder thread + * manages both sides of the encoder (feeding and draining); the only external input is + * the GL texture. + *

+ * The design is complicated slightly by the need to create an EGL activity that shares state + * with a view that gets restarted if (say) the device orientation changes. When the view + * in question is a GLSurfaceView, we don't have full control over the EGL activity creation + * on that side, so we have to bend a bit backwards here. + *

+ * To use: + *

+ * + * TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases. + */ +public class TextureMovieEncoder implements Runnable { + private static final String TAG = TextureMovieEncoder.class.getSimpleName(); + private static final boolean VERBOSE = false; + + private static final int MSG_START_RECORDING = 0; + private static final int MSG_STOP_RECORDING = 1; + private static final int MSG_FRAME_AVAILABLE = 2; + private static final int MSG_SET_TEXTURE_ID = 3; + private static final int MSG_UPDATE_SHARED_CONTEXT = 4; + private static final int MSG_QUIT = 5; + private static final int MSG_RELEASE_RECORDING = 6; + private static final int MSG_CHANGE_EFFECT = 7; + + // ----- accessed exclusively by encoder thread ----- + private WindowSurface mInputWindowSurface; + private EglCore mEglCore; + private FullFrameRect mFullScreen; + private int mTextureId; + private int mFrameNum; + private VideoEncoderCore mVideoEncoder; + + // ----- accessed by multiple threads ----- + private volatile EncoderHandler mHandler; + + private Object mReadyFence = new Object(); // guards ready/running + private boolean mReady; + private boolean mRunning; + private long mRecordingStartTime; + private long mLastFrameTime = 0; + private Texture2dProgram.ProgramType mProgramType; + private EncoderConfig mEncoderConfig; + private int mIframeInterval = 2; + + /** + * Encoder configuration. + *

+ * Object is immutable, which means we can safely pass it between threads without + * explicit synchronization (and don't need to worry about it getting tweaked out from + * under us). + *

+ * TODO: make frame rate and iframe interval configurable? Maybe use builder pattern + * with reasonable defaults for those and bit rate. + */ + public static class EncoderConfig { + final int mWidth; + final int mHeight; + final EGLContext mEglContext; + final IMediaMuxer writerHandler; + final Texture2dProgram.ProgramType mProgramType; + public int mFrameRate; + public int mBitRate; + + public EncoderConfig(IMediaMuxer handler, int width, int height, int bitRate, int frameRate, + EGLContext sharedEglContext, Texture2dProgram.ProgramType programType) { + writerHandler = handler; + mWidth = width; + mHeight = height; + mBitRate = bitRate; + mEglContext = sharedEglContext; + mProgramType = programType; + mFrameRate = frameRate; + } + + } + + /** + * Tells the video recorder to start recording. (Call from non-encoder thread.) + *

+ * Creates a new thread, which will create an encoder using the provided configuration. + *

+ * Returns after the recorder thread has started and is ready to accept Messages. The + * encoder may not yet be fully configured. + */ + public boolean startRecording(EncoderConfig config, long mRecordingStartTime) { + Log.d(TAG, "Encoder: startRecording()"); + synchronized (mReadyFence) { + if (mRunning) { + Log.w(TAG, "Encoder thread already running"); + return false; + } + this.mRecordingStartTime = mRecordingStartTime; + mRunning = true; + new Thread(this, "TextureMovieEncoder").start(); + while (!mReady) { + try { + mReadyFence.wait(); + } catch (InterruptedException ie) { + // ignore + } + } + } + + mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config)); + return true; + } + + + public void releaseRecording() { + if (mHandler != null) { + mHandler.sendMessage(mHandler.obtainMessage(MSG_RELEASE_RECORDING)); + mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT)); + } + } + + /** + * Tells the video recorder to stop recording. (Call from non-encoder thread.) + *

+ * Returns immediately; the encoder/muxer may not yet be finished creating the movie. + *

+ * TODO: have the encoder thread invoke a callback on the UI thread just before it shuts down + * so we can provide reasonable status UI (and let the caller know that movie encoding + * has completed). + */ + public void stopRecording() { + if (mHandler != null) { + mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING)); + mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT)); + } + // We don't know when these will actually finish (or even start). We don't want to + // delay the UI thread though, so we return immediately. + } + + /** + * Returns true if recording has been started. + */ + public boolean isRecording() { + synchronized (mReadyFence) { + return mRunning; + } + } + + /** + * Tells the video recorder to refresh its EGL surface. (Call from non-encoder thread.) + */ + public void updateSharedContext(EGLContext sharedContext) { + mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext)); + } + + /** + * Tells the video recorder that a new frame is available. (Call from non-encoder thread.) + *

+ * This function sends a message and returns immediately. This isn't sufficient -- we + * don't want the caller to latch a new frame until we're done with this one -- but we + * can get away with it so long as the input frame rate is reasonable and the encoder + * thread doesn't stall. + *

+ * TODO: either block here until the texture has been rendered onto the encoder surface, + * or have a separate "block if still busy" method that the caller can execute immediately + * before it calls updateTexImage(). The latter is preferred because we don't want to + * stall the caller while this thread does work. + */ + public void frameAvailable(SurfaceTexture st) { + synchronized (mReadyFence) { + if (!mReady) { + return; + } + } + + if (mHandler == null) { + return; + } + + float[] transform = new float[16]; // TODO - avoid alloc every frame + st.getTransformMatrix(transform); + /* + long timestamp = st.getTimestamp(); + if (timestamp == 0) { + // Seeing this after device is toggled off/on with power button. The + // first frame back has a zero timestamp. + // + // MPEG4Writer thinks this is cause to abort() in native code, so it's very + // important that we just ignore the frame. + Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero"); + return; + } + */ + + long frameTime = System.currentTimeMillis(); + if (mVideoEncoder != null && (frameTime - mLastFrameTime) >= getFrameInterval()) + { + Log.d(TAG, " get frame interval :" + getFrameInterval()); + // encode data at least in every 50 milliseconds, it measn 20fps or less + long timestamp = (frameTime - mRecordingStartTime) + * 1000000; // convert it to nano seconds + mLastFrameTime = frameTime; + mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE, + (int) (timestamp >> 32), (int) timestamp, transform)); + } + } + + private long getFrameInterval() { + return 1000 / mEncoderConfig.mFrameRate; + } + + public void setFrameRate(int framerate) { + if (mEncoderConfig != null) { + mEncoderConfig.mFrameRate = framerate; + } + } + + public void setBitrate(int bitrate) { + if (mEncoderConfig != null) { + mEncoderConfig.mBitRate = bitrate; + } + } + + public void setIframeInterval(int seconds) { + mIframeInterval = seconds; + } + + public int getFrameRate() { + return mEncoderConfig != null ? mEncoderConfig.mFrameRate : 0; + } + + /** + * Tells the video recorder what texture name to use. This is the external texture that + * we're receiving camera previews in. (Call from non-encoder thread.) + *

+ * TODO: do something less clumsy + */ + public void setTextureId(int id) { + synchronized (mReadyFence) { + if (!mReady) { + return; + } + } + mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null)); + } + + public void setEffect(Texture2dProgram.ProgramType programType) { + synchronized (mReadyFence) { + if (!mReady) { + return; + } + } + mHandler.sendMessage(mHandler.obtainMessage(MSG_CHANGE_EFFECT, 0, 0, programType)); + } + + /** + * Encoder thread entry point. Establishes Looper/Handler and waits for messages. + *

+ * @see Thread#run() + */ + @Override + public void run() { + // Establish a Looper for this thread, and define a Handler for it. + Looper.prepare(); + synchronized (mReadyFence) { + mHandler = new EncoderHandler(this); + mReady = true; + mReadyFence.notify(); + } + Looper.loop(); + + Log.d(TAG, "Encoder thread exiting"); + synchronized (mReadyFence) { + mReady = mRunning = false; + mHandler = null; + } + } + + + /** + * Handles encoder state change requests. The handler is created on the encoder thread. + */ + private static class EncoderHandler extends Handler { + + + private WeakReference mWeakEncoder; + + public EncoderHandler(TextureMovieEncoder encoder) { + mWeakEncoder = new WeakReference(encoder); + } + + @Override // runs on encoder thread + public void handleMessage(Message inputMessage) { + int what = inputMessage.what; + Object obj = inputMessage.obj; + + TextureMovieEncoder encoder = mWeakEncoder.get(); + if (encoder == null) { + Log.w(TAG, "EncoderHandler.handleMessage: encoder is null"); + return; + } + + switch (what) { + case MSG_START_RECORDING: + encoder.handleStartRecording((EncoderConfig) obj); + break; + case MSG_STOP_RECORDING: + encoder.handleStopRecording(true); + break; + case MSG_RELEASE_RECORDING: + encoder.handleStopRecording(false); + break; + case MSG_FRAME_AVAILABLE: + long timestamp = (((long) inputMessage.arg1) << 32) | + (((long) inputMessage.arg2) & 0xffffffffL); + encoder.handleFrameAvailable((float[]) obj, timestamp); + break; + case MSG_SET_TEXTURE_ID: + encoder.handleSetTexture(inputMessage.arg1); + break; + case MSG_UPDATE_SHARED_CONTEXT: + encoder.handleUpdateSharedContext((EGLContext) inputMessage.obj); + break; + case MSG_CHANGE_EFFECT: + encoder.changeEffect((Texture2dProgram.ProgramType) inputMessage.obj); + break; + case MSG_QUIT: + Looper.myLooper().quit(); + System.out.println("looper msg quit...."); + break; + default: + throw new RuntimeException("Unhandled msg what=" + what); + } + } + } + + private void changeEffect(Texture2dProgram.ProgramType type) { + if (mFullScreen != null) { + //// TODO: 25.04.2016 try with true parameter + mFullScreen.release(false); + } + mFullScreen = new FullFrameRect( + new Texture2dProgram(type)); + mProgramType = type; + + } + + /** + * Starts recording. + */ + private void handleStartRecording(EncoderConfig config) { + Log.d(TAG, "handleStartRecording " + config); + this.mEncoderConfig = config; + mFrameNum = 0; + prepareEncoder(config.mEglContext, config.mWidth, config.mHeight, config.mBitRate, config.mFrameRate, + config.writerHandler, config.mProgramType); + } + + /** + * Handles notification of an available frame. + *

+ * The texture is rendered onto the encoder's input surface, along with a moving + * box (just because we can). + *

+ * @param transform The texture transform, from SurfaceTexture. + * @param timestampNanos The frame's timestamp, from SurfaceTexture. + */ + private void handleFrameAvailable(float[] transform, long timestampNanos) { + if (VERBOSE) Log.d(TAG, "handleFrameAvailable tr=" + transform); + if (mFullScreen != null) { + mVideoEncoder.drainEncoder(false); + mFullScreen.drawFrame(mTextureId, transform); + + // drawBox(mFrameNum++); + + mInputWindowSurface.setPresentationTime(timestampNanos); + mInputWindowSurface.swapBuffers(); + } + } + + /** + * Handles a request to stop encoding. + */ + private void handleStopRecording(boolean stopMuxer) { + Log.d(TAG, "handleStopRecording"); + mVideoEncoder.drainEncoder(true); + releaseEncoder(); + if (stopMuxer) { + mVideoEncoder.stopMuxer(); + } + } + + /** + * Sets the texture name that SurfaceTexture will use when frames are received. + */ + private void handleSetTexture(int id) { + //Log.d(TAG, "handleSetTexture " + id); + mTextureId = id; + } + + /** + * Tears down the EGL surface and activity we've been using to feed the MediaCodec input + * surface, and replaces it with a new one that shares with the new activity. + *

+ * This is useful if the old activity we were sharing with went away (maybe a GLSurfaceView + * that got torn down) and we need to hook up with the new one. + */ + private void handleUpdateSharedContext(EGLContext newSharedContext) { + Log.d(TAG, "handleUpdatedSharedContext " + newSharedContext); + + // Release the EGLSurface and EGLContext. + mInputWindowSurface.releaseEglSurface(); + mFullScreen.release(false); + mEglCore.release(); + + // Create a new EGLContext and recreate the window surface. + mEglCore = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE); + mInputWindowSurface.recreate(mEglCore); + mInputWindowSurface.makeCurrent(); + + // Create new programs and such for the new activity. + mFullScreen = new FullFrameRect( + new Texture2dProgram(mProgramType)); + } + + private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate, int frameRate, + IMediaMuxer writerHandle, Texture2dProgram.ProgramType programType) + throws IllegalStateException + { + try { + mVideoEncoder = new VideoEncoderCore(width, height, bitRate, frameRate, mIframeInterval, writerHandle); + + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE); + mInputWindowSurface = new WindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true); + mInputWindowSurface.makeCurrent(); + + mProgramType = programType; + mFullScreen = new FullFrameRect( + new Texture2dProgram(programType)); + + } + + private void releaseEncoder() { + mVideoEncoder.release(); + if (mInputWindowSurface != null) { + mInputWindowSurface.release(); + mInputWindowSurface = null; + } + if (mFullScreen != null) { + mFullScreen.release(false); + mFullScreen = null; + } + if (mEglCore != null) { + mEglCore.release(); + mEglCore = null; + } + } + + /** + * Draws a box, with position offset. + */ + private void drawBox(int posn) { + final int width = mInputWindowSurface.getWidth(); + int xpos = (posn * 4) % (width - 50); + GLES20.glEnable(GLES20.GL_SCISSOR_TEST); + GLES20.glScissor(xpos, 0, 100, 100); + GLES20.glClearColor(1.0f, 0.0f, 1.0f, 1.0f); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + GLES20.glDisable(GLES20.GL_SCISSOR_TEST); + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/VideoEncoderCore.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/VideoEncoderCore.java new file mode 100644 index 0000000..2809afc --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/VideoEncoderCore.java @@ -0,0 +1,294 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.psudoanon.broadcaster.encoder; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.util.Log; +import android.view.Surface; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import com.psudoanon.broadcaster.network.IMediaMuxer; + +/** + * This class wraps up the core components used for surface-input video encoding. + *

+ * Once created, frames are fed to the input surface. Remember to provide the presentation + * time stamp, and always call drainEncoder() before swapBuffers() to ensure that the + * producer side doesn't get backed up. + *

+ * This class is not thread-safe, with one exception: it is valid to use the input surface + * on one thread, and drain the output on a different thread. + */ +public class VideoEncoderCore { + private static final String TAG = VideoEncoderCore.class.getSimpleName(); + private static final boolean VERBOSE = false; + + // TODO: these ought to be configurable as well + private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding + //private int frameRate = 20; // 20fps + private static final int IFRAME_INTERVAL = 2; // 2 seconds between I-frames + private IMediaMuxer mWriterHandler; + + private Surface mInputSurface; + private MediaCodec mEncoder; + private MediaCodec.BufferInfo mBufferInfo; + private boolean mMuxerStarted; + private Map reservedBuffers = new HashMap(); + + + /** + * Configures encoder and muxer state, and prepares the input Surface. + */ + public VideoEncoderCore(int width, int height, int bitRate, int frameRate, int iframeInteral, IMediaMuxer writerHandler) + throws IOException { + mBufferInfo = new MediaCodec.BufferInfo(); + + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height); + + // this.frameRate = frameRate; + + // Set some properties. Failing to specify some of these can cause the MediaCodec + // configure() call to throw an unhelpful exception. + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, + MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); + format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iframeInteral); + + if (VERBOSE) Log.d(TAG, "format: " + format); + + // Create a MediaCodec encoder, and configure it with our format. Get a Surface + // we can use for input and wrap it with a class that handles the EGL work. + mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); + mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mEncoder.createInputSurface(); + mEncoder.start(); + + mWriterHandler = writerHandler; + + } + + + public static boolean doesEncoderWork(int width, int height, int bitRate, int frameRate) { + + boolean success = false; + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height); + + // Set some properties. Failing to specify some of these can cause the MediaCodec + // configure() call to throw an unhelpful exception. + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, + MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); + format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + if (VERBOSE) Log.d(TAG, "format: " + format); + + // Create a MediaCodec encoder, and configure it with our format. Get a Surface + // we can use for input and wrap it with a class that handles the EGL work. + MediaCodec mEncoder = null; + try { + mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); + mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + Surface mInputSurface = mEncoder.createInputSurface(); + success = true; + } catch (IOException e) { + e.printStackTrace(); + } + catch (IllegalStateException e) { + e.printStackTrace(); + } + finally { + mEncoder.release(); + } + + return success; + } + + /** + * Returns the encoder's input surface. + */ + public Surface getInputSurface() { + return mInputSurface; + } + + /** + * Releases encoder resources. + */ + public void release() { + if (VERBOSE) Log.d(TAG, "releasing encoder objects"); + if (mEncoder != null) { + mEncoder.stop(); + mEncoder.release(); + mEncoder = null; + } + } + + /** + * Extracts all pending data from the encoder and forwards it to the muxer. + *

+ * If endOfStream is not set, this returns when there is no more data to drain. If it + * is set, we send EOS to the encoder, and then iterate until we see EOS on the output. + * Calling this with endOfStream set should be done once, swipe_right before stopping the muxer. + *

+ * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're + * not recording audio. + */ + public void drainEncoder(boolean endOfStream) { + if (mEncoder == null) { + return; + } + final int TIMEOUT_USEC = 10000; + if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")"); + + if (endOfStream) { + if (VERBOSE) Log.d(TAG, "sending EOS to encoder"); + mEncoder.signalEndOfInputStream(); + } + + ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); + while (true) { + int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + if (!endOfStream) { + break; // out of while + } else { + if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS"); + } + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + // not expected for an encoder + encoderOutputBuffers = mEncoder.getOutputBuffers(); + Log.d("VideoEncoder", "INFO_OUTPUT_BUFFERS_CHANGED"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // should happen before receiving buffers, and should only happen once + if (mMuxerStarted) { + throw new RuntimeException("format changed twice"); + } + MediaFormat newFormat = mEncoder.getOutputFormat(); + ByteBuffer sps = newFormat.getByteBuffer("csd-0"); + ByteBuffer pps = newFormat.getByteBuffer("csd-1"); + byte[] config = new byte[sps.limit() + pps.limit()]; + sps.get(config, 0, sps.limit()); + pps.get(config, sps.limit(), pps.limit()); + + mWriterHandler.writeVideo(config, config.length, 0); + + Log.d(TAG, "encoder output format changed: " + newFormat); + + mMuxerStarted = true; + } else if (encoderStatus < 0) { + Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + + encoderStatus); + // let's ignore it + + } else { + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + encoderStatus + + " was null"); + } + + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // The codec config data was pulled out and fed to the muxer when we got + // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. + if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG"); + mBufferInfo.size = 0; + } + + if (mBufferInfo.size != 0) { + if (!mMuxerStarted) { + throw new RuntimeException("muxer hasn't started"); + } + + // adjust the ByteBuffer values to match BufferInfo (not needed?) + encodedData.position(mBufferInfo.offset); + encodedData.limit(mBufferInfo.offset + mBufferInfo.size); + + long presentationTimeInMillis = mBufferInfo.presentationTimeUs/1000; //convert it to milliseconds + //first it should be divided to 1000 and assign value to a long + //then cast it to int - + // Othe wise after about 35 minutes(exceeds integer max size) presentationTime will be negative + //in this assignment int presetationTime = (int)mBufferInfo.presentationTimeUs/1000; + int presetationTime = (int)presentationTimeInMillis; + byte[] data = getBuffer(mBufferInfo.size, mWriterHandler.getLastVideoFrameTimeStamp(), presetationTime); + encodedData.get(data, 0, mBufferInfo.size); + encodedData.position(mBufferInfo.offset); + + mWriterHandler.writeVideo(data, mBufferInfo.size, presetationTime); + } + mEncoder.releaseOutputBuffer(encoderStatus, false); + + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (!endOfStream) { + Log.w(TAG, "reached end of stream unexpectedly"); + } else { + if (VERBOSE) Log.d(TAG, "end of stream reached"); + } + reservedBuffers.clear(); + break; // out of while + } + } + } + } + + private byte[] getBuffer(int size, int lastSentFrameTimestamp, int currentTimeStamp) + { + /** + * how does it work? + * we put byte array with their timestamp value to a hash map + * when there is a new output buffer array, we check the last frame timestamp of mediamuxer + * if the byte buffer timestamp is less than the value of last frame timestamp of mediamuxer + * it means that we can use that byte buffer again because it is already written to network + */ + Iterator> iterator = reservedBuffers.entrySet().iterator(); + + while(iterator.hasNext()) { + Map.Entry next = iterator.next(); + if (next.getKey() <= lastSentFrameTimestamp) + { + // it means this frame is sent + byte[] value = (byte[]) next.getValue(); + iterator.remove(); + if (value.length >= size) + { + reservedBuffers.put(currentTimeStamp, value); + return value; + } + // if byte array length is not bigger than requested size, + // we let this array to soft hands of GC + } + } + + // no eligible data found, create a new byte + byte[] data = new byte[size]; + reservedBuffers.put(currentTimeStamp, data); + return data; + } + + public void stopMuxer() { + if (mWriterHandler != null) { + mWriterHandler.stopMuxer(); + mWriterHandler = null; + } + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/Drawable2d.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/Drawable2d.java new file mode 100644 index 0000000..abf9795 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/Drawable2d.java @@ -0,0 +1,197 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.psudoanon.broadcaster.encoder.gles; + +import java.nio.FloatBuffer; + +/** + * Base class for stuff we like to draw. + */ +public class Drawable2d { + private static final int SIZEOF_FLOAT = 4; + + /** + * Simple equilateral triangle (1.0 per side). Centered on (0,0). + */ + private static final float TRIANGLE_COORDS[] = { + 0.0f, 0.577350269f, // 0 top + -0.5f, -0.288675135f, // 1 bottom left + 0.5f, -0.288675135f // 2 bottom swipe_right + }; + private static final float TRIANGLE_TEX_COORDS[] = { + 0.5f, 0.0f, // 0 top center + 0.0f, 1.0f, // 1 bottom left + 1.0f, 1.0f, // 2 bottom swipe_right + }; + private static final FloatBuffer TRIANGLE_BUF = + GlUtil.createFloatBuffer(TRIANGLE_COORDS); + private static final FloatBuffer TRIANGLE_TEX_BUF = + GlUtil.createFloatBuffer(TRIANGLE_TEX_COORDS); + + /** + * Simple square, specified as a triangle strip. The square is centered on (0,0) and has + * a size of 1x1. + *

+ * Triangles are 0-1-2 and 2-1-3 (counter-clockwise winding). + */ + private static final float RECTANGLE_COORDS[] = { + -0.5f, -0.5f, // 0 bottom left + 0.5f, -0.5f, // 1 bottom swipe_right + -0.5f, 0.5f, // 2 top left + 0.5f, 0.5f, // 3 top swipe_right + }; + private static final float RECTANGLE_TEX_COORDS[] = { + 0.0f, 1.0f, // 0 bottom left + 1.0f, 1.0f, // 1 bottom swipe_right + 0.0f, 0.0f, // 2 top left + 1.0f, 0.0f // 3 top swipe_right + }; + private static final FloatBuffer RECTANGLE_BUF = + GlUtil.createFloatBuffer(RECTANGLE_COORDS); + private static final FloatBuffer RECTANGLE_TEX_BUF = + GlUtil.createFloatBuffer(RECTANGLE_TEX_COORDS); + + /** + * A "full" square, extending from -1 to +1 in both dimensions. When the model/view/projection + * matrix is identity, this will exactly cover the viewport. + *

+ * The texture coordinates are Y-inverted relative to RECTANGLE. (This seems to work out + * swipe_right with external textures from SurfaceTexture.) + */ + private static final float FULL_RECTANGLE_COORDS[] = { + -1.0f, -1.0f, // 0 bottom left + 1.0f, -1.0f, // 1 bottom swipe_right + -1.0f, 1.0f, // 2 top left + 1.0f, 1.0f, // 3 top swipe_right + }; + private static final float FULL_RECTANGLE_TEX_COORDS[] = { + 0.0f, 0.0f, // 0 bottom left + 1.0f, 0.0f, // 1 bottom swipe_right + 0.0f, 1.0f, // 2 top left + 1.0f, 1.0f // 3 top swipe_right + }; + private static final FloatBuffer FULL_RECTANGLE_BUF = + GlUtil.createFloatBuffer(FULL_RECTANGLE_COORDS); + private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = + GlUtil.createFloatBuffer(FULL_RECTANGLE_TEX_COORDS); + + + private FloatBuffer mVertexArray; + private FloatBuffer mTexCoordArray; + private int mVertexCount; + private int mCoordsPerVertex; + private int mVertexStride; + private int mTexCoordStride; + private Prefab mPrefab; + + /** + * Enum values for constructor. + */ + public enum Prefab { + TRIANGLE, RECTANGLE, FULL_RECTANGLE + } + + /** + * Prepares a drawable from a "pre-fabricated" shape definition. + *

+ * Does no EGL/GL operations, so this can be done at any time. + */ + public Drawable2d(Prefab shape) { + switch (shape) { + case TRIANGLE: + mVertexArray = TRIANGLE_BUF; + mTexCoordArray = TRIANGLE_TEX_BUF; + mCoordsPerVertex = 2; + mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; + mVertexCount = TRIANGLE_COORDS.length / mCoordsPerVertex; + break; + case RECTANGLE: + mVertexArray = RECTANGLE_BUF; + mTexCoordArray = RECTANGLE_TEX_BUF; + mCoordsPerVertex = 2; + mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; + mVertexCount = RECTANGLE_COORDS.length / mCoordsPerVertex; + break; + case FULL_RECTANGLE: + mVertexArray = FULL_RECTANGLE_BUF; + mTexCoordArray = FULL_RECTANGLE_TEX_BUF; + mCoordsPerVertex = 2; + mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; + mVertexCount = FULL_RECTANGLE_COORDS.length / mCoordsPerVertex; + break; + default: + throw new RuntimeException("Unknown shape " + shape); + } + mTexCoordStride = 2 * SIZEOF_FLOAT; + mPrefab = shape; + } + + /** + * Returns the array of vertices. + *

+ * To avoid allocations, this returns internal state. The caller must not modify it. + */ + public FloatBuffer getVertexArray() { + return mVertexArray; + } + + /** + * Returns the array of texture coordinates. + *

+ * To avoid allocations, this returns internal state. The caller must not modify it. + */ + public FloatBuffer getTexCoordArray() { + return mTexCoordArray; + } + + /** + * Returns the number of vertices stored in the vertex array. + */ + public int getVertexCount() { + return mVertexCount; + } + + /** + * Returns the width, in bytes, of the data for each vertex. + */ + public int getVertexStride() { + return mVertexStride; + } + + /** + * Returns the width, in bytes, of the data for each texture coordinate. + */ + public int getTexCoordStride() { + return mTexCoordStride; + } + + /** + * Returns the number of position coordinates per vertex. This will be 2 or 3. + */ + public int getCoordsPerVertex() { + return mCoordsPerVertex; + } + + @Override + public String toString() { + if (mPrefab != null) { + return "[Drawable2d: " + mPrefab + "]"; + } else { + return "[Drawable2d: ...]"; + } + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglCore.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglCore.java new file mode 100644 index 0000000..56b1704 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglCore.java @@ -0,0 +1,372 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.psudoanon.broadcaster.encoder.gles; + +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.EGLConfig; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLExt; +import android.opengl.EGLSurface; +import android.util.Log; +import android.view.Surface; + +/** + * Core EGL state (display, activity, config). + *

+ * The EGLContext must only be attached to one thread at a time. This class is not thread-safe. + */ +public final class EglCore { + private static final String TAG = GlUtil.TAG; + + /** + * Constructor flag: surface must be recordable. This discourages EGL from using a + * pixel format that cannot be converted efficiently to something usable by the video + * encoder. + */ + public static final int FLAG_RECORDABLE = 0x01; + + /** + * Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this + * flag, GLES2 is used. + */ + public static final int FLAG_TRY_GLES3 = 0x02; + + // Android-specific extension. + private static final int EGL_RECORDABLE_ANDROID = 0x3142; + + private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; + private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; + private EGLConfig mEGLConfig = null; + private int mGlVersion = -1; + + + /** + * Prepares EGL display and activity. + *

+ * Equivalent to EglCore(null, 0). + */ + public EglCore() { + this(null, 0); + } + + /** + * Prepares EGL display and activity. + *

+ * @param sharedContext The activity to share, or null if sharing is not desired. + * @param flags Configuration bit flags, e.g. FLAG_RECORDABLE. + */ + public EglCore(EGLContext sharedContext, int flags) { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("EGL already set up"); + } + + if (sharedContext == null) { + sharedContext = EGL14.EGL_NO_CONTEXT; + } + + mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("unable to get EGL14 display"); + } + int[] version = new int[2]; + if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { + mEGLDisplay = null; + throw new RuntimeException("unable to initialize EGL14"); + } + + // Try to get a GLES3 activity, if requested. + if ((flags & FLAG_TRY_GLES3) != 0) { + //Log.d(TAG, "Trying GLES 3"); + EGLConfig config = getConfig(flags, 3); + if (config != null) { + int[] attrib3_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 3, + EGL14.EGL_NONE + }; + EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, + attrib3_list, 0); + + if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) { + //Log.d(TAG, "Got GLES 3 config"); + mEGLConfig = config; + mEGLContext = context; + mGlVersion = 3; + } + } + } + if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed + //Log.d(TAG, "Trying GLES 2"); + EGLConfig config = getConfig(flags, 2); + if (config == null) { + throw new RuntimeException("Unable to find a suitable EGLConfig"); + } + int[] attrib2_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL14.EGL_NONE + }; + EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, + attrib2_list, 0); + checkEglError("eglCreateContext"); + mEGLConfig = config; + mEGLContext = context; + mGlVersion = 2; + } + + // Confirm with query. + int[] values = new int[1]; + EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, + values, 0); + Log.d(TAG, "EGLContext created, client version " + values[0]); + } + + /** + * Finds a suitable EGLConfig. + * + * @param flags Bit flags from constructor. + * @param version Must be 2 or 3. + */ + private EGLConfig getConfig(int flags, int version) { + int renderableType = EGL14.EGL_OPENGL_ES2_BIT; + if (version >= 3) { + renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR; + } + + // The actual surface is generally RGBA or RGBX, so situationally omitting alpha + // doesn't really help. It can also lead to a huge performance hit on glReadPixels() + // when reading into a GL_RGBA buffer. + int[] attribList = { + EGL14.EGL_RED_SIZE, 8, + EGL14.EGL_GREEN_SIZE, 8, + EGL14.EGL_BLUE_SIZE, 8, + EGL14.EGL_ALPHA_SIZE, 8, + //EGL14.EGL_DEPTH_SIZE, 16, + //EGL14.EGL_STENCIL_SIZE, 8, + EGL14.EGL_RENDERABLE_TYPE, renderableType, + EGL14.EGL_NONE, 0, // placeholder for recordable [@-3] + EGL14.EGL_NONE + }; + if ((flags & FLAG_RECORDABLE) != 0) { + attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID; + attribList[attribList.length - 2] = 1; + } + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, + numConfigs, 0)) { + Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig"); + return null; + } + return configs[0]; + } + + /** + * Discards all resources held by this class, notably the EGL activity. This must be + * called from the thread where the activity was created. + *

+ * On completion, no activity will be current. + */ + public void release() { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + // Android is unusual in that it uses a reference-counted EGLDisplay. So for + // every eglInitialize() we need an eglTerminate(). + EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, + EGL14.EGL_NO_CONTEXT); + EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); + EGL14.eglReleaseThread(); + EGL14.eglTerminate(mEGLDisplay); + } + + mEGLDisplay = EGL14.EGL_NO_DISPLAY; + mEGLContext = EGL14.EGL_NO_CONTEXT; + mEGLConfig = null; + } + + @Override + protected void finalize() throws Throwable { + try { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + // We're limited here -- finalizers don't run on the thread that holds + // the EGL state, so if a surface or activity is still current on another + // thread we can't fully release it here. Exceptions thrown from here + // are quietly discarded. Complain in the log file. + Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked"); + release(); + } + } finally { + super.finalize(); + } + } + + /** + * Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's + * still current in a activity. + */ + public void releaseSurface(EGLSurface eglSurface) { + EGL14.eglDestroySurface(mEGLDisplay, eglSurface); + } + + /** + * Creates an EGL surface associated with a Surface. + *

+ * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute. + */ + public EGLSurface createWindowSurface(Object surface) { + if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) { + throw new RuntimeException("invalid surface: " + surface); + } + + // Create a window surface, and attach it to the Surface we received. + int[] surfaceAttribs = { + EGL14.EGL_NONE + }; + EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, + surfaceAttribs, 0); + checkEglError("eglCreateWindowSurface"); + if (eglSurface == null) { + throw new RuntimeException("surface was null"); + } + return eglSurface; + } + + /** + * Creates an EGL surface associated with an offscreen buffer. + */ + public EGLSurface createOffscreenSurface(int width, int height) { + int[] surfaceAttribs = { + EGL14.EGL_WIDTH, width, + EGL14.EGL_HEIGHT, height, + EGL14.EGL_NONE + }; + EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, + surfaceAttribs, 0); + checkEglError("eglCreatePbufferSurface"); + if (eglSurface == null) { + throw new RuntimeException("surface was null"); + } + return eglSurface; + } + + /** + * Makes our EGL activity current, using the supplied surface for both "draw" and "read". + */ + public void makeCurrent(EGLSurface eglSurface) { + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + // called makeCurrent() before create? + Log.d(TAG, "NOTE: makeCurrent w/o display"); + } + if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + + /** + * Makes our EGL activity current, using the supplied "draw" and "read" surfaces. + */ + public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) { + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + // called makeCurrent() before create? + Log.d(TAG, "NOTE: makeCurrent w/o display"); + } + if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent(draw,read) failed"); + } + } + + /** + * Makes no activity current. + */ + public void makeNothingCurrent() { + if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, + EGL14.EGL_NO_CONTEXT)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + + /** + * Calls eglSwapBuffers. Use this to "publish" the current frame. + * + * @return false on failure + */ + public boolean swapBuffers(EGLSurface eglSurface) { + return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface); + } + + /** + * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. + */ + public void setPresentationTime(EGLSurface eglSurface, long nsecs) { + EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs); + } + + /** + * Returns true if our activity and the specified surface are current. + */ + public boolean isCurrent(EGLSurface eglSurface) { + return mEGLContext.equals(EGL14.eglGetCurrentContext()) && + eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW)); + } + + /** + * Performs a simple surface query. + */ + public int querySurface(EGLSurface eglSurface, int what) { + int[] value = new int[1]; + EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0); + return value[0]; + } + + /** + * Queries a string value. + */ + public String queryString(int what) { + return EGL14.eglQueryString(mEGLDisplay, what); + } + + /** + * Returns the GLES version this activity is configured for (currently 2 or 3). + */ + public int getGlVersion() { + return mGlVersion; + } + + /** + * Writes the current display, activity, and surface to the log. + */ + public static void logCurrent(String msg) { + EGLDisplay display; + EGLContext context; + EGLSurface surface; + + display = EGL14.eglGetCurrentDisplay(); + context = EGL14.eglGetCurrentContext(); + surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW); + Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", activity=" + context + + ", surface=" + surface); + } + + /** + * Checks for EGL errors. Throws an exception if an error has been raised. + */ + private void checkEglError(String msg) { + int error; + if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { + throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); + } + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglSurfaceBase.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglSurfaceBase.java new file mode 100644 index 0000000..b5339d1 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/EglSurfaceBase.java @@ -0,0 +1,197 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.psudoanon.broadcaster.encoder.gles; + +import android.graphics.Bitmap; +import android.opengl.EGL14; +import android.opengl.EGLSurface; +import android.opengl.GLES20; +import android.util.Log; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +/** + * Common base class for EGL surfaces. + *

+ * There can be multiple surfaces associated with a single activity. + */ +public class EglSurfaceBase { + protected static final String TAG = GlUtil.TAG; + + // EglCore object we're associated with. It may be associated with multiple surfaces. + protected EglCore mEglCore; + + private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; + private int mWidth = -1; + private int mHeight = -1; + + protected EglSurfaceBase(EglCore eglCore) { + mEglCore = eglCore; + } + + /** + * Creates a window surface. + *

+ * @param surface May be a Surface or SurfaceTexture. + */ + public void createWindowSurface(Object surface) { + if (mEGLSurface != EGL14.EGL_NO_SURFACE) { + throw new IllegalStateException("surface already created"); + } + mEGLSurface = mEglCore.createWindowSurface(surface); + + // Don't cache width/height here, because the size of the underlying surface can change + // out from under us (see e.g. HardwareScalerActivity). + //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); + //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); + } + + /** + * Creates an off-screen surface. + */ + public void createOffscreenSurface(int width, int height) { + if (mEGLSurface != EGL14.EGL_NO_SURFACE) { + throw new IllegalStateException("surface already created"); + } + mEGLSurface = mEglCore.createOffscreenSurface(width, height); + mWidth = width; + mHeight = height; + } + + /** + * Returns the surface's width, in pixels. + *

+ * If this is called on a window surface, and the underlying surface is in the process + * of changing size, we may not see the new size swipe_right away (e.g. in the "surfaceChanged" + * callback). The size should match after the next buffer swap. + */ + public int getWidth() { + if (mWidth < 0) { + return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); + } else { + return mWidth; + } + } + + /** + * Returns the surface's height, in pixels. + */ + public int getHeight() { + if (mHeight < 0) { + return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); + } else { + return mHeight; + } + } + + /** + * Release the EGL surface. + */ + public void releaseEglSurface() { + mEglCore.releaseSurface(mEGLSurface); + mEGLSurface = EGL14.EGL_NO_SURFACE; + mWidth = mHeight = -1; + } + + /** + * Makes our EGL activity and surface current. + */ + public void makeCurrent() { + mEglCore.makeCurrent(mEGLSurface); + } + + /** + * Makes our EGL activity and surface current for drawing, using the supplied surface + * for reading. + */ + public void makeCurrentReadFrom(EglSurfaceBase readSurface) { + mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface); + } + + /** + * Calls eglSwapBuffers. Use this to "publish" the current frame. + * + * @return false on failure + */ + public boolean swapBuffers() { + boolean result = mEglCore.swapBuffers(mEGLSurface); + if (!result) { + Log.d(TAG, "WARNING: swapBuffers() failed"); + } + return result; + } + + /** + * Sends the presentation time stamp to EGL. + * + * @param nsecs Timestamp, in nanoseconds. + */ + public void setPresentationTime(long nsecs) { + mEglCore.setPresentationTime(mEGLSurface, nsecs); + } + + /** + * Saves the EGL surface to a file. + *

+ * Expects that this object's EGL surface is current. + */ + public void saveFrame(File file) throws IOException { + if (!mEglCore.isCurrent(mEGLSurface)) { + throw new RuntimeException("Expected EGL activity/surface is not current"); + } + + // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA + // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap + // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the + // Bitmap "copy pixels" method wants the same format GL provides. + // + // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling + // here often. + // + // Making this even more interesting is the upside-down nature of GL, which means + // our output will look upside down relative to what appears on screen if the + // typical GL conventions are used. + + String filename = file.toString(); + + int width = getWidth(); + int height = getHeight(); + ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); + buf.order(ByteOrder.LITTLE_ENDIAN); + GLES20.glReadPixels(0, 0, width, height, + GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); + GlUtil.checkGlError("glReadPixels"); + buf.rewind(); + + BufferedOutputStream bos = null; + try { + bos = new BufferedOutputStream(new FileOutputStream(filename)); + Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bmp.copyPixelsFromBuffer(buf); + bmp.compress(Bitmap.CompressFormat.PNG, 90, bos); + bmp.recycle(); + } finally { + if (bos != null) bos.close(); + } + Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'"); + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/FullFrameRect.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/FullFrameRect.java new file mode 100644 index 0000000..55dc138 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/FullFrameRect.java @@ -0,0 +1,89 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.psudoanon.broadcaster.encoder.gles; + +/** + * This class essentially represents a viewport-sized sprite that will be rendered with + * a texture, usually from an external source like the camera or video decoder. + */ +public class FullFrameRect { + private final Drawable2d mRectDrawable = new Drawable2d(Drawable2d.Prefab.FULL_RECTANGLE); + private Texture2dProgram mProgram; + + /** + * Prepares the object. + * + * @param program The program to use. FullFrameRect takes ownership, and will release + * the program when no longer needed. + */ + public FullFrameRect(Texture2dProgram program) { + mProgram = program; + } + + /** + * Releases resources. + *

+ * This must be called with the appropriate EGL activity current (i.e. the one that was + * current when the constructor was called). If we're about to destroy the EGL activity, + * there's no value in having the caller make it current just to do this cleanup, so you + * can pass a flag that will tell this function to skip any EGL-activity-specific cleanup. + */ + public void release(boolean doEglCleanup) { + if (mProgram != null) { + if (doEglCleanup) { + mProgram.release(); + } + mProgram = null; + } + } + + /** + * Returns the program currently in use. + */ + public Texture2dProgram getProgram() { + return mProgram; + } + + /** + * Changes the program. The previous program will be released. + *

+ * The appropriate EGL activity must be current. + */ + public void changeProgram(Texture2dProgram program) { + mProgram.release(); + mProgram = program; + } + + /** + * Creates a texture object suitable for use with drawFrame(). + */ + public int createTextureObject() { + return mProgram.createTextureObject(); + } + + /** + * Draws a viewport-filling rect, texturing it with the specified texture object. + */ + public void drawFrame(int textureId, float[] texMatrix) { + // Use the identity matrix for MVP so our 2x2 FULL_RECTANGLE covers the viewport. + mProgram.draw(GlUtil.IDENTITY_MATRIX, mRectDrawable.getVertexArray(), 0, + mRectDrawable.getVertexCount(), mRectDrawable.getCoordsPerVertex(), + mRectDrawable.getVertexStride(), + texMatrix, mRectDrawable.getTexCoordArray(), textureId, + mRectDrawable.getTexCoordStride()); + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/GlUtil.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/GlUtil.java new file mode 100644 index 0000000..b946b3e --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/GlUtil.java @@ -0,0 +1,195 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.psudoanon.broadcaster.encoder.gles; + +import android.opengl.GLES20; +import android.opengl.GLES30; +import android.opengl.Matrix; +import android.util.Log; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; + +/** + * Some OpenGL utility functions. + */ +public class GlUtil { + public static final String TAG = "Grafika"; + + /** Identity matrix for general use. Don't modify or life will get weird. */ + public static final float[] IDENTITY_MATRIX; + static { + IDENTITY_MATRIX = new float[16]; + Matrix.setIdentityM(IDENTITY_MATRIX, 0); + } + + private static final int SIZEOF_FLOAT = 4; + + + private GlUtil() {} // do not instantiate + + /** + * Creates a new program from the supplied vertex and fragment shaders. + * + * @return A handle to the program, or 0 on failure. + */ + public static int createProgram(String vertexSource, String fragmentSource) { + int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); + if (vertexShader == 0) { + return 0; + } + int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); + if (pixelShader == 0) { + return 0; + } + + int program = GLES20.glCreateProgram(); + checkGlError("glCreateProgram"); + if (program == 0) { + Log.e(TAG, "Could not create program"); + } + GLES20.glAttachShader(program, vertexShader); + checkGlError("glAttachShader"); + GLES20.glAttachShader(program, pixelShader); + checkGlError("glAttachShader"); + GLES20.glLinkProgram(program); + int[] linkStatus = new int[1]; + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] != GLES20.GL_TRUE) { + Log.e(TAG, "Could not link program: "); + Log.e(TAG, GLES20.glGetProgramInfoLog(program)); + GLES20.glDeleteProgram(program); + program = 0; + } + return program; + } + + /** + * Compiles the provided shader source. + * + * @return A handle to the shader, or 0 on failure. + */ + public static int loadShader(int shaderType, String source) { + int shader = GLES20.glCreateShader(shaderType); + checkGlError("glCreateShader type=" + shaderType); + GLES20.glShaderSource(shader, source); + GLES20.glCompileShader(shader); + int[] compiled = new int[1]; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + Log.e(TAG, "Could not compile shader " + shaderType + ":"); + Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); + GLES20.glDeleteShader(shader); + shader = 0; + } + return shader; + } + + /** + * Checks to see if a GLES error has been raised. + */ + public static void checkGlError(String op) { + int error = GLES20.glGetError(); + if (error != GLES20.GL_NO_ERROR) { + String msg = op + ": glError 0x" + Integer.toHexString(error); + Log.e(TAG, msg); + throw new RuntimeException(msg); + } + } + + /** + * Checks to see if the location we obtained is valid. GLES returns -1 if a label + * could not be found, but does not set the GL error. + *

+ * Throws a RuntimeException if the location is invalid. + */ + public static void checkLocation(int location, String label) { + if (location < 0) { + throw new RuntimeException("Unable to locate '" + label + "' in program"); + } + } + + /** + * Creates a texture from raw data. + * + * @param data Image data, in a "direct" ByteBuffer. + * @param width Texture width, in pixels (not bytes). + * @param height Texture height, in pixels. + * @param format Image data format (use constant appropriate for glTexImage2D(), e.g. GL_RGBA). + * @return Handle to texture. + */ + public static int createImageTexture(ByteBuffer data, int width, int height, int format) { + int[] textureHandles = new int[1]; + int textureHandle; + + GLES20.glGenTextures(1, textureHandles, 0); + textureHandle = textureHandles[0]; + GlUtil.checkGlError("glGenTextures"); + + // Bind the texture handle to the 2D texture target. + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle); + + // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering + // is smaller or larger than the source image. + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, + GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, + GLES20.GL_LINEAR); + GlUtil.checkGlError("loadImageTexture"); + + // Load the data from the buffer into the texture handle. + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format, + width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data); + GlUtil.checkGlError("loadImageTexture"); + + return textureHandle; + } + + /** + * Allocates a direct float buffer, and populates it with the float array data. + */ + public static FloatBuffer createFloatBuffer(float[] coords) { + // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. + ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT); + bb.order(ByteOrder.nativeOrder()); + FloatBuffer fb = bb.asFloatBuffer(); + fb.put(coords); + fb.position(0); + return fb; + } + + /** + * Writes GL version info to the log. + */ + public static void logVersionInfo() { + Log.i(TAG, "vendor : " + GLES20.glGetString(GLES20.GL_VENDOR)); + Log.i(TAG, "renderer: " + GLES20.glGetString(GLES20.GL_RENDERER)); + Log.i(TAG, "version : " + GLES20.glGetString(GLES20.GL_VERSION)); + + if (false) { + int[] values = new int[1]; + GLES30.glGetIntegerv(GLES30.GL_MAJOR_VERSION, values, 0); + int majorVersion = values[0]; + GLES30.glGetIntegerv(GLES30.GL_MINOR_VERSION, values, 0); + int minorVersion = values[0]; + if (GLES30.glGetError() == GLES30.GL_NO_ERROR) { + Log.i(TAG, "iversion: " + majorVersion + "." + minorVersion); + } + } + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/Texture2dProgram.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/Texture2dProgram.java new file mode 100644 index 0000000..688a056 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/Texture2dProgram.java @@ -0,0 +1,435 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.psudoanon.broadcaster.encoder.gles; + +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.util.Log; + +import java.nio.FloatBuffer; +import java.util.ArrayList; +import java.util.List; + +/** + * GL program and supporting functions for textured 2D shapes. + */ +public class Texture2dProgram { + private static final String TAG = GlUtil.TAG; + + public enum ProgramType { + TEXTURE_2D, TEXTURE_EXT, TEXTURE_EXT_BW, TEXTURE_EXT_FILT, + TEXTURE_EXT_SEPIA, TEXTURE_EXT_CROSSPROCESS, TEXTURE_EXT_POSTERIZE,TEXTURE_EXT_GRAYSCALE + } + + public static final List EFFECTS = new ArrayList<>(); + static { + EFFECTS.add(ProgramType.TEXTURE_EXT); + // EFFECTS.add(ProgramType.TEXTURE_EXT_BW); + EFFECTS.add(ProgramType.TEXTURE_EXT_CROSSPROCESS); + EFFECTS.add(ProgramType.TEXTURE_EXT_POSTERIZE); + EFFECTS.add(ProgramType.TEXTURE_EXT_SEPIA); + EFFECTS.add(ProgramType.TEXTURE_EXT_GRAYSCALE); + // EFFECTS.add(ProgramType.TEXTURE_EXT_CONTRAST); + } + + // Simple vertex shader, used for all programs. + private static final String VERTEX_SHADER = + "uniform mat4 uMVPMatrix;\n" + + "uniform mat4 uTexMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec4 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n" + + "}\n"; + + // Simple fragment shader for use with "normal" 2D textures. + private static final String FRAGMENT_SHADER_2D = + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform sampler2D sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; + + // Simple fragment shader for use with external 2D textures (e.g. what we get from + // SurfaceTexture). + private static final String FRAGMENT_SHADER_EXT = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; + + // Fragment shader that converts color to black & white with a simple transformation. + private static final String FRAGMENT_SHADER_EXT_BW = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " vec4 tc = texture2D(sTexture, vTextureCoord);\n" + + " float color = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" + + " gl_FragColor = vec4(color, color, color, 1.0);\n" + + "}\n"; + + + // Fragment shader that converts color to black & white with a simple transformation. + private static final String FRAGMENT_SHADER_EXT_SEPIA = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " vec4 tc = texture2D(sTexture, vTextureCoord);\n" + + " gl_FragColor = vec4(tc.x, tc.y, tc.z, 1.0);\n" + + " gl_FragColor.r = dot(tc, vec4(.393, .769, .189, 0));\n" + + " gl_FragColor.g = dot(tc, vec4(.349, .686, .168, 0));\n" + + " gl_FragColor.b = dot(tc, vec4(.272, .534, .131, 0));\n" + + "}\n"; + + private static final String FRAGMENT_SHADER_EXT_POSTERIZE = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "uniform samplerExternalOES sTexture;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " vec3 color = texture2D(sTexture, vTextureCoord).rgb;\n" + + " color = pow(color, vec3(0.6, 0.6, 0.6));\n" + + " color = color * 8.0;\n" + + " color = floor(color);\n" + + " color = color / 8.0;\n" + + " color = pow(color, vec3(1.0/0.6));\n" + + " gl_FragColor = vec4(color, 1.0);\n" + + "}\n"; + + private static final String FRAGMENT_SHADER_EXT_GRAY_SCALE = "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "uniform samplerExternalOES sTexture;\n" + + "varying vec2 vTextureCoord;\n" + "void main() {\n" + + " vec4 color = texture2D(sTexture, vTextureCoord);\n" + + " float y = dot(color, vec4(0.299, 0.587, 0.114, 0));\n" + + " gl_FragColor = vec4(y, y, y, color.a);\n" + "}\n"; + + + private static final String FRAGMENT_SHADER_EXT_CROSSPROCESS = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "uniform samplerExternalOES sTexture;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " vec4 color = texture2D(sTexture, vTextureCoord);\n" + + " vec3 ncolor = vec3(0.0, 0.0, 0.0);\n" + " float value;\n" + + " if (color.r < 0.5) {\n" + " value = color.r;\n" + + " } else {\n" + " value = 1.0 - color.r;\n" + " }\n" + + " float red = 4.0 * value * value * value;\n" + + " if (color.r < 0.5) {\n" + " ncolor.r = red;\n" + + " } else {\n" + " ncolor.r = 1.0 - red;\n" + " }\n" + + " if (color.g < 0.5) {\n" + " value = color.g;\n" + + " } else {\n" + " value = 1.0 - color.g;\n" + " }\n" + + " float green = 2.0 * value * value;\n" + + " if (color.g < 0.5) {\n" + " ncolor.g = green;\n" + + " } else {\n" + " ncolor.g = 1.0 - green;\n" + " }\n" + + " ncolor.b = color.b * 0.5 + 0.25;\n" + + " gl_FragColor = vec4(ncolor.rgb, color.a);\n" + + "}\n"; + + + // Fragment shader with a convolution filter. The upper-left half will be drawn normally, + // the lower-swipe_right half will have the filter applied, and a thin red line will be drawn + // at the border. + // + // This is not optimized for performance. Some things that might make this faster: + // - Remove the conditionals. They're used to present a half & half view with a red + // stripe across the middle, but that's only useful for a demo. + // - Unroll the loop. Ideally the compiler does this for you when it's beneficial. + // - Bake the filter kernel into the shader, instead of passing it through a uniform + // array. That, combined with loop unrolling, should reduce memory accesses. + public static final int KERNEL_SIZE = 9; + private static final String FRAGMENT_SHADER_EXT_FILT = + "#extension GL_OES_EGL_image_external : require\n" + + "#define KERNEL_SIZE " + KERNEL_SIZE + "\n" + + "precision highp float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "uniform float uKernel[KERNEL_SIZE];\n" + + "uniform vec2 uTexOffset[KERNEL_SIZE];\n" + + "uniform float uColorAdjust;\n" + + "void main() {\n" + + " int i = 0;\n" + + " vec4 sum = vec4(0.0);\n" + + " if (vTextureCoord.x < vTextureCoord.y - 0.005) {\n" + + " for (i = 0; i < KERNEL_SIZE; i++) {\n" + + " vec4 texc = texture2D(sTexture, vTextureCoord + uTexOffset[i]);\n" + + " sum += texc * uKernel[i];\n" + + " }\n" + + " sum += uColorAdjust;\n" + + " } else if (vTextureCoord.x > vTextureCoord.y + 0.005) {\n" + + " sum = texture2D(sTexture, vTextureCoord);\n" + + " } else {\n" + + " sum.r = 1.0;\n" + + " }\n" + + " gl_FragColor = sum;\n" + + "}\n"; + + private ProgramType mProgramType; + + // Handles to the GL program and various components of it. + private int mProgramHandle; + private int muMVPMatrixLoc; + private int muTexMatrixLoc; + private int muKernelLoc; + private int muTexOffsetLoc; + private int muColorAdjustLoc; + private int maPositionLoc; + private int maTextureCoordLoc; + + private int mTextureTarget; + + private float[] mKernel = new float[KERNEL_SIZE]; + private float[] mTexOffset; + private float mColorAdjust; + + + /** + * Prepares the program in the current EGL activity. + */ + public Texture2dProgram(ProgramType programType) { + mProgramType = programType; + + switch (programType) { + case TEXTURE_2D: + mTextureTarget = GLES20.GL_TEXTURE_2D; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_2D); + break; + case TEXTURE_EXT: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT); + break; + case TEXTURE_EXT_BW: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_BW); + break; + case TEXTURE_EXT_GRAYSCALE: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_GRAY_SCALE); + break; + case TEXTURE_EXT_SEPIA: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_SEPIA); + break; + case TEXTURE_EXT_POSTERIZE: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_POSTERIZE); + break; + case TEXTURE_EXT_CROSSPROCESS: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_CROSSPROCESS); + break; + case TEXTURE_EXT_FILT: + mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_FILT); + break; + default: + throw new RuntimeException("Unhandled type " + programType); + } + if (mProgramHandle == 0) { + throw new RuntimeException("Unable to create program"); + } + Log.d(TAG, "Created program " + mProgramHandle + " (" + programType + ")"); + + // get locations of attributes and uniforms + + maPositionLoc = GLES20.glGetAttribLocation(mProgramHandle, "aPosition"); + GlUtil.checkLocation(maPositionLoc, "aPosition"); + maTextureCoordLoc = GLES20.glGetAttribLocation(mProgramHandle, "aTextureCoord"); + GlUtil.checkLocation(maTextureCoordLoc, "aTextureCoord"); + muMVPMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uMVPMatrix"); + GlUtil.checkLocation(muMVPMatrixLoc, "uMVPMatrix"); + muTexMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uTexMatrix"); + GlUtil.checkLocation(muTexMatrixLoc, "uTexMatrix"); + muKernelLoc = GLES20.glGetUniformLocation(mProgramHandle, "uKernel"); + if (muKernelLoc < 0) { + // no kernel in this one + muKernelLoc = -1; + muTexOffsetLoc = -1; + muColorAdjustLoc = -1; + } else { + // has kernel, must also have tex offset and color adj + muTexOffsetLoc = GLES20.glGetUniformLocation(mProgramHandle, "uTexOffset"); + GlUtil.checkLocation(muTexOffsetLoc, "uTexOffset"); + muColorAdjustLoc = GLES20.glGetUniformLocation(mProgramHandle, "uColorAdjust"); + GlUtil.checkLocation(muColorAdjustLoc, "uColorAdjust"); + + // initialize default values + setKernel(new float[] {0f, 0f, 0f, 0f, 1f, 0f, 0f, 0f, 0f}, 0f); + setTexSize(256, 256); + } + } + + /** + * Releases the program. + *

+ * The appropriate EGL activity must be current (i.e. the one that was used to create + * the program). + */ + public void release() { + Log.d(TAG, "deleting program " + mProgramHandle); + GLES20.glDeleteProgram(mProgramHandle); + mProgramHandle = -1; + } + + /** + * Returns the program type. + */ + public ProgramType getProgramType() { + return mProgramType; + } + + /** + * Creates a texture object suitable for use with this program. + *

+ * On exit, the texture will be bound. + */ + public int createTextureObject() { + int[] textures = new int[1]; + GLES20.glGenTextures(1, textures, 0); + GlUtil.checkGlError("glGenTextures"); + + int texId = textures[0]; + GLES20.glBindTexture(mTextureTarget, texId); + GlUtil.checkGlError("glBindTexture " + texId); + + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, + GLES20.GL_NEAREST); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, + GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, + GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, + GLES20.GL_CLAMP_TO_EDGE); + GlUtil.checkGlError("glTexParameter"); + + return texId; + } + + /** + * Configures the convolution filter values. + * + * @param values Normalized filter values; must be KERNEL_SIZE elements. + */ + public void setKernel(float[] values, float colorAdj) { + if (values.length != KERNEL_SIZE) { + throw new IllegalArgumentException("Kernel size is " + values.length + + " vs. " + KERNEL_SIZE); + } + System.arraycopy(values, 0, mKernel, 0, KERNEL_SIZE); + mColorAdjust = colorAdj; + //Log.d(TAG, "filt kernel: " + Arrays.toString(mKernel) + ", adj=" + colorAdj); + } + + /** + * Sets the size of the texture. This is used to find adjacent texels when filtering. + */ + public void setTexSize(int width, int height) { + float rw = 1.0f / width; + float rh = 1.0f / height; + + // Don't need to create a new array here, but it's syntactically convenient. + mTexOffset = new float[] { + -rw, -rh, 0f, -rh, rw, -rh, + -rw, 0f, 0f, 0f, rw, 0f, + -rw, rh, 0f, rh, rw, rh + }; + //Log.d(TAG, "filt size: " + width + "x" + height + ": " + Arrays.toString(mTexOffset)); + } + + /** + * Issues the draw call. Does the full setup on every call. + * + * @param mvpMatrix The 4x4 projection matrix. + * @param vertexBuffer Buffer with vertex position data. + * @param firstVertex Index of first vertex to use in vertexBuffer. + * @param vertexCount Number of vertices in vertexBuffer. + * @param coordsPerVertex The number of coordinates per vertex (e.g. x,y is 2). + * @param vertexStride Width, in bytes, of the position data for each vertex (often + * vertexCount * sizeof(float)). + * @param texMatrix A 4x4 transformation matrix for texture coords. (Primarily intended + * for use with SurfaceTexture.) + * @param texBuffer Buffer with vertex texture data. + * @param texStride Width, in bytes, of the texture data for each vertex. + */ + public void draw(float[] mvpMatrix, FloatBuffer vertexBuffer, int firstVertex, + int vertexCount, int coordsPerVertex, int vertexStride, + float[] texMatrix, FloatBuffer texBuffer, int textureId, int texStride) { + GlUtil.checkGlError("draw start"); + + // Select the program. + GLES20.glUseProgram(mProgramHandle); + GlUtil.checkGlError("glUseProgram"); + + // Set the texture. + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(mTextureTarget, textureId); + + // Copy the model / view / projection matrix over. + GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mvpMatrix, 0); + GlUtil.checkGlError("glUniformMatrix4fv"); + + // Copy the texture transformation matrix over. + GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, texMatrix, 0); + GlUtil.checkGlError("glUniformMatrix4fv"); + + // Enable the "aPosition" vertex attribute. + GLES20.glEnableVertexAttribArray(maPositionLoc); + GlUtil.checkGlError("glEnableVertexAttribArray"); + + // Connect vertexBuffer to "aPosition". + GLES20.glVertexAttribPointer(maPositionLoc, coordsPerVertex, + GLES20.GL_FLOAT, false, vertexStride, vertexBuffer); + GlUtil.checkGlError("glVertexAttribPointer"); + + // Enable the "aTextureCoord" vertex attribute. + GLES20.glEnableVertexAttribArray(maTextureCoordLoc); + GlUtil.checkGlError("glEnableVertexAttribArray"); + + // Connect texBuffer to "aTextureCoord". + GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, + GLES20.GL_FLOAT, false, texStride, texBuffer); + GlUtil.checkGlError("glVertexAttribPointer"); + + // Populate the convolution kernel, if present. + if (muKernelLoc >= 0) { + GLES20.glUniform1fv(muKernelLoc, KERNEL_SIZE, mKernel, 0); + GLES20.glUniform2fv(muTexOffsetLoc, KERNEL_SIZE, mTexOffset, 0); + GLES20.glUniform1f(muColorAdjustLoc, mColorAdjust); + } + + // Draw the rect. + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, firstVertex, vertexCount); + GlUtil.checkGlError("glDrawArrays"); + + // Done -- disable vertex array, texture, and program. + GLES20.glDisableVertexAttribArray(maPositionLoc); + GLES20.glDisableVertexAttribArray(maTextureCoordLoc); + GLES20.glBindTexture(mTextureTarget, 0); + GLES20.glUseProgram(0); + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/WindowSurface.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/WindowSurface.java new file mode 100644 index 0000000..cefe54f --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/encoder/gles/WindowSurface.java @@ -0,0 +1,90 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.psudoanon.broadcaster.encoder.gles; + +import android.graphics.SurfaceTexture; +import android.view.Surface; + +/** + * Recordable EGL window surface. + *

+ * It's good practice to explicitly release() the surface, preferably from a "finally" block. + */ +public class WindowSurface extends EglSurfaceBase { + private Surface mSurface; + private boolean mReleaseSurface; + + /** + * Associates an EGL surface with the native window surface. + *

+ * Set releaseSurface to true if you want the Surface to be released when release() is + * called. This is convenient, but can interfere with framework classes that expect to + * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the + * surfaceDestroyed() callback won't fire). + */ + public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) { + super(eglCore); + createWindowSurface(surface); + mSurface = surface; + mReleaseSurface = releaseSurface; + } + + /** + * Associates an EGL surface with the SurfaceTexture. + */ + public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) { + super(eglCore); + createWindowSurface(surfaceTexture); + } + + /** + * Releases any resources associated with the EGL surface (and, if configured to do so, + * with the Surface as well). + *

+ * Does not require that the surface's EGL activity be current. + */ + public void release() { + releaseEglSurface(); + if (mSurface != null) { + if (mReleaseSurface) { + mSurface.release(); + } + mSurface = null; + } + } + + /** + * Recreate the EGLSurface, using the new EglBase. The caller should have already + * freed the old EGLSurface with releaseEglSurface(). + *

+ * This is useful when we want to update the EGLSurface associated with a Surface. + * For example, if we want to share with a different EGLContext, which can only + * be done by tearing down and recreating the activity. (That's handled by the caller; + * this just creates a new EGLSurface for the Surface we were handed earlier.) + *

+ * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a + * activity somewhere, the create call will fail with complaints from the Surface + * about already being connected. + */ + public void recreate(EglCore newEglCore) { + if (mSurface == null) { + throw new RuntimeException("not yet implemented for SurfaceTexture"); + } + mEglCore = newEglCore; // switch to new activity + createWindowSurface(mSurface); // create new surface + } +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/IMediaMuxer.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/IMediaMuxer.java new file mode 100644 index 0000000..a49927e --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/IMediaMuxer.java @@ -0,0 +1,39 @@ +package com.psudoanon.broadcaster.network; + +/** + * Created by faraklit on 03.03.2016. + */ +public interface IMediaMuxer { + + int SEND_AUDIO = 0; + int SEND_VIDEO = 1; + int STOP_STREAMING = 2; + + /** + * + * @return the last audio frame timestamp in milliseconds + */ + int getLastAudioFrameTimeStamp(); + + + /** + * + * @return the last video frame timestamp in milliseconds + */ + int getLastVideoFrameTimeStamp(); + + + boolean open(String uri); + + boolean isConnected(); + + void writeAudio(byte[] data, int size, int presentationTime); + + void writeVideo(byte[] data, int length, int presentationTime); + + void stopMuxer(); + + int getFrameCountInQueue(); + + int getVideoFrameCountInQueue(); +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/RTMPStreamer.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/RTMPStreamer.java new file mode 100644 index 0000000..d413294 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/network/RTMPStreamer.java @@ -0,0 +1,342 @@ +package com.psudoanon.broadcaster.network; + +import android.os.Handler; +import android.os.Looper; +import android.os.Message; +import android.util.Log; + + +import net.butterflytv.rtmp_client.RTMPMuxer; + +import java.util.ArrayList; +import java.util.Iterator; + +/** + * Created by faraklit on 09.02.2016. + */ +public class RTMPStreamer extends Handler implements IMediaMuxer { + + + private static final boolean DEBUG = false; + private static final String TAG = RTMPStreamer.class.getSimpleName(); + RTMPMuxer rtmpMuxer = new RTMPMuxer(); + + public int frameCount; + public int result = 0; + private int lastVideoFrameTimeStamp; + private int lastAudioFrameTimeStamp; + private int mLastReceivedVideoFrameTimeStamp = -1; + private int mLastReceivedAudioFrameTimeStamp = -1; + private int lastSentFrameTimeStamp = -1; + private Object frameSynchronized = new Object(); + private boolean isConnected = false; + + public class Frame { + byte[] data; + int timestamp; + int length; + + public Frame(byte[] data, int length, int timestamp) { + this.data = data; + this.length = length; + this.timestamp = timestamp; + } + } + + private ArrayList audioFrameList = new ArrayList<>(); + private ArrayList videoFrameList = new ArrayList<>(); + + + public RTMPStreamer(Looper looper) { + super(looper); + mLastReceivedVideoFrameTimeStamp = -1; + mLastReceivedAudioFrameTimeStamp = -1; + lastSentFrameTimeStamp = -1; + } + + public int getLastReceivedVideoFrameTimeStamp() { + return mLastReceivedVideoFrameTimeStamp; + } + + public int getLastReceivedAudioFrameTimeStamp() { + return mLastReceivedAudioFrameTimeStamp; + } + + public int getLastSentFrameTimeStamp() { + return lastSentFrameTimeStamp; + } + + /** + * + * @param url of the stream + */ + public boolean open(String url) { + frameCount = 0; + lastVideoFrameTimeStamp = 0; + lastAudioFrameTimeStamp = 0; + mLastReceivedVideoFrameTimeStamp = -1; + mLastReceivedAudioFrameTimeStamp = -1; + lastSentFrameTimeStamp = -1; + isConnected = false; + int result = rtmpMuxer.open(url, 0, 0); + + if (result > 0) { + // file_open("/mnt/sdcard/stream.flv" + (int) Math.random() * 1000); + // writeFLVHeader(true, true); + isConnected = true; + } + return isConnected; + } + + public void close() { + Log.i(TAG, "close rtmp connection"); + isConnected = false; + rtmpMuxer.close(); + } + + /** + * It is critically important to send the frames in time order. + * If an audio packet's timestamp is before to any video packet timestamp, + * connection can be closed by server. So we make packet ordering below according packet's timestamp + * + * @param msg + */ + @Override + public void handleMessage(Message msg) { + switch (msg.what) { + case SEND_AUDIO: { + /** + * msg.obj aac data, + * msg.arg1 length of the data + * msg.arg2 timestamp + */ + + if ((msg.arg2 >= mLastReceivedAudioFrameTimeStamp) && (msg.arg1 > 0)) { + //some initial frames(decoder params) may be equal to previos ones + // add packet if the new frame timestamp is bigger than the last frame + // otherwise discard the packet. If we don't discard it, rtmp connection totally drops + mLastReceivedAudioFrameTimeStamp = msg.arg2; + audioFrameList.add(new Frame((byte[]) msg.obj, msg.arg1, msg.arg2)); + } + else { + Log.w(TAG, "discarding audio packet because time stamp is older than last packet or data lenth equal to zero"); + } + sendFrames(); + } + break; + case SEND_VIDEO: { + + /** + * msg.obj h264 nal unit, + * msg.arg1 length of the data + * msg.arg2 timestamp + */ + if ((msg.arg2 >= mLastReceivedVideoFrameTimeStamp) && (msg.arg1 > 0)) { + //some initial frames(decoder params) may be equal to previous ones + // add packet if the new frame timestamp is bigger than the last frame + // otherwise discard the packet. If we don't discard it, rtmp connection totally drops + mLastReceivedVideoFrameTimeStamp = msg.arg2; + videoFrameList.add(new Frame((byte[]) msg.obj, msg.arg1, msg.arg2)); + } + else { + + Log.w(TAG, "discarding videp packet because time stamp is older than last packet or data lenth equal to zero"); + } + sendFrames(); + } + break; + case STOP_STREAMING: + finishframes(); + close(); + break; + } + + + + } + + private void finishframes() + { + int videoFrameListSize, audioFrameListSize; + do { + sendFrames(); + + videoFrameListSize = videoFrameList.size(); + audioFrameListSize = audioFrameList.size(); + //one of the frame list should be exhausted while the other have frames + } while ((videoFrameListSize > 0) && (audioFrameListSize > 0)); + + if (videoFrameListSize > 0) { + //send all video frames remained in the list + sendVideoFrames(videoFrameList.get(videoFrameListSize - 1).timestamp); + } + else if (audioFrameListSize > 0) { + //send all audio frames remained in the list + sendAudioFrames(audioFrameList.get(audioFrameListSize - 1).timestamp); + } + + } + + private void sendFrames() { + // this is a simple sorting algorithm. + // we do not know the audio or video frames timestamp in advance and they are not + // deterministic. So we send video frames with the timestamp is less than the first one in the list + // and the same algorithm applies for audio frames. + int listSize = videoFrameList.size(); + if (listSize > 0) { + sendAudioFrames(videoFrameList.get(0).timestamp); + } + + listSize = audioFrameList.size(); + if (listSize > 0) { + sendVideoFrames(audioFrameList.get(0).timestamp); + } + } + + private void sendAudioFrames(int timestamp) { + Iterator iterator = audioFrameList.iterator(); + while (iterator.hasNext()) + { + Frame audioFrame = iterator.next(); + if (audioFrame.timestamp <= timestamp) + { + // frame time stamp should be equal or less than the previous timestamp + // in some cases timestamp of audio and video frames may be equal + if (audioFrame.timestamp >= lastSentFrameTimeStamp) { + if (audioFrame.timestamp == lastSentFrameTimeStamp) { + audioFrame.timestamp++; + } + if (isConnected) { + int result = rtmpMuxer.writeAudio(audioFrame.data, 0, audioFrame.length, audioFrame.timestamp); + + if (DEBUG) { + Log.d(TAG, "send audio result: " + result + " time:" + audioFrame.timestamp + " length:" + audioFrame.length); + } + + if (result < 0) { + close(); + } + } + lastAudioFrameTimeStamp = audioFrame.timestamp; + lastSentFrameTimeStamp = audioFrame.timestamp; + synchronized (frameSynchronized) { + frameCount--; + } + } + iterator.remove(); + } + else { + //if timestamp is bigger than the auio frame timestamp + //it will be sent later so break the loop + break; + } + } + } + + private void sendVideoFrames(int timestamp) { + Iterator iterator = videoFrameList.iterator(); + while (iterator.hasNext()) { + Frame frame = iterator.next(); + if ((frame.timestamp <= timestamp)) + { + // frame time stamp should be equal or less than timestamp + // in some cases timestamp of audio and video frames may be equal + if (frame.timestamp >= lastSentFrameTimeStamp) { + if (frame.timestamp == lastSentFrameTimeStamp) { + frame.timestamp++; + } + if (isConnected) { + int result = rtmpMuxer.writeVideo(frame.data, 0, frame.length, frame.timestamp); + if (DEBUG) { + Log.d(TAG, "send video result: " + result + " time:" + frame.timestamp + " length:" + frame.length); + } + if (result < 0) { + close(); + } + } + lastVideoFrameTimeStamp = frame.timestamp; + lastSentFrameTimeStamp = frame.timestamp; + synchronized (frameSynchronized) { + frameCount--; + } + } + + iterator.remove(); + } + else { + //if frame timestamp is not smaller than the timestamp + // break the loop, it will be sent later + break; + } + } + } + + public int getLastAudioFrameTimeStamp() { + return lastAudioFrameTimeStamp; + } + + public int getLastVideoFrameTimeStamp() { + return lastVideoFrameTimeStamp; + } + + public void writeFLVHeader(boolean hasAudio, boolean hasVideo) { + rtmpMuxer.write_flv_header(hasAudio, hasVideo); + } + + public void file_open(String s) { + rtmpMuxer.file_open(s); + } + + + public void file_close() { + rtmpMuxer.file_close(); + } + + public boolean isConnected() { + return isConnected; + } + + @Override + public void writeAudio(byte[] data, int size, int presentationTime) { + Message message = obtainMessage(IMediaMuxer.SEND_AUDIO, data); + message.arg1 = size; + message.arg2 = presentationTime; + sendMessage(message); + synchronized (frameSynchronized) { + frameCount++; + } + if (DEBUG) Log.d(TAG, "writeAudio size: " + size + " time:" + presentationTime); + } + + @Override + public void writeVideo(byte[] data, int length, int presentationTime) { + Message message = obtainMessage(IMediaMuxer.SEND_VIDEO, data); + message.arg1 = length; + message.arg2 = presentationTime; + sendMessage(message); + synchronized (frameSynchronized) { + frameCount++; + } + + if (DEBUG) Log.d(TAG, "writeVideo size: " + length + " time:" + presentationTime); + } + + @Override + public void stopMuxer() { + sendEmptyMessage(RTMPStreamer.STOP_STREAMING); + } + + @Override + public int getFrameCountInQueue() { + synchronized (frameSynchronized) { + return frameCount; + } + } + + public int getVideoFrameCountInQueue() { + synchronized (frameSynchronized) { + return videoFrameList.size(); + } + } +} + diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Resolution.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Resolution.java new file mode 100644 index 0000000..8a78db6 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Resolution.java @@ -0,0 +1,18 @@ +package com.psudoanon.broadcaster.utils; + +import java.io.Serializable; + +/** + * Created by mekya on 28/03/2017. + */ + +public class Resolution implements Serializable +{ + public final int width; + public final int height; + + public Resolution(int width, int height) { + this.width = width; + this.height = height; + } +} \ No newline at end of file diff --git a/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Utils.java b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Utils.java new file mode 100644 index 0000000..2742f5d --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/java/com/psudoanon/broadcaster/utils/Utils.java @@ -0,0 +1,69 @@ +package com.psudoanon.broadcaster.utils; + +import android.content.Context; +import android.content.SharedPreferences; + + +public class Utils { + + public static final String APP_SHARED_PREFERENCES = "applicationDetails"; + private static final String DOES_ENCODER_WORKS = Utils.class.getName() + ".DOES_ENCODER_WORKS"; + + + public static final int ENCODER_NOT_TESTED = -1; + public static final int ENCODER_WORKS = 1; + public static final int ENCODER_NOT_WORKS = 0; + + + //public static final String SHARED_PREFERENCE_FIRST_INSTALLATION="FIRST_INSTALLATION"; + private static SharedPreferences sharedPreference = null; + + + + public static String getDurationString(int seconds) { + + if(seconds < 0 || seconds > 2000000)//there is an codec problem and duration is not set correctly,so display meaningfull string + seconds = 0; + int hours = seconds / 3600; + int minutes = (seconds % 3600) / 60; + seconds = seconds % 60; + + if(hours == 0) + return twoDigitString(minutes) + " : " + twoDigitString(seconds); + else + return twoDigitString(hours) + " : " + twoDigitString(minutes) + " : " + twoDigitString(seconds); + } + + public static String twoDigitString(int number) { + + if (number == 0) { + return "00"; + } + + if (number / 10 == 0) { + return "0" + number; + } + + return String.valueOf(number); + } + + public static SharedPreferences getDefaultSharedPreferences(Context context) { + if (sharedPreference == null) { + sharedPreference = context.getSharedPreferences(APP_SHARED_PREFERENCES, Context.MODE_PRIVATE); + } + return sharedPreference; + } + + + public static int doesEncoderWorks(Context context) { + return getDefaultSharedPreferences(context).getInt(DOES_ENCODER_WORKS, ENCODER_NOT_TESTED); + } + + public static void setEncoderWorks(Context context, boolean works) { + SharedPreferences sharedPreferences = getDefaultSharedPreferences(context); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putInt(DOES_ENCODER_WORKS, works ? ENCODER_WORKS : ENCODER_NOT_WORKS); + editor.apply(); + } + +} diff --git a/livestream-to-earn-android/app/broadcaster/src/main/res/values/strings.xml b/livestream-to-earn-android/app/broadcaster/src/main/res/values/strings.xml new file mode 100644 index 0000000..b41a3a6 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/main/res/values/strings.xml @@ -0,0 +1,22 @@ + + LiveVideoPublishSDK + On Air + No camera exists in your device + Camera is not running properly + Restart + Your previous broadcast still sends packets due to slow internet speed + Oopps this shouldn\'t be happened. Please let the library developer this status + Connection to media server is lost + Network speed is low. Latency is increasing. + Your device is not eligible to broadcast. There is no hardware encoder. + You\'re not connected to any network. Please connect to Internet + Stop + Camera and Microphone permission is required to run this app + Permission + App does not work without permissions. Please give Camera and Record Audio permission on app settings + First call openCamera function + Only one camera exists in this device + Camera permission is required to run this app + Microphone permission is required to run this app + + diff --git a/livestream-to-earn-android/app/broadcaster/src/test/java/com/psudoanon/broadcaster/ExampleUnitTest.kt b/livestream-to-earn-android/app/broadcaster/src/test/java/com/psudoanon/broadcaster/ExampleUnitTest.kt new file mode 100644 index 0000000..f9acf27 --- /dev/null +++ b/livestream-to-earn-android/app/broadcaster/src/test/java/com/psudoanon/broadcaster/ExampleUnitTest.kt @@ -0,0 +1,17 @@ +package com.psudoanon.broadcaster + +import org.junit.Test + +import org.junit.Assert.* + +/** + * Example local unit test, which will execute on the development machine (host). + * + * See [testing documentation](http://d.android.com/tools/testing). + */ +class ExampleUnitTest { + @Test + fun addition_isCorrect() { + assertEquals(4, 2 + 2) + } +} diff --git a/livestream-to-earn-android/app/build.gradle b/livestream-to-earn-android/app/build.gradle new file mode 100644 index 0000000..fd56e28 --- /dev/null +++ b/livestream-to-earn-android/app/build.gradle @@ -0,0 +1,48 @@ +apply plugin: 'com.android.application' +apply plugin: 'kotlin-android' +apply plugin: 'kotlin-android-extensions' + +android { + compileSdkVersion 29 + buildToolsVersion "29.0.2" + ndkVersion "20.1.5948944" + + + defaultConfig { + applicationId "com.psudoanon.livestreamtoearn" + minSdkVersion 26 + targetSdkVersion 29 + versionCode 1 + versionName "1.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + compileOptions { + sourceCompatibility = 1.8 + targetCompatibility = 1.8 + } + +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + implementation project(path: ':app:broadcaster') + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" + implementation 'androidx.appcompat:appcompat:1.1.0' + implementation 'androidx.core:core-ktx:1.2.0' + implementation 'androidx.constraintlayout:constraintlayout:1.1.3' + implementation 'com.android.support:support-compat:28.0.0' + implementation 'org.web3j:core-android:2.2.1' + implementation 'io.github.novacrypto:BIP39:2019.01.27' + implementation 'androidx.legacy:legacy-support-v4:1.0.0' + testImplementation 'junit:junit:4.12' + androidTestImplementation 'androidx.test.ext:junit:1.1.1' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' +} diff --git a/livestream-to-earn-android/app/proguard-rules.pro b/livestream-to-earn-android/app/proguard-rules.pro new file mode 100644 index 0000000..f1b4245 --- /dev/null +++ b/livestream-to-earn-android/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/livestream-to-earn-android/app/src/androidTest/java/com/psudoanon/livestreamtoearn/ExampleInstrumentedTest.kt b/livestream-to-earn-android/app/src/androidTest/java/com/psudoanon/livestreamtoearn/ExampleInstrumentedTest.kt new file mode 100644 index 0000000..19da626 --- /dev/null +++ b/livestream-to-earn-android/app/src/androidTest/java/com/psudoanon/livestreamtoearn/ExampleInstrumentedTest.kt @@ -0,0 +1,24 @@ +package com.psudoanon.livestreamtoearn + +import androidx.test.platform.app.InstrumentationRegistry +import androidx.test.ext.junit.runners.AndroidJUnit4 + +import org.junit.Test +import org.junit.runner.RunWith + +import org.junit.Assert.* + +/** + * Instrumented test, which will execute on an Android device. + * + * See [testing documentation](http://d.android.com/tools/testing). + */ +@RunWith(AndroidJUnit4::class) +class ExampleInstrumentedTest { + @Test + fun useAppContext() { + // Context of the app under test. + val appContext = InstrumentationRegistry.getInstrumentation().targetContext + assertEquals("com.psudoanon.livestreamtoearn", appContext.packageName) + } +} diff --git a/livestream-to-earn-android/app/src/main/AndroidManifest.xml b/livestream-to-earn-android/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000..ca8cc00 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/AndroidManifest.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/livestream-to-earn-android/app/src/main/java/com/psudoanon/livestreamtoearn/MainActivity.kt b/livestream-to-earn-android/app/src/main/java/com/psudoanon/livestreamtoearn/MainActivity.kt new file mode 100644 index 0000000..e064a83 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/java/com/psudoanon/livestreamtoearn/MainActivity.kt @@ -0,0 +1,23 @@ +package com.psudoanon.livestreamtoearn + +import android.annotation.SuppressLint +import android.content.Intent +import android.content.pm.ActivityInfo +import androidx.appcompat.app.AppCompatActivity +import android.os.Bundle +import android.view.WindowManager + +class MainActivity : AppCompatActivity() { + + @SuppressLint("SourceLockedOrientationActivity") + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + setContentView(R.layout.activity_main) + + +// this.requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE + + val intent = Intent(this, VideoBroadcastActivity::class.java) + startActivity(intent) + } +} diff --git a/livestream-to-earn-android/app/src/main/java/com/psudoanon/livestreamtoearn/VideoBroadcastActivity.kt b/livestream-to-earn-android/app/src/main/java/com/psudoanon/livestreamtoearn/VideoBroadcastActivity.kt new file mode 100644 index 0000000..0f2a980 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/java/com/psudoanon/livestreamtoearn/VideoBroadcastActivity.kt @@ -0,0 +1,276 @@ +package com.psudoanon.livestreamtoearn + +import android.annotation.SuppressLint +import android.app.Notification +import android.app.NotificationChannel +import android.app.NotificationManager +import android.content.* +import android.content.pm.ActivityInfo +import android.hardware.Camera +import android.icu.text.IDNA +import android.opengl.GLSurfaceView +import android.os.* +import androidx.appcompat.app.AppCompatActivity +import android.view.View +import android.view.Window +import android.view.WindowManager +import android.widget.* +import androidx.core.app.NotificationCompat +import androidx.core.app.NotificationManagerCompat +import androidx.fragment.app.Fragment +import com.psudoanon.broadcaster.LiveVideoBroadcaster +import com.psudoanon.broadcaster.OnEventListener +import io.github.novacrypto.bip39.MnemonicGenerator +import io.github.novacrypto.bip39.Words +import io.github.novacrypto.bip39.wordlists.English +import io.github.novacrypto.hashing.Sha256.sha256 +import kotlinx.android.synthetic.main.activity_video_broadcast.* +import org.web3j.crypto.* +import org.web3j.protocol.Web3j +import org.web3j.protocol.Web3jFactory +import org.web3j.protocol.core.DefaultBlockParameter +import org.web3j.protocol.core.DefaultBlockParameterName +import org.web3j.protocol.http.HttpService +import org.web3j.protocol.rx.Web3jRx +import org.web3j.utils.Convert +import rx.internal.util.ActionSubscriber +import rx.schedulers.Schedulers +import java.io.File +import java.io.FileOutputStream +import java.io.ObjectOutputStream +import java.lang.Exception +import java.math.BigInteger +import java.util.* +import kotlin.random.Random +import kotlin.system.exitProcess + +class VideoBroadcastActivity : AppCompatActivity(), View.OnClickListener { + + private var live = false + private var flashOn = false + + private lateinit var wallet: WalletFile + private lateinit var mGLView: GLSurfaceView + private lateinit var mBalanceView: TextView + private lateinit var mWeb3Connection: Web3j + + private var privateKeyHex: String? = null + private var address: String? = null + private var keyPair: ECKeyPair? = null + private var privateKeyDec: BigInteger? = null + private var mLiveVideoBroadcaster: LiveVideoBroadcaster? = null + + private val INFURA_URL = "https://mainnet.infura.io/v3/" + private val RTMP_BASE_URL = "rtmp://192.168.1.151:1935/0x%s" + private val PUBLIC_ADDRESS_KEY = "public_address" + private val PRIVATE_HEX_KEY = "private" + private val WALLET_PASSWORD = "changeme" + private val N_CHANNEL_ID = "LSTE_NOTIFICATION_CHANNEL" + private val N_CHANNEL_NAME = "Livestream to Earn" + private val N_CHANNEL_DESC = "Livestream to Earn Notification Channel" + private val DESIRED_AUDIO_BITRATE = 128 * 1024 + private val DESIRED_VIDEO_BITRATE = 1000 * 1024 + private val I_FRAME_INTERVAL_SEC = 1 + private val TIMER_INTERVAL = (30 * 1000).toLong() // Fetch balance every 30 seconds + + private val mConnection = object : ServiceConnection { + override fun onServiceDisconnected(name: ComponentName?) { + mLiveVideoBroadcaster = null + } + + override fun onServiceConnected(name: ComponentName?, service: IBinder?) { + val binder = service as LiveVideoBroadcaster.LocalBinder + + if (mLiveVideoBroadcaster == null) { + mLiveVideoBroadcaster = binder.service as LiveVideoBroadcaster + mLiveVideoBroadcaster!!.init(this@VideoBroadcastActivity, mGLView) + + mLiveVideoBroadcaster!!.setVideoBitrate(DESIRED_VIDEO_BITRATE) + mLiveVideoBroadcaster!!.setAudioBitrate(DESIRED_AUDIO_BITRATE) + mLiveVideoBroadcaster!!.setIFrameIntervalSeconds(I_FRAME_INTERVAL_SEC) + + // mLiveVideoBroadcaster!!.setAdaptiveStreaming(true) + } + + mLiveVideoBroadcaster!!.openCamera(Camera.CameraInfo.CAMERA_FACING_BACK) + } + } + + private val mHandler: Handler = object : Handler(Looper.getMainLooper()) { + val UPDATE_TEXT = 1 + + override fun handleMessage(msg: Message) { + when (msg.what) { + UPDATE_TEXT -> { + try { + val balance = mWeb3Connection.ethGetBalance("0x"+address, DefaultBlockParameterName.LATEST).send() + val ethBalance = Convert.fromWei(balance.balance.toString(), Convert.Unit.ETHER) + + runOnUiThread { + mBalanceView.text = String.format("Balance: %s", ethBalance.toString()) + } + } catch (e: Exception) { + runOnUiThread { + mBalanceView.text = "Unable to fetch wallet balance" + } + } + } + } + } + } + + @SuppressLint("SourceLockedOrientationActivity") + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + + this.setupNotificationChannel() + + this.requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE + this.supportActionBar?.hide() + this.window.setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN) + window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON) + setContentView(R.layout.activity_video_broadcast) + + mGLView = findViewById(R.id.cameraPerview).also { it.setOnClickListener(this) } + mGLView.setEGLContextClientVersion(2) + + mBalanceView = findViewById(R.id.balanceView) + + mBalanceView.text = "Fetching balance..." + + mWeb3Connection = Web3jFactory.build(HttpService(INFURA_URL)) + + if (getPrivateKey() == null || getWalletAddress() == null) { + keyPair = Keys.createEcKeyPair() + privateKeyDec = keyPair?.privateKey + privateKeyHex = privateKeyDec?.toString(16) + wallet = Wallet.createLight(WALLET_PASSWORD, keyPair) + address = wallet.address + + val sharedPreferences = getPreferences(Context.MODE_PRIVATE) + + with (sharedPreferences.edit()) { + putString(PRIVATE_HEX_KEY, privateKeyHex) + putString(PUBLIC_ADDRESS_KEY, address) + commit() + } + } else { + address = getWalletAddress() + privateKeyHex = getPrivateKey() + } + + val timer = Timer() + + timer.scheduleAtFixedRate(object : TimerTask() { + override fun run() { + mHandler.dispatchMessage(mHandler.obtainMessage(1)) + } + + }, 0, TIMER_INTERVAL) + } + + override fun onResume() { + super.onResume() + Handler().postDelayed({ + if (!live) { + toggleBroadcasting() + toggleFlash() + } + }, 1000) + } + + override fun onStart() { + super.onStart() + Intent(this, LiveVideoBroadcaster::class.java).also { intent -> bindService(intent, mConnection, Context.BIND_AUTO_CREATE) } + } + + override fun onStop() { + super.onStop() + unbindService(mConnection) + } + + override fun onClick(v: View?) { + when (v?.id) { + R.id.cameraPerview -> { + if (live) { + mLiveVideoBroadcaster!!.stopBroadcasting() + } + copyPrivateKeyToClipboard() + finishAffinity() + exitProcess(0) + } + } + } + + private fun setupNotificationChannel() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + val channel = NotificationChannel(N_CHANNEL_ID, N_CHANNEL_NAME, NotificationManager.IMPORTANCE_DEFAULT).apply { + description = N_CHANNEL_DESC + } + + val notificationManager: NotificationManager = getSystemService(Context.NOTIFICATION_SERVICE) as NotificationManager + notificationManager.createNotificationChannel(channel) + } + } + + private fun copyPrivateKeyToClipboard() { + val clipboard = getSystemService(Context.CLIPBOARD_SERVICE) as ClipboardManager + val clip = ClipData.newPlainText("key", getPrivateKey()) + + clipboard.setPrimaryClip(clip) + val notification = NotificationCompat.Builder(this, N_CHANNEL_ID) + .setSmallIcon(R.drawable.ic_flash_on_24px) + .setContentTitle("Livestream to Earn") + .setContentText("Private key copied to clipboard") + .setPriority(NotificationCompat.PRIORITY_DEFAULT) + + with(NotificationManagerCompat.from(this)) { + notify(0, notification.build()) + } + } + + private fun toggleBroadcasting() { + try { + if (address != null) { + if (!live) { + val streamUrl = String.format(RTMP_BASE_URL, address) + + showLongToast(String.format("Broadcasting to %s", streamUrl)) + mLiveVideoBroadcaster!!.startBroadcasting(streamUrl) + + live = true + } else { + showLongToast("Broadcast stopped") + mLiveVideoBroadcaster!!.stopBroadcasting() + live = false + } + } else { + showLongToast("Missing wallet") + } + } catch (e: Exception) { + showLongToast("Error starting broadcast") + } + } + + private fun toggleFlash() { + if (!flashOn) { + mLiveVideoBroadcaster?.startFlash() + flashOn = true + } else { + mLiveVideoBroadcaster?.stopFlash() + flashOn = false + } + } + + private fun getPrivateKey(): String? { + return getPreferences(Context.MODE_PRIVATE).getString(PRIVATE_HEX_KEY, null) + } + + private fun getWalletAddress(): String? { + return getPreferences(Context.MODE_PRIVATE).getString(PUBLIC_ADDRESS_KEY, null) + } + + private fun showLongToast(text: String) { + Toast.makeText(applicationContext, text, Toast.LENGTH_LONG).show() + } +} diff --git a/livestream-to-earn-android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/livestream-to-earn-android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml new file mode 100644 index 0000000..2b068d1 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/livestream-to-earn-android/app/src/main/res/drawable/ic_flash_off_24px.xml b/livestream-to-earn-android/app/src/main/res/drawable/ic_flash_off_24px.xml new file mode 100644 index 0000000..db90ef3 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/drawable/ic_flash_off_24px.xml @@ -0,0 +1,9 @@ + + + diff --git a/livestream-to-earn-android/app/src/main/res/drawable/ic_flash_on_24px.xml b/livestream-to-earn-android/app/src/main/res/drawable/ic_flash_on_24px.xml new file mode 100644 index 0000000..f128954 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/drawable/ic_flash_on_24px.xml @@ -0,0 +1,9 @@ + + + diff --git a/livestream-to-earn-android/app/src/main/res/drawable/ic_launcher_background.xml b/livestream-to-earn-android/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 0000000..07d5da9 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/livestream-to-earn-android/app/src/main/res/layout/activity_main.xml b/livestream-to-earn-android/app/src/main/res/layout/activity_main.xml new file mode 100644 index 0000000..4fc2444 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/layout/activity_main.xml @@ -0,0 +1,18 @@ + + + + + + \ No newline at end of file diff --git a/livestream-to-earn-android/app/src/main/res/layout/activity_video_broadcast.xml b/livestream-to-earn-android/app/src/main/res/layout/activity_video_broadcast.xml new file mode 100644 index 0000000..d4897da --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/layout/activity_video_broadcast.xml @@ -0,0 +1,30 @@ + + + + + + + + + \ No newline at end of file diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/livestream-to-earn-android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml new file mode 100644 index 0000000..eca70cf --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/livestream-to-earn-android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml new file mode 100644 index 0000000..eca70cf --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/livestream-to-earn-android/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 0000000..a571e60 Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-hdpi/ic_launcher.png differ diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/livestream-to-earn-android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png new file mode 100644 index 0000000..61da551 Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png differ diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/livestream-to-earn-android/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 0000000..c41dd28 Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/livestream-to-earn-android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png new file mode 100644 index 0000000..db5080a Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/livestream-to-earn-android/app/src/main/res/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 0000000..6dba46d Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/livestream-to-earn-android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png new file mode 100644 index 0000000..da31a87 Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png differ diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/livestream-to-earn-android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 0000000..15ac681 Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/livestream-to-earn-android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png new file mode 100644 index 0000000..b216f2d Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png differ diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/livestream-to-earn-android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 0000000..f25a419 Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ diff --git a/livestream-to-earn-android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/livestream-to-earn-android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png new file mode 100644 index 0000000..e96783c Binary files /dev/null and b/livestream-to-earn-android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png differ diff --git a/livestream-to-earn-android/app/src/main/res/values/colors.xml b/livestream-to-earn-android/app/src/main/res/values/colors.xml new file mode 100644 index 0000000..030098f --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/values/colors.xml @@ -0,0 +1,6 @@ + + + #6200EE + #3700B3 + #03DAC5 + diff --git a/livestream-to-earn-android/app/src/main/res/values/strings.xml b/livestream-to-earn-android/app/src/main/res/values/strings.xml new file mode 100644 index 0000000..085f2e8 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/values/strings.xml @@ -0,0 +1,6 @@ + + Livestream to Earn + + + Hello blank fragment + diff --git a/livestream-to-earn-android/app/src/main/res/values/styles.xml b/livestream-to-earn-android/app/src/main/res/values/styles.xml new file mode 100644 index 0000000..5885930 --- /dev/null +++ b/livestream-to-earn-android/app/src/main/res/values/styles.xml @@ -0,0 +1,11 @@ + + + + + + diff --git a/livestream-to-earn-android/app/src/test/java/com/psudoanon/livestreamtoearn/ExampleUnitTest.kt b/livestream-to-earn-android/app/src/test/java/com/psudoanon/livestreamtoearn/ExampleUnitTest.kt new file mode 100644 index 0000000..8ee8e4c --- /dev/null +++ b/livestream-to-earn-android/app/src/test/java/com/psudoanon/livestreamtoearn/ExampleUnitTest.kt @@ -0,0 +1,17 @@ +package com.psudoanon.livestreamtoearn + +import org.junit.Test + +import org.junit.Assert.* + +/** + * Example local unit test, which will execute on the development machine (host). + * + * See [testing documentation](http://d.android.com/tools/testing). + */ +class ExampleUnitTest { + @Test + fun addition_isCorrect() { + assertEquals(4, 2 + 2) + } +} diff --git a/livestream-to-earn-android/build.gradle b/livestream-to-earn-android/build.gradle new file mode 100644 index 0000000..f31f8bb --- /dev/null +++ b/livestream-to-earn-android/build.gradle @@ -0,0 +1,29 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + ext.kotlin_version = '1.3.72' + repositories { + google() + jcenter() + + } + dependencies { + classpath 'com.android.tools.build:gradle:3.6.2' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + google() + jcenter() + + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/livestream-to-earn-android/gradle.properties b/livestream-to-earn-android/gradle.properties new file mode 100644 index 0000000..23339e0 --- /dev/null +++ b/livestream-to-earn-android/gradle.properties @@ -0,0 +1,21 @@ +# Project-wide Gradle settings. +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +org.gradle.jvmargs=-Xmx1536m +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +# org.gradle.parallel=true +# AndroidX package structure to make it clearer which packages are bundled with the +# Android operating system, and which are packaged with your app's APK +# https://developer.android.com/topic/libraries/support-library/androidx-rn +android.useAndroidX=true +# Automatically convert third-party libraries to use AndroidX +android.enableJetifier=true +# Kotlin code style for this project: "official" or "obsolete": +kotlin.code.style=official diff --git a/livestream-to-earn-android/gradle/wrapper/gradle-wrapper.jar b/livestream-to-earn-android/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..f6b961f Binary files /dev/null and b/livestream-to-earn-android/gradle/wrapper/gradle-wrapper.jar differ diff --git a/livestream-to-earn-android/gradle/wrapper/gradle-wrapper.properties b/livestream-to-earn-android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..7aa20fa --- /dev/null +++ b/livestream-to-earn-android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Sun Apr 26 15:51:35 EDT 2020 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip diff --git a/livestream-to-earn-android/gradlew b/livestream-to-earn-android/gradlew new file mode 100755 index 0000000..cccdd3d --- /dev/null +++ b/livestream-to-earn-android/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/livestream-to-earn-android/gradlew.bat b/livestream-to-earn-android/gradlew.bat new file mode 100644 index 0000000..e95643d --- /dev/null +++ b/livestream-to-earn-android/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/livestream-to-earn-android/local.properties b/livestream-to-earn-android/local.properties new file mode 100644 index 0000000..b86542c --- /dev/null +++ b/livestream-to-earn-android/local.properties @@ -0,0 +1,8 @@ +## This file must *NOT* be checked into Version Control Systems, +# as it contains information specific to your local configuration. +# +# Location of the SDK. This is only used by Gradle. +# For customization when using a Version Control System, please read the +# header note. +#Sun Apr 26 16:21:23 EDT 2020 +sdk.dir=/Users/adam/Library/Android/sdk diff --git a/livestream-to-earn-android/settings.gradle b/livestream-to-earn-android/settings.gradle new file mode 100644 index 0000000..3366479 --- /dev/null +++ b/livestream-to-earn-android/settings.gradle @@ -0,0 +1,3 @@ +rootProject.name='Livestream to Earn' +include ':app' +include ':app:broadcaster'