diff --git a/CMakeLists.txt b/CMakeLists.txt index e41f87f..726a51f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -14,19 +14,27 @@ find_package(projectM4 4.1.0 REQUIRED Playlist) find_package(GStreamer REQUIRED COMPONENTS gstreamer-audio gstreamer-gl gstreamer-pbutils gstreamer-video) find_package(GLIB2 REQUIRED) -add_library(gstprojectm SHARED - src/caps.h - src/caps.c +add_library(gstprojectm MODULE + src/bufferdisposal.h + src/bufferdisposal.c src/debug.h src/debug.c - src/config.h - src/enums.h - src/plugin.h - src/plugin.c - src/projectm.h - src/projectm.c src/gstglbaseaudiovisualizer.h src/gstglbaseaudiovisualizer.c + src/gstpmaudiovisualizer.h + src/gstpmaudiovisualizer.c + src/gstprojectm.h + src/gstprojectm.c + src/gstprojectmbase.h + src/gstprojectmbase.c + src/gstprojectmcaps.h + src/gstprojectmcaps.c + src/gstprojectmconfig.h + src/pushbuffer.h + src/pushbuffer.c + src/register.c + src/renderbuffer.h + src/renderbuffer.c ) target_include_directories(gstprojectm diff --git a/README.md b/README.md index 1b43b93..8a8abe1 100644 --- a/README.md +++ b/README.md @@ -57,25 +57,50 @@ The documentation has been organized into distinct files, each dedicated to a sp - **[OSX](docs/OSX.md)** - **[Windows](docs/WINDOWS.md)** -Once the plugin has been installed, you can use it something like this: +Once the plugin has been installed, you can use it something like this to render to an OpenGL window: ```shell -gst-launch pipewiresrc ! queue ! audioconvert ! projectm preset=/usr/local/share/projectM/presets preset-duration=5 ! video/x-raw,width=2048,height=1440,framerate=60/1 ! videoconvert ! xvimagesink sync=false +gst-launch pipewiresrc ! queue ! audioconvert ! "audio/x-raw, format=S16LE, rate=44100, channels=2, layout=interleaved" ! projectm preset=/usr/local/share/projectM/presets preset-duration=10 mesh-size=48,32 is-live=true ! 'video/x-raw(memory:GLMemory),width=2048,height=1440,framerate=60/1' ! glimagesink sync=false ``` -Or to convert an audio file to video: +To render from a live source in real-time to a gl window, an identity element can be used to provide a proper timestamp source for the pipeline. This example also includes a texture directory: +```shell +gst-launch souphttpsrc location=http://your-radio-stream is-live=true ! queue ! decodebin ! audioconvert ! "audio/x-raw, format=S16LE, rate=44100, channels=2, layout=interleaved" ! identity single-segment=true sync=true ! projectm preset=/usr/local/share/projectM/presets preset-duration=5 mesh-size=48,32 is-live=true texture-dir=/usr/local/share/projectM/presets-milkdrop-texture-pack ! video/x-raw(memory:GLMemory),width=1920,height=1080,framerate=60/1 ! glimagesink sync=false +``` + +Or to convert an audio file to video using offline rendering: ```shell +gst-launch-1.0 -e \ filesrc location=input.mp3 ! decodebin name=dec \ decodebin ! tee name=t \ t. ! queue ! audioconvert ! audioresample ! \ capsfilter caps="audio/x-raw, format=F32LE, channels=2, rate=44100" ! avenc_aac bitrate=256000 ! queue ! mux. \ - t. ! queue ! audioconvert ! projectm preset=/usr/local/share/projectM/presets preset-duration=3 mesh-size=1024,576 ! \ - identity sync=false ! videoconvert ! videorate ! video/x-raw,framerate=60/1,width=3840,height=2160 ! \ + t. ! queue ! audioconvert ! capsfilter caps="audio/x-raw, format=S16LE, channels=2, rate=44100" ! \ + projectm preset=/usr/local/share/projectM/presets preset-duration=3 mesh-size=1024,576 is-live=false ! \ + identity sync=false ! videoconvert ! videorate ! video/x-raw\(memory:GLMemory\),framerate=60/1,width=3840,height=2160 ! \ + gldownload \ x264enc bitrate=35000 key-int-max=300 speed-preset=veryslow ! video/x-h264,stream-format=avc,alignment=au ! queue ! mux. \ mp4mux name=mux ! filesink location=render.mp4; ``` +Or converting an audio file with the nVidia optimized encoder, directly from GL memory: +```shell +gst-launch-1.0 -e \ + filesrc location=input.mp3 ! \ + decodebin ! tee name=t \ + t. ! queue ! audioconvert ! audioresample ! \ + capsfilter caps="audio/x-raw, format=F32LE, channels=2, rate=44100" ! \ + avenc_aac bitrate=320000 ! queue ! mux. \ + t. ! queue ! audioconvert ! capsfilter caps="audio/x-raw, format=S16LE, channels=2, rate=44100" ! projectm \ + preset=/usr/local/share/projectM/presets preset-duration=3 mesh-size=1024,576 is-live=false ! \ + identity sync=false ! videoconvert ! videorate ! \ + video/x-raw\(memory:GLMemory\),framerate=60/1,width=1920,height=1080 ! \ + nvh264enc ! h264parse ! \ + video/x-h264,stream-format=avc,alignment=au ! queue ! mux. \ + mp4mux name=mux ! filesink location=render.mp4; +``` + Available options ```shell @@ -193,21 +218,23 @@ If you have your own ProjectM preset files: Once the plugin has been installed, you can use it something like this: ```shell -gst-launch pipewiresrc ! queue ! audioconvert ! projectm preset=/usr/local/share/projectM/presets preset-duration=5 ! video/x-raw,width=2048,height=1440,framerate=60/1 ! videoconvert ! xvimagesink sync=false +gst-launch pipewiresrc ! queue ! audioconvert ! "audio/x-raw, format=S16LE, rate=44100, channels=2, layout=interleaved" ! projectm preset=/usr/local/share/projectM/presets preset-duration=5 mesh-size=48,32 ! 'video/x-raw(memory:GLMemory),width=2048,height=1440,framerate=60/1' ! glimagesink sync=false ``` Or to convert an audio file to video: ```shell gst-launch-1.0 -e \ - filesrc location=input.mp3 ! \ + filesrc location=input.mp3 ! decodebin name=dec \ decodebin ! tee name=t \ t. ! queue ! audioconvert ! audioresample ! \ - capsfilter caps="audio/x-raw, format=F32LE, channels=2, rate=44100" ! avenc_aac bitrate=320000 ! queue ! mux. \ - t. ! queue ! audioconvert ! projectm preset=/usr/local/share/projectM/presets texture-dir=/usr/local/share/projectM/textures preset-duration=6 mesh-size=1024,576 ! \ - identity sync=false ! videoconvert ! videorate ! video/x-raw,framerate=60/1,width=3840,height=2160 ! \ - x264enc bitrate=50000 key-int-max=200 speed-preset=veryslow ! video/x-h264,stream-format=avc,alignment=au ! queue ! mux. \ - mp4mux name=mux ! filesink location=output.mp4 + capsfilter caps="audio/x-raw, format=F32LE, channels=2, rate=44100" ! avenc_aac bitrate=256000 ! queue ! mux. \ + t. ! queue ! audioconvert ! capsfilter caps="audio/x-raw, format=S16LE, channels=2, rate=44100" ! \ + projectm preset=/usr/local/share/projectM/presets preset-duration=3 mesh-size=1024,576 is-live=false ! \ + identity sync=false ! videoconvert ! videorate ! video/x-raw\(memory:GLMemory\),framerate=60/1,width=3840,height=2160 ! \ + gldownload \ + x264enc bitrate=35000 key-int-max=300 speed-preset=veryslow ! video/x-h264,stream-format=avc,alignment=au ! queue ! mux. \ + mp4mux name=mux ! filesink location=render.mp4; ``` You may need to adjust some elements which may or may not be present in your GStreamer installation, such as x264enc, avenc_aac, etc. @@ -220,6 +247,55 @@ gst-inspect projectm

(back to top)

+## Technical Details + +### OpenGL Rendering and Buffer Handling + +- projectM output is rendered to OpenGL textures via **Frame Buffer Object (FBO)**. +- **Textures are pooled** and reused across frames. +- Each rendered texture becomes a GStreamer video buffer pushed downstream. **All video buffers stay in GPU memory**. + +--- + +### Timing and Synchronization + +The plugin synchronizes rendering to the GStreamer pipeline clock using **audio presentation timestamp (PTS) as the leading reference**. + +Pipeline caps control the desired video framerate for rendering. The render loop is **push-based** to conform with +GStreamer's pipeline timing concept, and to enable faster-than-real-time rendering. +A **fixed number of audio samples is consumed per video frame**. + +**Example:** `735 samples per frame at 44.1 kHz = ~60 FPS.` + +**Note:** Live pipelines are auto-detected by the plugin if Gstreamer supports it (not supported on Windows). +For Windows or other cases where auto-detection is not appropriate, the `is-live` property can be configured. +The default mode is offline rendering, `is-live=false`. + +**Live pipelines only:** Frames may be dropped or rendering FPS adjusted if frame rendering can't keep up with +pipeline caps FPS. + +Video frame PTS offset is derived from the **first audio buffer PTS** or **segment event** plus accumulated samples to align with audio timing. + + +| Timing Source | Origin | Applies to clock | Purpose | +|----------------------------|--------------------|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Audio Timestamps | Audio Input | Always | Determine video timing and sync. | +| Sample Rate / Pipeline FPS | Audio Input / Caps | Always | Defines how many audio samples are used per frame and target FPS. | +| Segment Info | Segment Event | Always | Tracks running time and playback position. Used for PTS offsets. | +| QoS Feedback | QoS Event | Live | Skips outdated frames to correct sync with downstream sink/pipeline clock. | +| Render Frame Drop | Render Loop | Live | Drop frames that cannot be rendered in time to keep sync with pipeline clock. | +| GL Frame Render Duration | Render Loop | Live | Exponential Moving Average of the frame render duration. Adjusts plugin target FPS in case exceeds the real-time budget most of the time. | +| Latency Event | Render Loop | Live | Inform upstream of latency changes in case of adaptive FPS changes (via EMA). | +| Buffer push clock jitter | Render Loop | Live | Exponential Moving Average of the source pad push jitter caused by the scheduler. Clocks in gstreamer are not guaranteed to be precise with timed waits, as this cannot be guaranteed by the operating system. Adds jitter EMA as a correction to the buffer PTS. | + + +--- + + +

(back to top)

+ +--- + ## Contributing @@ -261,6 +337,8 @@ Blaquewithaq (Discord: SoFloppy#1289) - [@anomievision](https://twitter.com/anom Mischa (Discord: mish) - [@revmischa](https://github.com/revmischa) +Michael [@mbaetgen-wup](https://github.com/mbaetgen-wup) - michael -at- widerup.com +

(back to top)

diff --git a/build.ps1 b/build.ps1 index cf25cae..8aa1db9 100644 --- a/build.ps1 +++ b/build.ps1 @@ -54,7 +54,7 @@ function Start-ConfigureBuild { -DVCPKG_TARGET_TRIPLET=x64-windows ` -DCMAKE_MSVC_RUNTIME_LIBRARY="MultiThreaded$<$:Debug>DLL" ` -DCMAKE_VERBOSE_MAKEFILE=YES ` - -DCMAKE_PREFIX_PATH="${Env:PROJECTM_ROOT}/lib/cmake/projectM4" + -DCMAKE_PREFIX_PATH="${Env:PROJECTM_ROOT}" } # Copy required DLLs to dist directory @@ -137,7 +137,7 @@ function Invoke-PromptInstall { # Print example command Write-Host Write-Host "Done! Here's an example command:" - Write-Host 'gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! "video/x-raw,width=512,height=512,framerate=60/1" ! videoconvert ! xvimagesink sync=false' + Write-Host 'gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=60/1" ! glimagesink sync=false' } else { Write-Host diff --git a/build.sh b/build.sh index 74a1148..940d8f0 100755 --- a/build.sh +++ b/build.sh @@ -4,10 +4,8 @@ set -e # Set variables based on OS if [[ "$OSTYPE" == "linux-gnu"* ]]; then LIB_EXT="so" - VIDEO_SINK="xvimagesink" elif [[ "$OSTYPE" == "darwin"* ]]; then LIB_EXT="dylib" - VIDEO_SINK="osxvideosink" else echo "Unsupported OS!" exit 1 @@ -99,7 +97,7 @@ prompt_install() { # Print example command echo echo "Done! Here's an example command:" - echo "gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! "video/x-raw,width=512,height=512,framerate=60/1" ! videoconvert ! $VIDEO_SINK sync=false" + echo "gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! \"video/x-raw(memory:GLMemory),width=512,height=512,framerate=60/1\" ! videoconvert ! glimagesink sync=false" else echo echo "Done!" diff --git a/convert.sh b/convert.sh index 79bff18..9059e99 100644 --- a/convert.sh +++ b/convert.sh @@ -154,13 +154,16 @@ gst-launch-1.0 -e \ t. ! queue ! audioconvert ! audioresample ! \ capsfilter caps="audio/x-raw, format=F32LE, channels=2, rate=44100" ! \ avenc_aac bitrate=320000 ! queue ! mux. \ - t. ! queue ! audioconvert ! projectm \ + t. ! queue ! audioconvert ! capsfilter caps="audio/x-raw, format=S16LE, channels=2, rate=44100" ! \ + projectm \ preset=$PRESET_PATH \ texture-dir=$TEXTURE_DIR \ preset-duration=$PRESET_DURATION \ + is-live=false \ mesh-size=${MESH_X},${MESH_Y} ! \ identity sync=false ! videoconvert ! videorate ! \ - video/x-raw,framerate=$FRAMERATE/1,width=$VIDEO_WIDTH,height=$VIDEO_HEIGHT ! \ + video/x-raw\(memory:GLMemory\),framerate=$FRAMERATE/1,width=$VIDEO_WIDTH,height=$VIDEO_HEIGHT ! \ + gldownload ! \ x264enc bitrate=$(($BITRATE * 1000)) key-int-max=200 speed-preset=$SPEED_PRESET ! \ video/x-h264,stream-format=avc,alignment=au ! queue ! mux. \ mp4mux name=mux ! filesink location=$OUTPUT_FILE & diff --git a/docs/LINUX.md b/docs/LINUX.md index 6315df6..2443262 100644 --- a/docs/LINUX.md +++ b/docs/LINUX.md @@ -92,7 +92,7 @@ source ~/.bash_profile To utilize the plugin with the example, please install GStreamer ```bash -gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! "video/x-raw,width=512,height=512,framerate=60/1" ! videoconvert ! xvimagesink sync=false +gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=60/1" ! glimagesink sync=false ``` ### Testing diff --git a/docs/OSX.md b/docs/OSX.md index bb35f42..429bedc 100644 --- a/docs/OSX.md +++ b/docs/OSX.md @@ -102,7 +102,7 @@ source ~/.bash_profile To utilize the plugin with the example, please install GStreamer ```bash -gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! "video/x-raw,width=512,height=512,framerate=60/1" ! videoconvert ! xvimagesink sync=false +gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=60/1" ! glimagesink sync=false ``` ### Testing diff --git a/docs/OVERVIEW.md b/docs/OVERVIEW.md index d6be7ec..24b73ab 100644 --- a/docs/OVERVIEW.md +++ b/docs/OVERVIEW.md @@ -15,7 +15,7 @@ - [x] OSX - [ ] Windows (see issues) - [x] Accepting an audio/x-raw stream (coded to add more formats later, if needed) -- [x] Generating a video/x-raw stream (coded to add more formats later, if needed) +- [x] Generating a video/x-raw(memory:GLMemory) stream (coded to add more formats later, if needed) - [x] Utilizing the new C API in libprojectM 4.0 - [x] Implemented properties with defaults (aka settings) diff --git a/docs/WINDOWS.md b/docs/WINDOWS.md index db4d442..b570757 100644 --- a/docs/WINDOWS.md +++ b/docs/WINDOWS.md @@ -66,7 +66,7 @@ Copy-Item -Path "dist\gstprojectm.dll" -Destination "$Env:USERPROFILE\.gstreamer To utilize the plugin with the example, please install GStreamer ```powershell -gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! "video/x-raw,width=512,height=512,framerate=60/1" ! videoconvert ! xvimagesink sync=false +gst-launch-1.0 audiotestsrc ! queue ! audioconvert ! projectm ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=60/1" ! glimagesink sync=false ``` ### Testing diff --git a/src/bufferdisposal.c b/src/bufferdisposal.c new file mode 100644 index 0000000..1b86988 --- /dev/null +++ b/src/bufferdisposal.c @@ -0,0 +1,153 @@ +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "bufferdisposal.h" + +#include + +GST_DEBUG_CATEGORY_STATIC(buffercleanup_debug); +#define GST_CAT_DEFAULT buffercleanup_debug + +/** + * Queue shutdown signal token. + */ +static gpointer BD_Q_SHUTDOWN_SIGNAL = &BD_Q_SHUTDOWN_SIGNAL; + +/** + * Callback for scheduling gl buffer release with gl thread. + * Needs to be called from the GL thread. + * + * @param context Current gl context. + * @param buf GL buffer to release. + */ +void bd_gl_buffer_dispose_gl(GstGLContext *context, gpointer buf) { + + GstBuffer *buffer = GST_BUFFER(buf); + if (buffer == NULL) + return; + + if (context != NULL) { + GstGLSyncMeta *sync_meta = gst_buffer_get_gl_sync_meta(buffer); + if (sync_meta) + gst_gl_sync_meta_set_sync_point(sync_meta, context); + } + + gst_buffer_unref(GST_BUFFER(buffer)); +} + +void bd_dispose_gl_buffer(BDBufferDisposal *state, GstBuffer *buf) { + g_assert(state != NULL); + g_assert(buf != NULL); + if (gst_gl_context_get_current() == state->gl_context) { + bd_gl_buffer_dispose_gl(state->gl_context, buf); + } else { + g_async_queue_push(state->disposal_queue, buf); + } +} + +/** + * Disposal loop for dropped gl buffers that are not making it to the src pad. + * Consume buffers to clean-up and dispatch release through gl thread. + * + * @param user_data Queue state to use. + * @return NULL + */ +static gpointer _bd_dispose_thread_func(gpointer user_data) { + + BDBufferDisposal *state = (BDBufferDisposal *)user_data; + g_assert(state != NULL); + + // consume gl buffers to dispatch to gl thread for cleanup + while (g_atomic_int_get(&state->running)) { + + gpointer item = g_async_queue_pop(state->disposal_queue); + + if (item == BD_Q_SHUTDOWN_SIGNAL) + break; + + if (!item) + continue; + + gst_gl_context_thread_add(state->gl_context, bd_gl_buffer_dispose_gl, item); + } + + return NULL; +} + +/** + * Dispose of all buffers currently queued. + * Needs to be called from the GL thread. + * + * @param user_data Queue state to use. + */ +void bd_clear_queue_gl(GstGLContext *context, gpointer user_data) { + BDBufferDisposal *state = (BDBufferDisposal *)user_data; + g_assert(state != NULL); + g_assert(gst_gl_context_get_current() == context); + + // make sure all gl buffers are released + gpointer item; + while ((item = g_async_queue_try_pop(state->disposal_queue)) != NULL) { + bd_gl_buffer_dispose_gl(context, item); + } +} + +void bd_clear(BDBufferDisposal *state) { + g_assert(state != NULL); + + if (gst_gl_context_get_current() == state->gl_context) { + bd_clear_queue_gl(state->gl_context, state); + } else { + gst_gl_context_thread_add(state->gl_context, bd_clear_queue_gl, state); + } +} + +void bd_init_buffer_disposal(BDBufferDisposal *state, + GstGLContext *gl_context) { + g_assert(state != NULL); + + static gsize _debug_initialized = 0; + if (g_once_init_enter(&_debug_initialized)) { + GST_DEBUG_CATEGORY_INIT(buffercleanup_debug, "buffercleanup", 0, + "projectM visualizer plugin buffer cleanup"); + } + + state->disposal_thread = NULL; + state->gl_context = gl_context; + g_atomic_int_set(&state->running, FALSE); + state->disposal_queue = g_async_queue_new(); +} + +void bd_dispose_buffer_disposal(BDBufferDisposal *state) { + g_assert(state != NULL); + g_assert(state->disposal_thread == NULL); + + g_async_queue_unref(state->disposal_queue); + state->disposal_queue = NULL; + state->gl_context = NULL; +} + +void bd_start_buffer_disposal(BDBufferDisposal *state) { + g_assert(state != NULL); + + g_atomic_int_set(&state->running, TRUE); + + if (state->disposal_thread == NULL) { + state->disposal_thread = + g_thread_new("bd-disposal-thread", _bd_dispose_thread_func, state); + } +} + +void bd_stop_buffer_disposal(BDBufferDisposal *state) { + g_assert(state != NULL); + + // signal and wait for cleanup thread to exit + g_atomic_int_set(&state->running, FALSE); + + if (state->disposal_thread != NULL) { + g_async_queue_push(state->disposal_queue, BD_Q_SHUTDOWN_SIGNAL); + g_thread_join(state->disposal_thread); + state->disposal_thread = NULL; + } +} diff --git a/src/bufferdisposal.h b/src/bufferdisposal.h new file mode 100644 index 0000000..9f7db5c --- /dev/null +++ b/src/bufferdisposal.h @@ -0,0 +1,91 @@ +/* + * An async queue to dispose of (dropped) GL buffers. The queue is consumed by + * a dedicated thread (bd-disposal-thread) to dispatch GL buffer unref to the + * GL thread. + */ + +#ifndef __BUFFERDISPOSAL_H__ +#define __BUFFERDISPOSAL_H__ + +#include +#include + +typedef struct { + + // not re-assigned during render thread lifetime + // -------------------------------------------------------------- + + /** + * Current gl context. No ownership. + */ + GstGLContext *gl_context; + + /** + * Thread running the gl buffer clean-up loop, + * used to release dropped buffer from the gl thread. + */ + GThread *disposal_thread; + + /** + * Queue to dispose of dropped gl buffers. + */ + GAsyncQueue *disposal_queue; + + // concurrent access, g_atomic + // -------------------------------------------------------------- + + /** + * TRUE if rendering is currently running. + */ + gboolean running; + + /*< private >*/ + gpointer _padding[GST_PADDING]; +} BDBufferDisposal; + +/** + * Dispose given buffer from the GL thread. + * Disposal will be queued if current thread is not the GL thread. + * + * @param state State to use. + * @param buf Buffer to dispose. + */ +void bd_dispose_gl_buffer(BDBufferDisposal *state, GstBuffer *buf); + +/** + * Dispose of all buffers currently queued for disposal. + * + * @param state Renderbuffer owning cleanup queue to clear. + */ +void bd_clear(BDBufferDisposal *state); + +/** + * Init queue state. + * + * @param state Queue state to init. + * @param gl_context GL context to use. + */ +void bd_init_buffer_disposal(BDBufferDisposal *state, GstGLContext *gl_context); + +/** + * Release all resources used by this queue. + * + * @param state Queue state to dispose. + */ +void bd_dispose_buffer_disposal(BDBufferDisposal *state); + +/** + * Start worker thread. + * + * @param state Queue state to use. + */ +void bd_start_buffer_disposal(BDBufferDisposal *state); + +/** + * Stop worker thread. + * + * @param state Queue state to use. + */ +void bd_stop_buffer_disposal(BDBufferDisposal *state); + +#endif diff --git a/src/caps.c b/src/caps.c deleted file mode 100644 index 09b5add..0000000 --- a/src/caps.c +++ /dev/null @@ -1,51 +0,0 @@ - -#ifdef HAVE_CONFIG_H -#include -#endif - -#include -#include - -#include "caps.h" -#include "plugin.h" - -GST_DEBUG_CATEGORY_STATIC(gst_projectm_caps_debug); -#define GST_CAT_DEFAULT gst_projectm_caps_debug - -const gchar *get_audio_sink_cap(unsigned int type) { - const char *format; - - switch (type) { - case 0: - format = - GST_AUDIO_CAPS_MAKE("audio/x-raw, " - "format = (string) " GST_AUDIO_NE( - S16) ", " - "layout = (string) interleaved, " - "channels = (int) { 2 }, " - "rate = (int) { 44100 }, " - "channel-mask = (bitmask) { 0x0003 }"); - break; - default: - format = NULL; - break; - } - - return format; -} - -const gchar *get_video_src_cap(unsigned int type) { - const char *format; - - switch (type) { - case 0: - format = GST_VIDEO_CAPS_MAKE("video/x-raw, format = (string) { ABGR }, " - "framerate=(fraction)[0/1,MAX]"); - break; - default: - format = NULL; - break; - } - - return format; -} \ No newline at end of file diff --git a/src/config.h b/src/config.h deleted file mode 100644 index bc83e3b..0000000 --- a/src/config.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef __GST_PROJECTM_CONFIG_H__ -#define __GST_PROJECTM_CONFIG_H__ - -#include - -G_BEGIN_DECLS - -/** - * @brief Plugin Details - */ - -#define PACKAGE "GstProjectM" -#define PACKAGE_NAME "GstProjectM" -#define PACKAGE_VERSION "0.0.2" -#define PACKAGE_LICENSE "LGPL" -#define PACKAGE_ORIGIN "https://github.com/projectM-visualizer/gst-projectm" - -/** - * @brief ProjectM Settings (defaults) - */ - -#define DEFAULT_PRESET_PATH NULL -#define DEFAULT_TEXTURE_DIR_PATH NULL -#define DEFAULT_BEAT_SENSITIVITY 1.0 -#define DEFAULT_HARD_CUT_DURATION 3.0 -#define DEFAULT_HARD_CUT_ENABLED FALSE -#define DEFAULT_HARD_CUT_SENSITIVITY 1.0 -#define DEFAULT_SOFT_CUT_DURATION 3.0 -#define DEFAULT_PRESET_DURATION 0.0 -#define DEFAULT_MESH_SIZE "48,32" -#define DEFAULT_ASPECT_CORRECTION TRUE -#define DEFAULT_EASTER_EGG 0.0 -#define DEFAULT_PRESET_LOCKED FALSE -#define DEFAULT_ENABLE_PLAYLIST TRUE -#define DEFAULT_SHUFFLE_PRESETS TRUE // depends on ENABLE_PLAYLIST - -G_END_DECLS - -#endif /* __GST_PROJECTM_CONFIG_H__ */ diff --git a/src/debug.c b/src/debug.c index 9a948aa..510b480 100644 --- a/src/debug.c +++ b/src/debug.c @@ -2,12 +2,12 @@ #include "config.h" #endif +#include "debug.h" + #include #include -#include "debug.h" - -void gl_error_handler(GstGLContext *context, gpointer data) { +void gl_error_handler(GstGLContext *context) { GLuint error = context->gl_vtable->GetError(); switch (error) { @@ -47,4 +47,4 @@ void gl_error_handler(GstGLContext *context, gpointer data) { g_error("OpenGL Error: Unknown error code - 0x%x\n", error); break; } -} \ No newline at end of file +} diff --git a/src/debug.h b/src/debug.h index c0b3969..bb67133 100644 --- a/src/debug.h +++ b/src/debug.h @@ -1,7 +1,6 @@ #ifndef __GST_PROJECTM_DEBUG_H__ #define __GST_PROJECTM_DEBUG_H__ -#include #include G_BEGIN_DECLS @@ -26,8 +25,8 @@ G_BEGIN_DECLS * @param context The OpenGL context. * @param data Unused. */ -void gl_error_handler(GstGLContext *context, gpointer data); +void gl_error_handler(GstGLContext *context); G_END_DECLS -#endif /* __GST_PROJECTM_DEBUG_H__ */ \ No newline at end of file +#endif /* __GST_PROJECTM_DEBUG_H__ */ diff --git a/src/enums.h b/src/enums.h deleted file mode 100644 index 863d677..0000000 --- a/src/enums.h +++ /dev/null @@ -1,32 +0,0 @@ -#ifndef __GST_PROJECTM_ENUMS_H__ -#define __GST_PROJECTM_ENUMS_H__ - -#include - -G_BEGIN_DECLS - -/** - * @brief Properties - */ - -enum { - PROP_0, - PROP_PRESET_PATH, - PROP_TEXTURE_DIR_PATH, - PROP_BEAT_SENSITIVITY, - PROP_HARD_CUT_DURATION, - PROP_HARD_CUT_ENABLED, - PROP_HARD_CUT_SENSITIVITY, - PROP_SOFT_CUT_DURATION, - PROP_PRESET_DURATION, - PROP_MESH_SIZE, - PROP_ASPECT_CORRECTION, - PROP_EASTER_EGG, - PROP_PRESET_LOCKED, - PROP_SHUFFLE_PRESETS, - PROP_ENABLE_PLAYLIST -}; - -G_END_DECLS - -#endif /* __GST_PROJECTM_ENUMS_H__ */ diff --git a/src/gstglbaseaudiovisualizer.c b/src/gstglbaseaudiovisualizer.c index e1f786a..fe6bfe8 100644 --- a/src/gstglbaseaudiovisualizer.c +++ b/src/gstglbaseaudiovisualizer.c @@ -35,50 +35,94 @@ #endif #include "gstglbaseaudiovisualizer.h" + +#include "gstpmaudiovisualizer.h" +#include "renderbuffer.h" + #include +#include + +#ifdef _WIN32 +#define strcasecmp _stricmp +#endif /** * SECTION:GstGLBaseAudioVisualizer - * @short_description: #GstAudioVisualizer subclass for injecting OpenGL + * @short_description: #GstPMAudioVisualizer subclass for injecting OpenGL * resources in a pipeline * @title: GstGLBaseAudioVisualizer - * @see_also: #GstAudioVisualizer + * @see_also: #GstPMAudioVisualizer * - * Wrapper for GstAudioVisualizer for handling OpenGL contexts. + * Wrapper for GstPMAudioVisualizer for handling OpenGL contexts. * * #GstGLBaseAudioVisualizer handles the nitty gritty details of retrieving an * OpenGL context. It also provides `gl_start()` and `gl_stop()` virtual methods * that ensure an OpenGL context is available and current in the calling thread - * for initializing and cleaning up OpenGL dependent resources. The `gl_render` - * virtual method is used to perform OpenGL rendering. + * for initializing and cleaning up OpenGL resources. The `render` + * virtual method of the GstPMAudioVisualizer is implemented to perform OpenGL + * rendering. The implementer provides an implementation for fill_gl_memory to + * render directly to gl memory. Rendering is performed blocking for + * offline rendering and asynchronously for real-time rendering. + * The plugin detects if the pipeline clock is a real-time clock. + * + * Typical plug-in call order for implementer-provided functions: + * - gl_start (once) + * - setup (every time caps change, typically once) + * - fill_gl_memory (once for each frame) + * - gl_stop (once) */ #define GST_CAT_DEFAULT gst_gl_base_audio_visualizer_debug -GST_DEBUG_CATEGORY_STATIC(GST_CAT_DEFAULT); +GST_DEBUG_CATEGORY_STATIC(gst_gl_base_audio_visualizer_debug); + +#define DEFAULT_TIMESTAMP_OFFSET 0 + +/** + * Wait for up to 0.625 * fps frame duration for a free slot to queue input + * audio for a frame. If the previous frame does not start rendering within this + * time, it is dropped. + */ +#ifndef MAX_QUEUE_WAIT_TIME_IN_FRAME_DURATIONS_N +#define MAX_QUEUE_WAIT_TIME_IN_FRAME_DURATIONS_N 5 +#endif + +#ifndef MAX_QUEUE_WAIT_TIME_IN_FRAME_DURATIONS_D +#define MAX_QUEUE_WAIT_TIME_IN_FRAME_DURATIONS_D 8 +#endif + +/* + * GST element property default values. + */ +#define DEFAULT_MIN_FPS_N 1 +#define DEFAULT_MIN_FPS_D 1 +#define DEFAULT_PIPELINE_LIVE "auto" struct _GstGLBaseAudioVisualizerPrivate { GstGLContext *other_context; gint64 n_frames; /* total frames sent */ - gboolean gl_result; + gboolean gl_started; GRecMutex context_lock; + GstGLFramebuffer *fbo; + RBRenderBuffer render_buffer; + gboolean is_realtime; }; /* Properties */ -enum { PROP_0 }; +enum { PROP_0, PROP_MIN_FPS_N, PROP_MIN_FPS_D, PROP_PIPELINE_LIVE }; #define gst_gl_base_audio_visualizer_parent_class parent_class G_DEFINE_ABSTRACT_TYPE_WITH_CODE( GstGLBaseAudioVisualizer, gst_gl_base_audio_visualizer, - GST_TYPE_AUDIO_VISUALIZER, + GST_TYPE_PM_AUDIO_VISUALIZER, G_ADD_PRIVATE(GstGLBaseAudioVisualizer) GST_DEBUG_CATEGORY_INIT(gst_gl_base_audio_visualizer_debug, "glbaseaudiovisualizer", 0, "glbaseaudiovisualizer element");); -static void gst_gl_base_audio_visualizer_finalize(GObject *object); +static void gst_gl_base_audio_visualizer_dispose(GObject *object); static void gst_gl_base_audio_visualizer_set_property(GObject *object, guint prop_id, const GValue *value, @@ -88,83 +132,194 @@ static void gst_gl_base_audio_visualizer_get_property(GObject *object, GValue *value, GParamSpec *pspec); +/** + * Discover gl context / display from gst. + */ static void gst_gl_base_audio_visualizer_set_context(GstElement *element, GstContext *context); +/** + * Handle pipeline state changes. + */ static GstStateChangeReturn gst_gl_base_audio_visualizer_change_state(GstElement *element, GstStateChange transition); -static gboolean gst_gl_base_audio_visualizer_render(GstAudioVisualizer *bscope, - GstBuffer *audio, - GstVideoFrame *video); +/** + * Renders a video frame using gl, impl for parent class + * GstPMAudioVisualizerClass. + */ +static GstFlowReturn +gst_gl_base_audio_visualizer_parent_render(GstPMAudioVisualizer *bscope, + GstBuffer *audio, GstClockTime pts, + guint64 frame_duration); + +/** + * Internal utility for resetting state on start. + */ static void gst_gl_base_audio_visualizer_start(GstGLBaseAudioVisualizer *glav); + +/** + * Internal utility for cleaning up gl context on stop. + */ static void gst_gl_base_audio_visualizer_stop(GstGLBaseAudioVisualizer *glav); -static gboolean -gst_gl_base_audio_visualizer_decide_allocation(GstAudioVisualizer *gstav, - GstQuery *query); +/** + * GL memory pool allocation impl for parent class GstPMAudioVisualizerClass. + */ +static gboolean gst_gl_base_audio_visualizer_parent_decide_allocation( + GstPMAudioVisualizer *pmav, GstQuery *query); + +/** + * called when format changes, default empty v-impl for this class. can be + * overwritten by implementer. + */ static gboolean gst_gl_base_audio_visualizer_default_setup(GstGLBaseAudioVisualizer *glav); + +/** + * gl context is started and usable. called from gl thread. default empty v-impl + * for this class, can be overwritten by implementer. + */ static gboolean gst_gl_base_audio_visualizer_default_gl_start(GstGLBaseAudioVisualizer *glav); + +/** + * GL context is shutting down. called from gl thread. default empty v-impl for + * this class. can be overwritten by implementer. + */ static void gst_gl_base_audio_visualizer_default_gl_stop(GstGLBaseAudioVisualizer *glav); -static gboolean gst_gl_base_audio_visualizer_default_gl_render( - GstGLBaseAudioVisualizer *glav, GstBuffer *audio, GstVideoFrame *video); +/** + * Default empty v-impl for rendering a frame. called from gl thread. can be + * overwritten by implementer. + */ +static gboolean gst_gl_base_audio_visualizer_default_fill_gl_memory( + GstAVRenderParams *render_data); + +/** + * Find a valid gl context. lock must have already been acquired. + */ static gboolean gst_gl_base_audio_visualizer_find_gl_context_unlocked( GstGLBaseAudioVisualizer *glav); -static gboolean gst_gl_base_audio_visualizer_setup(GstAudioVisualizer *gstav); +/** + * Called whenever the caps change, src and sink caps are both set. Impl for + * parent class GstPMAudioVisualizerClass. + */ +static gboolean +gst_gl_base_audio_visualizer_parent_setup(GstPMAudioVisualizer *pmav); + +/** + * Called from gl thread: fbo rtt rending function. + */ +static void gst_gl_base_audio_visualizer_fill_gl(GstGLContext *context, + gpointer render_slot_ptr); static void gst_gl_base_audio_visualizer_class_init(GstGLBaseAudioVisualizerClass *klass) { GObjectClass *gobject_class = G_OBJECT_CLASS(klass); - GstAudioVisualizerClass *gstav_class = GST_AUDIO_VISUALIZER_CLASS(klass); + GstPMAudioVisualizerClass *pmav_class = GST_PM_AUDIO_VISUALIZER_CLASS(klass); GstElementClass *element_class = GST_ELEMENT_CLASS(klass); - gobject_class->finalize = gst_gl_base_audio_visualizer_finalize; + gobject_class->dispose = gst_gl_base_audio_visualizer_dispose; gobject_class->set_property = gst_gl_base_audio_visualizer_set_property; gobject_class->get_property = gst_gl_base_audio_visualizer_get_property; element_class->set_context = GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_set_context); - element_class->change_state = + pmav_class->change_state = GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_change_state); - gstav_class->decide_allocation = - GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_decide_allocation); - gstav_class->setup = GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_setup); + pmav_class->decide_allocation = + GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_parent_decide_allocation); + + pmav_class->setup = + GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_parent_setup); - gstav_class->render = GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_render); + pmav_class->render = + GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_parent_render); klass->supported_gl_api = GST_GL_API_ANY; + klass->gl_start = GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_default_gl_start); + klass->gl_stop = GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_default_gl_stop); - klass->gl_render = - GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_default_gl_render); + klass->setup = GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_default_setup); + + klass->fill_gl_memory = + GST_DEBUG_FUNCPTR(gst_gl_base_audio_visualizer_default_fill_gl_memory); + + g_object_class_install_property( + gobject_class, PROP_MIN_FPS_N, + g_param_spec_int("min-fps-n", "Min FPS numerator", + "Specifies the numerator for the min fps (EMA)", 1, 1000, + DEFAULT_MIN_FPS_N, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_MIN_FPS_D, + g_param_spec_int("min-fps-d", "Min FPS denominator", + "Specifies the denominator for the min fps (EMA)", 1, + 1000, DEFAULT_MIN_FPS_D, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_PIPELINE_LIVE, + g_param_spec_string("pipeline-live", "Pipeline Live", + "Specifies if this element renders in real-time " + "(true) or as fast as possible for offline rendering " + "(false) or to auto-detect pipeline clock (auto)", + DEFAULT_PIPELINE_LIVE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +} + +/** + * Callback function to receive fps changes from render buffer. + * + * @param user_data Render buffer to use. + * @param frame_duration New fps frame duration. + */ +static void adjust_fps_callback(gpointer user_data, guint64 frame_duration) { + + if (frame_duration == 0) { + return; + } + + RBRenderBuffer *render_state = (RBRenderBuffer *)user_data; + + GstPMAudioVisualizer *scope = GST_PM_AUDIO_VISUALIZER(render_state->plugin); + + gst_pm_audio_visualizer_adjust_fps(scope, frame_duration); } static void gst_gl_base_audio_visualizer_init(GstGLBaseAudioVisualizer *glav) { glav->priv = gst_gl_base_audio_visualizer_get_instance_private(glav); glav->priv->gl_started = FALSE; - glav->priv->gl_result = TRUE; + glav->priv->fbo = NULL; + glav->is_live = GST_GL_BASE_AUDIO_VISUALIZER_AUTO; + glav->priv->is_realtime = FALSE; glav->context = NULL; + + glav->min_fps_n = DEFAULT_MIN_FPS_N; + glav->min_fps_d = DEFAULT_MIN_FPS_D; + g_rec_mutex_init(&glav->priv->context_lock); + gst_gl_base_audio_visualizer_start(glav); } -static void gst_gl_base_audio_visualizer_finalize(GObject *object) { +static void gst_gl_base_audio_visualizer_dispose(GObject *object) { GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(object); gst_gl_base_audio_visualizer_stop(glav); g_rec_mutex_clear(&glav->priv->context_lock); - G_OBJECT_CLASS(parent_class)->finalize(object); + G_OBJECT_CLASS(parent_class)->dispose(object); } static void gst_gl_base_audio_visualizer_set_property(GObject *object, @@ -174,6 +329,26 @@ static void gst_gl_base_audio_visualizer_set_property(GObject *object, GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(object); switch (prop_id) { + + case PROP_MIN_FPS_N: + glav->min_fps_n = g_value_get_int(value); + break; + + case PROP_MIN_FPS_D: + glav->min_fps_d = g_value_get_int(value); + break; + + case PROP_PIPELINE_LIVE: + const char *str = g_value_get_string(value); + if (strcasecmp("true", str) == 0) { + glav->is_live = GST_GL_BASE_AUDIO_VISUALIZER_REALTIME; + } else if (strcasecmp("false", str) == 0) { + glav->is_live = GST_GL_BASE_AUDIO_VISUALIZER_OFFLINE; + } else { + glav->is_live = GST_GL_BASE_AUDIO_VISUALIZER_AUTO; + } + break; + default: G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); break; @@ -187,6 +362,25 @@ static void gst_gl_base_audio_visualizer_get_property(GObject *object, GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(object); switch (prop_id) { + + case PROP_MIN_FPS_N: + g_value_set_int(value, glav->min_fps_n); + break; + + case PROP_MIN_FPS_D: + g_value_set_int(value, glav->min_fps_d); + break; + + case PROP_PIPELINE_LIVE: + if (glav->is_live == GST_GL_BASE_AUDIO_VISUALIZER_REALTIME) { + g_value_set_string(value, "true"); + } else if (glav->is_live == GST_GL_BASE_AUDIO_VISUALIZER_OFFLINE) { + g_value_set_string(value, "false"); + } else { + g_value_set_string(value, "auto"); + } + break; + default: G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); break; @@ -212,8 +406,7 @@ static void gst_gl_base_audio_visualizer_set_context(GstElement *element, if (old_display != new_display) { gst_clear_object(&glav->context); if (gst_gl_base_audio_visualizer_find_gl_context_unlocked(glav)) { - // TODO does this need to be handled ? - // gst_pad_mark_reconfigure (GST_BASE_SRC_PAD (glav)); + gst_pad_mark_reconfigure(GST_BASE_SRC_PAD(glav)); } } } @@ -224,6 +417,39 @@ static void gst_gl_base_audio_visualizer_set_context(GstElement *element, GST_ELEMENT_CLASS(parent_class)->set_context(element, context); } +/** + * Find the pipeline and determine if it is live. + * Not supported on Windows. + * + * @param element Plugin element. + * + * @return TRUE if the pipeline is live. + */ +#ifndef _WIN32 + +static gboolean is_pipeline_live(GstElement *element) { + GstPipeline *pipeline = NULL; + gboolean is_live = FALSE; + + GstObject *parent = GST_OBJECT(element); + while (parent && !GST_IS_PIPELINE(parent)) { + GstObject *next = gst_object_get_parent(parent); + if (parent != GST_OBJECT(element)) + gst_object_unref(parent); + parent = next; + } + + if (parent && GST_IS_PIPELINE(parent)) { + pipeline = GST_PIPELINE(parent); + is_live = gst_pipeline_is_live(pipeline); + gst_object_unref(parent); + } + + return is_live; +} + +#endif + static gboolean gst_gl_base_audio_visualizer_default_gl_start(GstGLBaseAudioVisualizer *glav) { return TRUE; @@ -237,6 +463,7 @@ gst_gl_base_audio_visualizer_default_setup(GstGLBaseAudioVisualizer *glav) { static void gst_gl_base_audio_visualizer_gl_start(GstGLContext *context, gpointer data) { GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(data); + GstPMAudioVisualizer *pmav = GST_PM_AUDIO_VISUALIZER(data); GstGLBaseAudioVisualizerClass *glav_class = GST_GL_BASE_AUDIO_VISUALIZER_GET_CLASS(glav); @@ -244,7 +471,45 @@ static void gst_gl_base_audio_visualizer_gl_start(GstGLContext *context, gst_gl_insert_debug_marker(glav->context, "starting element %s", GST_OBJECT_NAME(glav)); + // init fbo for rtt + glav->priv->fbo = gst_gl_framebuffer_new_with_default_depth( + context, GST_VIDEO_INFO_WIDTH(&pmav->vinfo), + GST_VIDEO_INFO_HEIGHT(&pmav->vinfo)); + + // initialize render buffer + GstClockTime max_frame_duration = + gst_util_uint64_scale_int(GST_SECOND, glav->min_fps_d, glav->min_fps_n); + + GstClockTime caps_frame_duration = + gst_util_uint64_scale_int(GST_SECOND, GST_VIDEO_INFO_FPS_D(&pmav->vinfo), + GST_VIDEO_INFO_FPS_N(&pmav->vinfo)); + + // determine if we're using a real-time pipeline + if (glav->is_live == GST_GL_BASE_AUDIO_VISUALIZER_OFFLINE) { + glav->priv->is_realtime = FALSE; + } else if (glav->is_live == GST_GL_BASE_AUDIO_VISUALIZER_REALTIME) { + glav->priv->is_realtime = TRUE; + } else { + // auto-detect, unless we're on windows +#ifdef _WIN32 + glav->priv->is_realtime = FALSE; +#else + glav->priv->is_realtime = is_pipeline_live(GST_ELEMENT(data)); +#endif + } + + // render loop QoS is disabled for offline rendering + rb_init_render_buffer( + &glav->priv->render_buffer, GST_OBJECT(glav), glav->context, pmav->srcpad, + gst_gl_base_audio_visualizer_fill_gl, adjust_fps_callback, + max_frame_duration, caps_frame_duration, glav->priv->is_realtime, + glav->priv->is_realtime); + + // cascade gl start to implementor glav->priv->gl_started = glav_class->gl_start(glav); + + // get gl rendering going + rb_start(&glav->priv->render_buffer); } static void @@ -256,87 +521,191 @@ static void gst_gl_base_audio_visualizer_gl_stop(GstGLContext *context, GstGLBaseAudioVisualizerClass *glav_class = GST_GL_BASE_AUDIO_VISUALIZER_GET_CLASS(glav); - GST_INFO_OBJECT(glav, "stopping"); + GST_INFO_OBJECT(glav, "gl stopping"); gst_gl_insert_debug_marker(glav->context, "stopping element %s", GST_OBJECT_NAME(glav)); - if (glav->priv->gl_started) + // stop gl rendering first + rb_stop(&glav->priv->render_buffer); + rb_clear(&glav->priv->render_buffer); + + // clean up implementor + if (glav->priv->gl_started) { glav_class->gl_stop(glav); + } glav->priv->gl_started = FALSE; -} -static gboolean gst_gl_base_audio_visualizer_setup(GstAudioVisualizer *gstav) { - GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(gstav); - GstGLBaseAudioVisualizerClass *glav_class = - GST_GL_BASE_AUDIO_VISUALIZER_GET_CLASS(gstav); + // clean up render buffer + rb_dispose_render_buffer(&glav->priv->render_buffer); - // cascade setup to the derived plugin after gl initialization has been - // completed - return glav_class->setup(glav); + // clean up state + if (glav->priv->fbo) { + gst_object_unref(glav->priv->fbo); + } + + gst_pm_audio_visualizer_dispose_buffer_pool(GST_PM_AUDIO_VISUALIZER(data)); } -static gboolean gst_gl_base_audio_visualizer_default_gl_render( - GstGLBaseAudioVisualizer *glav, GstBuffer *audio, GstVideoFrame *video) { +static gboolean gst_gl_base_audio_visualizer_default_fill_gl_memory( + GstAVRenderParams *render_data) { + (void)render_data; return TRUE; } -typedef struct { - GstGLBaseAudioVisualizer *glav; - GstBuffer *in_audio; - GstVideoFrame *out_video; -} GstGLRenderCallbackParams; +static void gst_gl_base_audio_visualizer_fill_gl(GstGLContext *context, + gpointer render_slot_ptr) { + + // we're inside the gl thread! + + RBSlot *render_slot = (RBSlot *)render_slot_ptr; + + GstGLBaseAudioVisualizer *glav = + GST_GL_BASE_AUDIO_VISUALIZER(render_slot->plugin); -static void -gst_gl_base_audio_visualizer_gl_thread_render_callback(gpointer params) { - GstGLRenderCallbackParams *cb_params = (GstGLRenderCallbackParams *)params; GstGLBaseAudioVisualizerClass *klass = - GST_GL_BASE_AUDIO_VISUALIZER_GET_CLASS(cb_params->glav); + GST_GL_BASE_AUDIO_VISUALIZER_GET_CLASS(render_slot->plugin); + + GstPMAudioVisualizer *pmav = GST_PM_AUDIO_VISUALIZER(render_slot->plugin); + + GstBuffer *out_buf; + GstVideoFrame out_video; - // inside gl thread: call virtual render function with audio and video - cb_params->glav->priv->gl_result = klass->gl_render( - cb_params->glav, cb_params->in_audio, cb_params->out_video); + // obtain output buffer from the (GL texture backed) pool + gst_pm_audio_visualizer_util_prepare_output_buffer(pmav, &out_buf); + + // Check for GL sync meta + GstGLSyncMeta *sync_meta = gst_buffer_get_gl_sync_meta(out_buf); + + // if (sync_meta) { + // wait until GPU is done using this buffer should not be needed + // gst_gl_sync_meta_wait(sync_meta, glav->context); + // } + + // map output video frame to buffer outbuf with gl flags + gst_video_frame_map(&out_video, &pmav->vinfo, out_buf, + GST_MAP_WRITE | GST_MAP_GL | + GST_VIDEO_FRAME_MAP_FLAG_NO_REF); + + GstAVRenderParams ds_rd; + ds_rd.in_audio = render_slot->in_audio; + ds_rd.mem = GST_GL_MEMORY_CAST(gst_buffer_peek_memory(out_buf, 0)); + ds_rd.fbo = glav->priv->fbo; + ds_rd.pts = render_slot->pts; + ds_rd.plugin = glav; + + GST_TRACE_OBJECT(render_slot->plugin, "filling gl memory %p", ds_rd.mem); + + // call virtual render function with audio and video + render_slot->gl_result = klass->fill_gl_memory(&ds_rd); + + gst_video_frame_unmap(&out_video); + + if (sync_meta) + gst_gl_sync_meta_set_sync_point(sync_meta, glav->context); + + render_slot->out_buf = out_buf; + // ownership transferred + out_buf = NULL; } -static gboolean gst_gl_base_audio_visualizer_render(GstAudioVisualizer *bscope, - GstBuffer *audio, - GstVideoFrame *video) { - GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(bscope); - GstGLRenderCallbackParams cb_params; - GstGLWindow *window; +static GstFlowReturn gst_gl_base_audio_visualizer_fill( + GstPMAudioVisualizer *bscope, GstGLBaseAudioVisualizer *glav, + GstBuffer *audio, GstClockTime pts, guint64 frame_duration) { g_rec_mutex_lock(&glav->priv->context_lock); + if (G_UNLIKELY(!glav->context)) + goto not_negotiated; + + /* 0 framerate and we are at the second frame, eos */ + if (G_UNLIKELY(GST_VIDEO_INFO_FPS_N(&bscope->vinfo) == 0 && + glav->priv->n_frames == 1)) + goto eos; - // wrap params into cb_params struct to pass them to the GL window/thread via - // userdata pointer - cb_params.glav = glav; - cb_params.in_audio = audio; - cb_params.out_video = video; + if (glav->priv->is_realtime == FALSE) { + g_rec_mutex_unlock(&glav->priv->context_lock); - window = gst_gl_context_get_window(glav->context); + // offline rendering can be done synchronously, avoid queuing overhead + rb_render_blocking(&glav->priv->render_buffer, audio, pts, frame_duration); - // dispatch render call through the gl thread - // call is blocking, accessing audio and video params from gl thread *should* - // be safe - gst_gl_window_send_message( - window, - GST_GL_WINDOW_CB(gst_gl_base_audio_visualizer_gl_thread_render_callback), - &cb_params); + g_rec_mutex_lock(&glav->priv->context_lock); + } else { + // prepare args for queuing frame rendering + RBQueueArgs args; + args.render_buffer = &glav->priv->render_buffer; + args.in_audio = audio; + args.pts = pts; + args.frame_duration = frame_duration; + + // limit wait based on fps factor, make sure we never wait too long in order + // to keep in sync + args.max_wait = (GstClockTimeDiff)gst_util_uint64_scale_int( + frame_duration, MAX_QUEUE_WAIT_TIME_IN_FRAME_DURATIONS_N, + MAX_QUEUE_WAIT_TIME_IN_FRAME_DURATIONS_D); - gst_object_unref(window); + g_rec_mutex_unlock(&glav->priv->context_lock); - g_rec_mutex_unlock(&glav->priv->context_lock); + // dispatch gst_gl_base_audio_visualizer_fill_gl to the gl render buffer, + // rendering is deferred. This may block for a while though. + rb_queue_render_task_log(&args); - if (glav->priv->gl_result) { - glav->priv->n_frames++; - } else { - // gl error - GST_ELEMENT_ERROR(glav, RESOURCE, NOT_FOUND, - (("failed to render audio visualizer")), - (("A GL error occurred"))); + g_rec_mutex_lock(&glav->priv->context_lock); } - return glav->priv->gl_result; + glav->priv->n_frames++; + + g_rec_mutex_unlock(&glav->priv->context_lock); + + return GST_FLOW_OK; + +not_negotiated: { + g_rec_mutex_unlock(&glav->priv->context_lock); + GST_ELEMENT_ERROR(glav, CORE, NEGOTIATION, (NULL), + (("format wasn't negotiated before get function"))); + return GST_FLOW_NOT_NEGOTIATED; +} +eos: { + g_rec_mutex_unlock(&glav->priv->context_lock); + GST_DEBUG_OBJECT(glav, "eos: 0 framerate, frame %d", + (gint)glav->priv->n_frames); + return GST_FLOW_EOS; +} +} + +static gboolean +gst_gl_base_audio_visualizer_parent_setup(GstPMAudioVisualizer *pmav) { + GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(pmav); + GstGLBaseAudioVisualizerClass *glav_class = + GST_GL_BASE_AUDIO_VISUALIZER_GET_CLASS(pmav); + + GstClockTime caps_frame_duration = + gst_util_uint64_scale_int(GST_SECOND, GST_VIDEO_INFO_FPS_D(&pmav->vinfo), + GST_VIDEO_INFO_FPS_N(&pmav->vinfo)); + + rb_set_caps_frame_duration(&glav->priv->render_buffer, caps_frame_duration); + + GST_INFO_OBJECT(glav, + "GL setup - render config: is-live: %s, caps-frame-duration: " + "%" GST_TIME_FORMAT + ", min-fps: %d/%d, min-fps-duration: %" GST_TIME_FORMAT, + glav->priv->is_realtime ? "true" : "false", + GST_TIME_ARGS(caps_frame_duration), glav->min_fps_n, + glav->min_fps_d, + GST_TIME_ARGS(glav->priv->render_buffer.max_frame_duration)); + + // cascade setup to the derived plugin after gl initialization has been + // completed + return glav_class->setup(glav); +} + +static GstFlowReturn +gst_gl_base_audio_visualizer_parent_render(GstPMAudioVisualizer *bscope, + GstBuffer *audio, GstClockTime pts, + guint64 frame_duration) { + GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(bscope); + + return gst_gl_base_audio_visualizer_fill(bscope, glav, audio, pts, + frame_duration); } static void gst_gl_base_audio_visualizer_start(GstGLBaseAudioVisualizer *glav) { @@ -493,10 +862,9 @@ error: { } } -static gboolean -gst_gl_base_audio_visualizer_decide_allocation(GstAudioVisualizer *gstav, - GstQuery *query) { - GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(gstav); +static gboolean gst_gl_base_audio_visualizer_parent_decide_allocation( + GstPMAudioVisualizer *pmav, GstQuery *query) { + GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(pmav); GstGLContext *context; GstBufferPool *pool = NULL; GstStructure *config; @@ -524,7 +892,8 @@ gst_gl_base_audio_visualizer_decide_allocation(GstAudioVisualizer *gstav, gst_video_info_init(&vinfo); gst_video_info_from_caps(&vinfo, caps); size = vinfo.size; - min = max = 0; + min = 0; + max = 0; update_pool = FALSE; } @@ -536,6 +905,12 @@ gst_gl_base_audio_visualizer_decide_allocation(GstAudioVisualizer *gstav, } config = gst_buffer_pool_get_config(pool); + // there should be at least 2 textures, so that one is rendered while the + // other one is pushed downstream + // todo: pool size config properties needed ? + if (min < 2) { + min = 2; + } gst_buffer_pool_config_set_params(config, caps, size, min, max); gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META); if (gst_query_find_allocation_meta(query, GST_GL_SYNC_META_API_TYPE, NULL)) @@ -544,6 +919,9 @@ gst_gl_base_audio_visualizer_decide_allocation(GstAudioVisualizer *gstav, gst_buffer_pool_config_add_option( config, GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META); + gst_buffer_pool_config_add_option( + config, GST_BUFFER_POOL_OPTION_GL_TEXTURE_TARGET_2D); + gst_buffer_pool_set_config(pool, config); if (update_pool) @@ -557,6 +935,25 @@ gst_gl_base_audio_visualizer_decide_allocation(GstAudioVisualizer *gstav, return TRUE; } +static GstPipeline *get_pipeline(GstElement *element) { + GstObject *parent = GST_OBJECT(element); + + while (parent) { + if (GST_IS_PIPELINE(parent)) + return GST_PIPELINE(parent); + + GstObject *next = gst_object_get_parent(parent); + + // we increase ref with get_parent, so unref previous level + if (parent != GST_OBJECT(element)) + gst_object_unref(parent); + + parent = next; + } + + return NULL; // no pipeline found +} + static GstStateChangeReturn gst_gl_base_audio_visualizer_change_state(GstElement *element, GstStateChange transition) { @@ -568,10 +965,6 @@ gst_gl_base_audio_visualizer_change_state(GstElement *element, gst_element_state_get_name(GST_STATE_TRANSITION_CURRENT(transition)), gst_element_state_get_name(GST_STATE_TRANSITION_NEXT(transition))); - ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition); - if (ret == GST_STATE_CHANGE_FAILURE) - return ret; - switch (transition) { case GST_STATE_CHANGE_READY_TO_NULL: g_rec_mutex_lock(&glav->priv->context_lock); @@ -579,6 +972,7 @@ gst_gl_base_audio_visualizer_change_state(GstElement *element, gst_clear_object(&glav->display); g_rec_mutex_unlock(&glav->priv->context_lock); break; + default: break; } diff --git a/src/gstglbaseaudiovisualizer.h b/src/gstglbaseaudiovisualizer.h index a48781b..52ba6d2 100644 --- a/src/gstglbaseaudiovisualizer.h +++ b/src/gstglbaseaudiovisualizer.h @@ -32,20 +32,19 @@ #ifndef __GST_GL_BASE_AUDIO_VISUALIZER_H__ #define __GST_GL_BASE_AUDIO_VISUALIZER_H__ +#include "gstpmaudiovisualizer.h" + #include -#include -#include -#include typedef struct _GstGLBaseAudioVisualizer GstGLBaseAudioVisualizer; typedef struct _GstGLBaseAudioVisualizerClass GstGLBaseAudioVisualizerClass; typedef struct _GstGLBaseAudioVisualizerPrivate GstGLBaseAudioVisualizerPrivate; +typedef struct _GstAVRenderParams GstAVRenderParams; G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstGLBaseAudioVisualizer, gst_object_unref) G_BEGIN_DECLS -GST_GL_API GType gst_gl_base_audio_visualizer_get_type(void); #define GST_TYPE_GL_BASE_AUDIO_VISUALIZER \ @@ -64,48 +63,137 @@ GType gst_gl_base_audio_visualizer_get_type(void); (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_GL_BASE_AUDIO_VISUALIZER, \ GstGLBaseAudioVisualizerClass)) +/** + * Plugin mode of operation type. + */ +typedef enum { + /** + * Real-time / live rendering. + */ + GST_GL_BASE_AUDIO_VISUALIZER_REALTIME, + + /** + * Faster-than-real-time rendering. + */ + GST_GL_BASE_AUDIO_VISUALIZER_OFFLINE, + + /** + * Auto-detect if pipeline is live. + */ + GST_GL_BASE_AUDIO_VISUALIZER_AUTO +} GstGLBaseAudioVisualizerMode; + /** * GstGLBaseAudioVisualizer: * @display: the currently configured #GstGLDisplay * @context: the currently configured #GstGLContext * - * The parent instance type of a base GL Audio Visualizer. + * The parent instance type of base GL Audio Visualizer. */ struct _GstGLBaseAudioVisualizer { - GstAudioVisualizer parent; + GstPMAudioVisualizer parent; /*< public >*/ GstGLDisplay *display; GstGLContext *context; - /*< private >*/ - gpointer _padding[GST_PADDING]; + /** + * Minimum FPS numerator setting for EMA. + */ + gint min_fps_n; + + /** + * Minimum FPS denominator setting for EMA. + */ + gint min_fps_d; + + /** + * Operation mode property. + */ + GstGLBaseAudioVisualizerMode is_live; GstGLBaseAudioVisualizerPrivate *priv; + + /*< private >*/ + gpointer _padding[GST_PADDING]; }; /** * GstGLBaseAudioVisualizerClass: * @supported_gl_api: the logical-OR of #GstGLAPI's supported by this element - * @gl_start: called in the GL thread to setup the element GL state. + * @gl_start: called in the GL thread to set up the element GL state. * @gl_stop: called in the GL thread to clean up the element GL state. * @gl_render: called in the GL thread to fill the current video texture. * @setup: called when the format changes (delegate from - * GstAudioVisualizer.setup) + * GstPMAudioVisualizer.setup) * * The base class for OpenGL based audio visualizers. - * + * Extends GstPMAudioVisualizer to add GL rendering callbacks. + * Handles GL context and render buffers. */ struct _GstGLBaseAudioVisualizerClass { - GstAudioVisualizerClass parent_class; + GstPMAudioVisualizerClass parent_class; /*< public >*/ + /** + * Supported OpenGL API flags. + */ GstGLAPI supported_gl_api; + + /** + * Virtual function called from gl thread once the gl context can be used for + * initializing gl resources. + */ gboolean (*gl_start)(GstGLBaseAudioVisualizer *glav); + + /** + * Virtual function called from gl thread when gl context is being closed for + * gl resource clean up. + */ void (*gl_stop)(GstGLBaseAudioVisualizer *glav); - gboolean (*gl_render)(GstGLBaseAudioVisualizer *glav, GstBuffer *audio, - GstVideoFrame *video); + + /** + * Virtual function called when caps have been set for the pipeline. + */ gboolean (*setup)(GstGLBaseAudioVisualizer *glav); + + /* Virtual function called to render each frame, in_audio is optional. */ + gboolean (*fill_gl_memory)(GstAVRenderParams *render_data); + + /*< private >*/ + gpointer _padding[GST_PADDING]; +}; + +/** + * Parameter struct for rendering calls. + */ +struct _GstAVRenderParams { + + /** + * Context plugin. + */ + GstGLBaseAudioVisualizer *plugin; + + /** + * Framebuffer to use for rendering. + */ + GstGLFramebuffer *fbo; + + /** + * Rendering target GL memory. + */ + GstGLMemory *mem; + + /** + * Audio data for frame. + */ + GstBuffer *in_audio; + + /** + * Current buffer presentation timestamp. + */ + GstClockTime pts; + /*< private >*/ gpointer _padding[GST_PADDING]; }; diff --git a/src/gstpmaudiovisualizer.c b/src/gstpmaudiovisualizer.c new file mode 100644 index 0000000..56a2ee9 --- /dev/null +++ b/src/gstpmaudiovisualizer.c @@ -0,0 +1,1251 @@ +/* GStreamer + * Copyright (C) <2011> Stefan Kost + * Copyright (C) <2015> Luis de Bethencourt + * + * gstaudiovisualizer.h: base class for audio visualisation elements + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ +/** + * SECTION:gstaudiovisualizer + * @title: GstPMAudioVisualizer + * @short_description: Base class for visualizers. + * + * A baseclass for scopes (visualizers). It takes care of re-fitting the + * audio-rate to video-rate and handles renegotiation (downstream video size + * changes). + * + */ + +/* + * The code in this file is based on + * GStreamer / gst-plugins-base, latest version as of 2025/05/29. + * gst-libs/gst/pbutils/gstaudiovisualizer.c Git Repository: + * https://gitlab.freedesktop.org/gstreamer/gstreamer/-/blob/main/subprojects/gst-plugins-base/gst-libs/gst/pbutils/gstaudiovisualizer.c + * Original copyright notice has been retained at the top of this file. + * + * The code has been modified to improve compatibility with projectM and OpenGL. + * + * - Main memory based video frame buffers have been removed. + * + * - Cpu based transition shaders have been removed. + * + * - Bugfix for the amount of bytes being flushed for a single video frame from + * the audio input buffer. + * + * - Uses a sample count based approach for pts/dts timestamps instead + * GstAdapter derived timestamps. + * + * - Consistent locking, memory (de)allocation + * + * - Allow dynamic fps adjustments while staying sample accurate. + * + * - Segment event propagation. + * + * - Memory management and rendering is implementer-provided. + * + * Typical plug-in call order for implementer-provided functions: + * - decide_allocation (once) + * - setup (when caps change, typically once) + * - render (once for each frame) + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstpmaudiovisualizer.h" + +#include + +#include +#include + +GST_DEBUG_CATEGORY_STATIC(pm_audio_visualizer_debug); +#define GST_CAT_DEFAULT (pm_audio_visualizer_debug) + +/** + * Ignore QoS events during the first couple of frames that can cause a start + * delay. + */ +#ifndef QOS_IGNORE_FIRST_N_FRAMES +#define QOS_IGNORE_FIRST_N_FRAMES 5 +#endif + +/** + * Min latency change required to push a latency event + * upstream. The latency is compared to last published latency. + */ +#ifndef LATENCY_EVENT_MIN_CHANGE +#define LATENCY_EVENT_MIN_CHANGE GST_MSECOND +#endif + +enum { PROP_0 }; + +static GstBaseTransformClass *parent_class = NULL; +static gint private_offset = 0; + +static void +gst_pm_audio_visualizer_class_init(GstPMAudioVisualizerClass *klass); +static void gst_pm_audio_visualizer_init(GstPMAudioVisualizer *scope, + GstPMAudioVisualizerClass *g_class); +static void gst_pm_audio_visualizer_set_property(GObject *object, guint prop_id, + const GValue *value, + GParamSpec *pspec); +static void gst_pm_audio_visualizer_get_property(GObject *object, guint prop_id, + GValue *value, + GParamSpec *pspec); +static void gst_pm_audio_visualizer_dispose(GObject *object); + +static gboolean +gst_pm_audio_visualizer_src_negotiate(GstPMAudioVisualizer *scope); +static gboolean gst_pm_audio_visualizer_src_setcaps(GstPMAudioVisualizer *scope, + GstCaps *caps); +static gboolean +gst_pm_audio_visualizer_sink_setcaps(GstPMAudioVisualizer *scope, + GstCaps *caps); + +static GstFlowReturn gst_pm_audio_visualizer_chain(GstPad *pad, + GstObject *parent, + GstBuffer *buffer); + +static gboolean gst_pm_audio_visualizer_src_event(GstPad *pad, + GstObject *parent, + GstEvent *event); +static gboolean gst_pm_audio_visualizer_sink_event(GstPad *pad, + GstObject *parent, + GstEvent *event); + +static gboolean gst_pm_audio_visualizer_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query); + +static GstStateChangeReturn +gst_pm_audio_visualizer_parent_change_state(GstElement *element, + GstStateChange transition); + +static GstStateChangeReturn +gst_pm_audio_visualizer_default_change_state(GstElement *element, + GstStateChange transition); + +static gboolean +gst_pm_audio_visualizer_do_bufferpool(GstPMAudioVisualizer *scope, + GstCaps *outcaps); + +static gboolean +gst_pm_audio_visualizer_default_decide_allocation(GstPMAudioVisualizer *scope, + GstQuery *query); + +static void gst_pm_audio_visualizer_send_latency_if_needed_unlocked( + GstPMAudioVisualizer *scope); + +struct _GstPMAudioVisualizerPrivate { + gboolean negotiated; + + GstBufferPool *pool; + gboolean pool_active; + GstAllocator *allocator; + GstAllocationParams params; + GstQuery *query; + + /* pads */ + GstPad *sinkpad; + + GstAdapter *adapter; + + GstBuffer *inbuf; + + guint spf; /* samples per video frame */ + + /* QoS stuff */ /* with LOCK */ + gdouble proportion; + /* qos: earliest time to render the next frame, the render loop will skip + * frames until this time */ + GstClockTime earliest_time; + + guint dropped; /* frames dropped / not dropped */ + guint processed; + + /* samples consumed, relative to the current segment. Basis for timestamps. */ + guint64 samples_consumed; + + /* configuration mutex */ + GMutex config_lock; + + GstSegment segment; + + /* ready flag and condition triggered once the plugin is ready to process + * buffers, triggers every time a caps event is processed */ + GCond ready_cond; + gboolean ready; + + /* have src caps been setup */ + gboolean src_ready; + + /* have sink caps been setup */ + gboolean sink_ready; + + /* clock timestamp pts offset, either from first audio buffer pts or segment + * event */ + gboolean pts_offset_initialized; + GstClockTime pts_offset; + GstClockTime caps_frame_duration; + GstClockTime last_reported_latency; + gboolean fps_changed; +}; + +/* base class */ + +GType gst_pm_audio_visualizer_get_type(void) { + static gsize audio_visualizer_type = 0; + + if (g_once_init_enter(&audio_visualizer_type)) { + static const GTypeInfo audio_visualizer_info = { + sizeof(GstPMAudioVisualizerClass), + NULL, + NULL, + (GClassInitFunc)gst_pm_audio_visualizer_class_init, + NULL, + NULL, + sizeof(GstPMAudioVisualizer), + 0, + (GInstanceInitFunc)gst_pm_audio_visualizer_init, + }; + GType _type; + + /* TODO: rename when exporting it as a library */ + _type = + g_type_register_static(GST_TYPE_ELEMENT, "GstPMAudioVisualizer", + &audio_visualizer_info, G_TYPE_FLAG_ABSTRACT); + + private_offset = + g_type_add_instance_private(_type, sizeof(GstPMAudioVisualizerPrivate)); + + g_once_init_leave(&audio_visualizer_type, _type); + } + return (GType)audio_visualizer_type; +} + +static inline GstPMAudioVisualizerPrivate * +gst_pm_audio_visualizer_get_instance_private(GstPMAudioVisualizer *self) { + return (G_STRUCT_MEMBER_P(self, private_offset)); +} + +static void +gst_pm_audio_visualizer_class_init(GstPMAudioVisualizerClass *klass) { + GObjectClass *gobject_class = (GObjectClass *)klass; + GstElementClass *element_class = (GstElementClass *)klass; + + if (private_offset != 0) + g_type_class_adjust_private_offset(klass, &private_offset); + + parent_class = g_type_class_peek_parent(klass); + + GST_DEBUG_CATEGORY_INIT(pm_audio_visualizer_debug, "pmaudiovisualizer", 0, + "projectm audio visualisation base class"); + + gobject_class->set_property = gst_pm_audio_visualizer_set_property; + gobject_class->get_property = gst_pm_audio_visualizer_get_property; + gobject_class->dispose = gst_pm_audio_visualizer_dispose; + + element_class->change_state = + GST_DEBUG_FUNCPTR(gst_pm_audio_visualizer_parent_change_state); + + klass->change_state = + GST_DEBUG_FUNCPTR(gst_pm_audio_visualizer_default_change_state); + + klass->decide_allocation = + GST_DEBUG_FUNCPTR(gst_pm_audio_visualizer_default_decide_allocation); + + klass->segment_change = NULL; +} + +static void gst_pm_audio_visualizer_init(GstPMAudioVisualizer *scope, + GstPMAudioVisualizerClass *g_class) { + GstPadTemplate *pad_template; + + scope->priv = gst_pm_audio_visualizer_get_instance_private(scope); + + /* create the sink pad */ + pad_template = + gst_element_class_get_pad_template(GST_ELEMENT_CLASS(g_class), "sink"); + g_return_if_fail(pad_template != NULL); + scope->priv->sinkpad = gst_pad_new_from_template(pad_template, "sink"); + gst_pad_set_chain_function(scope->priv->sinkpad, + GST_DEBUG_FUNCPTR(gst_pm_audio_visualizer_chain)); + gst_pad_set_event_function( + scope->priv->sinkpad, + GST_DEBUG_FUNCPTR(gst_pm_audio_visualizer_sink_event)); + gst_element_add_pad(GST_ELEMENT(scope), scope->priv->sinkpad); + + /* create the src pad */ + pad_template = + gst_element_class_get_pad_template(GST_ELEMENT_CLASS(g_class), "src"); + g_return_if_fail(pad_template != NULL); + scope->srcpad = gst_pad_new_from_template(pad_template, "src"); + gst_pad_set_event_function( + scope->srcpad, GST_DEBUG_FUNCPTR(gst_pm_audio_visualizer_src_event)); + gst_pad_set_query_function( + scope->srcpad, GST_DEBUG_FUNCPTR(gst_pm_audio_visualizer_src_query)); + gst_element_add_pad(GST_ELEMENT(scope), scope->srcpad); + + scope->priv->adapter = gst_adapter_new(); + scope->priv->inbuf = gst_buffer_new(); + g_cond_init(&scope->priv->ready_cond); + + scope->priv->dropped = 0; + scope->priv->earliest_time = 0; + scope->priv->processed = 0; + scope->priv->samples_consumed = 0; + scope->priv->src_ready = FALSE; + scope->priv->sink_ready = FALSE; + scope->priv->ready = FALSE; + scope->priv->pts_offset_initialized = FALSE; + scope->priv->pts_offset = GST_CLOCK_TIME_NONE; + scope->priv->caps_frame_duration = 0; + scope->priv->last_reported_latency = GST_CLOCK_TIME_NONE; + scope->priv->fps_changed = FALSE; + scope->latency = GST_CLOCK_TIME_NONE; + + /* properties */ + + /* reset the initial video state */ + gst_video_info_init(&scope->vinfo); + scope->req_frame_duration = GST_CLOCK_TIME_NONE; + + /* reset the initial state */ + gst_audio_info_init(&scope->ainfo); + gst_video_info_init(&scope->vinfo); + + g_mutex_init(&scope->priv->config_lock); +} + +static void gst_pm_audio_visualizer_set_property(GObject *object, guint prop_id, + const GValue *value, + GParamSpec *pspec) { + GstPMAudioVisualizer *scope = GST_PM_AUDIO_VISUALIZER(object); + + switch (prop_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } +} + +static void gst_pm_audio_visualizer_get_property(GObject *object, guint prop_id, + GValue *value, + GParamSpec *pspec) { + GstPMAudioVisualizer *scope = GST_PM_AUDIO_VISUALIZER(object); + + switch (prop_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } +} + +static void gst_pm_audio_visualizer_dispose(GObject *object) { + GstPMAudioVisualizer *scope = GST_PM_AUDIO_VISUALIZER(object); + + // make sure nobody is still waiting to get started + g_mutex_lock(&scope->priv->config_lock); + scope->priv->ready = TRUE; + g_cond_broadcast(&scope->priv->ready_cond); + g_mutex_unlock(&scope->priv->config_lock); + + if (scope->priv->adapter) { + g_object_unref(scope->priv->adapter); + scope->priv->adapter = NULL; + } + if (scope->priv->inbuf) { + gst_buffer_unref(scope->priv->inbuf); + scope->priv->inbuf = NULL; + } + + GST_OBJECT_LOCK(scope); + if (scope->priv->pool) { + if (scope->priv->pool_active) + gst_buffer_pool_set_active(scope->priv->pool, FALSE); + gst_object_unref(scope->priv->pool); + scope->priv->pool = NULL; + scope->priv->pool_active = FALSE; + } + if (scope->priv->allocator) { + gst_object_unref(scope->priv->allocator); + scope->priv->allocator = NULL; + } + if (scope->priv->query) { + gst_query_unref(scope->priv->query); + scope->priv->query = NULL; + } + GST_OBJECT_UNLOCK(scope); + + g_mutex_clear(&scope->priv->config_lock); + g_cond_clear(&scope->priv->ready_cond); + + G_OBJECT_CLASS(parent_class)->dispose(object); +} + +static void +gst_pm_audio_visualizer_reset_unlocked(GstPMAudioVisualizer *scope) { + + gst_adapter_clear(scope->priv->adapter); + gst_segment_init(&scope->priv->segment, GST_FORMAT_UNDEFINED); + + scope->priv->proportion = 1.0; + scope->priv->earliest_time = 0; + scope->priv->dropped = 0; + scope->priv->processed = 0; + scope->priv->samples_consumed = 0; + scope->priv->pts_offset_initialized = FALSE; + scope->priv->pts_offset = GST_CLOCK_TIME_NONE; + scope->latency = GST_CLOCK_TIME_NONE; +} + +/* */ +static gboolean gst_pm_audio_visualizer_do_setup(GstPMAudioVisualizer *scope) { + + GstPMAudioVisualizerClass *klass = GST_PM_AUDIO_VISUALIZER_GET_CLASS(scope); + + GST_OBJECT_LOCK(scope); + scope->priv->earliest_time = 0; + GST_OBJECT_UNLOCK(scope); + + g_mutex_lock(&scope->priv->config_lock); + + const guint spf = gst_util_uint64_scale_int( + GST_AUDIO_INFO_RATE(&scope->ainfo), GST_VIDEO_INFO_FPS_D(&scope->vinfo), + GST_VIDEO_INFO_FPS_N(&scope->vinfo)); + + scope->req_spf = spf; + scope->priv->spf = spf; + + g_mutex_unlock(&scope->priv->config_lock); + + if (klass->setup && !klass->setup(scope)) + return FALSE; + + GST_INFO_OBJECT( + scope, "video: dimension %dx%d, framerate %d/%d", + GST_VIDEO_INFO_WIDTH(&scope->vinfo), GST_VIDEO_INFO_HEIGHT(&scope->vinfo), + GST_VIDEO_INFO_FPS_N(&scope->vinfo), GST_VIDEO_INFO_FPS_D(&scope->vinfo)); + + GST_INFO_OBJECT(scope, "audio: rate %d, channels: %d, bpf: %d", + GST_AUDIO_INFO_RATE(&scope->ainfo), + GST_AUDIO_INFO_CHANNELS(&scope->ainfo), + GST_AUDIO_INFO_BPF(&scope->ainfo)); + + GST_INFO_OBJECT(scope, "blocks: spf / req_spf %u", spf); + + g_mutex_lock(&scope->priv->config_lock); + scope->priv->ready = TRUE; + g_cond_broadcast(&scope->priv->ready_cond); + g_mutex_unlock(&scope->priv->config_lock); + + return TRUE; +} + +static void check_ready_unlocked(GstPMAudioVisualizer *scope) { + if (scope->priv->src_ready && scope->priv->sink_ready) { + g_mutex_unlock(&scope->priv->config_lock); + gst_pm_audio_visualizer_do_setup(scope); + g_mutex_lock(&scope->priv->config_lock); + } else { + scope->priv->ready = FALSE; + } +} + +static gboolean +gst_pm_audio_visualizer_sink_setcaps(GstPMAudioVisualizer *scope, + GstCaps *caps) { + GstAudioInfo info; + + if (!gst_audio_info_from_caps(&info, caps)) + goto wrong_caps; + + g_mutex_lock(&scope->priv->config_lock); + scope->ainfo = info; + g_mutex_unlock(&scope->priv->config_lock); + + GST_DEBUG_OBJECT(scope, "audio: channels %d, rate %d", + GST_AUDIO_INFO_CHANNELS(&info), GST_AUDIO_INFO_RATE(&info)); + + if (!gst_pm_audio_visualizer_src_negotiate(scope)) { + goto not_negotiated; + } + + g_mutex_lock(&scope->priv->config_lock); + scope->priv->sink_ready = TRUE; + check_ready_unlocked(scope); + g_mutex_unlock(&scope->priv->config_lock); + + return TRUE; + + /* Errors */ +wrong_caps: { + GST_WARNING_OBJECT(scope, "could not parse caps"); + return FALSE; +} +not_negotiated: { + GST_WARNING_OBJECT(scope, "failed to negotiate"); + return FALSE; +} +} + +static gboolean gst_pm_audio_visualizer_src_setcaps(GstPMAudioVisualizer *scope, + GstCaps *caps) { + GstVideoInfo info; + gboolean res; + + if (!gst_video_info_from_caps(&info, caps)) + goto wrong_caps; + + g_mutex_lock(&scope->priv->config_lock); + + scope->vinfo = info; + + scope->priv->caps_frame_duration = gst_util_uint64_scale_int( + GST_SECOND, GST_VIDEO_INFO_FPS_D(&info), GST_VIDEO_INFO_FPS_N(&info)); + + scope->req_frame_duration = scope->priv->caps_frame_duration; + g_mutex_unlock(&scope->priv->config_lock); + + gst_pad_set_caps(scope->srcpad, caps); + + /* find a pool for the negotiated caps now */ + res = gst_pm_audio_visualizer_do_bufferpool(scope, caps); + gst_caps_unref(caps); + + g_mutex_lock(&scope->priv->config_lock); + scope->priv->src_ready = TRUE; + check_ready_unlocked(scope); + g_mutex_unlock(&scope->priv->config_lock); + + return res; + + /* ERRORS */ +wrong_caps: { + gst_caps_unref(caps); + GST_DEBUG_OBJECT(scope, "error parsing caps"); + return FALSE; +} + +setup_failed: { + GST_WARNING_OBJECT(scope, "failed to set up"); + return FALSE; +} +} + +static gboolean +gst_pm_audio_visualizer_src_negotiate(GstPMAudioVisualizer *scope) { + GstCaps *othercaps, *target; + GstStructure *structure; + GstCaps *templ; + gboolean ret; + + templ = gst_pad_get_pad_template_caps(scope->srcpad); + + GST_DEBUG_OBJECT(scope, "performing negotiation"); + + /* see what the peer can do */ + othercaps = gst_pad_peer_query_caps(scope->srcpad, NULL); + if (othercaps) { + target = gst_caps_intersect(othercaps, templ); + gst_caps_unref(othercaps); + gst_caps_unref(templ); + + if (gst_caps_is_empty(target)) + goto no_format; + + target = gst_caps_truncate(target); + } else { + target = templ; + } + + target = gst_caps_make_writable(target); + structure = gst_caps_get_structure(target, 0); + gst_structure_fixate_field_nearest_int(structure, "width", 320); + gst_structure_fixate_field_nearest_int(structure, "height", 200); + gst_structure_fixate_field_nearest_fraction(structure, "framerate", 25, 1); + if (gst_structure_has_field(structure, "pixel-aspect-ratio")) + gst_structure_fixate_field_nearest_fraction(structure, "pixel-aspect-ratio", + 1, 1); + + target = gst_caps_fixate(target); + + GST_DEBUG_OBJECT(scope, "final caps are %" GST_PTR_FORMAT, target); + + ret = gst_pm_audio_visualizer_src_setcaps(scope, target); + + return ret; + +no_format: { + gst_caps_unref(target); + return FALSE; +} +} + +/* takes ownership of the pool, allocator and query */ +static gboolean gst_pm_audio_visualizer_set_allocation( + GstPMAudioVisualizer *scope, GstBufferPool *pool, GstAllocator *allocator, + const GstAllocationParams *params, GstQuery *query) { + GstAllocator *oldalloc; + GstBufferPool *oldpool; + GstQuery *oldquery; + GstPMAudioVisualizerPrivate *priv = scope->priv; + + GST_OBJECT_LOCK(scope); + oldpool = priv->pool; + priv->pool = pool; + priv->pool_active = FALSE; + + oldalloc = priv->allocator; + priv->allocator = allocator; + + oldquery = priv->query; + priv->query = query; + + if (params) + priv->params = *params; + else + gst_allocation_params_init(&priv->params); + GST_OBJECT_UNLOCK(scope); + + if (oldpool) { + GST_DEBUG_OBJECT(scope, "deactivating old pool %p", oldpool); + gst_buffer_pool_set_active(oldpool, FALSE); + gst_object_unref(oldpool); + } + if (oldalloc) { + gst_object_unref(oldalloc); + } + if (oldquery) { + gst_query_unref(oldquery); + } + return TRUE; +} + +static gboolean +gst_pm_audio_visualizer_do_bufferpool(GstPMAudioVisualizer *scope, + GstCaps *outcaps) { + GstQuery *query; + gboolean result = TRUE; + GstBufferPool *pool = NULL; + GstPMAudioVisualizerClass *klass; + GstAllocator *allocator; + GstAllocationParams params; + + /* not passthrough, we need to allocate */ + /* find a pool for the negotiated caps now */ + GST_DEBUG_OBJECT(scope, "doing allocation query"); + query = gst_query_new_allocation(outcaps, TRUE); + + if (!gst_pad_peer_query(scope->srcpad, query)) { + /* not a problem, we use the query defaults */ + GST_DEBUG_OBJECT(scope, "allocation query failed"); + } + + klass = GST_PM_AUDIO_VISUALIZER_GET_CLASS(scope); + + GST_DEBUG_OBJECT(scope, "calling decide_allocation"); + g_assert(klass->decide_allocation != NULL); + result = klass->decide_allocation(scope, query); + + GST_DEBUG_OBJECT(scope, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, result, + query); + + if (!result) + goto no_decide_allocation; + + /* we got configuration from our peer or the decide_allocation method, + * parse them */ + if (gst_query_get_n_allocation_params(query) > 0) { + gst_query_parse_nth_allocation_param(query, 0, &allocator, ¶ms); + } else { + allocator = NULL; + gst_allocation_params_init(¶ms); + } + + if (gst_query_get_n_allocation_pools(query) > 0) + gst_query_parse_nth_allocation_pool(query, 0, &pool, NULL, NULL, NULL); + + /* now store */ + result = gst_pm_audio_visualizer_set_allocation(scope, pool, allocator, + ¶ms, query); + + return result; + + /* Errors */ +no_decide_allocation: { + GST_WARNING_OBJECT(scope, "Subclass failed to decide allocation"); + gst_query_unref(query); + + return result; +} +} + +static gboolean +gst_pm_audio_visualizer_default_decide_allocation(GstPMAudioVisualizer *scope, + GstQuery *query) { + /* removed main memory pool implementation. This vmethod is overridden for + * using gl memory by gstglbaseaudiovisualizer. */ + g_error("vmethod gst_pm_audio_visualizer_default_decide_allocation is not " + "implemented"); +} + +GstFlowReturn +gst_pm_audio_visualizer_util_prepare_output_buffer(GstPMAudioVisualizer *scope, + GstBuffer **outbuf) { + GstPMAudioVisualizerPrivate *priv; + + priv = scope->priv; + + g_assert(priv->pool != NULL); + + /* we can't reuse the input buffer */ + if (!priv->pool_active) { + GST_DEBUG_OBJECT(scope, "setting pool %p active", priv->pool); + if (!gst_buffer_pool_set_active(priv->pool, TRUE)) + goto activate_failed; + priv->pool_active = TRUE; + } + GST_TRACE_OBJECT(scope, "using pool alloc"); + + return gst_buffer_pool_acquire_buffer(priv->pool, outbuf, NULL); + + /* ERRORS */ +activate_failed: { + GST_ELEMENT_ERROR(scope, RESOURCE, SETTINGS, + ("failed to activate bufferpool"), + ("failed to activate bufferpool")); + return GST_FLOW_ERROR; +} +} + +static GstFlowReturn gst_pm_audio_visualizer_chain(GstPad *pad, + GstObject *parent, + GstBuffer *buffer) { + GstFlowReturn ret = GST_FLOW_OK; + GstPMAudioVisualizer *scope = GST_PM_AUDIO_VISUALIZER(parent); + GstPMAudioVisualizerClass *klass; + GstClockTime ts; + guint avail, sbpf; + // databuf is a buffer holding one video frame worth of audio data used as + // temp buffer for copying from the adapter only + // inbuf is a plugin-scoped buffer holding a copy of the one video frame worth + // of audio data from the adapter to process + GstBuffer *databuf, *inbuf; + gint bpf; + + klass = GST_PM_AUDIO_VISUALIZER_GET_CLASS(scope); + + // ensure caps have been setup for sink and src pads, and plugin init code is + // done + g_mutex_lock(&scope->priv->config_lock); + while (!scope->priv->ready) { + g_cond_wait(&scope->priv->ready_cond, &scope->priv->config_lock); + } + g_mutex_unlock(&scope->priv->config_lock); + + if (buffer == NULL) { + return GST_FLOW_OK; + } + + /* remember pts timestamp of the first audio buffer as stream clock offset + * timestamp */ + g_mutex_lock(&scope->priv->config_lock); + if (!scope->priv->pts_offset_initialized) { + scope->priv->pts_offset_initialized = TRUE; + scope->priv->pts_offset = GST_BUFFER_PTS(buffer); + + if (gst_debug_category_get_threshold(pm_audio_visualizer_debug) >= + GST_LEVEL_INFO) { + GstClock *clock = gst_element_get_clock(GST_ELEMENT(scope)); + GstClockTime running_time = 0; + if (clock != NULL) { + running_time = gst_clock_get_time(clock) - + gst_element_get_base_time(GST_ELEMENT(scope)); + gst_object_unref(clock); + } + + GST_DEBUG_OBJECT( + scope, + "Buffer ts: %" GST_TIME_FORMAT ", running_time: %" GST_TIME_FORMAT, + GST_TIME_ARGS(scope->priv->pts_offset), GST_TIME_ARGS(running_time)); + } + } + g_mutex_unlock(&scope->priv->config_lock); + + /* resync on DISCONT */ + if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT)) { + gst_adapter_clear(scope->priv->adapter); + } + + /* Make sure have an output format */ + if (gst_pad_check_reconfigure(scope->srcpad)) { + if (!gst_pm_audio_visualizer_src_negotiate(scope)) { + gst_pad_mark_reconfigure(scope->srcpad); + goto not_negotiated; + } + } + + bpf = GST_AUDIO_INFO_BPF(&scope->ainfo); + + if (bpf == 0) { + ret = GST_FLOW_NOT_NEGOTIATED; + goto beach; + } + + GST_TRACE_OBJECT(scope, "Chain func pushing %lu bytes to adapter", + gst_buffer_get_size(buffer)); + + gst_adapter_push(scope->priv->adapter, buffer); + + g_mutex_lock(&scope->priv->config_lock); + + /* this is what we want */ + /* number of audio bytes to process for one video frame */ + /* samples per video frame * audio bytes per frame for both channels */ + sbpf = scope->req_spf * bpf; + + inbuf = scope->priv->inbuf; + + // prepare buffer + gst_buffer_copy_into(inbuf, buffer, GST_BUFFER_COPY_METADATA, 0, -1); + + /* this is what we have */ + avail = gst_adapter_available(scope->priv->adapter); + + while (avail >= sbpf) { + + gboolean fps_changed_since_last_frame = scope->priv->fps_changed; + scope->priv->fps_changed = FALSE; + + // make sure frame duration does not change while processing one frame + const GstClockTime frame_duration = scope->req_frame_duration; + + // derive timestamps from the number of samples consumed, + // calculate timestamp based on audio input samples already processed to + // avoid clock drift + ts = scope->priv->pts_offset + + gst_util_uint64_scale_int(scope->priv->samples_consumed, GST_SECOND, + GST_AUDIO_INFO_RATE(&scope->ainfo)); + + scope->priv->samples_consumed += scope->req_spf; + + /* check for QoS, don't compute buffers that are known to be late */ + if (GST_CLOCK_TIME_IS_VALID(ts)) { + GstClockTime earliest_time; + gdouble proportion; + guint64 qostime; + + qostime = gst_segment_to_running_time(&scope->priv->segment, + GST_FORMAT_TIME, ts) + + frame_duration; + + earliest_time = scope->priv->earliest_time; + proportion = scope->priv->proportion; + + if (scope->priv->segment.format != GST_FORMAT_TIME) { + GST_WARNING_OBJECT(scope, + "Segment format not TIME, skipping QoS checks"); + } else if (GST_CLOCK_TIME_IS_VALID(earliest_time) && + qostime <= earliest_time) { + GstClockTime stream_time, jitter; + GstMessage *qos_msg; + + GST_DEBUG_OBJECT(scope, + "QoS: skip ts: %" GST_TIME_FORMAT + ", earliest: %" GST_TIME_FORMAT, + GST_TIME_ARGS(qostime), GST_TIME_ARGS(earliest_time)); + + ++scope->priv->dropped; + stream_time = gst_segment_to_stream_time(&scope->priv->segment, + GST_FORMAT_TIME, ts); + jitter = GST_CLOCK_DIFF(qostime, earliest_time); + + GstClockTime duration = GST_BUFFER_DURATION_IS_VALID(buffer) + ? GST_BUFFER_DURATION(buffer) + : frame_duration; + + qos_msg = gst_message_new_qos(GST_OBJECT(scope), FALSE, qostime, + stream_time, ts, duration); + gst_message_set_qos_values(qos_msg, jitter, proportion, 1000000); + gst_message_set_qos_stats(qos_msg, GST_FORMAT_BUFFERS, + scope->priv->processed, scope->priv->dropped); + gst_element_post_message(GST_ELEMENT(scope), qos_msg); + + goto skip; + } + } + + // map pts ts via segment to running time + ts = + gst_segment_to_running_time(&scope->priv->segment, GST_FORMAT_TIME, ts); + + ++scope->priv->processed; + + /* sync controlled properties */ + if (GST_CLOCK_TIME_IS_VALID(ts)) + gst_object_sync_values(GST_OBJECT(scope), ts); + + /* this can fail as the data size we need could have changed */ + if (!(databuf = gst_adapter_get_buffer(scope->priv->adapter, sbpf))) + break; + + /* place sbpf number of bytes of audio data into inbuf */ + /* this is not a deep copy of the data at this point */ + gst_buffer_remove_all_memory(inbuf); + gst_buffer_copy_into(inbuf, databuf, GST_BUFFER_COPY_MEMORY, 0, sbpf); + gst_buffer_unref(databuf); + + /* call class->render() vmethod */ + g_mutex_unlock(&scope->priv->config_lock); + + ret = klass->render(scope, inbuf, ts, frame_duration); + if (ret != GST_FLOW_OK) { + goto beach; + } + + g_mutex_lock(&scope->priv->config_lock); + + skip: + // inform upstream of updated fps + if (fps_changed_since_last_frame == TRUE) { + gst_pm_audio_visualizer_send_latency_if_needed_unlocked(scope); + } + + /* we want to take less or more, depending on spf : req_spf */ + if (avail - sbpf >= sbpf) { + // enough audio data for more frames is available + gst_adapter_unmap(scope->priv->adapter); + gst_adapter_flush(scope->priv->adapter, sbpf); + } else if (avail >= sbpf) { + // was just enough audio data for one frame + // rendering. seems like a bug in the original code + /* just flush a bit and stop */ + // gst_adapter_flush(scope->priv->adapter, (avail - sbpf)); + + // instead just flush one video frame worth of audio data from the buffer + // and stop + gst_adapter_unmap(scope->priv->adapter); + gst_adapter_flush(scope->priv->adapter, sbpf); + break; + } + avail = gst_adapter_available(scope->priv->adapter); + + // recalculate for the next frame + sbpf = scope->req_spf * bpf; + } + + g_mutex_unlock(&scope->priv->config_lock); + +beach: + return ret; + + /* ERRORS */ +not_negotiated: { + GST_DEBUG_OBJECT(scope, "Failed to renegotiate"); + return GST_FLOW_NOT_NEGOTIATED; +} +} + +static gboolean gst_pm_audio_visualizer_src_event(GstPad *pad, + GstObject *parent, + GstEvent *event) { + gboolean res; + GstPMAudioVisualizer *scope; + + scope = GST_PM_AUDIO_VISUALIZER(parent); + + switch (GST_EVENT_TYPE(event)) { + case GST_EVENT_QOS: { + gdouble proportion; + GstClockTimeDiff diff; + GstClockTime timestamp; + + gst_event_parse_qos(event, NULL, &proportion, &diff, ×tamp); + + /* save stuff for the _chain() function */ + g_mutex_lock(&scope->priv->config_lock); + // ignore QoS events for first few frames, sinks seem to send erratic QoS at + // the beginning + if (scope->priv->processed > QOS_IGNORE_FIRST_N_FRAMES) { + scope->priv->proportion = proportion; + if (diff > 0) { + /* we're late, this is a good estimate for next displayable + * frame (see part-qos.txt) (skip all frames until this time) */ + scope->priv->earliest_time = timestamp + MIN(diff * 2, GST_SECOND * 3) + + scope->req_frame_duration; + } else { + scope->priv->earliest_time = timestamp + diff; + } + } else { + GST_DEBUG_OBJECT(scope, "Ignoring early QoS event, processed frames: %d", + scope->priv->processed); + } + g_mutex_unlock(&scope->priv->config_lock); + + res = gst_pad_push_event(scope->priv->sinkpad, event); + break; + } + case GST_EVENT_LATENCY: + g_mutex_lock(&scope->priv->config_lock); + gst_event_parse_latency(event, &scope->latency); + g_mutex_unlock(&scope->priv->config_lock); + res = gst_pad_event_default(pad, parent, event); + GST_DEBUG_OBJECT(scope, "Received latency event: %" GST_TIME_FORMAT, + GST_TIME_ARGS(scope->latency)); + break; + case GST_EVENT_RECONFIGURE: + /* don't forward */ + gst_event_unref(event); + res = TRUE; + break; + default: + res = gst_pad_event_default(pad, parent, event); + break; + } + + return res; +} + +static gboolean gst_pm_audio_visualizer_sink_event(GstPad *pad, + GstObject *parent, + GstEvent *event) { + gboolean res; + + GstPMAudioVisualizer *scope = GST_PM_AUDIO_VISUALIZER(parent); + GstPMAudioVisualizerClass *klass = GST_PM_AUDIO_VISUALIZER_GET_CLASS(scope); + + switch (GST_EVENT_TYPE(event)) { + case GST_EVENT_CAPS: { + GstCaps *caps; + + gst_event_parse_caps(event, &caps); + res = gst_pm_audio_visualizer_sink_setcaps(scope, caps); + gst_event_unref(event); + break; + } + case GST_EVENT_FLUSH_STOP: + g_mutex_lock(&scope->priv->config_lock); + gst_pm_audio_visualizer_reset_unlocked(scope); + g_mutex_unlock(&scope->priv->config_lock); + res = gst_pad_push_event(scope->srcpad, event); + break; + case GST_EVENT_SEGMENT: { + /* the newsegment values are used to clip the input samples + * and to convert the incoming timestamps to running time so + * we can do QoS */ + g_mutex_lock(&scope->priv->config_lock); + gst_event_copy_segment(event, &scope->priv->segment); + if (scope->priv->segment.format != GST_FORMAT_TIME) { + GST_WARNING_OBJECT(scope, "Unexpected segment format: %d", + scope->priv->segment.format); + } + scope->priv->pts_offset = + scope->priv->segment.start; // or segment.position if it's a live seek + scope->priv->pts_offset_initialized = TRUE; + scope->priv->samples_consumed = 0; + g_mutex_unlock(&scope->priv->config_lock); + if (klass->segment_change) { + klass->segment_change(scope, &scope->priv->segment); + } + res = gst_pad_push_event(scope->srcpad, event); + GST_DEBUG_OBJECT( + scope, "Segment start: %" GST_TIME_FORMAT ", stop: %" GST_TIME_FORMAT, + GST_TIME_ARGS(scope->priv->segment.start), + GST_TIME_ARGS(scope->priv->segment.stop)); + break; + } + default: + res = gst_pad_event_default(pad, parent, event); + break; + } + + return res; +} + +static GstClockTime calc_our_latency_unlocked(GstPMAudioVisualizer *scope, + gint rate) { + /* the max samples we must buffer */ + guint max_samples = MAX(scope->req_spf, scope->priv->spf); + return gst_util_uint64_scale(max_samples, GST_SECOND, rate); +} + +static void gst_pm_audio_visualizer_send_latency_if_needed_unlocked( + GstPMAudioVisualizer *scope) { + + // send latency event if latency changed a lot + GstClockTime latency = + calc_our_latency_unlocked(scope, GST_AUDIO_INFO_RATE(&scope->ainfo)); + + // check if the latency has changed enough to send an event + if (ABS((GstClockTimeDiff)latency - scope->priv->last_reported_latency) > + LATENCY_EVENT_MIN_CHANGE) { + + scope->priv->last_reported_latency = latency; + g_mutex_unlock(&scope->priv->config_lock); + gst_pad_push_event(scope->priv->sinkpad, gst_event_new_latency(latency)); + GST_DEBUG_OBJECT(scope, "Sent latency event to sink pad: %" GST_TIME_FORMAT, + GST_TIME_ARGS(latency)); + g_mutex_lock(&scope->priv->config_lock); + } +} + +static gboolean gst_pm_audio_visualizer_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query) { + gboolean res = FALSE; + GstPMAudioVisualizer *scope; + + scope = GST_PM_AUDIO_VISUALIZER(parent); + + switch (GST_QUERY_TYPE(query)) { + case GST_QUERY_LATENCY: { + /* We need to send the query upstream and add the returned latency to our + * own */ + GstClockTime min_latency, max_latency; + gboolean us_live; + GstClockTime our_latency; + gint rate = GST_AUDIO_INFO_RATE(&scope->ainfo); + + if (rate == 0) + break; + + if ((res = gst_pad_peer_query(scope->priv->sinkpad, query))) { + gst_query_parse_latency(query, &us_live, &min_latency, &max_latency); + + GST_DEBUG_OBJECT( + scope, "Peer latency: min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, + GST_TIME_ARGS(min_latency), GST_TIME_ARGS(max_latency)); + + g_mutex_lock(&scope->priv->config_lock); + our_latency = calc_our_latency_unlocked(scope, rate); + g_mutex_unlock(&scope->priv->config_lock); + + GST_DEBUG_OBJECT(scope, "Our latency: %" GST_TIME_FORMAT, + GST_TIME_ARGS(our_latency)); + + /* we add some latency but only if we need to buffer more than what + * upstream gives us */ + min_latency += our_latency; + if (max_latency != -1) + max_latency += our_latency; + + GST_DEBUG_OBJECT(scope, + "Calculated total latency : min %" GST_TIME_FORMAT + " max %" GST_TIME_FORMAT, + GST_TIME_ARGS(min_latency), GST_TIME_ARGS(max_latency)); + + gst_query_set_latency(query, TRUE, min_latency, max_latency); + g_mutex_lock(&scope->priv->config_lock); + scope->priv->last_reported_latency = our_latency; + g_mutex_unlock(&scope->priv->config_lock); + } + break; + } + default: + res = gst_pad_query_default(pad, parent, query); + break; + } + + return res; +} + +static GstStateChangeReturn +gst_pm_audio_visualizer_parent_change_state(GstElement *element, + GstStateChange transition) { + + GstPMAudioVisualizer *scope = GST_PM_AUDIO_VISUALIZER(element); + + switch (transition) { + case GST_STATE_CHANGE_READY_TO_PAUSED: + g_mutex_lock(&scope->priv->config_lock); + gst_pm_audio_visualizer_reset_unlocked(scope); + g_mutex_unlock(&scope->priv->config_lock); + break; + default: + break; + } + + GstStateChangeReturn ret = + GST_ELEMENT_CLASS(parent_class)->change_state(element, transition); + if (ret == GST_STATE_CHANGE_FAILURE) + return ret; + + GstPMAudioVisualizerClass *klass = GST_PM_AUDIO_VISUALIZER_GET_CLASS(scope); + return klass->change_state(element, transition); +} + +static GstStateChangeReturn +gst_pm_audio_visualizer_default_change_state(GstElement *element, + GstStateChange transition) { + return GST_STATE_CHANGE_SUCCESS; +} + +static gboolean log_fps_change(gpointer message) { + GST_INFO("%s", (gchar *)message); + + g_free(message); + return G_SOURCE_REMOVE; // remove after run +} + +void gst_pm_audio_visualizer_adjust_fps(GstPMAudioVisualizer *scope, + guint64 frame_duration) { + g_mutex_lock(&scope->priv->config_lock); + + guint64 set_duration; + guint set_req_spf; + + // clamp for cap fps + if (frame_duration <= scope->priv->caps_frame_duration) { + set_duration = scope->priv->caps_frame_duration; + set_req_spf = scope->priv->spf; + } else { + set_duration = frame_duration; + // calculate samples per frame for the given frame duration + set_req_spf = + (guint)(((guint64)GST_AUDIO_INFO_RATE(&scope->ainfo) * frame_duration + + GST_SECOND / 2) / + GST_SECOND); + } + + // update for next frame + if (scope->req_frame_duration != set_duration) { + scope->req_frame_duration = set_duration; + scope->req_spf = set_req_spf; + scope->priv->fps_changed = TRUE; + } + + g_mutex_unlock(&scope->priv->config_lock); + + if (gst_debug_category_get_threshold(pm_audio_visualizer_debug) >= + GST_LEVEL_WARNING) { + + gchar *message = + g_strdup_printf("Adjusting framerate, max fps: %f, using " + "frame-duration: %" GST_TIME_FORMAT ", spf: %u", + (gdouble)GST_SECOND / (gdouble)frame_duration, + GST_TIME_ARGS(set_duration), set_req_spf); + + g_idle_add(log_fps_change, message); + } +} + +void gst_pm_audio_visualizer_dispose_buffer_pool(GstPMAudioVisualizer* scope) { + gst_pm_audio_visualizer_set_allocation(scope, NULL, NULL, NULL, NULL); +} diff --git a/src/gstpmaudiovisualizer.h b/src/gstpmaudiovisualizer.h new file mode 100644 index 0000000..2602dd3 --- /dev/null +++ b/src/gstpmaudiovisualizer.h @@ -0,0 +1,178 @@ +/* GStreamer + * Copyright (C) <2011> Stefan Kost + * Copyright (C) <2015> Luis de Bethencourt + * + * gstaudiovisualizer.c: base class for audio visualisation elements + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +/* + * The code in this file is based on + * GStreamer / gst-plugins-base, latest version as of 2025/05/29. + * gst-libs/gst/pbutils/gstaudiovisualizer.h Git Repository: + * https://gitlab.freedesktop.org/gstreamer/gstreamer/-/blob/main/subprojects/gst-plugins-base/gst-libs/gst/pbutils/gstaudiovisualizer.h + * + * Original copyright notice has been retained at the top of this file. + * The code has been modified to improve compatibility with projectM and OpenGL. + * See impl for details. + */ + +#ifndef __GST_PM_AUDIO_VISUALIZER_H__ +#define __GST_PM_AUDIO_VISUALIZER_H__ + +#include + +#include +#include + +G_BEGIN_DECLS +#define GST_TYPE_PM_AUDIO_VISUALIZER (gst_pm_audio_visualizer_get_type()) +#define GST_PM_AUDIO_VISUALIZER(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_PM_AUDIO_VISUALIZER, \ + GstPMAudioVisualizer)) +#define GST_PM_AUDIO_VISUALIZER_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_PM_AUDIO_VISUALIZER, \ + GstPMAudioVisualizerClass)) +#define GST_PM_AUDIO_VISUALIZER_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_PM_AUDIO_VISUALIZER, \ + GstPMAudioVisualizerClass)) +#define GST_PM_IS_SYNAESTHESIA(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_PM_AUDIO_VISUALIZER)) +#define GST_PM_IS_SYNAESTHESIA_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_PM_AUDIO_VISUALIZER)) +typedef struct _GstPMAudioVisualizer GstPMAudioVisualizer; +typedef struct _GstPMAudioVisualizerClass GstPMAudioVisualizerClass; +typedef struct _GstPMAudioVisualizerPrivate GstPMAudioVisualizerPrivate; + +struct _GstPMAudioVisualizer { + GstElement parent; + + /** + * current min samples per frame wanted by the subclass (one channel), may + * vary depending on actual fps. + */ + guint req_spf; + + /** + * Current fps frame duration, may be different from caps fps. + */ + guint64 req_frame_duration; + + /** + * Caps video state. + */ + GstVideoInfo vinfo; + + /** + * Input audio state. + */ + GstAudioInfo ainfo; + + /*< private >*/ + GstPMAudioVisualizerPrivate *priv; + + /** + * Source pad to push video buffers downstream. + */ + GstPad *srcpad; + + /** + * Current pipeline latency. + */ + GstClockTime latency; + + /*< private >*/ + gpointer _padding[GST_PADDING]; +}; + +/** + * GstPMAudioVisualizerClass: + * @decide_allocation: buffer pool allocation + * @render: render a frame from an audio buffer. + * @setup: Called whenever the format changes, and sink and src caps are + * configured. + * @change_state: Cascades gst change state to the implementor. Parent is + * processed first. + * @segment_change: Cascades gst segment events to the implementor. Parent is + * processed first. + * + * Base class for audio visualizers, derived from gstreamer + * GstAudioVisualizerClass. This plugin consumes n audio input samples for each + * output video frame to keep audio and video in-sync. + */ +struct _GstPMAudioVisualizerClass { + /*< private >*/ + GstElementClass parent_class; + + /*< public >*/ + /** + * Virtual function, called whenever the caps change, sink and src pad caps + * are both configured. + */ + gboolean (*setup)(GstPMAudioVisualizer *scope); + + /** + * Virtual function for rendering a frame. + */ + GstFlowReturn (*render)(GstPMAudioVisualizer *scope, GstBuffer *audio, + GstClockTime pts, guint64 frame_duration); + + /** + * Virtual function for buffer pool allocation. + */ + gboolean (*decide_allocation)(GstPMAudioVisualizer *scope, GstQuery *query); + + /** + * Virtual function to allow implementor to receive change_state events. + */ + GstStateChangeReturn (*change_state)(GstElement *element, + GstStateChange transition); + + /** + * Virtual function allow implementor to receive segment change events. + */ + void (*segment_change)(GstPMAudioVisualizer *scope, GstSegment *segment); + + /*< private >*/ + gpointer _padding[GST_PADDING]; +}; + +GType gst_pm_audio_visualizer_get_type(void); + +/** + * Obtain buffer from buffer pool for rendering. + * + * @param scope Plugin data. + * @param outbuf Pointer for receiving output buffer. + * + * @return GST_FLOW_ERROR in case of pool errors, or the result of + * gst_buffer_pool_acquire_buffer(...) + */ +GstFlowReturn +gst_pm_audio_visualizer_util_prepare_output_buffer(GstPMAudioVisualizer *scope, + GstBuffer **outbuf); + +void gst_pm_audio_visualizer_adjust_fps(GstPMAudioVisualizer *scope, + guint64 frame_duration); + +void +gst_pm_audio_visualizer_dispose_buffer_pool(GstPMAudioVisualizer *scope); + +G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstPMAudioVisualizer, gst_object_unref) + +G_END_DECLS +#endif /* __GST_PM_AUDIO_VISUALIZER_H__ */ diff --git a/src/gstprojectm.c b/src/gstprojectm.c new file mode 100644 index 0000000..0937539 --- /dev/null +++ b/src/gstprojectm.c @@ -0,0 +1,177 @@ +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstprojectm.h" + +#include "debug.h" +#include "gstglbaseaudiovisualizer.h" +#include "gstprojectmcaps.h" + +#include + +GST_DEBUG_CATEGORY_STATIC(gst_projectm_debug); +#define GST_CAT_DEFAULT gst_projectm_debug + +struct _GstProjectMPrivate { + + GstBaseProjectMPrivate base; +}; + +G_DEFINE_TYPE_WITH_CODE(GstProjectM, gst_projectm, + GST_TYPE_GL_BASE_AUDIO_VISUALIZER, + G_ADD_PRIVATE(GstProjectM) + GST_DEBUG_CATEGORY_INIT(gst_projectm_debug, + "gstprojectm", 0, + "Plugin Root")); + +void gst_projectm_set_property(GObject *object, guint property_id, + const GValue *value, GParamSpec *pspec) { + + GstProjectM *plugin = GST_PROJECTM(object); + + gst_projectm_base_set_property(object, &plugin->settings, property_id, value, + pspec); +} + +void gst_projectm_get_property(GObject *object, guint property_id, + GValue *value, GParamSpec *pspec) { + GstProjectM *plugin = GST_PROJECTM(object); + + gst_projectm_base_get_property(object, &plugin->settings, property_id, value, + pspec); +} + +static void gst_projectm_init(GstProjectM *plugin) { + plugin->priv = gst_projectm_get_instance_private(plugin); + + gst_gl_memory_init_once(); + + gst_projectm_base_init(&plugin->settings, &plugin->priv->base); +} + +static void gst_projectm_finalize(GObject *object) { + + GstProjectM *plugin = GST_PROJECTM(object); + + gst_projectm_base_finalize(&plugin->settings, &plugin->priv->base); + G_OBJECT_CLASS(gst_projectm_parent_class)->finalize(object); +} + +static void gst_projectm_gl_stop(GstGLBaseAudioVisualizer *src) { + + GstProjectM *plugin = GST_PROJECTM(src); + + gst_projectm_base_gl_stop(G_OBJECT(src), &plugin->priv->base); +} + +static gboolean gst_projectm_gl_start(GstGLBaseAudioVisualizer *glav) { + // Cast the audio visualizer to the ProjectM plugin + GstProjectM *plugin = GST_PROJECTM(glav); + GstPMAudioVisualizer *pmav = GST_PM_AUDIO_VISUALIZER(glav); + + gst_projectm_base_gl_start(G_OBJECT(glav), &plugin->priv->base, + &plugin->settings, glav->context, &pmav->vinfo); + + GST_INFO_OBJECT(plugin, "GL start complete"); + + return TRUE; +} + +static gboolean gst_projectm_setup(GstGLBaseAudioVisualizer *glav) { + + GstPMAudioVisualizer *pmav = GST_PM_AUDIO_VISUALIZER(glav); + + // Log audio info + GST_DEBUG_OBJECT( + glav, "Audio Information ", + pmav->ainfo.channels, pmav->ainfo.rate, pmav->ainfo.finfo->description); + + // Log video info + GST_DEBUG_OBJECT( + glav, + "Video Information ", + GST_VIDEO_INFO_WIDTH(&pmav->vinfo), GST_VIDEO_INFO_HEIGHT(&pmav->vinfo), + pmav->vinfo.fps_n, pmav->vinfo.fps_d, pmav->req_spf); + + return TRUE; +} + +static gboolean gst_projectm_fill_gl_memory_callback(gpointer stuff) { + + GstAVRenderParams *render_data = (GstAVRenderParams *)stuff; + GstProjectM *plugin = GST_PROJECTM(render_data->plugin); + GstGLBaseAudioVisualizer *glav = + GST_GL_BASE_AUDIO_VISUALIZER(render_data->plugin); + gboolean result = TRUE; + + // VIDEO + GST_TRACE_OBJECT(plugin, "rendering projectM to fbo %d", + render_data->fbo->fbo_id); + + gst_projectm_base_fill_gl_memory_callback(&plugin->priv->base, glav->context, + render_data->fbo, render_data->pts, + render_data->in_audio); + + return result; +} + +static gboolean gst_projectm_fill_gl_memory(GstAVRenderParams *render_data) { + + gboolean result = gst_gl_framebuffer_draw_to_texture( + render_data->fbo, render_data->mem, gst_projectm_fill_gl_memory_callback, + render_data); + + return result; +} + +static void gst_projectm_segment_change(GstPMAudioVisualizer *scope, + GstSegment *segment) { + GstProjectM *plugin = GST_PROJECTM(scope); + gint64 pts_offset = segment->time - segment->start; + gst_projectm_base_set_segment_pts_offset(&plugin->priv->base, pts_offset); +} + +static void gst_projectm_class_init(GstProjectMClass *klass) { + GObjectClass *gobject_class = (GObjectClass *)klass; + GstPMAudioVisualizerClass *parent_scope_class = + GST_PM_AUDIO_VISUALIZER_CLASS(klass); + GstGLBaseAudioVisualizerClass *scope_class = + GST_GL_BASE_AUDIO_VISUALIZER_CLASS(klass); + + // Setup audio and video caps + const gchar *audio_sink_caps = get_audio_sink_cap(); + const gchar *video_src_caps = get_video_src_cap(); + + gst_element_class_add_pad_template( + GST_ELEMENT_CLASS(klass), + gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS, + gst_caps_from_string(video_src_caps))); + gst_element_class_add_pad_template( + GST_ELEMENT_CLASS(klass), + gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS, + gst_caps_from_string(audio_sink_caps))); + + gst_element_class_set_static_metadata( + GST_ELEMENT_CLASS(klass), "ProjectM Visualizer", "Generic", + "A plugin for visualizing music using ProjectM", + "AnomieVision | Tristan Charpentier " + " | Michael Baetgen " + ""); + + // Setup properties + gobject_class->set_property = gst_projectm_set_property; + gobject_class->get_property = gst_projectm_get_property; + + gst_projectm_base_install_properties(gobject_class); + + gobject_class->finalize = gst_projectm_finalize; + + scope_class->supported_gl_api = GST_GL_API_OPENGL3 | GST_GL_API_GLES2; + scope_class->gl_start = GST_DEBUG_FUNCPTR(gst_projectm_gl_start); + scope_class->gl_stop = GST_DEBUG_FUNCPTR(gst_projectm_gl_stop); + scope_class->fill_gl_memory = GST_DEBUG_FUNCPTR(gst_projectm_fill_gl_memory); + scope_class->setup = GST_DEBUG_FUNCPTR(gst_projectm_setup); + parent_scope_class->segment_change = + GST_DEBUG_FUNCPTR(gst_projectm_segment_change); +} diff --git a/src/plugin.h b/src/gstprojectm.h similarity index 61% rename from src/plugin.h rename to src/gstprojectm.h index de1acff..e16d33a 100644 --- a/src/plugin.h +++ b/src/gstprojectm.h @@ -2,7 +2,7 @@ #define __GST_PROJECTM_H__ #include "gstglbaseaudiovisualizer.h" -#include +#include "gstprojectmbase.h" typedef struct _GstProjectMPrivate GstProjectMPrivate; @@ -12,31 +12,30 @@ G_BEGIN_DECLS G_DECLARE_FINAL_TYPE(GstProjectM, gst_projectm, GST, PROJECTM, GstGLBaseAudioVisualizer) +/* + * Main GstElement for this plug-in. Handles interactions with projectM. + * Uses GstPMAudioVisualizer for handling audio-visualization (audio input, + * timing, buffer pool, chain function). GstGLBaseAudioVisualizer (video frame + * data, GL memory allocation, GL rendering) extends GstPMAudioVisualizer to add + * gl context handling and is used by this plugin directly. Hierarchy: + * GstProjectM -> GstGLBaseAudioVisualizer -> GstPMAudioVisualizer. + */ struct _GstProjectM { GstGLBaseAudioVisualizer element; - gchar *preset_path; - gchar *texture_dir_path; - - gfloat beat_sensitivity; - gdouble hard_cut_duration; - gboolean hard_cut_enabled; - gfloat hard_cut_sensitivity; - gdouble soft_cut_duration; - gdouble preset_duration; - gulong mesh_width; - gulong mesh_height; - gboolean aspect_correction; - gfloat easter_egg; - gboolean preset_locked; - gboolean enable_playlist; - gboolean shuffle_presets; + GstBaseProjectMSettings settings; GstProjectMPrivate *priv; + + /*< private >*/ + gpointer _padding[GST_PADDING]; }; struct _GstProjectMClass { - GstAudioVisualizerClass parent_class; + GstGLBaseAudioVisualizerClass parent_class; + + /*< private >*/ + gpointer _padding[GST_PADDING]; }; static void gst_projectm_set_property(GObject *object, guint prop_id, @@ -53,13 +52,10 @@ static gboolean gst_projectm_gl_start(GstGLBaseAudioVisualizer *glav); static void gst_projectm_gl_stop(GstGLBaseAudioVisualizer *glav); -static gboolean gst_projectm_render(GstGLBaseAudioVisualizer *glav, - GstBuffer *audio, GstVideoFrame *video); +static gboolean gst_projectm_fill_gl_memory(GstAVRenderParams *render_data); static void gst_projectm_class_init(GstProjectMClass *klass); -static gboolean plugin_init(GstPlugin *plugin); - static gboolean gst_projectm_setup(GstGLBaseAudioVisualizer *glav); G_END_DECLS diff --git a/src/gstprojectmbase.c b/src/gstprojectmbase.c new file mode 100644 index 0000000..59b1e40 --- /dev/null +++ b/src/gstprojectmbase.c @@ -0,0 +1,723 @@ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstprojectmbase.h" + +#include "debug.h" +#include "gstglbaseaudiovisualizer.h" +#include "gstprojectmconfig.h" + +#include + +#ifdef USE_GLEW +#include +#endif + +GST_DEBUG_CATEGORY_STATIC(gst_projectm_base_debug); +#define GST_CAT_DEFAULT gst_projectm_base_debug + +enum { + PROP_0, + PROP_PRESET_PATH, + PROP_TEXTURE_DIR_PATH, + PROP_BEAT_SENSITIVITY, + PROP_HARD_CUT_DURATION, + PROP_HARD_CUT_ENABLED, + PROP_HARD_CUT_SENSITIVITY, + PROP_SOFT_CUT_DURATION, + PROP_PRESET_DURATION, + PROP_MESH_SIZE, + PROP_ASPECT_CORRECTION, + PROP_EASTER_EGG, + PROP_PRESET_LOCKED, + PROP_SHUFFLE_PRESETS, + PROP_ENABLE_PLAYLIST, + PROP_MIN_FPS, + PROP_IS_LIVE +}; + +/** + * @brief ProjectM Settings (defaults) + */ + +#define DEFAULT_PRESET_PATH NULL +#define DEFAULT_TEXTURE_DIR_PATH NULL +#define DEFAULT_BEAT_SENSITIVITY 1.0 +#define DEFAULT_HARD_CUT_DURATION 3.0 +#define DEFAULT_HARD_CUT_ENABLED FALSE +#define DEFAULT_HARD_CUT_SENSITIVITY 1.0 +#define DEFAULT_SOFT_CUT_DURATION 3.0 +#define DEFAULT_PRESET_DURATION 0.0 +#define DEFAULT_MESH_SIZE "48,32" +#define DEFAULT_ASPECT_CORRECTION TRUE +#define DEFAULT_EASTER_EGG 0.0 +#define DEFAULT_PRESET_LOCKED FALSE +#define DEFAULT_ENABLE_PLAYLIST TRUE +#define DEFAULT_SHUFFLE_PRESETS TRUE // depends on ENABLE_PLAYLIST +#define DEFAULT_MIN_FPS "1/1" +#define DEFAULT_MIN_FPS_N 1 +#define DEFAULT_MIN_FPS_D 1 +#define DEFAULT_IS_LIVE "auto" + +static gboolean gst_projectm_base_log_preset_change(gpointer preset) { + GST_INFO("Preset: %s", (char *)preset); + + projectm_free_string((char *)preset); + + return G_SOURCE_REMOVE; // remove after run +} + +gboolean gst_projectm_base_parse_fraction(const gchar *str, gint *numerator, + gint *denominator) { + g_return_val_if_fail(str != NULL, FALSE); + g_return_val_if_fail(numerator != NULL, FALSE); + g_return_val_if_fail(denominator != NULL, FALSE); + + gchar **parts = g_strsplit(str, "/", 2); + if (!parts[0] || !parts[1]) { + g_strfreev(parts); + return FALSE; + } + + gchar *endptr = NULL; + gint64 num = g_ascii_strtoll(parts[0], &endptr, 10); + if (*endptr != '\0') { + g_strfreev(parts); + return FALSE; + } + + gint64 denom = g_ascii_strtoll(parts[1], &endptr, 10); + if (*endptr != '\0' || denom == 0) { + g_strfreev(parts); + return FALSE; + } + + *numerator = (gint)num; + *denominator = (gint)denom; + + g_strfreev(parts); + return TRUE; +} + +static void gst_projectm_base_handle_preset_change(bool is_hard_cut, + unsigned int index, + void *user_data) { + + if (gst_debug_category_get_threshold(gst_projectm_base_debug) >= + GST_LEVEL_INFO) { + + char *name = + projectm_playlist_item((projectm_playlist_handle)user_data, index); + + g_idle_add(gst_projectm_base_log_preset_change, name); + } +} + +static GstBaseProjectMInitResult +projectm_init(GObject *plugin, GstBaseProjectMSettings *settings, + GstVideoInfo *vinfo) { + + GstBaseProjectMInitResult result; + result.ret_handle = NULL; + result.ret_playlist = NULL; + result.success = FALSE; + + // Create ProjectM instance + GST_DEBUG_OBJECT(plugin, "Creating projectM instance.."); + result.ret_handle = projectm_create(); + + if (!result.ret_handle) { + GST_DEBUG_OBJECT( + plugin, + "project_create() returned NULL, projectM instance was not created!"); + + return result; + } else { + GST_DEBUG_OBJECT(plugin, "Created projectM instance!"); + } + + if (settings->enable_playlist) { + GST_DEBUG_OBJECT(plugin, "Playlist enabled"); + + // initialize preset playlist + result.ret_playlist = projectm_playlist_create(result.ret_handle); + projectm_playlist_set_shuffle(result.ret_playlist, + settings->shuffle_presets); + + // add handler to print preset change + projectm_playlist_set_preset_switched_event_callback( + result.ret_playlist, gst_projectm_base_handle_preset_change, + result.ret_playlist); + } else { + GST_DEBUG_OBJECT(plugin, "Playlist disabled"); + } + // Log properties + GST_INFO_OBJECT(plugin, + "Using Properties: " + "preset=%s, " + "texture-dir=%s, " + "beat-sensitivity=%f, " + "hard-cut-duration=%f, " + "hard-cut-enabled=%d, " + "hard-cut-sensitivity=%f, " + "soft-cut-duration=%f, " + "preset-duration=%f, " + "mesh-size=(%lu, %lu), " + "aspect-correction=%d, " + "easter-egg=%f, " + "preset-locked=%d, " + "enable-playlist=%d, " + "shuffle-presets=%d, " + "min-fps=%d/%d, " + "is-live=%s", + settings->preset_path, settings->texture_dir_path, + settings->beat_sensitivity, settings->hard_cut_duration, + settings->hard_cut_enabled, settings->hard_cut_sensitivity, + settings->soft_cut_duration, settings->preset_duration, + settings->mesh_width, settings->mesh_height, + settings->aspect_correction, settings->easter_egg, + settings->preset_locked, settings->enable_playlist, + settings->shuffle_presets, settings->min_fps_n, + settings->min_fps_d, settings->is_live); + + // Load preset file if path is provided + if (settings->preset_path != NULL) { + if (result.ret_playlist != NULL) { + unsigned int added_count = projectm_playlist_add_path( + result.ret_playlist, settings->preset_path, true, false); + GST_INFO_OBJECT(plugin, "Loaded preset path: %s, presets found: %d", + settings->preset_path, added_count); + } else { + projectm_load_preset_file(result.ret_handle, settings->preset_path, + false); + GST_INFO_OBJECT(plugin, "Loaded preset file: %s", settings->preset_path); + } + } + + // Set texture search path if directory path is provided + if (settings->texture_dir_path != NULL) { + const gchar *texturePaths[1] = {settings->texture_dir_path}; + projectm_set_texture_search_paths(result.ret_handle, texturePaths, 1); + } + + // Set properties + projectm_set_beat_sensitivity(result.ret_handle, settings->beat_sensitivity); + projectm_set_hard_cut_duration(result.ret_handle, + settings->hard_cut_duration); + projectm_set_hard_cut_enabled(result.ret_handle, settings->hard_cut_enabled); + projectm_set_hard_cut_sensitivity(result.ret_handle, + settings->hard_cut_sensitivity); + projectm_set_soft_cut_duration(result.ret_handle, + settings->soft_cut_duration); + + // Set preset duration, or set to in infinite duration if zero + if (settings->preset_duration > 0.0) { + projectm_set_preset_duration(result.ret_handle, settings->preset_duration); + // kick off the first preset + if (projectm_playlist_size(result.ret_playlist) > 1 && + !settings->preset_locked) { + projectm_playlist_play_next(result.ret_playlist, true); + } + } else { + projectm_set_preset_duration(result.ret_handle, 999999.0); + } + + projectm_set_mesh_size(result.ret_handle, settings->mesh_width, + settings->mesh_height); + projectm_set_aspect_correction(result.ret_handle, + settings->aspect_correction); + projectm_set_easter_egg(result.ret_handle, settings->easter_egg); + projectm_set_preset_locked(result.ret_handle, settings->preset_locked); + + gdouble fps; + gst_util_fraction_to_double(GST_VIDEO_INFO_FPS_N(vinfo), + GST_VIDEO_INFO_FPS_D(vinfo), &fps); + + projectm_set_fps(result.ret_handle, gst_util_gdouble_to_guint64(fps)); + projectm_set_window_size(result.ret_handle, GST_VIDEO_INFO_WIDTH(vinfo), + GST_VIDEO_INFO_HEIGHT(vinfo)); + + result.success = TRUE; + return result; +} + +void gst_projectm_base_set_property(GObject *object, + GstBaseProjectMSettings *settings, + guint property_id, const GValue *value, + GParamSpec *pspec) { + + GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(object); + + const gchar *property_name = g_param_spec_get_name(pspec); + GST_DEBUG_OBJECT(object, "set-property <%s>", property_name); + + switch (property_id) { + case PROP_PRESET_PATH: + g_free(settings->preset_path); + settings->preset_path = g_strdup(g_value_get_string(value)); + break; + case PROP_TEXTURE_DIR_PATH: + g_free(settings->texture_dir_path); + settings->texture_dir_path = g_strdup(g_value_get_string(value)); + break; + case PROP_BEAT_SENSITIVITY: + settings->beat_sensitivity = g_value_get_float(value); + break; + case PROP_HARD_CUT_DURATION: + settings->hard_cut_duration = g_value_get_double(value); + break; + case PROP_HARD_CUT_ENABLED: + settings->hard_cut_enabled = g_value_get_boolean(value); + break; + case PROP_HARD_CUT_SENSITIVITY: + settings->hard_cut_sensitivity = g_value_get_float(value); + break; + case PROP_SOFT_CUT_DURATION: + settings->soft_cut_duration = g_value_get_double(value); + break; + case PROP_PRESET_DURATION: + settings->preset_duration = g_value_get_double(value); + break; + case PROP_MESH_SIZE: { + const gchar *meshSizeStr = g_value_get_string(value); + + if (meshSizeStr) { + gchar **parts = g_strsplit(meshSizeStr, ",", 2); + if (parts[0] && parts[1]) { + settings->mesh_width = atoi(parts[0]); + settings->mesh_height = atoi(parts[1]); + } + g_strfreev(parts); + } + } break; + case PROP_ASPECT_CORRECTION: + settings->aspect_correction = g_value_get_boolean(value); + break; + case PROP_EASTER_EGG: + settings->easter_egg = g_value_get_float(value); + break; + case PROP_PRESET_LOCKED: + settings->preset_locked = g_value_get_boolean(value); + break; + case PROP_ENABLE_PLAYLIST: + settings->enable_playlist = g_value_get_boolean(value); + break; + case PROP_SHUFFLE_PRESETS: + settings->shuffle_presets = g_value_get_boolean(value); + break; + case PROP_MIN_FPS: + gint num, denom; + gboolean success; + const gchar *fpsStr = g_value_get_string(value); + success = gst_projectm_base_parse_fraction(fpsStr, &num, &denom); + if (success) { + settings->min_fps_n = num; + settings->min_fps_d = denom; + g_object_set(G_OBJECT(glav), "min-fps-n", num, "min-fps-d", denom, NULL); + } + break; + case PROP_IS_LIVE: + g_free(settings->is_live); + settings->is_live = g_strdup(g_value_get_string(value)); + g_object_set(G_OBJECT(glav), "pipeline-live", settings->is_live, NULL); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_projectm_base_get_property(GObject *object, + GstBaseProjectMSettings *settings, + guint property_id, GValue *value, + GParamSpec *pspec) { + + const gchar *property_name = g_param_spec_get_name(pspec); + GST_DEBUG_OBJECT(settings, "get-property <%s>", property_name); + + GstGLBaseAudioVisualizer *glav = GST_GL_BASE_AUDIO_VISUALIZER(object); + + switch (property_id) { + case PROP_PRESET_PATH: + g_value_set_string(value, settings->preset_path); + break; + case PROP_TEXTURE_DIR_PATH: + g_value_set_string(value, settings->texture_dir_path); + break; + case PROP_BEAT_SENSITIVITY: + g_value_set_float(value, settings->beat_sensitivity); + break; + case PROP_HARD_CUT_DURATION: + g_value_set_double(value, settings->hard_cut_duration); + break; + case PROP_HARD_CUT_ENABLED: + g_value_set_boolean(value, settings->hard_cut_enabled); + break; + case PROP_HARD_CUT_SENSITIVITY: + g_value_set_float(value, settings->hard_cut_sensitivity); + break; + case PROP_SOFT_CUT_DURATION: + g_value_set_double(value, settings->soft_cut_duration); + break; + case PROP_PRESET_DURATION: + g_value_set_double(value, settings->preset_duration); + break; + case PROP_MESH_SIZE: { + gchar *meshSizeStr = + g_strdup_printf("%lu,%lu", settings->mesh_width, settings->mesh_height); + g_value_set_string(value, meshSizeStr); + g_free(meshSizeStr); + break; + } + case PROP_ASPECT_CORRECTION: + g_value_set_boolean(value, settings->aspect_correction); + break; + case PROP_EASTER_EGG: + g_value_set_float(value, settings->easter_egg); + break; + case PROP_PRESET_LOCKED: + g_value_set_boolean(value, settings->preset_locked); + break; + case PROP_ENABLE_PLAYLIST: + g_value_set_boolean(value, settings->enable_playlist); + break; + case PROP_SHUFFLE_PRESETS: + g_value_set_boolean(value, settings->shuffle_presets); + break; + case PROP_MIN_FPS: + gchar *fpsStr = + g_strdup_printf("%d/%d", settings->min_fps_n, settings->min_fps_d); + g_value_set_string(value, fpsStr); + g_free(fpsStr); + + g_object_set(G_OBJECT(glav), "min-fps-n", settings->min_fps_n, "min-fps-d", + settings->min_fps_d, NULL); + break; + case PROP_IS_LIVE: + g_value_set_string(value, settings->is_live); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_projectm_base_init(GstBaseProjectMSettings *settings, + GstBaseProjectMPrivate *priv) { + + static gsize _debug_initialized = 0; + if (g_once_init_enter(&_debug_initialized)) { + GST_DEBUG_CATEGORY_INIT(gst_projectm_base_debug, "projectm_base", 0, + "projectM visualizer plugin base"); + } + + // Set default values for properties + settings->preset_path = DEFAULT_PRESET_PATH; + settings->texture_dir_path = DEFAULT_TEXTURE_DIR_PATH; + settings->beat_sensitivity = DEFAULT_BEAT_SENSITIVITY; + settings->hard_cut_duration = DEFAULT_HARD_CUT_DURATION; + settings->hard_cut_enabled = DEFAULT_HARD_CUT_ENABLED; + settings->hard_cut_sensitivity = DEFAULT_HARD_CUT_SENSITIVITY; + settings->soft_cut_duration = DEFAULT_SOFT_CUT_DURATION; + settings->preset_duration = DEFAULT_PRESET_DURATION; + settings->enable_playlist = DEFAULT_ENABLE_PLAYLIST; + settings->shuffle_presets = DEFAULT_SHUFFLE_PRESETS; + settings->min_fps_d = DEFAULT_MIN_FPS_D; + settings->min_fps_n = DEFAULT_MIN_FPS_N; + settings->is_live = g_strdup(DEFAULT_IS_LIVE); + + const gchar *meshSizeStr = DEFAULT_MESH_SIZE; + + if (meshSizeStr) { + gchar **parts = g_strsplit(meshSizeStr, ",", 2); + if (parts[0] && parts[1]) { + settings->mesh_width = atoi(parts[0]); + settings->mesh_height = atoi(parts[1]); + } + g_strfreev(parts); + } + + settings->aspect_correction = DEFAULT_ASPECT_CORRECTION; + settings->easter_egg = DEFAULT_EASTER_EGG; + settings->preset_locked = DEFAULT_PRESET_LOCKED; + + gst_projectm_base_parse_fraction(DEFAULT_MIN_FPS, &settings->min_fps_n, + &settings->min_fps_d); + + priv->first_frame_time = 0; + priv->first_frame_received = FALSE; + + g_mutex_init(&priv->projectm_lock); +} + +void gst_projectm_base_finalize(GstBaseProjectMSettings *settings, + GstBaseProjectMPrivate *priv) { + g_free(settings->preset_path); + g_free(settings->texture_dir_path); + g_free(settings->is_live); + g_mutex_clear(&priv->projectm_lock); +} + +gboolean gst_projectm_base_gl_start(GObject *plugin, + GstBaseProjectMPrivate *priv, + GstBaseProjectMSettings *settings, + GstGLContext *context, + GstVideoInfo *vinfo) { + +#ifdef USE_GLEW + GST_DEBUG_OBJECT(plugin, "Initializing GLEW"); + GLenum err = glewInit(); + if (GLEW_OK != err) { + GST_ERROR_OBJECT(plugin, "GLEW initialization failed"); + return FALSE; + } +#endif + + GST_PROJECTM_BASE_LOCK(priv); + + // Check if ProjectM instance exists, and create if not + if (!priv->handle) { + // Create ProjectM instance + priv->first_frame_received = FALSE; + GstBaseProjectMInitResult result = projectm_init(plugin, settings, vinfo); + if (!result.success) { + GST_ERROR_OBJECT(plugin, "projectM could not be initialized"); + return FALSE; + } + gl_error_handler(context); + priv->handle = result.ret_handle; + priv->playlist = result.ret_playlist; + } + GST_PROJECTM_BASE_UNLOCK(priv); + + GST_INFO_OBJECT(plugin, "projectM GL start complete"); + return TRUE; +} + +void gst_projectm_base_gl_stop(GObject *plugin, GstBaseProjectMPrivate *priv) { + + GST_PROJECTM_BASE_LOCK(priv); + if (priv->handle) { + GST_DEBUG_OBJECT(plugin, "Destroying ProjectM instance"); + projectm_destroy(priv->handle); + priv->handle = NULL; + } + GST_PROJECTM_BASE_UNLOCK(priv); +} + +gdouble get_seconds_since_first_frame_unlocked(GstBaseProjectMPrivate *priv, + GstClockTime pts) { + if (!priv->first_frame_received) { + // store the timestamp of the first frame + priv->first_frame_time = pts; + priv->first_frame_received = TRUE; + return 0.0; + } + + // calculate elapsed time + GstClockTime elapsed_time = pts - priv->first_frame_time; + + // convert to fractional seconds + gdouble elapsed_seconds = (gdouble)elapsed_time / GST_SECOND; + + return elapsed_seconds; +} + +void gst_projectm_base_fill_audio_buffer_unlocked(GstBaseProjectMPrivate *priv, + GstBuffer *in_audio) { + + if (in_audio != NULL) { + + GstMapInfo audioMap; + + gst_buffer_map(in_audio, &audioMap, GST_MAP_READ); + + projectm_pcm_add_int16(priv->handle, (gint16 *)audioMap.data, + audioMap.size / 4, PROJECTM_STEREO); + + gst_buffer_unmap(in_audio, &audioMap); + } +} + +void gst_projectm_base_fill_gl_memory_callback(GstBaseProjectMPrivate *priv, + GstGLContext *context, + GstGLFramebuffer *fbo, + GstClockTime pts, + GstBuffer *in_audio) { + + GST_PROJECTM_BASE_LOCK(priv); + + // get current gst sync time (pts) and set projectM time + gdouble seconds_since_first_frame = + get_seconds_since_first_frame_unlocked(priv, pts); + + projectm_set_frame_time(priv->handle, seconds_since_first_frame); + + // process audio buffer + gst_projectm_base_fill_audio_buffer_unlocked(priv, in_audio); + + // render the frame + projectm_opengl_render_frame_fbo(priv->handle, fbo->fbo_id); + + // removed for performance reasons: gl_error_handler(context); + + GST_PROJECTM_BASE_UNLOCK(priv); +} + +void gst_projectm_base_set_segment_pts_offset(GstBaseProjectMPrivate *priv, + gint64 pts_offset) { + GST_PROJECTM_BASE_LOCK(priv); + priv->first_frame_time = pts_offset; + GST_PROJECTM_BASE_UNLOCK(priv); +} + +void gst_projectm_base_install_properties(GObjectClass *gobject_class) { + + // setup properties + g_object_class_install_property( + gobject_class, PROP_PRESET_PATH, + g_param_spec_string( + "preset", "Preset", + "Specifies the path to the preset file. The preset file determines " + "the visual style and behavior of the audio visualizer.", + DEFAULT_PRESET_PATH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_TEXTURE_DIR_PATH, + g_param_spec_string("texture-dir", "Texture Directory", + "Sets the path to the directory containing textures " + "used in the visualizer.", + DEFAULT_TEXTURE_DIR_PATH, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_BEAT_SENSITIVITY, + g_param_spec_float( + "beat-sensitivity", "Beat Sensitivity", + "Controls the sensitivity to audio beats. Higher values make the " + "visualizer respond more strongly to beats.", + 0.0, 5.0, DEFAULT_BEAT_SENSITIVITY, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_HARD_CUT_DURATION, + g_param_spec_double("hard-cut-duration", "Hard Cut Duration", + "Sets the duration, in seconds, for hard cuts. Hard " + "cuts are abrupt transitions in the visualizer.", + 0.0, 999999.0, DEFAULT_HARD_CUT_DURATION, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_HARD_CUT_ENABLED, + g_param_spec_boolean( + "hard-cut-enabled", "Hard Cut Enabled", + "Enables or disables hard cuts. When enabled, the visualizer may " + "exhibit sudden transitions based on the audio input.", + DEFAULT_HARD_CUT_ENABLED, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_HARD_CUT_SENSITIVITY, + g_param_spec_float( + "hard-cut-sensitivity", "Hard Cut Sensitivity", + "Adjusts the sensitivity of the visualizer to hard cuts. Higher " + "values increase the responsiveness to abrupt changes in audio.", + 0.0, 1.0, DEFAULT_HARD_CUT_SENSITIVITY, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_SOFT_CUT_DURATION, + g_param_spec_double( + "soft-cut-duration", "Soft Cut Duration", + "Sets the duration, in seconds, for soft cuts. Soft cuts are " + "smoother transitions between visualizer states.", + 0.0, 999999.0, DEFAULT_SOFT_CUT_DURATION, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_PRESET_DURATION, + g_param_spec_double("preset-duration", "Preset Duration", + "Sets the duration, in seconds, for each preset. A " + "zero value causes the preset to play indefinitely.", + 0.0, 999999.0, DEFAULT_PRESET_DURATION, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_MESH_SIZE, + g_param_spec_string("mesh-size", "Mesh Size", + "Sets the size of the mesh used in rendering. The " + "format is 'width,height'.", + DEFAULT_MESH_SIZE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_ASPECT_CORRECTION, + g_param_spec_boolean( + "aspect-correction", "Aspect Correction", + "Enables or disables aspect ratio correction. When enabled, the " + "visualizer adjusts for aspect ratio differences in rendering.", + DEFAULT_ASPECT_CORRECTION, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_EASTER_EGG, + g_param_spec_float( + "easter-egg", "Easter Egg", + "Controls the activation of an Easter Egg feature. The value " + "determines the likelihood of triggering the Easter Egg.", + 0.0, 1.0, DEFAULT_EASTER_EGG, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_PRESET_LOCKED, + g_param_spec_boolean( + "preset-locked", "Preset Locked", + "Locks or unlocks the current preset. When locked, the visualizer " + "remains on the current preset without automatic changes.", + DEFAULT_PRESET_LOCKED, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_ENABLE_PLAYLIST, + g_param_spec_boolean( + "enable-playlist", "Enable Playlist", + "Enables or disables the playlist feature. When enabled, the " + "visualizer can switch between presets based on a provided " + "playlist.", + DEFAULT_ENABLE_PLAYLIST, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_SHUFFLE_PRESETS, + g_param_spec_boolean( + "shuffle-presets", "Shuffle Presets", + "Enables or disables preset shuffling. When enabled, the " + "visualizer " + "randomly selects presets from the playlist if presets are " + "provided " + "and not locked. Playlist must be enabled for this to take effect.", + DEFAULT_SHUFFLE_PRESETS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_MIN_FPS, + g_param_spec_string( + "min-fps", "Minimum FPS", + "Specifies the lower bound for EMA fps adjustments for real-time " + "pipelines. How low the fps is allowed to be in case the rendering " + "can't keep up with pipeline fps. Applies to real-time pipelines " + "only.", + DEFAULT_MIN_FPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property( + gobject_class, PROP_IS_LIVE, + g_param_spec_string( + "is-live", "is live", + "Specifies if the plugin renders in real-time or as fast as " + "possible " + "(offline). This setting is auto-detected for live pipelines, " + "but can also be specified if auto-detection is " + "not appropriate. Possible values are \"auto\", \"true\", " + "\"false\". " + "Default is \"auto\".", + DEFAULT_IS_LIVE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +} diff --git a/src/gstprojectmbase.h b/src/gstprojectmbase.h new file mode 100644 index 0000000..bb83522 --- /dev/null +++ b/src/gstprojectmbase.h @@ -0,0 +1,205 @@ + +/* + * Basic gst/projectM integration structs and functions that can be re-used for + * alternative plugin implementations. + */ + +#ifndef __GST_PROJECTM_BASE_H__ +#define __GST_PROJECTM_BASE_H__ + +#include +#include +#include +#include +#include + +G_BEGIN_DECLS + +/** + * projectM config properties. + */ +struct _GstBaseProjectMSettings { + + gchar *preset_path; + gchar *texture_dir_path; + gchar *is_live; + + gfloat beat_sensitivity; + gdouble hard_cut_duration; + gboolean hard_cut_enabled; + gfloat hard_cut_sensitivity; + gdouble soft_cut_duration; + gdouble preset_duration; + gulong mesh_width; + gulong mesh_height; + gboolean aspect_correction; + gfloat easter_egg; + gboolean preset_locked; + gboolean enable_playlist; + gboolean shuffle_presets; + gint min_fps_n; + gint min_fps_d; + + /*< private >*/ + gpointer _padding[GST_PADDING]; +}; + +/** + * Variables needed for managing projectM. + */ +struct _GstBaseProjectMPrivate { + projectm_handle handle; + projectm_playlist_handle playlist; + GMutex projectm_lock; + + GstClockTime first_frame_time; + gboolean first_frame_received; + + /*< private >*/ + gpointer _padding[GST_PADDING]; +}; + +/** + * projectM init result return arguments. + */ +struct _GstBaseProjectMInitResult { + projectm_handle ret_handle; + projectm_playlist_handle ret_playlist; + gboolean success; + + /*< private >*/ + gpointer _padding[GST_PADDING]; +}; + +typedef struct _GstBaseProjectMPrivate GstBaseProjectMPrivate; +typedef struct _GstBaseProjectMSettings GstBaseProjectMSettings; +typedef struct _GstBaseProjectMInitResult GstBaseProjectMInitResult; + +/** + * get_property delegate for projectM setting structs. + * + * @param object Plugin gst object. + * @param settings Settings struct to update. + * @param property_id Property id to update. + * @param value Property value. + * @param pspec Gst param type spec. + */ +void gst_projectm_base_set_property(GObject *object, + GstBaseProjectMSettings *settings, + guint property_id, const GValue *value, + GParamSpec *pspec); + +/** + * set_property delegate for projectM setting structs. + * + * @param object Plugin gst object. + * @param settings Settings struct to update. + * @param property_id Property id to update. + * @param value Property value. + * @param pspec Gst param type spec. + */ +void gst_projectm_base_get_property(GObject *object, + GstBaseProjectMSettings *settings, + guint property_id, GValue *value, + GParamSpec *pspec); + +/** + * Plugin init() delegate for projectM settings and priv. + * + * @param settings Settings to init. + * @param priv Private obj to init. + */ +void gst_projectm_base_init(GstBaseProjectMSettings *settings, + GstBaseProjectMPrivate *priv); + +/** + * Plugin finalize() delegate for projectM settings and priv. + * + * @param settings Settings to init. + * @param priv Private obj to init. + */ +void gst_projectm_base_finalize(GstBaseProjectMSettings *settings, + GstBaseProjectMPrivate *priv); + +/** + * GL start delegate to setup projectM fbo rendering. + * + * @param plugin Plugin gst object. + * @param priv Plugin priv data. + * @param settings Plugin settings. + * @param context The gl context to use for projectM rendering. + * @param vinfo Video rendering details. + * + * @return TRUE on success. + */ +gboolean gst_projectm_base_gl_start(GObject *plugin, + GstBaseProjectMPrivate *priv, + GstBaseProjectMSettings *settings, + GstGLContext *context, GstVideoInfo *vinfo); + +/** + * GL stop delegate to clean up projectM rendering resources. + * + * @param plugin Plugin gst object. + * @param priv Plugin priv data. + */ +void gst_projectm_base_gl_stop(GObject *plugin, GstBaseProjectMPrivate *priv); + +/** + * Just pushes audio data to projectM without rendering. + * + * @param priv Plugin priv data. + * @param in_audio Audio data buffer to push to projectM. + */ +void gst_projectm_base_fill_audio_buffer_unlocked(GstBaseProjectMPrivate *priv, + GstBuffer *in_audio); + +/** + * Render one frame with projectM. + * + * @param priv Plugin priv data. + * @param context ProjectM GL context. + * @param pts Current pts timestamp. + * @param in_audio Input audio buffer to push to projectM before rendering, may + * be NULL. + */ +void gst_projectm_base_fill_gl_memory_callback(GstBaseProjectMPrivate *priv, + GstGLContext *context, + GstGLFramebuffer *fbo, + GstClockTime pts, + GstBuffer *in_audio); + +/** + * Reset time offset for a new segment. + * + * @param priv Plugin priv data. + * @param pts_offset pts time offset for a new segment. + */ +void gst_projectm_base_set_segment_pts_offset(GstBaseProjectMPrivate *priv, + gint64 pts_offset); + +/** + * Install properties from projectM settings to given plugin class. + * + * @param gobject_class Plugin class to install properties to. + */ +void gst_projectm_base_install_properties(GObjectClass *gobject_class); + +/** + * Utility to parse a fraction from a string. + * + * @param str Fraction as string, ex. 60/1 + * @param numerator Return ref for numerator. + * @param denominator Return ref for denominator. + * + * @return TRUE if the fraction was parsed correctly. + */ +gboolean gst_projectm_base_parse_fraction(const gchar *str, gint *numerator, + gint *denominator); + +#define GST_PROJECTM_BASE_LOCK(priv) (g_mutex_lock(&priv->projectm_lock)) +#define GST_PROJECTM_BASE_UNLOCK(priv) (g_mutex_unlock(&priv->projectm_lock)) + +G_END_DECLS + +#endif // __GST_PROJECTM_BASE_H__ diff --git a/src/gstprojectmcaps.c b/src/gstprojectmcaps.c new file mode 100644 index 0000000..e022c8b --- /dev/null +++ b/src/gstprojectmcaps.c @@ -0,0 +1,28 @@ + +#ifdef HAVE_CONFIG_H +#include +#endif + +#include "gstprojectmcaps.h" + +#include "gstprojectm.h" + +#include +#include + +GST_DEBUG_CATEGORY_STATIC(gst_projectm_caps_debug); +#define GST_CAT_DEFAULT gst_projectm_caps_debug + +const gchar *get_audio_sink_cap() { + return GST_AUDIO_CAPS_MAKE("audio/x-raw, " + "format = (string) " GST_AUDIO_NE( + S16) ", " + "layout = (string) interleaved, " + "channels = (int) { 2 }, " + "rate = (int) { 44100 }, " + "channel-mask = (bitmask) { 0x0003 }"); +} + +const gchar *get_video_src_cap() { + return GST_VIDEO_CAPS_MAKE_WITH_FEATURES("memory:GLMemory", "RGBA"); +} diff --git a/src/caps.h b/src/gstprojectmcaps.h similarity index 54% rename from src/caps.h rename to src/gstprojectmcaps.h index 070d281..1b22835 100644 --- a/src/caps.h +++ b/src/gstprojectmcaps.h @@ -3,26 +3,22 @@ #include -#include "plugin.h" - G_BEGIN_DECLS /** * @brief Get audio sink caps based on the given type. * - * @param type - The type of audio caps to retrieve. * @return The audio caps format string. */ -const gchar *get_audio_sink_cap(unsigned int type); +const gchar *get_audio_sink_cap(); /** * Get video source caps based on the given type. * - * @param type - The type of video caps to retrieve. * @return The video caps format string. */ -const gchar *get_video_src_cap(unsigned int type); +const gchar *get_video_src_cap(); G_END_DECLS -#endif /* __GST_PROJECTM_CAPS_H__ */ \ No newline at end of file +#endif /* __GST_PROJECTM_CAPS_H__ */ diff --git a/src/gstprojectmconfig.h b/src/gstprojectmconfig.h new file mode 100644 index 0000000..7e3f5ff --- /dev/null +++ b/src/gstprojectmconfig.h @@ -0,0 +1,20 @@ +#ifndef __GST_PROJECTM_CONFIG_H__ +#define __GST_PROJECTM_CONFIG_H__ + +#include + +G_BEGIN_DECLS + +/** + * @brief Plugin Details + */ + +#define PACKAGE "GstProjectM" +#define PACKAGE_NAME "GstProjectM" +#define PACKAGE_VERSION "0.0.3" +#define PACKAGE_LICENSE "LGPL" +#define PACKAGE_ORIGIN "https://github.com/projectM-visualizer/gst-projectm" + +G_END_DECLS + +#endif /* __GST_PROJECTM_CONFIG_H__ */ diff --git a/src/plugin.c b/src/plugin.c deleted file mode 100644 index 125b2ed..0000000 --- a/src/plugin.c +++ /dev/null @@ -1,539 +0,0 @@ -#include -#ifdef HAVE_CONFIG_H -#include "config.h" -#endif - -#ifdef USE_GLEW -#include -#endif -#include -#include -#include - -#include - -#include "caps.h" -#include "config.h" -#include "debug.h" -#include "enums.h" -#include "gstglbaseaudiovisualizer.h" -#include "plugin.h" -#include "projectm.h" - -GST_DEBUG_CATEGORY_STATIC(gst_projectm_debug); -#define GST_CAT_DEFAULT gst_projectm_debug - -struct _GstProjectMPrivate { - GLenum gl_format; - projectm_handle handle; - - GstClockTime first_frame_time; - gboolean first_frame_received; -}; - -G_DEFINE_TYPE_WITH_CODE(GstProjectM, gst_projectm, - GST_TYPE_GL_BASE_AUDIO_VISUALIZER, - G_ADD_PRIVATE(GstProjectM) - GST_DEBUG_CATEGORY_INIT(gst_projectm_debug, - "gstprojectm", 0, - "Plugin Root")); - -void gst_projectm_set_property(GObject *object, guint property_id, - const GValue *value, GParamSpec *pspec) { - GstProjectM *plugin = GST_PROJECTM(object); - - const gchar *property_name = g_param_spec_get_name(pspec); - GST_DEBUG_OBJECT(plugin, "set-property <%s>", property_name); - - switch (property_id) { - case PROP_PRESET_PATH: - plugin->preset_path = g_strdup(g_value_get_string(value)); - break; - case PROP_TEXTURE_DIR_PATH: - plugin->texture_dir_path = g_strdup(g_value_get_string(value)); - break; - case PROP_BEAT_SENSITIVITY: - plugin->beat_sensitivity = g_value_get_float(value); - break; - case PROP_HARD_CUT_DURATION: - plugin->hard_cut_duration = g_value_get_double(value); - break; - case PROP_HARD_CUT_ENABLED: - plugin->hard_cut_enabled = g_value_get_boolean(value); - break; - case PROP_HARD_CUT_SENSITIVITY: - plugin->hard_cut_sensitivity = g_value_get_float(value); - break; - case PROP_SOFT_CUT_DURATION: - plugin->soft_cut_duration = g_value_get_double(value); - break; - case PROP_PRESET_DURATION: - plugin->preset_duration = g_value_get_double(value); - break; - case PROP_MESH_SIZE: { - const gchar *meshSizeStr = g_value_get_string(value); - gint width, height; - - gchar **parts = g_strsplit(meshSizeStr, ",", 2); - - if (parts && g_strv_length(parts) == 2) { - width = atoi(parts[0]); - height = atoi(parts[1]); - - plugin->mesh_width = width; - plugin->mesh_height = height; - - g_strfreev(parts); - } - } break; - case PROP_ASPECT_CORRECTION: - plugin->aspect_correction = g_value_get_boolean(value); - break; - case PROP_EASTER_EGG: - plugin->easter_egg = g_value_get_float(value); - break; - case PROP_PRESET_LOCKED: - plugin->preset_locked = g_value_get_boolean(value); - break; - case PROP_ENABLE_PLAYLIST: - plugin->enable_playlist = g_value_get_boolean(value); - break; - case PROP_SHUFFLE_PRESETS: - plugin->shuffle_presets = g_value_get_boolean(value); - break; - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); - break; - } -} - -void gst_projectm_get_property(GObject *object, guint property_id, - GValue *value, GParamSpec *pspec) { - GstProjectM *plugin = GST_PROJECTM(object); - - const gchar *property_name = g_param_spec_get_name(pspec); - GST_DEBUG_OBJECT(plugin, "get-property <%s>", property_name); - - switch (property_id) { - case PROP_PRESET_PATH: - g_value_set_string(value, plugin->preset_path); - break; - case PROP_TEXTURE_DIR_PATH: - g_value_set_string(value, plugin->texture_dir_path); - break; - case PROP_BEAT_SENSITIVITY: - g_value_set_float(value, plugin->beat_sensitivity); - break; - case PROP_HARD_CUT_DURATION: - g_value_set_double(value, plugin->hard_cut_duration); - break; - case PROP_HARD_CUT_ENABLED: - g_value_set_boolean(value, plugin->hard_cut_enabled); - break; - case PROP_HARD_CUT_SENSITIVITY: - g_value_set_float(value, plugin->hard_cut_sensitivity); - break; - case PROP_SOFT_CUT_DURATION: - g_value_set_double(value, plugin->soft_cut_duration); - break; - case PROP_PRESET_DURATION: - g_value_set_double(value, plugin->preset_duration); - break; - case PROP_MESH_SIZE: { - gchar *meshSizeStr = - g_strdup_printf("%lu,%lu", plugin->mesh_width, plugin->mesh_height); - g_value_set_string(value, meshSizeStr); - g_free(meshSizeStr); - break; - } - case PROP_ASPECT_CORRECTION: - g_value_set_boolean(value, plugin->aspect_correction); - break; - case PROP_EASTER_EGG: - g_value_set_float(value, plugin->easter_egg); - break; - case PROP_PRESET_LOCKED: - g_value_set_boolean(value, plugin->preset_locked); - break; - case PROP_ENABLE_PLAYLIST: - g_value_set_boolean(value, plugin->enable_playlist); - break; - case PROP_SHUFFLE_PRESETS: - g_value_set_boolean(value, plugin->shuffle_presets); - break; - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); - break; - } -} - -static void gst_projectm_init(GstProjectM *plugin) { - plugin->priv = gst_projectm_get_instance_private(plugin); - - // Set default values for properties - plugin->preset_path = DEFAULT_PRESET_PATH; - plugin->texture_dir_path = DEFAULT_TEXTURE_DIR_PATH; - plugin->beat_sensitivity = DEFAULT_BEAT_SENSITIVITY; - plugin->hard_cut_duration = DEFAULT_HARD_CUT_DURATION; - plugin->hard_cut_enabled = DEFAULT_HARD_CUT_ENABLED; - plugin->hard_cut_sensitivity = DEFAULT_HARD_CUT_SENSITIVITY; - plugin->soft_cut_duration = DEFAULT_SOFT_CUT_DURATION; - plugin->preset_duration = DEFAULT_PRESET_DURATION; - plugin->enable_playlist = DEFAULT_ENABLE_PLAYLIST; - plugin->shuffle_presets = DEFAULT_SHUFFLE_PRESETS; - - const gchar *meshSizeStr = DEFAULT_MESH_SIZE; - gint width, height; - - gchar **parts = g_strsplit(meshSizeStr, ",", 2); - - if (parts && g_strv_length(parts) == 2) { - width = atoi(parts[0]); - height = atoi(parts[1]); - - plugin->mesh_width = width; - plugin->mesh_height = height; - - g_strfreev(parts); - } - - plugin->aspect_correction = DEFAULT_ASPECT_CORRECTION; - plugin->easter_egg = DEFAULT_EASTER_EGG; - plugin->preset_locked = DEFAULT_PRESET_LOCKED; - plugin->priv->handle = NULL; -} - -static void gst_projectm_finalize(GObject *object) { - GstProjectM *plugin = GST_PROJECTM(object); - g_free(plugin->preset_path); - g_free(plugin->texture_dir_path); - G_OBJECT_CLASS(gst_projectm_parent_class)->finalize(object); -} - -static void gst_projectm_gl_stop(GstGLBaseAudioVisualizer *src) { - GstProjectM *plugin = GST_PROJECTM(src); - if (plugin->priv->handle) { - GST_DEBUG_OBJECT(plugin, "Destroying ProjectM instance"); - projectm_destroy(plugin->priv->handle); - plugin->priv->handle = NULL; - } -} - -static gboolean gst_projectm_gl_start(GstGLBaseAudioVisualizer *glav) { - // Cast the audio visualizer to the ProjectM plugin - GstProjectM *plugin = GST_PROJECTM(glav); - -#ifdef USE_GLEW - GST_DEBUG_OBJECT(plugin, "Initializing GLEW"); - GLenum err = glewInit(); - if (GLEW_OK != err) { - GST_ERROR_OBJECT(plugin, "GLEW initialization failed"); - return FALSE; - } -#endif - - // Check if ProjectM instance exists, and create if not - if (!plugin->priv->handle) { - // Create ProjectM instance - plugin->priv->handle = projectm_init(plugin); - if (!plugin->priv->handle) { - GST_ERROR_OBJECT(plugin, "ProjectM could not be initialized"); - return FALSE; - } - gl_error_handler(glav->context, plugin); - } - - return TRUE; -} - -static gboolean gst_projectm_setup(GstGLBaseAudioVisualizer *glav) { - GstAudioVisualizer *bscope = GST_AUDIO_VISUALIZER(glav); - GstProjectM *plugin = GST_PROJECTM(glav); - - // Calculate depth based on pixel stride and bits - gint depth = bscope->vinfo.finfo->pixel_stride[0] * - ((bscope->vinfo.finfo->bits >= 8) ? 8 : 1); - - // Calculate required samples per frame - bscope->req_spf = - (bscope->ainfo.channels * bscope->ainfo.rate * 2) / bscope->vinfo.fps_n; - - // get GStreamer video format and map it to the corresponding OpenGL pixel - // format - const GstVideoFormat video_format = GST_VIDEO_INFO_FORMAT(&bscope->vinfo); - - // TODO: why is the reversed byte order needed when copying pixel data from - // OpenGL ? - switch (video_format) { - case GST_VIDEO_FORMAT_ABGR: - plugin->priv->gl_format = GL_RGBA; - break; - - case GST_VIDEO_FORMAT_RGBA: - // GL_ABGR_EXT does not seem to be well-supported, does not work on Windows - plugin->priv->gl_format = GL_ABGR_EXT; - break; - - default: - GST_ERROR_OBJECT(plugin, "Unsupported video format: %d", video_format); - return FALSE; - } - - // Log audio info - GST_DEBUG_OBJECT( - glav, "Audio Information ", - bscope->ainfo.channels, bscope->ainfo.rate, - bscope->ainfo.finfo->description); - - // Log video info - GST_DEBUG_OBJECT(glav, - "Video Information ", - GST_VIDEO_INFO_WIDTH(&bscope->vinfo), - GST_VIDEO_INFO_HEIGHT(&bscope->vinfo), bscope->vinfo.fps_n, - bscope->vinfo.fps_d, depth, bscope->req_spf); - - return TRUE; -} - -static double get_seconds_since_first_frame(GstProjectM *plugin, - GstVideoFrame *frame) { - if (!plugin->priv->first_frame_received) { - // Store the timestamp of the first frame - plugin->priv->first_frame_time = GST_BUFFER_PTS(frame->buffer); - plugin->priv->first_frame_received = TRUE; - return 0.0; - } - - // Calculate elapsed time - GstClockTime current_time = GST_BUFFER_PTS(frame->buffer); - GstClockTime elapsed_time = current_time - plugin->priv->first_frame_time; - - // Convert to fractional seconds - gdouble elapsed_seconds = (gdouble)elapsed_time / GST_SECOND; - - return elapsed_seconds; -} - -// TODO: CLEANUP & ADD DEBUGGING -static gboolean gst_projectm_render(GstGLBaseAudioVisualizer *glav, - GstBuffer *audio, GstVideoFrame *video) { - GstProjectM *plugin = GST_PROJECTM(glav); - - GstMapInfo audioMap; - gboolean result = TRUE; - - // get current gst (PTS) time and set projectM time - double seconds_since_first_frame = - get_seconds_since_first_frame(plugin, video); - projectm_set_frame_time(plugin->priv->handle, seconds_since_first_frame); - - // AUDIO - gst_buffer_map(audio, &audioMap, GST_MAP_READ); - - // GST_DEBUG_OBJECT(plugin, "Audio Samples: %u, Offset: %lu, Offset End: %lu, - // Sample Rate: %d, FPS: %d, Required Samples Per Frame: %d", - // audioMap.size / 8, audio->offset, audio->offset_end, - // bscope->ainfo.rate, bscope->vinfo.fps_n, bscope->req_spf); - - projectm_pcm_add_int16(plugin->priv->handle, (gint16 *)audioMap.data, - audioMap.size / 4, PROJECTM_STEREO); - - // GST_DEBUG_OBJECT(plugin, "Audio Data: %d %d %d %d", ((gint16 - // *)audioMap.data)[100], ((gint16 *)audioMap.data)[101], ((gint16 - // *)audioMap.data)[102], ((gint16 *)audioMap.data)[103]); - - // VIDEO - const GstGLFuncs *glFunctions = glav->context->gl_vtable; - - size_t windowWidth, windowHeight; - - projectm_get_window_size(plugin->priv->handle, &windowWidth, &windowHeight); - - projectm_opengl_render_frame(plugin->priv->handle); - gl_error_handler(glav->context, plugin); - - glFunctions->ReadPixels(0, 0, windowWidth, windowHeight, - plugin->priv->gl_format, GL_UNSIGNED_INT_8_8_8_8, - (guint8 *)GST_VIDEO_FRAME_PLANE_DATA(video, 0)); - - gst_buffer_unmap(audio, &audioMap); - - // GST_DEBUG_OBJECT(plugin, "Video Data: %d %d\n", - // GST_VIDEO_FRAME_N_PLANES(video), ((uint8_t - // *)(GST_VIDEO_FRAME_PLANE_DATA(video, 0)))[0]); - - // GST_DEBUG_OBJECT(plugin, "Rendered one frame"); - - return result; -} - -static void gst_projectm_class_init(GstProjectMClass *klass) { - GObjectClass *gobject_class = (GObjectClass *)klass; - GstElementClass *element_class = (GstElementClass *)klass; - GstGLBaseAudioVisualizerClass *scope_class = - GST_GL_BASE_AUDIO_VISUALIZER_CLASS(klass); - - // Setup audio and video caps - const gchar *audio_sink_caps = get_audio_sink_cap(0); - const gchar *video_src_caps = get_video_src_cap(0); - - gst_element_class_add_pad_template( - GST_ELEMENT_CLASS(klass), - gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS, - gst_caps_from_string(video_src_caps))); - gst_element_class_add_pad_template( - GST_ELEMENT_CLASS(klass), - gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS, - gst_caps_from_string(audio_sink_caps))); - - gst_element_class_set_static_metadata( - GST_ELEMENT_CLASS(klass), "ProjectM Visualizer", "Generic", - "A plugin for visualizing music using ProjectM", - "AnomieVision | Tristan Charpentier " - ""); - - // Setup properties - gobject_class->set_property = gst_projectm_set_property; - gobject_class->get_property = gst_projectm_get_property; - - g_object_class_install_property( - gobject_class, PROP_PRESET_PATH, - g_param_spec_string( - "preset", "Preset", - "Specifies the path to the preset file. The preset file determines " - "the visual style and behavior of the audio visualizer.", - DEFAULT_PRESET_PATH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_TEXTURE_DIR_PATH, - g_param_spec_string("texture-dir", "Texture Directory", - "Sets the path to the directory containing textures " - "used in the visualizer.", - DEFAULT_TEXTURE_DIR_PATH, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_BEAT_SENSITIVITY, - g_param_spec_float( - "beat-sensitivity", "Beat Sensitivity", - "Controls the sensitivity to audio beats. Higher values make the " - "visualizer respond more strongly to beats.", - 0.0, 5.0, DEFAULT_BEAT_SENSITIVITY, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_HARD_CUT_DURATION, - g_param_spec_double("hard-cut-duration", "Hard Cut Duration", - "Sets the duration, in seconds, for hard cuts. Hard " - "cuts are abrupt transitions in the visualizer.", - 0.0, 999999.0, DEFAULT_HARD_CUT_DURATION, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_HARD_CUT_ENABLED, - g_param_spec_boolean( - "hard-cut-enabled", "Hard Cut Enabled", - "Enables or disables hard cuts. When enabled, the visualizer may " - "exhibit sudden transitions based on the audio input.", - DEFAULT_HARD_CUT_ENABLED, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_HARD_CUT_SENSITIVITY, - g_param_spec_float( - "hard-cut-sensitivity", "Hard Cut Sensitivity", - "Adjusts the sensitivity of the visualizer to hard cuts. Higher " - "values increase the responsiveness to abrupt changes in audio.", - 0.0, 1.0, DEFAULT_HARD_CUT_SENSITIVITY, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_SOFT_CUT_DURATION, - g_param_spec_double( - "soft-cut-duration", "Soft Cut Duration", - "Sets the duration, in seconds, for soft cuts. Soft cuts are " - "smoother transitions between visualizer states.", - 0.0, 999999.0, DEFAULT_SOFT_CUT_DURATION, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_PRESET_DURATION, - g_param_spec_double("preset-duration", "Preset Duration", - "Sets the duration, in seconds, for each preset. A " - "zero value causes the preset to play indefinitely.", - 0.0, 999999.0, DEFAULT_PRESET_DURATION, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_MESH_SIZE, - g_param_spec_string("mesh-size", "Mesh Size", - "Sets the size of the mesh used in rendering. The " - "format is 'width,height'.", - DEFAULT_MESH_SIZE, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_ASPECT_CORRECTION, - g_param_spec_boolean( - "aspect-correction", "Aspect Correction", - "Enables or disables aspect ratio correction. When enabled, the " - "visualizer adjusts for aspect ratio differences in rendering.", - DEFAULT_ASPECT_CORRECTION, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_EASTER_EGG, - g_param_spec_float( - "easter-egg", "Easter Egg", - "Controls the activation of an Easter Egg feature. The value " - "determines the likelihood of triggering the Easter Egg.", - 0.0, 1.0, DEFAULT_EASTER_EGG, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_PRESET_LOCKED, - g_param_spec_boolean( - "preset-locked", "Preset Locked", - "Locks or unlocks the current preset. When locked, the visualizer " - "remains on the current preset without automatic changes.", - DEFAULT_PRESET_LOCKED, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_ENABLE_PLAYLIST, - g_param_spec_boolean( - "enable-playlist", "Enable Playlist", - "Enables or disables the playlist feature. When enabled, the " - "visualizer can switch between presets based on a provided playlist.", - DEFAULT_ENABLE_PLAYLIST, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - g_object_class_install_property( - gobject_class, PROP_SHUFFLE_PRESETS, - g_param_spec_boolean( - "shuffle-presets", "Shuffle Presets", - "Enables or disables preset shuffling. When enabled, the visualizer " - "randomly selects presets from the playlist if presets are provided " - "and not locked. Playlist must be enabled for this to take effect.", - DEFAULT_SHUFFLE_PRESETS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - - gobject_class->finalize = gst_projectm_finalize; - - scope_class->supported_gl_api = GST_GL_API_OPENGL3 | GST_GL_API_GLES2; - scope_class->gl_start = GST_DEBUG_FUNCPTR(gst_projectm_gl_start); - scope_class->gl_stop = GST_DEBUG_FUNCPTR(gst_projectm_gl_stop); - scope_class->gl_render = GST_DEBUG_FUNCPTR(gst_projectm_render); - scope_class->setup = GST_DEBUG_FUNCPTR(gst_projectm_setup); -} - -static gboolean plugin_init(GstPlugin *plugin) { - GST_DEBUG_CATEGORY_INIT(gst_projectm_debug, "projectm", 0, - "projectM visualizer plugin"); - - return gst_element_register(plugin, "projectm", GST_RANK_NONE, - GST_TYPE_PROJECTM); -} - -GST_PLUGIN_DEFINE(GST_VERSION_MAJOR, GST_VERSION_MINOR, projectm, - "plugin to visualize audio using the ProjectM library", - plugin_init, PACKAGE_VERSION, PACKAGE_LICENSE, PACKAGE_NAME, - PACKAGE_ORIGIN) diff --git a/src/projectm.c b/src/projectm.c deleted file mode 100644 index 1bac137..0000000 --- a/src/projectm.c +++ /dev/null @@ -1,127 +0,0 @@ -#ifdef HAVE_CONFIG_H -#include "config.h" -#endif - -#include - -#include -#include - -#include "plugin.h" -#include "projectm.h" - -GST_DEBUG_CATEGORY_STATIC(projectm_debug); -#define GST_CAT_DEFAULT projectm_debug - -projectm_handle projectm_init(GstProjectM *plugin) { - projectm_handle handle = NULL; - projectm_playlist_handle playlist = NULL; - - GST_DEBUG_CATEGORY_INIT(projectm_debug, "projectm", 0, "ProjectM"); - - GstAudioVisualizer *bscope = GST_AUDIO_VISUALIZER(plugin); - - // Create ProjectM instance - GST_DEBUG_OBJECT(plugin, "Creating projectM instance.."); - handle = projectm_create(); - - if (!handle) { - GST_DEBUG_OBJECT( - plugin, - "project_create() returned NULL, projectM instance was not created!"); - return NULL; - } else { - GST_DEBUG_OBJECT(plugin, "Created projectM instance!"); - } - - if (plugin->enable_playlist) { - GST_DEBUG_OBJECT(plugin, "Playlist enabled"); - - // initialize preset playlist - playlist = projectm_playlist_create(handle); - projectm_playlist_set_shuffle(playlist, plugin->shuffle_presets); - // projectm_playlist_set_preset_switched_event_callback(_playlist, - // &ProjectMWrapper::PresetSwitchedEvent, static_cast(this)); - } else { - GST_DEBUG_OBJECT(plugin, "Playlist disabled"); - } - - // Log properties - GST_INFO_OBJECT( - plugin, - "Using Properties: " - "preset=%s, " - "texture-dir=%s, " - "beat-sensitivity=%f, " - "hard-cut-duration=%f, " - "hard-cut-enabled=%d, " - "hard-cut-sensitivity=%f, " - "soft-cut-duration=%f, " - "preset-duration=%f, " - "mesh-size=(%lu, %lu)" - "aspect-correction=%d, " - "easter-egg=%f, " - "preset-locked=%d, " - "enable-playlist=%d, " - "shuffle-presets=%d", - plugin->preset_path, plugin->texture_dir_path, plugin->beat_sensitivity, - plugin->hard_cut_duration, plugin->hard_cut_enabled, - plugin->hard_cut_sensitivity, plugin->soft_cut_duration, - plugin->preset_duration, plugin->mesh_width, plugin->mesh_height, - plugin->aspect_correction, plugin->easter_egg, plugin->preset_locked, - plugin->enable_playlist, plugin->shuffle_presets); - - // Load preset file if path is provided - if (plugin->preset_path != NULL) { - int added_count = - projectm_playlist_add_path(playlist, plugin->preset_path, true, false); - GST_INFO("Loaded preset path: %s, presets found: %d", plugin->preset_path, - added_count); - } - - // Set texture search path if directory path is provided - if (plugin->texture_dir_path != NULL) { - const gchar *texturePaths[1] = {plugin->texture_dir_path}; - projectm_set_texture_search_paths(handle, texturePaths, 1); - } - - // Set properties - projectm_set_beat_sensitivity(handle, plugin->beat_sensitivity); - projectm_set_hard_cut_duration(handle, plugin->hard_cut_duration); - projectm_set_hard_cut_enabled(handle, plugin->hard_cut_enabled); - projectm_set_hard_cut_sensitivity(handle, plugin->hard_cut_sensitivity); - projectm_set_soft_cut_duration(handle, plugin->soft_cut_duration); - - // Set preset duration, or set to in infinite duration if zero - if (plugin->preset_duration > 0.0) { - projectm_set_preset_duration(handle, plugin->preset_duration); - - // kick off the first preset - if (projectm_playlist_size(playlist) > 1 && !plugin->preset_locked) { - projectm_playlist_play_next(playlist, true); - } - } else { - projectm_set_preset_duration(handle, 999999.0); - } - - projectm_set_mesh_size(handle, plugin->mesh_width, plugin->mesh_height); - projectm_set_aspect_correction(handle, plugin->aspect_correction); - projectm_set_easter_egg(handle, plugin->easter_egg); - projectm_set_preset_locked(handle, plugin->preset_locked); - - projectm_set_fps(handle, GST_VIDEO_INFO_FPS_N(&bscope->vinfo)); - projectm_set_window_size(handle, GST_VIDEO_INFO_WIDTH(&bscope->vinfo), - GST_VIDEO_INFO_HEIGHT(&bscope->vinfo)); - - return handle; -} - -// void projectm_render(GstProjectM *plugin, gint16 *samples, gint sample_count) -// { -// GST_DEBUG_OBJECT(plugin, "Rendering %d samples", sample_count); - -// projectm_pcm_add_int16(plugin->handle, samples, sample_count, -// PROJECTM_STEREO); - -// projectm_opengl_render_frame(plugin->handle); -// } diff --git a/src/projectm.h b/src/projectm.h deleted file mode 100644 index 1ba6a37..0000000 --- a/src/projectm.h +++ /dev/null @@ -1,24 +0,0 @@ -#ifndef __PROJECTM_H__ -#define __PROJECTM_H__ - -#include - -#include "plugin.h" -#include - -G_BEGIN_DECLS - -/** - * @brief Initialize ProjectM - */ -projectm_handle projectm_init(GstProjectM *plugin); - -/** - * @brief Render ProjectM - */ -// void projectm_render(GstProjectM *plugin, gint16 *samples, gint -// sample_count); - -G_END_DECLS - -#endif /* __PROJECTM_H__ */ \ No newline at end of file diff --git a/src/pushbuffer.c b/src/pushbuffer.c new file mode 100644 index 0000000..e977ee8 --- /dev/null +++ b/src/pushbuffer.c @@ -0,0 +1,326 @@ +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "pushbuffer.h" + +#include "bufferdisposal.h" + +#include + +GST_DEBUG_CATEGORY_STATIC(pushbuffer_debug); +#define GST_CAT_DEFAULT pushbuffer_debug + +/** + * EMA aloha for push schedule clock jitter average. + */ +#ifndef JITTER_EMA_ALPHA +#define JITTER_EMA_ALPHA 0.85 +#endif + +/** + * Push schedule clock jitter outlier threshold, any jitter duration above this + * value will be ignored. + */ +#ifndef JITTER_EMA_OUTLIER_THRESHOLD +#define JITTER_EMA_OUTLIER_THRESHOLD (5 * GST_MSECOND) +#endif + +/** + * Tolerance / minimal wait time for scheduling a timed wait before pushing a + * buffer. If the calculated wait time is less than this value, the wait will be + * skipped. + */ +#ifndef MIN_PUSH_SCHEDULE_WAIT +#define MIN_PUSH_SCHEDULE_WAIT (GST_USECOND * 50) +#endif + +gboolean pb_queue_buffer(PBPushBuffer *state, GstBuffer *buffer) { + g_assert(state != NULL); + g_assert(buffer != NULL); + + g_mutex_lock(&state->push_queue_mutex); + + gint next_idx = (state->push_queue_write_idx + 1) % PUSH_QUEUE_SIZE; + // wait until next position is free + while (state->push_queue[next_idx] != NULL && + g_atomic_int_get(&state->running)) { + g_cond_wait(&state->push_queue_free_cond, &state->push_queue_mutex); + next_idx = (state->push_queue_write_idx + 1) % PUSH_QUEUE_SIZE; + } + + gboolean ret = FALSE; + if (g_atomic_int_get(&state->running)) { + // write to the next position in the ring + state->push_queue_write_idx = next_idx; + g_assert(state->push_queue[state->push_queue_write_idx] == NULL); + // take the spot and signal that queue has changed + state->push_queue[state->push_queue_write_idx] = buffer; + g_cond_signal(&state->push_queue_cond); + ret = TRUE; + } + + g_mutex_unlock(&state->push_queue_mutex); + + return ret; +} + +void pb_calculate_avg_jitter(PBPushBuffer *state, + const GstClockTimeDiff jitter) { + // ignore outliers + if (ABS(jitter) > JITTER_EMA_OUTLIER_THRESHOLD) + return; + + if (!state->avg_jitter_init) { + state->avg_jitter = jitter; + state->avg_jitter_init = TRUE; + } else { + gdouble v = (1.0 - JITTER_EMA_ALPHA) * (gdouble)state->avg_jitter + + JITTER_EMA_ALPHA * (gdouble)jitter; + state->avg_jitter = (GstClockTimeDiff)llround(v); + } +} + +void pb_jitter_correction(PBPushBuffer *state, GstBuffer *outbuf) { + + if (GST_BUFFER_PTS(outbuf) != GST_CLOCK_TIME_NONE) { + GstClockTime correction = llabs(state->avg_jitter); + + if (state->avg_jitter > 0 && GST_BUFFER_PTS(outbuf) > correction) { + GST_BUFFER_PTS(outbuf) -= correction; + } else if (state->avg_jitter < 0) { + GST_BUFFER_PTS(outbuf) += correction; + } + } +} + +GstClockReturn pb_wait_to_push(PBPushBuffer *state, const GstClockTime pts) { + GstClockReturn ret = GST_CLOCK_UNSUPPORTED; + + if (state->clock) { + const GstClockTime base_time = + gst_element_get_base_time(GST_ELEMENT(state->plugin)); + + if (base_time == GST_CLOCK_TIME_NONE) { + return GST_CLOCK_UNSCHEDULED; + } + + const GstClockTime abs_time = pts + base_time; + + const GstClockTimeDiff remaining_wait = + GST_CLOCK_DIFF(abs_time, gst_clock_get_time(state->clock)); + + if (remaining_wait < MIN_PUSH_SCHEDULE_WAIT) { + // we don't need to wait, all good + return GST_CLOCK_OK; + } + + // we need to wait + GstClockTimeDiff jitter = 0; + const GstClockID clock_id = + gst_clock_new_single_shot_id(state->clock, abs_time); + + ret = gst_clock_id_wait(clock_id, &jitter); + + gst_clock_id_unref(clock_id); + + if (ret == GST_CLOCK_OK || ret == GST_CLOCK_EARLY) { + // record jitter + pb_calculate_avg_jitter(state, jitter); + } + GST_TRACE_OBJECT(state->plugin, + "Push jitter avg=%" G_GINT64_FORMAT " ns, ret=%d", + state->avg_jitter, ret); + } + + return ret; +} + +/** + * Consume buffers to push and wait until it's PTS time to push. + * Used for real-time rendering only. + * + * + * @param user_data Render buffer to use. + * @return NULL + */ +static gpointer _rb_push_thread_func(gpointer user_data) { + + PBPushBuffer *state = (PBPushBuffer *)user_data; + g_assert(state != NULL); + + g_mutex_lock(&state->push_queue_mutex); + + while (g_atomic_int_get(&state->running)) { + + // consume gl buffer to push + gboolean stop = FALSE; + gint next_idx = (state->push_queue_read_idx + 1) % PUSH_QUEUE_SIZE; + while (state->push_queue[next_idx] == NULL) { + // no buffer to push, wait for one + g_cond_wait(&state->push_queue_cond, &state->push_queue_mutex); + if (!g_atomic_int_get(&state->running)) { + stop = TRUE; + break; + } + next_idx = (state->push_queue_read_idx + 1) % PUSH_QUEUE_SIZE; + } + + if (stop) { + break; + } + + state->push_queue_read_idx = next_idx; + + // found a buffer to push + GstBuffer *outbuf = state->push_queue[state->push_queue_read_idx]; + + g_assert(outbuf != NULL); + + // determine when it's time to push + const GstClockTime pts = GST_BUFFER_PTS(outbuf); + GstClockReturn clock_return = GST_CLOCK_UNSUPPORTED; + if (pts != GST_CLOCK_TIME_NONE) { + + g_mutex_unlock(&state->push_queue_mutex); + clock_return = pb_wait_to_push(state, pts); + g_mutex_lock(&state->push_queue_mutex); + + if (clock_return == GST_CLOCK_UNSCHEDULED) { + // drop buffer if clock is not running + if (state->push_queue[state->push_queue_read_idx] == outbuf) { + state->push_queue[state->push_queue_read_idx] = NULL; + bd_dispose_gl_buffer(state->buffer_disposal, outbuf); + } + continue; + } + } + + // now we own the buffer to push + state->push_queue[state->push_queue_read_idx] = NULL; + g_cond_signal(&state->push_queue_free_cond); + g_mutex_unlock(&state->push_queue_mutex); + + if (clock_return != GST_CLOCK_UNSUPPORTED) { + // apply wait jitter correction ro buffer + pb_jitter_correction(state, outbuf); + } + + // push buffer downstream + const GstFlowReturn ret = gst_pad_push(state->src_pad, outbuf); + + if (ret == GST_FLOW_FLUSHING) { + GST_INFO_OBJECT(state->plugin, + "Pad is flushing and does not accept buffers anymore"); + } else if (ret != GST_FLOW_OK) { + GST_WARNING_OBJECT(state->plugin, "Failed to push buffer to pad"); + } + + g_mutex_lock(&state->push_queue_mutex); + } + + g_mutex_unlock(&state->push_queue_mutex); + + return NULL; +} + +void pb_init_push_buffer(PBPushBuffer *state, BDBufferDisposal *buffer_cleanup, + GstObject *plugin, GstPad *src_pad) { + + static gsize _debug_initialized = 0; + if (g_once_init_enter(&_debug_initialized)) { + GST_DEBUG_CATEGORY_INIT(pushbuffer_debug, "pushbuffer", 0, + "projectM visualizer plugin push buffer"); + } + + state->buffer_disposal = buffer_cleanup; + state->plugin = plugin; + state->src_pad = src_pad; + + // init push queue + state->push_thread = NULL; + state->push_queue_read_idx = PUSH_QUEUE_SIZE - 1; + state->push_queue_write_idx = PUSH_QUEUE_SIZE - 1; + g_mutex_init(&state->push_queue_mutex); + g_cond_init(&state->push_queue_cond); + g_cond_init(&state->push_queue_free_cond); + for (guint i = 0; i < PUSH_QUEUE_SIZE; i++) { + state->push_queue[i] = NULL; + } + + state->avg_jitter = 0.0; + state->avg_jitter_init = FALSE; + + state->clock = NULL; +} + +void pb_dispose_push_buffer(PBPushBuffer *state) { + + g_cond_clear(&state->push_queue_cond); + g_cond_clear(&state->push_queue_free_cond); + g_mutex_clear(&state->push_queue_mutex); + + state->buffer_disposal = NULL; + state->plugin = NULL; + state->src_pad = NULL; + + if (state->clock != NULL) { + gst_object_unref(state->clock); + state->clock = NULL; + } +} + +void pb_clear_queue(PBPushBuffer *state) { + g_assert(state != NULL); + g_mutex_lock(&state->push_queue_mutex); + + // release buffers that are still queued before cleanup thread shuts down + for (guint i = 0; i < PUSH_QUEUE_SIZE; i++) { + if (state->push_queue[i] != NULL) { + bd_dispose_gl_buffer(state->buffer_disposal, state->push_queue[i]); + state->push_queue[i] = NULL; + } + } + state->push_queue_read_idx = PUSH_QUEUE_SIZE - 1; + state->push_queue_write_idx = PUSH_QUEUE_SIZE - 1; + state->avg_jitter = 0.0; + state->avg_jitter_init = FALSE; + + g_mutex_unlock(&state->push_queue_mutex); +} + +void pb_start_push_buffer(PBPushBuffer *state) { + + if (state->clock != NULL) { + gst_object_unref(state->clock); + } + state->clock = gst_element_get_clock(GST_ELEMENT(state->plugin)); + + g_atomic_int_set(&state->running, TRUE); + + if (state->push_thread == NULL) { + state->push_thread = + g_thread_new("pb-push-thread", _rb_push_thread_func, state); + } +} + +void pb_stop_push_buffer(PBPushBuffer *state) { + g_atomic_int_set(&state->running, FALSE); + + // signal wake up to conditions that may be blocking + g_mutex_lock(&state->push_queue_mutex); + g_cond_broadcast(&state->push_queue_cond); + g_cond_broadcast(&state->push_queue_free_cond); + g_mutex_unlock(&state->push_queue_mutex); + + // wait for push thread to exit + if (state->push_thread) { + g_thread_join(state->push_thread); + state->push_thread = NULL; + } + + if (state->clock != NULL) { + gst_object_unref(state->clock); + state->clock = NULL; + } +} diff --git a/src/pushbuffer.h b/src/pushbuffer.h new file mode 100644 index 0000000..59a9ae8 --- /dev/null +++ b/src/pushbuffer.h @@ -0,0 +1,208 @@ +/* + * A ring buffer based queue to schedule GL buffers to be pushed + * downstream at presentation time (PTS). The push queue decouples the render + * loop from buffer push timing, allowing the render loop to render frames ahead + * up to the queue capacity. The queue is consumed by a dedicated thread + * (pb-push-thread) to wait for the next scheduled push. The queuing call will + * block when capacity is reached until a free slot is available, throttling the + * render loop. Frames are never dropped. + */ + +#ifndef __PUSHBUFFER_H__ +#define __PUSHBUFFER_H__ + +#include + +#include "bufferdisposal.h" + +/** + * Max number of gl frame buffers waiting in a scheduled state to be pushed. + * Capacity should be low (1-2) to allow back-pressure from fps increases to + * propagate quickly. + * + * 0 : Disable push queuing, block render loop directly until PTS of current + * frame is reached. + * + * >0 : Allow n buffers waiting in the queue for pushing while render thread + * continues. + */ +#ifndef PUSH_QUEUE_SIZE +#define PUSH_QUEUE_SIZE 1 +#endif + +/** + * Push buffer state. + */ +typedef struct { + + // not re-assigned during push buffer lifetime + // -------------------------------------------------------------- + + /** + * projectM plugin. No ownership. + */ + BDBufferDisposal *buffer_disposal; + + /** + * projectM plugin. No ownership. + */ + GstObject *plugin; + + /** + * projectM plugin source pad. No ownership. + */ + GstPad *src_pad; + + /** + * Thread for pushing gl buffers downstream. + * Used for real-time, pushing needs to be scheduled to be synchronized with + * the pipeline clock. + */ + GThread *push_thread; + + /** + * Ring buffer to schedule gl buffers for pushing. + */ + GstBuffer *push_queue[PUSH_QUEUE_SIZE]; + + /** + * Mutex for push ring buffer. + */ + GMutex push_queue_mutex; + + /** + * Condition signaled when a buffer has been queued. + */ + GCond push_queue_cond; + + /** + * Condition signaled when a buffer has been pushed + * and a slot is free. + */ + GCond push_queue_free_cond; + + /** + * Clock to use for scheduling. + */ + GstClock *clock; + + // concurrent access, g_atomic + // -------------------------------------------------------------- + + /** + * TRUE if rendering is currently running. + */ + gboolean running; + + // concurrent access, protected by push_queue_mutex + // -------------------------------------------------------------- + + /** + * Push ring buffer write position. + */ + gint push_queue_write_idx; + + /** + * Push ring buffer read position. + */ + gint push_queue_read_idx; + + // used only by either render (offline) or push thread (real-time) + // -------------------------------------------------------------- + + /** + * EMA based clock jitter average. + */ + GstClockTimeDiff avg_jitter; + + /** + * Clock jitter initialized. + */ + gboolean avg_jitter_init; + + /*< private >*/ + gpointer _padding[GST_PADDING]; +} PBPushBuffer; + +/** + * Schedule a rendered gl buffer for getting pushed downstream. + * The buffer will not be pushed until it's PTS time is reached. + * This call will block until the buffer can be scheduled or + * the render buffer is stopped. + * + * @param state The push buffer to use. + * @param buffer The gl buffer to push. Takes ownership of the buffer. + * + * @return TRUE if the buffer was scheduled successfully, FALSE in case the + * buffer was stopped. + */ +gboolean pb_queue_buffer(PBPushBuffer *state, GstBuffer *buffer); + +/** + * Removes and disposes all queued buffers and resets queue state. + * + * @param state State to clear. + */ +void pb_clear_queue(PBPushBuffer *state); + +/** + * Applies jitter correction to the given buffer. + * + * @param state Current push buffer. + * @param outbuf Buffer to apply correction to. + */ +void pb_jitter_correction(PBPushBuffer *state, GstBuffer *outbuf); + +/** + * Calculate current clock jitter average. + * + * @param state Current push buffer. + * @param jitter Latest jitter value. + */ +void pb_calculate_avg_jitter(PBPushBuffer *state, GstClockTimeDiff jitter); + +/** + * Wait until reaching the given PTS. The clock jitter is recorded if it's + * appropriate to do so. + * + * @param state Push buffer to use. + * @param pts Wait until this PTS is reached. + * @param locked Controls if unlocking is needed before entering wait. + * + * @return The clock wait result. + */ +GstClockReturn pb_wait_to_push(PBPushBuffer *state, GstClockTime pts); + +/** + * Init this push buffer. + * + * @param state Push buffer to use. + * @param buffer_cleanup Buffer disposal to use. + * @param plugin Context gst plugin element. + * @param src_pad Source pad to push buffers to. + */ +void pb_init_push_buffer(PBPushBuffer *state, BDBufferDisposal *buffer_cleanup, + GstObject *plugin, GstPad *src_pad); + +/** + * Release all respources for this push buffer. + * + * @param state Push buffer to use. + */ +void pb_dispose_push_buffer(PBPushBuffer *state); + +/** + * Start push buffer worker thread. + * + * @param state Push buffer to use. + */ +void pb_start_push_buffer(PBPushBuffer *state); + +/** + * Stop push buffer worker thread. + * + * @param state Push buffer to use. + */ +void pb_stop_push_buffer(PBPushBuffer *state); + +#endif diff --git a/src/register.c b/src/register.c new file mode 100644 index 0000000..342c46d --- /dev/null +++ b/src/register.c @@ -0,0 +1,35 @@ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#ifdef _WIN32 +#define EXPORT __declspec(dllexport) +#else +#define EXPORT +#endif + +#include "gstprojectm.h" +#include "gstprojectmconfig.h" + +#include + +/* + * This unit registers all gst elements from this plugin library to make them + * available to GStreamer. + */ +EXPORT gboolean plugin_init(GstPlugin *plugin) { + + // register main plugin projectM element + gboolean p1 = gst_element_register(plugin, "projectm", GST_RANK_NONE, + GST_TYPE_PROJECTM); + + // add additional elements here.. + + return p1; +} + +GST_PLUGIN_DEFINE(GST_VERSION_MAJOR, GST_VERSION_MINOR, projectm, + "plugin to visualize audio using the ProjectM library", + plugin_init, PACKAGE_VERSION, PACKAGE_LICENSE, PACKAGE_NAME, + PACKAGE_ORIGIN) diff --git a/src/renderbuffer.c b/src/renderbuffer.c new file mode 100644 index 0000000..439a3b1 --- /dev/null +++ b/src/renderbuffer.c @@ -0,0 +1,815 @@ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "renderbuffer.h" + +#include + +#include + +GST_DEBUG_CATEGORY_STATIC(renderbuffer_debug); +#define GST_CAT_DEFAULT renderbuffer_debug + +/** + * Number of frames inspected by EMA. + */ +#ifndef RB_EMA_FPS_ADJUST_INTERVAL +#define RB_EMA_FPS_ADJUST_INTERVAL 10 +#endif + +/** + * EMA alpha = 0.2 + */ +#ifndef RB_EMA_ALPHA_N +#define RB_EMA_ALPHA_N 1 +#endif + +#ifndef RB_EMA_ALPHA_D +#define RB_EMA_ALPHA_D 5 +#endif + +/** + * EMA increase frame duration (slow down fps) in case of detected lag. + * +15% + */ +#ifndef RB_EMA_FRAME_DURATION_INCREASE_N +#define RB_EMA_FRAME_DURATION_INCREASE_N 115 +#endif + +#ifndef RB_EMA_FRAME_DURATION_INCREASE_D +#define RB_EMA_FRAME_DURATION_INCREASE_D 100 +#endif + +/** + * EMA decrease frame duration (speed up fps) in case rendering performance + * recovers. -5% + */ +#ifndef RB_EMA_FRAME_DURATION_DECREASE_N +#define RB_EMA_FRAME_DURATION_DECREASE_N 95 +#endif + +#ifndef RB_EMA_FRAME_DURATION_DECREASE_D +#define RB_EMA_FRAME_DURATION_DECREASE_D 100 +#endif + +/** + * EMA tolerance for being too slow. + * Allow render time up to 1.1x + */ +#ifndef RB_EMA_FRAME_DURATION_TOLERANCE_UP_N +#define RB_EMA_FRAME_DURATION_TOLERANCE_UP_N 110 +#endif + +#ifndef RB_EMA_FRAME_DURATION_TOLERANCE_UP_D +#define RB_EMA_FRAME_DURATION_TOLERANCE_UP_D 100 +#endif + +/** + * EMA tolerance for being too fast. + * allow render time as low as 0.95x + */ +#ifndef RB_EMA_FRAME_DURATION_TOLERANCE_DOWN_N +#define RB_EMA_FRAME_DURATION_TOLERANCE_DOWN_N 95 +#endif + +#ifndef RB_EMA_FRAME_DURATION_TOLERANCE_DOWN_D +#define RB_EMA_FRAME_DURATION_TOLERANCE_DOWN_D 100 +#endif + +/** + * How much time has to be left of the time budget for scheduling before + * entering wait. Tolerance to account for scheduling overhead etc. to guarantee + * a defined max run-time of the scheduling process. + */ +#ifndef MIN_FREE_SLOT_SCHEDULE_WAIT +#define MIN_FREE_SLOT_SCHEDULE_WAIT GST_MSECOND +#endif + +/** + * Controls if pushing of gl buffers is done by the render loop (blocking) + * directly, or deferred to the push buffer. Deferring allows a more responsive + * render timing. + * + * 0 : Push blocks render loop. + * 1 : Push is deferred to push buffer, may or may not block. + */ +#ifndef PUSH_BUFFER_ENABLED +#define PUSH_BUFFER_ENABLED 1 +#endif + +/** + * Exponential Moving Average (EMA)-based adaptive frame duration (fps) + * adjustment. Determines desired frame duration change based on the + * frame render duration and min/max fps configs. + * + * @param state Render state data. + * @param render_duration Render duration for the last frame in nanos. + * @param frame_duration Current desired frame duration in nanos (fps * + * GST_SECOND). + */ +static void rb_handle_adaptive_fps_ema(RBRenderBuffer *state, + const GstClockTime render_duration, + const GstClockTime frame_duration) { + g_assert(state != NULL); + + state->ema_frame_counter++; + + // EMA smoothing: smoothed = alpha * x + (1 - alpha) * prev + state->ema_smoothed_render_time = + + gst_util_uint64_scale_int(render_duration, RB_EMA_ALPHA_N, + RB_EMA_ALPHA_D) + + + gst_util_uint64_scale_int(state->ema_smoothed_render_time, + RB_EMA_ALPHA_D - RB_EMA_ALPHA_N, + RB_EMA_ALPHA_D); + + if (state->ema_frame_counter >= RB_EMA_FPS_ADJUST_INTERVAL) { + + GstClockTime new_duration; + state->ema_frame_counter = 0; + + const GstClockTime upper_threshold = gst_util_uint64_scale_int( + frame_duration, RB_EMA_FRAME_DURATION_TOLERANCE_UP_N, + RB_EMA_FRAME_DURATION_TOLERANCE_UP_D); + + const GstClockTime lower_threshold = gst_util_uint64_scale_int( + frame_duration, RB_EMA_FRAME_DURATION_TOLERANCE_DOWN_N, + RB_EMA_FRAME_DURATION_TOLERANCE_DOWN_D); + + if (state->ema_smoothed_render_time > upper_threshold) { + + // rendering too slow, increase frame duration (drop FPS) + new_duration = gst_util_uint64_scale_int( + frame_duration, RB_EMA_FRAME_DURATION_INCREASE_N, + RB_EMA_FRAME_DURATION_INCREASE_D); + } else if (state->ema_smoothed_render_time < lower_threshold) { + + // rendering fast enough, try to decrease frame duration (increase FPS) + new_duration = gst_util_uint64_scale_int( + frame_duration, RB_EMA_FRAME_DURATION_DECREASE_N, + RB_EMA_FRAME_DURATION_DECREASE_D); + } else { + // within tolerance, no change + return; + } + + g_mutex_lock(&state->slot_lock); + + // clamp min/max frame duration (fps) according to config + if (new_duration > state->max_frame_duration) { + new_duration = state->max_frame_duration; + } else if (new_duration < state->caps_frame_duration) { + new_duration = state->caps_frame_duration; + } + + g_mutex_unlock(&state->slot_lock); + + if (new_duration != frame_duration) { + GST_DEBUG_OBJECT( + state->plugin, + "Adaptive FPS: frame duration changed from %" GST_TIME_FORMAT + " to %" GST_TIME_FORMAT, + GST_TIME_ARGS(frame_duration), GST_TIME_ARGS(new_duration)); + + // pass new frame duration to callback + state->adjust_fps_func(state, new_duration); + } + } +} + +void rb_init_render_buffer(RBRenderBuffer *state, GstObject *plugin, + GstGLContext *gl_context, GstPad *src_pad, + const GstGLContextThreadFunc gl_fill_func, + const RBAdjustFpsFunc adjust_fps_func, + const GstClockTime max_frame_duration, + const GstClockTime caps_frame_duration, + const gboolean is_qos_enabled, + const gboolean is_realtime) { + + g_assert(state != NULL); + g_assert(plugin != NULL); + g_assert(gl_context != NULL); + g_assert(src_pad != NULL); + g_assert(gl_fill_func != NULL); + g_assert(adjust_fps_func != NULL); + g_assert(max_frame_duration >= caps_frame_duration); + + static gsize _debug_initialized = 0; + if (g_once_init_enter(&_debug_initialized)) { + GST_DEBUG_CATEGORY_INIT(renderbuffer_debug, "renderbuffer", 0, + "projectM visualizer plugin render buffer"); + } + + // context without ownership + state->plugin = plugin; + state->adjust_fps_func = adjust_fps_func; + state->gl_context = gl_context; + state->src_pad = src_pad; + + // never changed after init + state->qos_enabled = is_qos_enabled; + state->is_realtime = is_realtime; + state->caps_frame_duration = caps_frame_duration; + state->max_frame_duration = max_frame_duration; + + // changed all the time + g_atomic_int_set(&state->running, FALSE); + + // init render queue / changed all the time + state->render_thread = NULL; + state->render_write_idx = NUM_RENDER_SLOTS - 1; + state->render_read_idx = NUM_RENDER_SLOTS - 1; + g_mutex_init(&state->slot_lock); + g_cond_init(&state->slot_available_cond); + g_cond_init(&state->render_queued_cond); + + for (guint i = 0; i < NUM_RENDER_SLOTS; i++) { + state->slots[i].state = RB_EMPTY; + state->slots[i].plugin = plugin; + state->slots[i].gl_result = FALSE; + state->slots[i].pts = GST_CLOCK_TIME_NONE; + state->slots[i].frame_duration = 0; + state->slots[i].out_buf = NULL; + state->slots[i].gl_fill_func = gl_fill_func; + state->slots[i].in_audio = NULL; + } + + // init EMA + state->ema_frame_counter = 0; + state->ema_smoothed_render_time = 0; + + bd_init_buffer_disposal(&state->buffer_disposal, gl_context); + pb_init_push_buffer(&state->push_buffer, &state->buffer_disposal, plugin, + src_pad); +} + +void rb_dispose_render_buffer(RBRenderBuffer *state) { + g_assert(state != NULL); + + g_cond_clear(&state->slot_available_cond); + g_cond_clear(&state->render_queued_cond); + + g_mutex_clear(&state->slot_lock); + + pb_dispose_push_buffer(&state->push_buffer); + bd_dispose_buffer_disposal(&state->buffer_disposal); +} + +RBQueueResult rb_queue_render_task(RBQueueArgs *args) { + g_assert(args != NULL); + + RBRenderBuffer *state = args->render_buffer; + g_assert(state != NULL); + + const gboolean wait_is_limited = args->max_wait != GST_CLOCK_TIME_NONE; + const GstClockTime start = gst_util_get_timestamp(); + GstClockTimeDiff used_wait = 0; + + g_mutex_lock(&state->slot_lock); + + RBSlot *slot = NULL; + gint slot_index = 0; + + gboolean found_slot = FALSE; + while (!found_slot && g_atomic_int_get(&state->running)) { + + // next slot to insert to + slot_index = (state->render_write_idx + 1) % NUM_RENDER_SLOTS; + + // jump over busy slot that's currently rendering if needed + if (state->slots[slot_index].state == RB_BUSY) { + slot_index = (state->render_write_idx + 2) % NUM_RENDER_SLOTS; + } + slot = &state->slots[slot_index]; + + // in case there is only one slot, it may still be busy + found_slot = slot->state == RB_EMPTY; + + if (!found_slot) { + if (wait_is_limited) { + const GstClockTimeDiff remaining_wait = + (GstClockTimeDiff)args->max_wait - used_wait; + // not waiting until the very last millisecond + // to avoid exceeding time budget + if (remaining_wait > MIN_FREE_SLOT_SCHEDULE_WAIT) { + + // this is in microseconds for a change + const gint64 now = g_get_monotonic_time(); + const gint64 deadline = now + remaining_wait / 1000; + + g_cond_wait_until(&state->slot_available_cond, &state->slot_lock, + deadline); + + used_wait = GST_CLOCK_DIFF(start, gst_util_get_timestamp()); + } else { + // not enough time left + break; + } + } else { + // no time constraints, frames are never dropped + // we just wait and keep trying + g_cond_wait(&state->slot_available_cond, &state->slot_lock); + } + } + } + + if (slot == NULL || !g_atomic_int_get(&state->running)) { + return RB_STOPPED; + } + + if (slot->state == RB_BUSY) { + // out of time, and we still can't schedule + g_mutex_unlock(&state->slot_lock); + return RB_TIMEOUT; + } + + state->render_write_idx = slot_index; + + // evict if already in use and clear buffers + const gboolean is_evicted = slot->state == RB_READY; + // optimization: set slot to empty so render thread does not pick this up yet + // this is safe, since we're being called by the chain function only (single + // producer) + slot->state = RB_EMPTY; + g_mutex_unlock(&state->slot_lock); + + // do heavy stuff + if (slot->in_audio != NULL) { + gst_buffer_unref(slot->in_audio); + } + + if (slot->out_buf != NULL) { + bd_dispose_gl_buffer(&state->buffer_disposal, slot->out_buf); + slot->out_buf = NULL; + } + + // populate slot + slot->gl_result = FALSE; + slot->pts = args->pts; + slot->frame_duration = args->frame_duration; + slot->in_audio = gst_buffer_copy_deep(args->in_audio); + + // lock and mark ready + g_mutex_lock(&state->slot_lock); + slot->state = RB_READY; + + // signal render thread that there is something to do + g_cond_signal(&state->render_queued_cond); + g_mutex_unlock(&state->slot_lock); + + return is_evicted == FALSE ? RB_SUCCESS : RB_EVICTED; +} + +void rb_queue_render_task_log(RBQueueArgs *args) { + + g_assert(args != NULL); + + RBRenderBuffer *state = args->render_buffer; + + g_assert(state != NULL); + + const GstClockTime start_ts = gst_util_get_timestamp(); + + const RBQueueResult result = rb_queue_render_task(args); + + switch (result) { + case RB_EVICTED: { + GST_DEBUG_OBJECT(state->plugin, + "Dropping previous GL frame from render buffer, " + "it was not picked up for rendering in time (evicted). " + "max-wait: %" GST_TIME_FORMAT ", pts: %" GST_TIME_FORMAT, + GST_TIME_ARGS(args->max_wait), GST_TIME_ARGS(args->pts)); + break; + } + + case RB_TIMEOUT: { + const GstClockTime now = gst_util_get_timestamp(); + GST_DEBUG_OBJECT( + state->plugin, + "Dropping GL frame from render buffer, waiting for free slot took too " + "long. elapsed: %" GST_TIME_FORMAT ", max-wait: %" GST_TIME_FORMAT + ", pts: %" GST_TIME_FORMAT, + GST_TIME_ARGS(now - start_ts), GST_TIME_ARGS(args->max_wait), + GST_TIME_ARGS(args->pts)); + break; + } + + default: + break; + } +} + +/** + * Calculate current time based on given element's clock for QoS checks. + * + * @param element The plugin element. + * @return Current time as determined by clock used by element. + */ +static GstClockTime rb_element_render_time(GstElement *element) { + const GstClockTime base_time = gst_element_get_base_time(element); + GstClock *clock = gst_element_get_clock(element); + const GstClockTime now = gst_clock_get_time(clock); + gst_object_unref(clock); + return GST_CLOCK_DIFF(base_time, now); +} + +gboolean rb_is_render_too_late(GstElement *element, const GstClockTime latency, + const GstClockTime running_time, + const GstClockTime tolerance) { + + g_assert(element != NULL); + + if (latency == GST_CLOCK_TIME_NONE) { + return FALSE; + } + + const GstClockTime render_time = rb_element_render_time(element); + + // latest time to push this buffer for it to make it to sink in time + const GstClockTime latest_push_time = running_time + latency; + + if (render_time > latest_push_time + tolerance) { + GST_DEBUG_OBJECT(element, + "Dropping late frame: render_time %" GST_TIME_FORMAT + " > buffer_running_time %" GST_TIME_FORMAT + " + latency %" GST_TIME_FORMAT + " + slack %" GST_TIME_FORMAT, + GST_TIME_ARGS(render_time), GST_TIME_ARGS(running_time), + GST_TIME_ARGS(latency), GST_TIME_ARGS(tolerance)); + return TRUE; + } + return FALSE; +} + +/** + * Render one frame for the given slot. + * + * @param state Render buffer to use. + * @param slot Prepared slot to render. + * + * @return Time it took to render the frame. + */ +GstClockTime rb_render_slot(RBRenderBuffer *state, RBSlot *slot) { + // measure rendering for QoS + const GstClockTime render_start = gst_util_get_timestamp(); + + // Dispatch slot to GL thread + gst_gl_context_thread_add(state->gl_context, slot->gl_fill_func, slot); + + const GstClockTime render_duration = + GST_CLOCK_DIFF(render_start, gst_util_get_timestamp()); + + // render took longer than the frame duration, this is a problem for + // real-time rendering if it happens too often + if (render_duration > slot->frame_duration) { + GST_DEBUG_OBJECT( + state->plugin, + "Render GL frame took too long: %" GST_TIME_FORMAT + ", frame-duration: %" GST_TIME_FORMAT ", pts: %" GST_TIME_FORMAT, + GST_TIME_ARGS(render_duration), GST_TIME_ARGS(slot->frame_duration), + GST_TIME_ARGS(slot->pts)); + } + + return render_duration; +} + +/** + * Send a video buffer to the source pad downstream. + * Buffer is checked and timestamps are populated before sending. + * Push is blocking for offline rendering, and for real-time rendering queued if + * capacity is available, otherwise blocking. + * + * @param state Render buffer to use. + * @param outbuf Video buffer to send downstream (takes ownership). + * @param pts Frame PTS. + * @param frame_duration Frame duration. + * @return TRUE if the buffer was pushed successfully. + */ +static GstFlowReturn rb_handle_push_buffer(RBRenderBuffer *state, + GstBuffer *outbuf, + const GstClockTime pts, + const GstClockTime frame_duration) { + g_assert(state != NULL); + + if (gst_buffer_get_size(outbuf) == 0) { + GST_WARNING_OBJECT(state->plugin, "Empty or invalid buffer, dropping."); + bd_dispose_gl_buffer(&state->buffer_disposal, outbuf); + return GST_FLOW_OK; + } + + // populate timestamps after rendering so they can't be changed by accident + GST_BUFFER_PTS(outbuf) = pts; + GST_BUFFER_DTS(outbuf) = pts; + GST_BUFFER_DURATION(outbuf) = frame_duration; + + GstFlowReturn ret; + if (state->is_realtime) { +#if PUSH_BUFFER_ENABLED == 1 + // for real-time, we need to wait until it's time to push the buffer + // dispatch to queue may block until capacity is available + gboolean result = pb_queue_buffer(&state->push_buffer, outbuf); + if (result) { + ret = GST_FLOW_OK; + } else { + bd_dispose_gl_buffer(&state->buffer_disposal, outbuf); + ret = GST_FLOW_ERROR; + } + } else { +#else + // blocking wait until it's time to push the buffer, + // then push directly + const GstClockReturn clock_return = + pb_wait_to_push(&state->push_buffer, pts); + if (clock_return != GST_CLOCK_UNSUPPORTED) { + // apply wait jitter correction ro buffer + pb_jitter_correction(&state->push_buffer, outbuf); + } + } +#endif + // push buffer downstream directly for offline rendering or if + // queuing is disabled + ret = gst_pad_push(state->src_pad, outbuf); + if (ret == GST_FLOW_FLUSHING) { + GST_INFO_OBJECT(state->plugin, + "Pad is flushing and does not accept buffers anymore"); + } else if (ret != GST_FLOW_OK) { + GST_WARNING_OBJECT(state->plugin, "Failed to push buffer to pad"); + } +#if PUSH_BUFFER_ENABLED == 1 + } // endif(state->is_realtime) +#endif + return ret; +} + +/** + * Reset render buffer references, set slot state to RB_EMPTY and signal. + * + * @param state Render buffer to use. + * @param slot Slot to release. + */ +static void rb_release_slot(RBRenderBuffer *state, RBSlot *slot) { + g_assert(state != NULL); + + // Lock and reset slot data + g_mutex_lock(&state->slot_lock); + + slot->in_audio = NULL; + slot->state = RB_EMPTY; + slot->out_buf = NULL; + slot->gl_result = FALSE; + + // let queuing know that a slot is available + g_cond_signal(&state->slot_available_cond); + g_mutex_unlock(&state->slot_lock); +} + +GstFlowReturn rb_render_blocking(RBRenderBuffer *state, GstBuffer *in_audio, + GstClockTime pts, + GstClockTime frame_duration) { + g_assert(state != NULL); + + // Lock and reset slot data + g_mutex_lock(&state->slot_lock); + + RBSlot *slot = &state->slots[0]; + slot->in_audio = in_audio; + slot->state = RB_BUSY; + slot->out_buf = NULL; + slot->pts = pts; + slot->gl_result = FALSE; + slot->frame_duration = frame_duration; + + // perform rendering + rb_render_slot(state, slot); + + GstFlowReturn ret = + rb_handle_push_buffer(state, slot->out_buf, pts, frame_duration); + + // reset slot + slot->in_audio = NULL; + slot->state = RB_EMPTY; + slot->out_buf = NULL; + slot->gl_result = FALSE; + + g_mutex_unlock(&state->slot_lock); + + return ret; +} + +/** + * Clears all render slots, releases all buffers currently held, resets the + * render queue state and EMA state. + * Needs to be called from the GL thread. + * + * @param state The render buffer to clear. + */ +static void rb_clear_slots(RBRenderBuffer *state) { + g_assert(state != NULL); + + g_mutex_lock(&state->slot_lock); + + // clean up queue and state + for (guint i = 0; i < NUM_RENDER_SLOTS; i++) { + if (state->slots[i].state == RB_READY) { + if (state->slots[i].in_audio) { + gst_buffer_unref(state->slots[i].in_audio); + state->slots[i].in_audio = NULL; + } + if (state->slots[i].out_buf) { + bd_dispose_gl_buffer(&state->buffer_disposal, state->slots[i].out_buf); + state->slots[i].out_buf = NULL; + } + state->slots[i].state = RB_EMPTY; + } + } + + state->render_write_idx = NUM_RENDER_SLOTS - 1; + state->render_read_idx = NUM_RENDER_SLOTS - 1; + state->ema_frame_counter = 0; + state->ema_smoothed_render_time = 0; + + g_mutex_unlock(&state->slot_lock); +} + +/** + * Render thread main worker function. + * + * @param user_data Render buffer to work on. + * @return NULL + */ +static gpointer _rb_render_thread_func(gpointer user_data) { + + RBRenderBuffer *state = (RBRenderBuffer *)user_data; + g_assert(state != NULL); + +#if NUM_RENDER_SLOTS > 2 + GstClockTime last_pts = GST_CLOCK_TIME_NONE; +#endif + // slot modifications are locked + + // start working on rendering frames until we shut down + while (g_atomic_int_get(&state->running)) { + + // first find a slot with data that's ready to render + gboolean found_slot = FALSE; + RBSlot *slot = NULL; + gint render_index = 0; + + g_mutex_lock(&state->slot_lock); + + while (!found_slot) { + render_index = (state->render_read_idx + 1) % NUM_RENDER_SLOTS; + + slot = &state->slots[render_index]; + + // find a slot with audio input data + // also check if it's already older than the last frame or if it's the + // first frame (shouldn't happen unless the ring buffer capacity > 2) + if (slot->state == RB_READY +#if NUM_RENDER_SLOTS > 2 + // wontfix: segment events would need to be handled for this check to + // work right otherwise last_pts is not reset when the pts offset + // changes. If this is ever desired, each queued frame should have an + // incrementing id field to use for this check + + // check if next frame is already outdated, may happen if write + // pointer jumps over the read pointer. + && (last_pts == GST_CLOCK_TIME_NONE || slot->pts > last_pts) +#endif + ) { + found_slot = TRUE; + } else { + // no data is ready, wait for a new audio buffer being pushed + g_cond_wait(&state->render_queued_cond, &state->slot_lock); + if (g_atomic_int_get(&state->running) == FALSE) { + break; + } + } + } + + // no slot means we're not running anymore + if (found_slot == FALSE) { + g_mutex_unlock(&state->slot_lock); + break; + } + + // update read maker + state->render_read_idx = render_index; +#if NUM_RENDER_SLOTS > 2 + last_pts = slot->pts; +#endif + + // nobody else is allowed to touch the slot anymore, it's owned by the + // render thread now + slot->state = RB_BUSY; + + g_mutex_unlock(&state->slot_lock); + + // perform gl rendering + const GstClockTime render_duration = rb_render_slot(state, slot); + + // copy params to locals vars before releasing the slot + GstBuffer *audio_buffer = slot->in_audio; + const GstClockTime frame_duration = slot->frame_duration; + const GstClockTime pts = slot->pts; + + // copy results to locals vars before releasing the slot + GstBuffer *outbuf = slot->out_buf; + const gboolean gl_result = slot->gl_result; + + // release slot and signal + rb_release_slot(state, slot); + + // send out buffer downstream + // call will block if rendering is running ahead + // and throttle render loop + if (rb_handle_push_buffer(state, outbuf, pts, frame_duration) == + GST_FLOW_OK) { + + // process rendering fps QoS in case frame was pushed + if (state->qos_enabled) { + rb_handle_adaptive_fps_ema(state, render_duration, frame_duration); + } + } + outbuf = NULL; + + gst_buffer_unref(audio_buffer); + + if (!gl_result) { + GST_WARNING_OBJECT( + state->plugin, + "Failed to render buffer, gl rendering returned error"); + } + } + + return NULL; +} + +/** + * Clears all queues. + * Needs to be called from GL thread. + * + * @param state Render buffer to clear. + */ +void rb_clear(RBRenderBuffer *state) { + g_assert(state != NULL); + + rb_clear_slots(state); +#if PUSH_BUFFER_ENABLED == 1 + pb_clear_queue(&state->push_buffer); +#endif + bd_clear(&state->buffer_disposal); +} + +void rb_start(RBRenderBuffer *state) { + g_assert(state != NULL); + + g_atomic_int_set(&state->running, TRUE); + + // threads are not needed for offline rendering + if (state->is_realtime) { + bd_start_buffer_disposal(&state->buffer_disposal); + pb_start_push_buffer(&state->push_buffer); + state->render_thread = + g_thread_new("rb-render-thread", _rb_render_thread_func, state); + } + + GST_INFO_OBJECT(state->plugin, "Started render buffer"); +} + +void rb_stop(RBRenderBuffer *state) { + g_assert(state != NULL); + + g_atomic_int_set(&state->running, FALSE); + + // threads are not needed for offline rendering + if (state->is_realtime) { + // wake up render thread to signal loop exit + g_mutex_lock(&state->slot_lock); + g_cond_broadcast(&state->render_queued_cond); + g_cond_broadcast(&state->slot_available_cond); + g_mutex_unlock(&state->slot_lock); + + pb_stop_push_buffer(&state->push_buffer); + + // wait for render thread to exit + g_thread_join(state->render_thread); + state->render_thread = NULL; + + bd_stop_buffer_disposal(&state->buffer_disposal); + } + + GST_INFO_OBJECT(state->plugin, "Stopped render buffer"); +} + +void rb_set_caps_frame_duration(RBRenderBuffer *state, + const GstClockTime caps_frame_duration) { + g_assert(state != NULL); + + g_mutex_lock(&state->slot_lock); + state->caps_frame_duration = caps_frame_duration; + g_mutex_unlock(&state->slot_lock); +} diff --git a/src/renderbuffer.h b/src/renderbuffer.h new file mode 100644 index 0000000..194c688 --- /dev/null +++ b/src/renderbuffer.h @@ -0,0 +1,468 @@ +/* + * Utility to allow offloading of rendering tasks from the plugin chain + * function. + * + * Uses a ring buffer based rendering task queue with a fixed number of + * rendering slots. The queue is consumed by a dedicated thread + * (rb-render-thread) to dispatch rendering to the GL thread. + * + * --- + * + * For offline pipelines only: + * + * - A blocking call is used for rendering, bypassing queuing. GL buffers are + * never dropped. + * + * --- + * + * For real-time pipelines only: + * + * - The buffer provides queueing for audio buffers to be rendered to video + * frames. It uses a bound-wait-on-full approach to avoid dropping frames when + * rendering duration exceeds the frame duration of the current fps: + * + * - In case a free slot is available queue + * immediately and return (async rendering). + * + * - In case the next available (not rendering) slot is scheduled (end of the + * ring + 1): + * + * - Wait for defined time for a slot to become + * available, this wait may not exceed the current fps frame duration, + * otherwise the plugin loses audio sync and fails. + * + * - In case the max wait deadline is met, + * and the next buffer still hasn't been picked up, it is overridden + * with the current frame (evicted), meaning the previous frame is being + * dropped as it is too late. + * + * + * - If the render duration exceeds the fps *sometimes*, subsequent + * faster-than-real-time rendered frames (if any) compensate for the small + * lag, or frames are dropped. + * + * - If the render duration exceeds the fps *most of the time*, an Exponential + * Moving Average (EMA) based algorithm instructs the plugin to reduce fps. + * EMA will also recover fps when render performance increases again. + * + * - GL buffers that completed rendering are scheduled to be pushed to the + * source pad at presentation time (PTS) using a PBPushBuffer. + */ + +#ifndef __RENDERBUFFER_H__ +#define __RENDERBUFFER_H__ + +#include "bufferdisposal.h" +#include "pushbuffer.h" + +#include +#include +#include + +G_BEGIN_DECLS + +/** + * Number of render slots that are used by the ring buffer. + * 2 is the ideal size and there should be no reason to change it: + * One slot for the gl thread to render the current frame while another slot is + * available for queuing the next audio buffer to render. + * + * Note: Increasing the number of slots >2 is not fully supported since + * it would require handling of PTS offset changes. See comments in code. + * + * Valid values: + * 1 : Wait for previous render to complete before scheduling. + * 2 : Render one item and schedule another at the same time. + */ +#ifndef NUM_RENDER_SLOTS +#define NUM_RENDER_SLOTS 2 +#endif + +/** + * Callback function pointer type for triggering a dynamic fps change. + */ +typedef void (*RBAdjustFpsFunc)(gpointer user_data, guint64 frame_duration); + +/** + * Current usage state of a render slot. + */ +typedef enum { + /** + * Slot is not in use at all. + */ + RB_EMPTY, + + /** + * Ready to render, in_audio buffer is filled. + */ + RB_READY, + + /** + * Slot is currently being rendered. + */ + RB_BUSY +} RBSlotState; + +/** + * Result status of queuing a buffer for rendering. + */ +typedef enum { + /** + * Buffer has been queued. + */ + RB_SUCCESS, + + /** + * Queuing buffer evicted (overwrote) a previously queued buffer (frame drop). + */ + RB_EVICTED, + + /** + * Buffer could not be queued because the allowed wait could not be met. + */ + RB_TIMEOUT, + /** + * Buffer could not be queued because the buffer was stopped. + */ + RB_STOPPED +} RBQueueResult; + +/** + * A render slot represents an item in the render buffer. It holds an audio + * input buffer used for a single frame, render context information like frame + * pts and duration, and an output buffer for the rendered video frame. + */ +typedef struct { + + // not re-assigned, needed as context to dispatch rendering + // -------------------------------------------------------------- + + /** + * projectM plugin. + */ + GstObject *plugin; + + /** + * Callback to render to gl texture buffer. + */ + GstGLContextThreadFunc gl_fill_func; + + // input for rendering, updated by queuing for each frame + // -------------------------------------------------------------- + + /** + * Presentation timestamp for this video frame. + */ + GstClockTime pts; + + /** + * Duration for this video frame (current fps). + */ + GstClockTime frame_duration; + + /** + * Audio data to feed to projectM for this frame. + */ + GstBuffer *in_audio; + + // output from rendering, updated by gl thread for each frame + // -------------------------------------------------------------- + + /** + * GL memory texture buffer for current frame. + */ + GstBuffer *out_buf; + + /** + * GL render result for current frame. + */ + gboolean gl_result; + + // frequently updated, more than once for each frame + // -------------------------------------------------------------- + + /** + * Usage state of this slot. + */ + RBSlotState state; + + /*< private >*/ + gpointer _padding[GST_PADDING]; +} RBSlot; + +/** + * All render buffer data. + */ +typedef struct { + + // not re-assigned during render thread lifetime + // -------------------------------------------------------------- + + /** + * projectM plugin. No ownership. + */ + GstObject *plugin; + + /** + * Current gl context. No ownership. + */ + GstGLContext *gl_context; + + /** + * projectM plugin source pad. No ownership. + */ + GstPad *src_pad; + + /** + * Utility to properly release GL buffers. + */ + BDBufferDisposal buffer_disposal; + + /** + * Utility for scheduling downstream push of rendered buffers. + */ + PBPushBuffer push_buffer; + + /** + * Thread running the render loop. + */ + GThread *render_thread; + + /** + * Callback function pointer to let the plugin know to change fps. + */ + RBAdjustFpsFunc adjust_fps_func; + + /** + * Lock for shared state between chain function and render thread. + */ + GMutex slot_lock; + + // concurrent access, g_atomic + // -------------------------------------------------------------- + + /** + * TRUE if rendering is currently running. + */ + gboolean running; + + // concurrent access, protected by slot_lock + // -------------------------------------------------------------- + /** + * Condition to wait for a buffer to queued for rendering. + */ + GCond render_queued_cond; + + /** + * Condition for slots becoming available after rendering completed. + */ + GCond slot_available_cond; + + /** + * Switch for real-time (render loop) QoS. + */ + gboolean qos_enabled; + + /** + * Is current pipeline using a real-time clock. + */ + gboolean is_realtime; + + /** + * Pipeline negotiated caps fps as frame duration. + */ + GstClockTime caps_frame_duration; + + /** + * Limit for max EMA fps changes as frame duration. Higher value = lower fps. + */ + GstClockTime max_frame_duration; + + /** + * Render ring buffer slots. + */ + RBSlot slots[NUM_RENDER_SLOTS]; + + // only used by the calling thread (chain function) / clean up + // -------------------------------------------------------------- + + /** + * Last index that data was inserted at (insertion pointer). + */ + gint render_write_idx; + + // only used by the render thread / clean up + // -------------------------------------------------------------- + + /** + * Last index that data was rendered from (read pointer). + */ + gint render_read_idx; + + /** + * EMA frame counter. + */ + guint ema_frame_counter; + + /** + * EMA running average. + */ + guint64 ema_smoothed_render_time; + + /*< private >*/ + gpointer _padding[GST_PADDING]; +} RBRenderBuffer; + +/** + * Call argument struct, input for queuing a frame for rendering. + */ +typedef struct { + + /** + * Render buffer to use. + */ + RBRenderBuffer *render_buffer; + + /** + * Max time to wait for queuing. + */ + GstClockTime max_wait; + + /** + * Presentation timestamp for this video frame. + */ + GstClockTime pts; + + /** + * Duration for this video frame (current fps). + */ + GstClockTime frame_duration; + + /** + * Audio data to feed to projectM for this frame. + */ + GstBuffer *in_audio; + + /*< private >*/ + gpointer _padding[GST_PADDING]; +} RBQueueArgs; + +/** + * One time initialization for the given render buffer. + * + * @param state Render buffer to use. + * @param plugin Plugin using the render buffer. + * @param gl_fill_func GL rendering function callback. + * @param adjust_fps_func FPS adjustment function callback. + * @param max_frame_duration FPS adjustment lower limit. + * @param caps_frame_duration FPS requested by pipeline caps. + * @param is_qos_enabled Controls if render-time QoS is enabled (EMA). + * @param is_realtime If TRUE async rendering is used. + */ +void rb_init_render_buffer(RBRenderBuffer *state, GstObject *plugin, + GstGLContext *gl_context, GstPad *src_pad, + GstGLContextThreadFunc gl_fill_func, + RBAdjustFpsFunc adjust_fps_func, + GstClockTime max_frame_duration, + GstClockTime caps_frame_duration, + gboolean is_qos_enabled, gboolean is_realtime); + +/** + * Release resources for the given render buffer. + * + * @param state Render buffer to clean up. + */ +void rb_dispose_render_buffer(RBRenderBuffer *state); + +/** + * Queue an audio buffer for rendering. The queuing is guaranteed to return + * within the given max time budget. The buffer will be dropped if queuing is + * not possible within the given time budget. + * Note: Single producer function. Not thread safe, should be called from chain + * function only! + * + * @param args Audio buffer and frame details for rendering. The render buffer + * does not take ownership of the given pointer. The given audio buffer is + * copied. + */ +RBQueueResult rb_queue_render_task(RBQueueArgs *args); + +/** + * Queue an audio buffer for rendering. The queuing is guaranteed to return + * within the given max time budget. The buffer will be dropped if queuing is + * not possible within the given time budget. + * Note: Single producer function. Not thread safe, should be called from chain + * function only! + * + * Convenience function that also handles queuing result by logging if frames + * are dropped (DEBUG level). + * + * @param args Audio buffer and frame details for rendering. The render buffer + * does not take ownership of the given pointer. The given audio buffer is + * copied. + */ +void rb_queue_render_task_log(RBQueueArgs *args); + +/** + * Render one frame synchronously. Using synchronous rendering is exclusive, + * queuing may not be used with the same render buffer at the same time. + * + * @param state Render buffer to use. + * @param in_audio Audio buffer to pass to projectM. No ownership. + * @param pts Frame PTS. + * @param frame_duration Frame duration. + * @return The downstream push result. + */ +GstFlowReturn rb_render_blocking(RBRenderBuffer *state, GstBuffer *in_audio, + GstClockTime pts, GstClockTime frame_duration); + +/** + * Determine if it's likely too late push a buffer, as it would likely be + * dropped by a pipeline synchronized sink. + * + * @param element The plugin element. + * @param latency Pipeline latency. + * @param running_time Current buffer running time. + * @param tolerance Tolerance to account for scheduling overhead. + * @return TRUE in case the buffer is too late. + */ +static gboolean rb_is_render_too_late(GstElement *element, GstClockTime latency, + GstClockTime running_time, + GstClockTime tolerance); + +/** + * Clears all queues. + * Needs to be called from GL thread. + * + * @param state Render buffer to clear. + */ +void rb_clear(RBRenderBuffer *state); + +/** + * Start render loop. + * Needs to be called from GL thread. + * + * @param state Render buffer to use. + */ +void rb_start(RBRenderBuffer *state); + +/** + * Stop render loop. Active threads will be joined before returning. + * Needs to be called from GL thread. + * + * @param state Render buffer to use. + */ +void rb_stop(RBRenderBuffer *state); + +/** + * Update caps as they get negotiated by the pipeline. Thread safe. + * + * @param state Render buffer to update. + * @param caps_frame_duration Frame duration from pipeline caps. + */ +void rb_set_caps_frame_duration(RBRenderBuffer *state, + GstClockTime caps_frame_duration); + +G_END_DECLS + +#endif // __RENDERBUFFER_H__ diff --git a/test.ps1 b/test.ps1 index 577f820..74c0187 100644 --- a/test.ps1 +++ b/test.ps1 @@ -34,7 +34,8 @@ switch ($args) { & gst-launch-1.0 -v ` audiotestsrc ! queue ! audioconvert ! ` projectm ` - ! "video/x-raw,width=512,height=512,framerate=60/1" ! videoconvert ! xvimagesink sync=false + is-live=true ` + ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=60/1" ! glimagesink sync=false break } @@ -43,7 +44,8 @@ switch ($args) { & gst-launch-1.0 -v ` audiotestsrc ! queue ! audioconvert ! ` projectm preset="test/presets/215-wave.milk" ` - ! "video/x-raw,width=512,height=512,framerate=60/1" ! videoconvert ! xvimagesink sync=false + is-live=true ` + ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=60/1" ! glimagesink sync=false break } @@ -63,7 +65,8 @@ switch ($args) { mesh-size="512,512" ` easter-egg=0.75 ` preset-locked=false ` - ! "video/x-raw,width=512,height=512,framerate=30/1" ! videoconvert ! xvimagesink sync=false + is-live=true ` + ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=30/1" ! glimagesink sync=false break } @@ -71,7 +74,7 @@ switch ($args) { $env:GST_DEBUG = "3" & gst-launch-1.0 -v ` filesrc location="test/audio/upbeat-future-bass.mp3" ! decodebin ! audioconvert ! ` - projectm ! videoscale ! videoconvert ! video/x-raw,width=1280,height=720 ! ` + projectm ! video/x-raw(memory:GLMemory),width=1280,height=720 ! gldownload ! videoscale ! videoconvert` x264enc ! mp4mux ! filesink location="test/output/test_video.mp4" break } @@ -82,7 +85,7 @@ switch ($args) { filesrc location="test/audio/upbeat-future-bass.mp3" ! decodebin name=dec ! ` audioconvert ! avenc_aac ! avmux_mp4 ! filesink location="test/output/video2.mp4" ` dec. ! ` - projectm ! videoconvert ! x264enc ! avenc_mp4 ! avmux_mp4.video_0 + projectm ! gldownload ! videoconvert ! x264enc ! avenc_mp4 ! avmux_mp4.video_0 break } diff --git a/test.sh b/test.sh index c3a3750..3585ff6 100755 --- a/test.sh +++ b/test.sh @@ -4,10 +4,8 @@ set -e # Set variables based on OS if [[ "$OSTYPE" == "linux-gnu"* ]]; then LIB_EXT="so" - VIDEO_SINK="xvimagesink" elif [[ "$OSTYPE" == "darwin"* ]]; then LIB_EXT="dylib" - VIDEO_SINK="osxvideosink" else echo "Unsupported OS!" exit 1 @@ -52,14 +50,16 @@ case "$1" in GST_DEBUG=projectm:5 gst-launch-1.0 -v \ audiotestsrc ! queue ! audioconvert ! \ projectm \ - ! "video/x-raw,width=512,height=512,framerate=60/1" ! videoconvert ! $VIDEO_SINK sync=false + is-live=true \ + ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=60/1" ! glimagesink sync=false ;; "--preset") GST_DEBUG=4 gst-launch-1.0 -v \ audiotestsrc ! queue ! audioconvert ! \ projectm preset="test/presets/250-wavecode.milk.milk" \ - ! "video/x-raw,width=512,height=512,framerate=60/1" ! videoconvert ! $VIDEO_SINK sync=false + is-live=true \ + ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=60/1" ! glimagesink sync=false ;; "--properties") @@ -77,13 +77,14 @@ case "$1" in mesh-size="512,512" \ easter-egg=0.75 \ preset-locked=false \ - ! "video/x-raw,width=512,height=512,framerate=30/1" ! videoconvert ! $VIDEO_SINK sync=false + is-live=true \ + ! "video/x-raw(memory:GLMemory),width=512,height=512,framerate=30/1" ! glimagesink sync=false ;; "--output-video") GST_DEBUG=3 gst-launch-1.0 -v \ filesrc location="test/audio/upbeat-future-bass.mp3" ! decodebin ! audioconvert ! \ - projectm preset="test/presets/250-wavecode.milk.milk" ! videoscale ! videoconvert ! video/x-raw,width=1280,height=720 ! \ + projectm preset="test/presets/250-wavecode.milk.milk" ! "video/x-raw(memory:GLMemory),width=1280,height=720" ! gldownload ! videoscale ! videoconvert \ x264enc ! mp4mux ! filesink location="test/output/test_video.mp4" ;; @@ -92,7 +93,7 @@ case "$1" in filesrc location="test/audio/upbeat-future-bass.mp3" ! decodebin name=dec ! \ audioconvert ! avenc_aac ! avmux_mp4 ! filesink location="test/output/video2.mp4" \ dec. ! \ - projectm preset="test/presets/250-wavecode.milk.milk" ! videoconvert ! x264enc ! avenc_mp4 ! avmux_mp4.video_0 + projectm preset="test/presets/250-wavecode.milk.milk" ! gldownload ! videoconvert ! x264enc ! avenc_mp4 ! avmux_mp4.video_0 ;; *)