diff --git a/engine/bind/lua b/engine/bind/lua index e09d90b..446b6d6 100644 Binary files a/engine/bind/lua and b/engine/bind/lua differ diff --git a/engine/bind/v4k.lua b/engine/bind/v4k.lua index 37b2f6d..f3b6a85 100644 --- a/engine/bind/v4k.lua +++ b/engine/bind/v4k.lua @@ -1876,7 +1876,7 @@ struct bt_t* children; void bt_addfun(const char *name, int(*func)()); bt_func bt_findfun(const char *name); char *bt_funcname(bt_func fn); - void ui_bt(bt_t *b); + int ui_bt(bt_t *b); void midi_send(unsigned midi_msg); typedef struct audio_handle* audio_t; audio_t audio_clip( const char *pathfile ); @@ -1889,6 +1889,7 @@ typedef struct audio_handle* audio_t; float audio_volume_clip(float gain); float audio_volume_stream(float gain); float audio_volume_master(float gain); + int ui_audio(); enum AUDIO_FLAGS { AUDIO_1CH = 0, AUDIO_2CH = 1, diff --git a/engine/joint/v4k.h b/engine/joint/v4k.h index b43e05f..7ce8597 100644 --- a/engine/joint/v4k.h +++ b/engine/joint/v4k.h @@ -15368,7 +15368,7 @@ API void bt_addfun(const char *name, int(*func)()); API bt_func bt_findfun(const char *name); API char *bt_funcname(bt_func fn); -API void ui_bt(bt_t *b); +API int ui_bt(bt_t *b); #line 0 #line 1 "v4k_audio.h" @@ -15402,6 +15402,8 @@ API float audio_volume_clip(float gain); // set fx volume if gain is in API float audio_volume_stream(float gain); // set bgm volume if gain is in [0..1] range. return current bgm volume in any case API float audio_volume_master(float gain); // set master volume if gain is in [0..1] range. return current master volume in any case +API int ui_audio(); + enum AUDIO_FLAGS { AUDIO_1CH = 0, // default AUDIO_2CH = 1, @@ -581506,7 +581508,7 @@ int audio_init( int flags ) { ma_backend_oss, ma_backend_jack, ma_backend_opensl, - //ma_backend_webaudio, + ma_backend_webaudio, //ma_backend_openal, //ma_backend_sdl, ma_backend_null // Lowest priority. @@ -581771,6 +581773,40 @@ int audio_queue( const void *samples, int num_samples, int flags ) { return audio_queue_voice; } + +int ui_audio() { + int changed = 0; + + float sfx = sqrt(volume_clip), bgm = sqrt(volume_stream), master = sqrt(volume_master); + if( ui_slider2("BGM volume", &bgm, va("%.2f", bgm))) changed = 1, audio_volume_stream(bgm); + if( ui_slider2("SFX volume", &sfx, va("%.2f", sfx))) changed = 1, audio_volume_clip(sfx); + if( ui_slider2("Master volume", &master, va("%.2f", master))) changed = 1, audio_volume_master(master); + + ui_separator(); + + int num_voices = sts_mixer_get_active_voices(&mixer); + ui_label2("Format", mixer.audio_format == 0 ? "None" : mixer.audio_format == 1 ? "8-bit" : mixer.audio_format == 2 ? "16-bit" : mixer.audio_format == 3 ? "32-bit integer" : "32-bit float"); + ui_label2("Frequency", va("%4.1f KHz", mixer.frequency / 1000.0)); + ui_label2("Voices", va("%d/%d", num_voices, STS_MIXER_VOICES)); + ui_separator(); + + for( int i = 0; i < STS_MIXER_VOICES; ++i ) { + if( mixer.voices[i].state != STS_MIXER_VOICE_STOPPED ) { // PLAYING || STREAMING + ui_label(va("Voice %d", i+1)); + + // float mul = mixer.voices[i].state == STS_MIXER_VOICE_STREAMING ? 2 : 1; + // float div = mixer.voices[i].state == STS_MIXER_VOICE_STREAMING ? mixer.voices[i].stream->sample.length : mixer.voices[i].sample->length; + // float pct = mixer.voices[i].position * mul / div; + // if(ui_slider2("Position", &pct, va("%5.2f", pct))) changed = 1; + if(ui_slider2("Gain", &mixer.voices[i].gain, va("%5.2f", mixer.voices[i].gain))) changed = 1; + if(ui_slider2("Pitch", &mixer.voices[i].pitch, va("%5.2f", mixer.voices[i].pitch))) changed = 1; + if(ui_slider2("Pan", &mixer.voices[i].pan, va("%5.2f", mixer.voices[i].pan))) changed = 1; + ui_separator(); + } + } + + return changed; +} #line 0 #line 1 "v4k_collide.c" @@ -590692,7 +590728,7 @@ GLuint shader_compile( GLenum type, const char *source ) { // dump log with line numbers shader_print( source ); - PANIC("ERROR: shader_compile(): %s\n%s\n", type == GL_VERTEX_SHADER ? "Vertex" : "Fragment", buf); + PRINTF("!ERROR: shader_compile(): %s\n%s\n", type == GL_VERTEX_SHADER ? "Vertex" : "Fragment", buf); return 0; } @@ -601356,7 +601392,7 @@ int window_frame_begin() { // @todo } for( int p = (open = ui_collapse_filtered(ICON_MD_VOLUME_UP " Audio", "Debug.Audio")), dummy = (clicked_or_toggled = ui_collapse_clicked()); p; ui_collapse_end(), p = 0) { - // @todo + ui_audio(); } for( int p = (open = ui_collapse_filtered(ICON_MD_VIDEOCAM " Camera", "Debug.Camera")), dummy = (clicked_or_toggled = ui_collapse_clicked()); p; ui_collapse_end(), p = 0) { ui_camera( camera_get_active() ); @@ -603071,7 +603107,7 @@ int bt_run(bt_t *b) { return 0; } -void ui_bt(bt_t *b) { +int ui_bt(bt_t *b) { if( b ) { char *info = bt_funcname(b->action); if(!info) info = va("%d", array_count(b->children)); @@ -603083,6 +603119,7 @@ void ui_bt(bt_t *b) { ui_collapse_end(); } } + return 0; } #line 0 diff --git a/engine/split/v4k_audio.c b/engine/split/v4k_audio.c index c64f76e..2f5872a 100644 --- a/engine/split/v4k_audio.c +++ b/engine/split/v4k_audio.c @@ -281,7 +281,7 @@ int audio_init( int flags ) { ma_backend_oss, ma_backend_jack, ma_backend_opensl, - //ma_backend_webaudio, + ma_backend_webaudio, //ma_backend_openal, //ma_backend_sdl, ma_backend_null // Lowest priority. @@ -546,3 +546,37 @@ int audio_queue( const void *samples, int num_samples, int flags ) { return audio_queue_voice; } + +int ui_audio() { + int changed = 0; + + float sfx = sqrt(volume_clip), bgm = sqrt(volume_stream), master = sqrt(volume_master); + if( ui_slider2("BGM volume", &bgm, va("%.2f", bgm))) changed = 1, audio_volume_stream(bgm); + if( ui_slider2("SFX volume", &sfx, va("%.2f", sfx))) changed = 1, audio_volume_clip(sfx); + if( ui_slider2("Master volume", &master, va("%.2f", master))) changed = 1, audio_volume_master(master); + + ui_separator(); + + int num_voices = sts_mixer_get_active_voices(&mixer); + ui_label2("Format", mixer.audio_format == 0 ? "None" : mixer.audio_format == 1 ? "8-bit" : mixer.audio_format == 2 ? "16-bit" : mixer.audio_format == 3 ? "32-bit integer" : "32-bit float"); + ui_label2("Frequency", va("%4.1f KHz", mixer.frequency / 1000.0)); + ui_label2("Voices", va("%d/%d", num_voices, STS_MIXER_VOICES)); + ui_separator(); + + for( int i = 0; i < STS_MIXER_VOICES; ++i ) { + if( mixer.voices[i].state != STS_MIXER_VOICE_STOPPED ) { // PLAYING || STREAMING + ui_label(va("Voice %d", i+1)); + + // float mul = mixer.voices[i].state == STS_MIXER_VOICE_STREAMING ? 2 : 1; + // float div = mixer.voices[i].state == STS_MIXER_VOICE_STREAMING ? mixer.voices[i].stream->sample.length : mixer.voices[i].sample->length; + // float pct = mixer.voices[i].position * mul / div; + // if(ui_slider2("Position", &pct, va("%5.2f", pct))) changed = 1; + if(ui_slider2("Gain", &mixer.voices[i].gain, va("%5.2f", mixer.voices[i].gain))) changed = 1; + if(ui_slider2("Pitch", &mixer.voices[i].pitch, va("%5.2f", mixer.voices[i].pitch))) changed = 1; + if(ui_slider2("Pan", &mixer.voices[i].pan, va("%5.2f", mixer.voices[i].pan))) changed = 1; + ui_separator(); + } + } + + return changed; +} diff --git a/engine/split/v4k_audio.h b/engine/split/v4k_audio.h index 3f8c9d0..beb0d3f 100644 --- a/engine/split/v4k_audio.h +++ b/engine/split/v4k_audio.h @@ -28,6 +28,8 @@ API float audio_volume_clip(float gain); // set fx volume if gain is in API float audio_volume_stream(float gain); // set bgm volume if gain is in [0..1] range. return current bgm volume in any case API float audio_volume_master(float gain); // set master volume if gain is in [0..1] range. return current master volume in any case +API int ui_audio(); + enum AUDIO_FLAGS { AUDIO_1CH = 0, // default AUDIO_2CH = 1, diff --git a/engine/split/v4k_bt.c b/engine/split/v4k_bt.c index f88e950..8930632 100644 --- a/engine/split/v4k_bt.c +++ b/engine/split/v4k_bt.c @@ -74,7 +74,7 @@ int bt_run(bt_t *b) { return 0; } -void ui_bt(bt_t *b) { +int ui_bt(bt_t *b) { if( b ) { char *info = bt_funcname(b->action); if(!info) info = va("%d", array_count(b->children)); @@ -86,4 +86,5 @@ void ui_bt(bt_t *b) { ui_collapse_end(); } } + return 0; } diff --git a/engine/split/v4k_bt.h b/engine/split/v4k_bt.h index ba41a82..2a1a75f 100644 --- a/engine/split/v4k_bt.h +++ b/engine/split/v4k_bt.h @@ -187,4 +187,4 @@ API void bt_addfun(const char *name, int(*func)()); API bt_func bt_findfun(const char *name); API char *bt_funcname(bt_func fn); -API void ui_bt(bt_t *b); +API int ui_bt(bt_t *b); diff --git a/engine/split/v4k_render.c b/engine/split/v4k_render.c index ecea305..dd5b7ca 100644 --- a/engine/split/v4k_render.c +++ b/engine/split/v4k_render.c @@ -81,7 +81,7 @@ GLuint shader_compile( GLenum type, const char *source ) { // dump log with line numbers shader_print( source ); - PANIC("ERROR: shader_compile(): %s\n%s\n", type == GL_VERTEX_SHADER ? "Vertex" : "Fragment", buf); + PRINTF("!ERROR: shader_compile(): %s\n%s\n", type == GL_VERTEX_SHADER ? "Vertex" : "Fragment", buf); return 0; } diff --git a/engine/split/v4k_window.c b/engine/split/v4k_window.c index 8353093..5ec0f58 100644 --- a/engine/split/v4k_window.c +++ b/engine/split/v4k_window.c @@ -564,7 +564,7 @@ int window_frame_begin() { // @todo } for( int p = (open = ui_collapse_filtered(ICON_MD_VOLUME_UP " Audio", "Debug.Audio")), dummy = (clicked_or_toggled = ui_collapse_clicked()); p; ui_collapse_end(), p = 0) { - // @todo + ui_audio(); } for( int p = (open = ui_collapse_filtered(ICON_MD_VIDEOCAM " Camera", "Debug.Camera")), dummy = (clicked_or_toggled = ui_collapse_clicked()); p; ui_collapse_end(), p = 0) { ui_camera( camera_get_active() ); diff --git a/engine/v4k.c b/engine/v4k.c index 5d2f205..137b430 100644 --- a/engine/v4k.c +++ b/engine/v4k.c @@ -1247,7 +1247,7 @@ int audio_init( int flags ) { ma_backend_oss, ma_backend_jack, ma_backend_opensl, - //ma_backend_webaudio, + ma_backend_webaudio, //ma_backend_openal, //ma_backend_sdl, ma_backend_null // Lowest priority. @@ -1512,6 +1512,40 @@ int audio_queue( const void *samples, int num_samples, int flags ) { return audio_queue_voice; } + +int ui_audio() { + int changed = 0; + + float sfx = sqrt(volume_clip), bgm = sqrt(volume_stream), master = sqrt(volume_master); + if( ui_slider2("BGM volume", &bgm, va("%.2f", bgm))) changed = 1, audio_volume_stream(bgm); + if( ui_slider2("SFX volume", &sfx, va("%.2f", sfx))) changed = 1, audio_volume_clip(sfx); + if( ui_slider2("Master volume", &master, va("%.2f", master))) changed = 1, audio_volume_master(master); + + ui_separator(); + + int num_voices = sts_mixer_get_active_voices(&mixer); + ui_label2("Format", mixer.audio_format == 0 ? "None" : mixer.audio_format == 1 ? "8-bit" : mixer.audio_format == 2 ? "16-bit" : mixer.audio_format == 3 ? "32-bit integer" : "32-bit float"); + ui_label2("Frequency", va("%4.1f KHz", mixer.frequency / 1000.0)); + ui_label2("Voices", va("%d/%d", num_voices, STS_MIXER_VOICES)); + ui_separator(); + + for( int i = 0; i < STS_MIXER_VOICES; ++i ) { + if( mixer.voices[i].state != STS_MIXER_VOICE_STOPPED ) { // PLAYING || STREAMING + ui_label(va("Voice %d", i+1)); + + // float mul = mixer.voices[i].state == STS_MIXER_VOICE_STREAMING ? 2 : 1; + // float div = mixer.voices[i].state == STS_MIXER_VOICE_STREAMING ? mixer.voices[i].stream->sample.length : mixer.voices[i].sample->length; + // float pct = mixer.voices[i].position * mul / div; + // if(ui_slider2("Position", &pct, va("%5.2f", pct))) changed = 1; + if(ui_slider2("Gain", &mixer.voices[i].gain, va("%5.2f", mixer.voices[i].gain))) changed = 1; + if(ui_slider2("Pitch", &mixer.voices[i].pitch, va("%5.2f", mixer.voices[i].pitch))) changed = 1; + if(ui_slider2("Pan", &mixer.voices[i].pan, va("%5.2f", mixer.voices[i].pan))) changed = 1; + ui_separator(); + } + } + + return changed; +} #line 0 #line 1 "v4k_collide.c" @@ -10433,7 +10467,7 @@ GLuint shader_compile( GLenum type, const char *source ) { // dump log with line numbers shader_print( source ); - PANIC("ERROR: shader_compile(): %s\n%s\n", type == GL_VERTEX_SHADER ? "Vertex" : "Fragment", buf); + PRINTF("!ERROR: shader_compile(): %s\n%s\n", type == GL_VERTEX_SHADER ? "Vertex" : "Fragment", buf); return 0; } @@ -21097,7 +21131,7 @@ int window_frame_begin() { // @todo } for( int p = (open = ui_collapse_filtered(ICON_MD_VOLUME_UP " Audio", "Debug.Audio")), dummy = (clicked_or_toggled = ui_collapse_clicked()); p; ui_collapse_end(), p = 0) { - // @todo + ui_audio(); } for( int p = (open = ui_collapse_filtered(ICON_MD_VIDEOCAM " Camera", "Debug.Camera")), dummy = (clicked_or_toggled = ui_collapse_clicked()); p; ui_collapse_end(), p = 0) { ui_camera( camera_get_active() ); @@ -22812,7 +22846,7 @@ int bt_run(bt_t *b) { return 0; } -void ui_bt(bt_t *b) { +int ui_bt(bt_t *b) { if( b ) { char *info = bt_funcname(b->action); if(!info) info = va("%d", array_count(b->children)); @@ -22824,6 +22858,7 @@ void ui_bt(bt_t *b) { ui_collapse_end(); } } + return 0; } #line 0 diff --git a/engine/v4k.h b/engine/v4k.h index feefa2b..a2c696c 100644 --- a/engine/v4k.h +++ b/engine/v4k.h @@ -1435,7 +1435,7 @@ API void bt_addfun(const char *name, int(*func)()); API bt_func bt_findfun(const char *name); API char *bt_funcname(bt_func fn); -API void ui_bt(bt_t *b); +API int ui_bt(bt_t *b); #line 0 #line 1 "v4k_audio.h" @@ -1469,6 +1469,8 @@ API float audio_volume_clip(float gain); // set fx volume if gain is in API float audio_volume_stream(float gain); // set bgm volume if gain is in [0..1] range. return current bgm volume in any case API float audio_volume_master(float gain); // set master volume if gain is in [0..1] range. return current master volume in any case +API int ui_audio(); + enum AUDIO_FLAGS { AUDIO_1CH = 0, // default AUDIO_2CH = 1,