From 00ed29d6c28cd2a35e0fa033a4eec1dd3245a0b1 Mon Sep 17 00:00:00 2001 From: Jonathan Moore Liles Date: Sun, 8 Sep 2013 13:51:05 -0700 Subject: [PATCH] Tweak DSP optimization. --- mixer/src/AUX_Module.C | 21 ++- mixer/src/Gain_Module.C | 44 +++--- mixer/src/Meter_Module.C | 11 +- mixer/src/Mono_Pan_Module.C | 69 ++++---- mixer/src/Plugin_Module.C | 18 ++- mixer/src/Spatializer_Module.C | 277 ++++++++++++++++----------------- nonlib/dsp.C | 58 ++++--- nonlib/dsp.h | 7 + 8 files changed, 259 insertions(+), 246 deletions(-) diff --git a/mixer/src/AUX_Module.C b/mixer/src/AUX_Module.C index 04fcf37..79ce0fb 100644 --- a/mixer/src/AUX_Module.C +++ b/mixer/src/AUX_Module.C @@ -108,7 +108,15 @@ AUX_Module::handle_sample_rate_change ( nframes_t n ) void AUX_Module::process ( nframes_t nframes ) { - if ( !bypass() ) + if ( unlikely( bypass() ) ) + { + for ( unsigned int i = 0; i < audio_input.size(); ++i ) + { + if ( audio_input[i].connected() ) + buffer_fill_with_silence( (sample_t*)aux_audio_output[i].jack_port()->buffer(nframes), nframes ); + } + } + else { float gt = DB_CO( control_input[0].control_value() ); @@ -116,9 +124,8 @@ AUX_Module::process ( nframes_t nframes ) bool use_gainbuf = smoothing.apply( gainbuf, nframes, gt ); - if ( use_gainbuf ) + if ( unlikely( use_gainbuf ) ) { - for ( unsigned int i = 0; i < audio_input.size(); ++i ) { if ( audio_input[i].connected() ) @@ -135,14 +142,6 @@ AUX_Module::process ( nframes_t nframes ) } } } - else - { - for ( unsigned int i = 0; i < audio_input.size(); ++i ) - { - if ( audio_input[i].connected() ) - buffer_fill_with_silence( (sample_t*)aux_audio_output[i].jack_port()->buffer(nframes), nframes ); - } - } } void diff --git a/mixer/src/Gain_Module.C b/mixer/src/Gain_Module.C index def7712..cb8a123 100644 --- a/mixer/src/Gain_Module.C +++ b/mixer/src/Gain_Module.C @@ -105,30 +105,38 @@ Gain_Module::handle_sample_rate_change ( nframes_t n ) void Gain_Module::process ( nframes_t nframes ) { - const float gt = DB_CO( control_input[1].control_value() ? -90.f : control_input[0].control_value() ); - sample_t gainbuf[nframes]; - - bool use_gainbuf = smoothing.apply( gainbuf, nframes, gt ); - - if ( use_gainbuf ) + if ( unlikely( bypass() ) ) { - for ( int i = audio_input.size(); i--; ) - { - if ( audio_input[i].connected() && audio_output[i].connected() ) - { - sample_t *out = (sample_t*)audio_input[i].buffer(); - - buffer_apply_gain_buffer( out, gainbuf, nframes ); - } - } + /* nothing to do */ } else - for ( int i = audio_input.size(); i--; ) + { + const float gt = DB_CO( control_input[1].control_value() ? -90.f : control_input[0].control_value() ); + + sample_t gainbuf[nframes]; + + bool use_gainbuf = smoothing.apply( gainbuf, nframes, gt ); + + if ( unlikely( use_gainbuf ) ) { - if ( audio_input[i].connected() && audio_output[i].connected() ) + for ( int i = audio_input.size(); i--; ) { - buffer_apply_gain( (sample_t*)audio_input[i].buffer(), nframes, gt ); + if ( audio_input[i].connected() && audio_output[i].connected() ) + { + sample_t *out = (sample_t*)audio_input[i].buffer(); + + buffer_apply_gain_buffer( out, gainbuf, nframes ); + } } } + else + for ( int i = audio_input.size(); i--; ) + { + if ( audio_input[i].connected() && audio_output[i].connected() ) + { + buffer_apply_gain( (sample_t*)audio_input[i].buffer(), nframes, gt ); + } + } + } } diff --git a/mixer/src/Meter_Module.C b/mixer/src/Meter_Module.C index e787d96..4f975f6 100644 --- a/mixer/src/Meter_Module.C +++ b/mixer/src/Meter_Module.C @@ -174,14 +174,11 @@ Meter_Module::process ( nframes_t nframes ) { for ( unsigned int i = 0; i < audio_input.size(); ++i ) { - if ( audio_input[i].connected() ) - { // float dB = 20 * log10( get_peak_sample( (float*)audio_input[i].buffer(), nframes ) / 2.0f ); - float dB = 20 * log10( buffer_get_peak( (sample_t*) audio_input[i].buffer(), nframes ) ); + float dB = 20 * log10( buffer_get_peak( (sample_t*) audio_input[i].buffer(), nframes ) ); - ((float*)control_output[0].buffer())[i] = dB; - if (dB > control_value[i]) - control_value[i] = dB; - } + ((float*)control_output[0].buffer())[i] = dB; + if (dB > control_value[i]) + control_value[i] = dB; } } diff --git a/mixer/src/Mono_Pan_Module.C b/mixer/src/Mono_Pan_Module.C index 86688d1..9eac660 100644 --- a/mixer/src/Mono_Pan_Module.C +++ b/mixer/src/Mono_Pan_Module.C @@ -80,49 +80,42 @@ Mono_Pan_Module::configure_inputs ( int ) void Mono_Pan_Module::process ( nframes_t nframes ) { - - if ( audio_input[0].connected() && - audio_output[0].connected() && - audio_output[1].connected() ) + if ( unlikely( bypass() ) ) { - if ( bypass() ) - { - buffer_copy( (sample_t*)audio_output[1].buffer(), (sample_t*)audio_input[0].buffer(), nframes ); + buffer_copy( (sample_t*)audio_output[1].buffer(), (sample_t*)audio_input[0].buffer(), nframes ); + } + else + { + const float gt = (control_input[0].control_value() + 1.0f) * 0.5f; + + sample_t gainbuf[nframes]; + bool use_gainbuf = smoothing.apply( gainbuf, nframes, gt ); + + if ( unlikely( use_gainbuf ) ) + { + /* right channel */ + + buffer_copy_and_apply_gain_buffer( (sample_t*)audio_output[1].buffer(), + (sample_t*)audio_input[0].buffer(), + gainbuf, + nframes ); + + /* left channel */ + for ( nframes_t i = 0; i < nframes; i++ ) + gainbuf[i] = 1.0f - gainbuf[i]; + + buffer_apply_gain_buffer( (sample_t*)audio_output[0].buffer(), gainbuf, nframes ); } else { - const float gt = (control_input[0].control_value() + 1.0f) * 0.5f; - - if ( ! smoothing.target_reached( gt ) ) - { - sample_t gainbuf[nframes]; - - smoothing.apply( gainbuf, nframes, gt ); + /* right channel */ + buffer_copy_and_apply_gain( (sample_t*)audio_output[1].buffer(), + (sample_t*)audio_input[0].buffer(), + nframes, + gt ); - /* right channel */ - - buffer_copy_and_apply_gain_buffer( (sample_t*)audio_output[1].buffer(), - (sample_t*)audio_input[0].buffer(), - gainbuf, - nframes ); - - /* left channel */ - for ( nframes_t i = 0; i < nframes; i++ ) - gainbuf[i] = 1.0f - gainbuf[i]; - - buffer_apply_gain_buffer( (sample_t*)audio_output[0].buffer(), gainbuf, nframes ); - } - else - { - /* right channel */ - buffer_copy_and_apply_gain( (sample_t*)audio_output[1].buffer(), - (sample_t*)audio_input[0].buffer(), - nframes, - gt ); - - /* left channel */ - buffer_apply_gain( (sample_t*)audio_output[0].buffer(), nframes, 1.0f - gt); - } + /* left channel */ + buffer_apply_gain( (sample_t*)audio_output[0].buffer(), nframes, 1.0f - gt); } } } diff --git a/mixer/src/Plugin_Module.C b/mixer/src/Plugin_Module.C index 4104912..4d41a25 100644 --- a/mixer/src/Plugin_Module.C +++ b/mixer/src/Plugin_Module.C @@ -795,12 +795,7 @@ Plugin_Module::process ( nframes_t nframes ) { handle_port_connection_change(); - if ( !bypass() ) - { - for ( unsigned int i = 0; i < _idata->handle.size(); ++i ) - _idata->descriptor->run( _idata->handle[i], nframes ); - } - else + if ( unlikely( bypass() ) ) { /* If this is a mono to stereo plugin, then duplicate the input channel... */ /* There's not much we can do to automatically support other configurations. */ @@ -808,9 +803,16 @@ Plugin_Module::process ( nframes_t nframes ) { buffer_copy( (sample_t*)audio_output[1].buffer(), (sample_t*)audio_input[0].buffer(), nframes ); } - } - _latency = get_plugin_latency(); + _latency = 0; + } + else + { + for ( unsigned int i = 0; i < _idata->handle.size(); ++i ) + _idata->descriptor->run( _idata->handle[i], nframes ); + + _latency = get_plugin_latency(); + } } diff --git a/mixer/src/Spatializer_Module.C b/mixer/src/Spatializer_Module.C index 1545ec9..5649053 100644 --- a/mixer/src/Spatializer_Module.C +++ b/mixer/src/Spatializer_Module.C @@ -595,169 +595,166 @@ Spatializer_Module::draw ( void ) void Spatializer_Module::process ( nframes_t nframes ) { - if ( !bypass() ) - { - float azimuth = control_input[0].control_value(); - float elevation = control_input[1].control_value(); - float radius = control_input[2].control_value(); - float highpass_freq = control_input[3].control_value(); - float width = control_input[4].control_value(); - float angle = control_input[5].control_value(); + float azimuth = control_input[0].control_value(); + float elevation = control_input[1].control_value(); + float radius = control_input[2].control_value(); + float highpass_freq = control_input[3].control_value(); + float width = control_input[4].control_value(); + float angle = control_input[5].control_value(); // bool more_options = control_input[6].control_value(); - bool speed_of_sound = control_input[7].control_value() > 0.5f; - float late_gain = DB_CO( control_input[8].control_value() ); - float early_gain = DB_CO( control_input[9].control_value() ); + bool speed_of_sound = control_input[7].control_value() > 0.5f; + float late_gain = DB_CO( control_input[8].control_value() ); + float early_gain = DB_CO( control_input[9].control_value() ); - control_input[3].hints.visible = highpass_freq != 0.0f; + control_input[3].hints.visible = highpass_freq != 0.0f; - float delay_seconds = 0.0f; + float delay_seconds = 0.0f; - if ( speed_of_sound && radius > 1.0f ) - delay_seconds = ( radius - 1.0f ) / 340.29f; + if ( speed_of_sound && radius > 1.0f ) + delay_seconds = ( radius - 1.0f ) / 340.29f; - /* direct sound follows inverse square law */ - /* but it's just the inverse as far as SPL goes */ + /* direct sound follows inverse square law */ + /* but it's just the inverse as far as SPL goes */ - /* let's not go nuts... */ - if ( radius < 0.01f ) - radius = 0.01f; + /* let's not go nuts... */ + if ( radius < 0.01f ) + radius = 0.01f; - float gain = 1.0f / radius; + float gain = 1.0f / radius; - /* float cutoff_frequency = gain * LOWPASS_FREQ; */ + /* float cutoff_frequency = gain * LOWPASS_FREQ; */ - sample_t gainbuf[nframes]; - sample_t delaybuf[nframes]; + sample_t gainbuf[nframes]; + sample_t delaybuf[nframes]; - bool use_gainbuf = false; - bool use_delaybuf = delay_smoothing.apply( delaybuf, nframes, delay_seconds ); + bool use_gainbuf = false; + bool use_delaybuf = delay_smoothing.apply( delaybuf, nframes, delay_seconds ); - for ( unsigned int i = 0; i < audio_input.size(); i++ ) - { - sample_t *buf = (sample_t*) audio_input[i].buffer(); + for ( unsigned int i = 0; i < audio_input.size(); i++ ) + { + sample_t *buf = (sample_t*) audio_input[i].buffer(); - /* frequency effects */ - _highpass[i]->run_highpass( buf, highpass_freq, nframes ); + /* frequency effects */ + _highpass[i]->run_highpass( buf, highpass_freq, nframes ); - /* send to late reverb */ - if ( i == 0 ) - buffer_copy( (sample_t*)aux_audio_output[0].jack_port()->buffer(nframes), buf, nframes ); - else - buffer_mix( (sample_t*)aux_audio_output[0].jack_port()->buffer(nframes), buf, nframes ); + /* send to late reverb */ + if ( i == 0 ) + buffer_copy( (sample_t*)aux_audio_output[0].jack_port()->buffer(nframes), buf, nframes ); + else + buffer_mix( (sample_t*)aux_audio_output[0].jack_port()->buffer(nframes), buf, nframes ); - } + } - { - use_gainbuf = late_gain_smoothing.apply( gainbuf, nframes, late_gain ); + { + use_gainbuf = late_gain_smoothing.apply( gainbuf, nframes, late_gain ); - /* gain effects */ - if ( use_gainbuf ) - buffer_apply_gain_buffer( (sample_t*)aux_audio_output[0].jack_port()->buffer(nframes), gainbuf, nframes ); - else - buffer_apply_gain( (sample_t*)aux_audio_output[0].jack_port()->buffer(nframes), nframes, late_gain ); - } - - float early_angle = azimuth - angle; - if ( early_angle > 180.0f ) - early_angle = -180 - ( early_angle - 180 ); - else if ( early_angle < -180.0f ) - early_angle = 180 - ( early_angle + 180 ); - - /* send to early reverb */ - if ( audio_input.size() == 1 ) - { - _early_panner->run_mono( (sample_t*)audio_input[0].buffer(), - (sample_t*)aux_audio_output[1].jack_port()->buffer(nframes), - (sample_t*)aux_audio_output[2].jack_port()->buffer(nframes), - (sample_t*)aux_audio_output[3].jack_port()->buffer(nframes), - (sample_t*)aux_audio_output[4].jack_port()->buffer(nframes), - azimuth + angle, - elevation, - nframes ); - } + /* gain effects */ + if ( unlikely( use_gainbuf ) ) + buffer_apply_gain_buffer( (sample_t*)aux_audio_output[0].jack_port()->buffer(nframes), gainbuf, nframes ); else - { - _early_panner->run_stereo( (sample_t*)audio_input[0].buffer(), - (sample_t*)audio_input[1].buffer(), - (sample_t*)aux_audio_output[1].jack_port()->buffer(nframes), - (sample_t*)aux_audio_output[2].jack_port()->buffer(nframes), - (sample_t*)aux_audio_output[3].jack_port()->buffer(nframes), - (sample_t*)aux_audio_output[4].jack_port()->buffer(nframes), - azimuth + angle, - elevation, - width, - nframes ); - } + buffer_apply_gain( (sample_t*)aux_audio_output[0].jack_port()->buffer(nframes), nframes, late_gain ); + } - { - use_gainbuf = early_gain_smoothing.apply( gainbuf, nframes, early_gain ); - - for ( int i = 1; i < 5; i++ ) - { - /* gain effects */ - if ( use_gainbuf ) - buffer_apply_gain_buffer( (sample_t*)aux_audio_output[i].jack_port()->buffer(nframes), gainbuf, nframes ); - else - buffer_apply_gain( (sample_t*)aux_audio_output[i].jack_port()->buffer(nframes), nframes, early_gain ); - } - } + float early_angle = azimuth - angle; + if ( early_angle > 180.0f ) + early_angle = -180 - ( early_angle - 180 ); + else if ( early_angle < -180.0f ) + early_angle = 180 - ( early_angle + 180 ); - float corrected_angle = fabs( angle ) - (fabs( width ) * 0.5f); - - if ( corrected_angle < 0.0f ) - corrected_angle = 0.0f; - - float cutoff_frequency = ( 1.0f / ( 1.0f + corrected_angle ) ) * 300000.0f; - - use_gainbuf = gain_smoothing.apply( gainbuf, nframes, gain ); - - for ( unsigned int i = 0; i < audio_input.size(); i++ ) - { - /* gain effects */ - if ( use_gainbuf ) - buffer_apply_gain_buffer( (sample_t*)audio_input[i].buffer(), gainbuf, nframes ); - else - buffer_apply_gain( (sample_t*)audio_input[i].buffer(), nframes, gain ); - - /* frequency effects */ - _lowpass[i]->run_lowpass( (sample_t*)audio_input[i].buffer(), cutoff_frequency, nframes ); - - /* delay effects */ - if ( speed_of_sound ) - { - if ( use_delaybuf ) - _delay[i]->run( (sample_t*)audio_input[i].buffer(), delaybuf, 0, nframes ); - else - _delay[i]->run( (sample_t*)audio_input[i].buffer(), 0, delay_seconds, nframes ); - } - } - - /* now do direct outputs */ - if ( audio_input.size() == 1 ) - { - _panner->run_mono( (sample_t*)audio_input[0].buffer(), - (sample_t*)audio_output[0].buffer(), - (sample_t*)audio_output[1].buffer(), - (sample_t*)audio_output[2].buffer(), - (sample_t*)audio_output[3].buffer(), - azimuth, - elevation, - nframes ); - } - else - { - _panner->run_stereo( (sample_t*)audio_input[0].buffer(), - (sample_t*)audio_input[1].buffer(), - (sample_t*)audio_output[0].buffer(), - (sample_t*)audio_output[1].buffer(), - (sample_t*)audio_output[2].buffer(), - (sample_t*)audio_output[3].buffer(), - azimuth, + /* send to early reverb */ + if ( audio_input.size() == 1 ) + { + _early_panner->run_mono( (sample_t*)audio_input[0].buffer(), + (sample_t*)aux_audio_output[1].jack_port()->buffer(nframes), + (sample_t*)aux_audio_output[2].jack_port()->buffer(nframes), + (sample_t*)aux_audio_output[3].jack_port()->buffer(nframes), + (sample_t*)aux_audio_output[4].jack_port()->buffer(nframes), + azimuth + angle, elevation, - width, nframes ); + } + else + { + _early_panner->run_stereo( (sample_t*)audio_input[0].buffer(), + (sample_t*)audio_input[1].buffer(), + (sample_t*)aux_audio_output[1].jack_port()->buffer(nframes), + (sample_t*)aux_audio_output[2].jack_port()->buffer(nframes), + (sample_t*)aux_audio_output[3].jack_port()->buffer(nframes), + (sample_t*)aux_audio_output[4].jack_port()->buffer(nframes), + azimuth + angle, + elevation, + width, + nframes ); + } + + { + use_gainbuf = early_gain_smoothing.apply( gainbuf, nframes, early_gain ); + + for ( int i = 1; i < 5; i++ ) + { + /* gain effects */ + if ( unlikely( use_gainbuf ) ) + buffer_apply_gain_buffer( (sample_t*)aux_audio_output[i].jack_port()->buffer(nframes), gainbuf, nframes ); + else + buffer_apply_gain( (sample_t*)aux_audio_output[i].jack_port()->buffer(nframes), nframes, early_gain ); } } + + float corrected_angle = fabs( angle ) - (fabs( width ) * 0.5f); + + if ( corrected_angle < 0.0f ) + corrected_angle = 0.0f; + + float cutoff_frequency = ( 1.0f / ( 1.0f + corrected_angle ) ) * 300000.0f; + + use_gainbuf = gain_smoothing.apply( gainbuf, nframes, gain ); + + for ( unsigned int i = 0; i < audio_input.size(); i++ ) + { + /* gain effects */ + if ( unlikely( use_gainbuf ) ) + buffer_apply_gain_buffer( (sample_t*)audio_input[i].buffer(), gainbuf, nframes ); + else + buffer_apply_gain( (sample_t*)audio_input[i].buffer(), nframes, gain ); + + /* frequency effects */ + _lowpass[i]->run_lowpass( (sample_t*)audio_input[i].buffer(), cutoff_frequency, nframes ); + + /* delay effects */ + if ( likely( speed_of_sound ) ) + { + if ( unlikely( use_delaybuf ) ) + _delay[i]->run( (sample_t*)audio_input[i].buffer(), delaybuf, 0, nframes ); + else + _delay[i]->run( (sample_t*)audio_input[i].buffer(), 0, delay_seconds, nframes ); + } + } + + /* now do direct outputs */ + if ( audio_input.size() == 1 ) + { + _panner->run_mono( (sample_t*)audio_input[0].buffer(), + (sample_t*)audio_output[0].buffer(), + (sample_t*)audio_output[1].buffer(), + (sample_t*)audio_output[2].buffer(), + (sample_t*)audio_output[3].buffer(), + azimuth, + elevation, + nframes ); + } + else + { + _panner->run_stereo( (sample_t*)audio_input[0].buffer(), + (sample_t*)audio_input[1].buffer(), + (sample_t*)audio_output[0].buffer(), + (sample_t*)audio_output[1].buffer(), + (sample_t*)audio_output[2].buffer(), + (sample_t*)audio_output[3].buffer(), + azimuth, + elevation, + width, + nframes ); + } } void diff --git a/nonlib/dsp.C b/nonlib/dsp.C index 4677daf..2f5a434 100644 --- a/nonlib/dsp.C +++ b/nonlib/dsp.C @@ -46,17 +46,21 @@ buffer_apply_gain ( sample_t * __restrict__ buf, nframes_t nframes, float g ) { sample_t * buf_ = (sample_t*) assume_aligned(buf); - if ( g != 1.0f ) - while ( nframes-- ) - *(buf_++) *= g; + if ( g == 1.0f ) + return; + + for ( nframes_t i = 0; i < nframes; i++ ) + buf_[i] *= g; } void buffer_apply_gain_unaligned ( sample_t * __restrict__ buf, nframes_t nframes, float g ) { - if ( g != 1.0f ) - while ( nframes-- ) - *(buf++) *= g; + if ( g == 1.0f ) + return; + + for ( nframes_t i = 0; i < nframes; i++ ) + buf[i] *= g; } void @@ -65,8 +69,8 @@ buffer_apply_gain_buffer ( sample_t * __restrict__ buf, const sample_t * __restr sample_t * buf_ = (sample_t*) assume_aligned(buf); const sample_t * gainbuf_ = (const sample_t*) assume_aligned(gainbuf); - while ( nframes-- ) - *(buf_++) *= *(gainbuf_++); + for ( nframes_t i = 0; i < nframes; i++ ) + buf_[i] *= gainbuf_[i]; } void @@ -76,8 +80,8 @@ buffer_copy_and_apply_gain_buffer ( sample_t * __restrict__ dst, const sample_t const sample_t * src_ = (const sample_t*) assume_aligned(src); const sample_t * gainbuf_ = (const sample_t*) assume_aligned(gainbuf); - while ( nframes-- ) - *(dst_++) = *(src_++) * *(gainbuf_++); + for ( nframes_t i = 0; i < nframes; i++ ) + dst_[i] = src_[i] * gainbuf_[i]; } void @@ -86,8 +90,8 @@ buffer_mix ( sample_t * __restrict__ dst, const sample_t * __restrict__ src, nfr sample_t * dst_ = (sample_t*) assume_aligned(dst); const sample_t * src_ = (const sample_t*) assume_aligned(src); - while ( nframes-- ) - *(dst_++) += *(src_++); + for ( nframes_t i = 0; i < nframes; i++ ) + dst_[i] += src_[i]; } void @@ -95,9 +99,9 @@ buffer_mix_with_gain ( sample_t * __restrict__ dst, const sample_t * __restrict_ { sample_t * dst_ = (sample_t*) assume_aligned(dst); const sample_t * src_ = (const sample_t*) assume_aligned(src); - - while ( nframes-- ) - *(dst_++) += *(src_++) * g; + + for ( nframes_t i = 0; i < nframes; i++ ) + dst_[i] += src_[i] * g; } void @@ -181,8 +185,10 @@ buffer_is_digital_black ( sample_t *buf, nframes_t nframes ) { while ( nframes-- ) { - if ( 0 != buf[nframes] ) - return false; + if (! *(buf++) ) + continue; + + return false; } return true; @@ -193,15 +199,19 @@ buffer_get_peak ( const sample_t * __restrict__ buf, nframes_t nframes ) { const sample_t * buf_ = (const sample_t*) assume_aligned(buf); - float p = 0.0f; - - while ( nframes-- ) + float pmax = 0.0f; + float pmin = 0.0f; + + for ( nframes_t i = 0; i < nframes; i++ ) { - const float s = fabs(*(buf_++)); - p = s > p ? s : p; + pmax = buf_[i] > pmax ? buf_[i] : pmax; + pmin = buf_[i] < pmin ? buf_[i] : pmin; } - return p; + pmax = fabsf(pmax); + pmin = fabsf(pmin); + + return pmax > pmin ? pmax : pmin; } void @@ -224,7 +234,7 @@ Value_Smoothing_Filter::sample_rate ( nframes_t n ) const float FS = n; const float T = 0.05f; - w = _cutoff / (FS * T); + w = _cutoff / (FS * T); } bool diff --git a/nonlib/dsp.h b/nonlib/dsp.h index 76e8e10..20c684e 100644 --- a/nonlib/dsp.h +++ b/nonlib/dsp.h @@ -79,3 +79,10 @@ static inline float interpolate_cubic ( const float fr, const float inm1, const #define DEG2RAD 0.01745329251f #define ONEOVERSQRT2 0.70710678118f + +#ifndef likely +#define likely(x) __builtin_expect(x,1) +#endif +#ifndef unlikely +#define unlikely(x) __builtin_expect(x,0) +#endif