Hi all,
Long time no see! After leaving things settled for a while, I'm working on my audio broadcasting tool where I'm battling an issue with Windows Volume on a Microphone Input, which is receiving non-voice content from a mixer, and tweaking the streamed result. Up until now, I've just been tweaking the volume of the Mixer output but it doesn't seem granular enough.
Before changing anything my code setup is:
- WASAPI Input Channel (a BASSWASAPIHandler taking the recording input)
- DSP Stream Buffer (a DSP_BufferStream monitoring the recording input)
- Various output channels receiving the BASSWASAPIHandler Input Channel
...and this is working prefectly.
Now I want to introduce a volume control to increase / reduce the input volume before it hits any outputs, and I'm struggling to get a consistent result.
So I'm aware that I need to use BASS_ChannelSetFX, like so:
_bassFXChannel = Bass.BASS_ChannelSetFX(_currentRecordingChannel, BASSFXType.BASS_FX_VOLUME, 0);
(_currentRecordingChannel being the BASSWASAPIHandler .InputChannel)
...but quite often this seems to result in a random error being thrown: 3221226356 (0xc0000374) which can't be handled inside a managed environment (Visual Studio).
IF it does happen to work, then most of the time when I try to use BASS_FXSetParameters setting a volume (between 0.1 and 1.0) on the FX channel, I get 'BASS_ERROR_ILLPARAM'.
(For context I'm using a slider with values 0 - 100, which is used in the SetInputVolume function below)
My first question is:
Am I, from a technical stance, doing it right by trying to apply the FX on the Input channel?
I've tried using the BufferStream (which is just the input channel anyway) and the InputHandler.OutputChannel all with the same results. The minute I remove the BASS_ChannelSetFX calls, the application behaves as it did.
Which suggests that I am doing something wrong....
The ultimate aim (for me right now) is to:
- Set the Windows Microphone Volume to 100% (not code related)
- Reduce the Microphone Input volume within the application, which is reflected in any other channels using that input
- Monitor the result of reducing the Microphone Volume
Any and all help is gratefully received!
internal async void CreateWasapiDevice(List<OutputPlugin> outputTargets)
{
ConfigurationsHelper.DeviceType sourceType = ConfigurationsHelper.DeviceType.Unknown;
int deviceIndex = ConfigurationsHelper.GetWasapiDeviceIndex(_sourceDevice, out sourceType);
bool streamInit = false;
streamInit = Bass.BASS_Init(0, 48000, BASSInit.BASS_DEVICE_DEFAULT, IntPtr.Zero);
var loopBackInit = Bass.BASS_Init(-1, 48000, BASSInit.BASS_DEVICE_DEFAULT, IntPtr.Zero);
var wasapiInit = BassWasapi.BASS_WASAPI_Init(0, 44100, 2, BASSWASAPIInit.BASS_WASAPI_AUTOFORMAT, 0f, 0f, null, IntPtr.Zero);
if (_sourceDevice.IsLoopback)
{
_wasapiInputHandler = new BassWasapiHandler(deviceIndex, false, _sourceDevice.mixfreq, _sourceDevice.mixchans, 0f, 0f);
}
else
{
_wasapiInputHandler = new BassWasapiHandler(deviceIndex, false, true, _sourceDevice.mixfreq, _sourceDevice.mixchans, 0f, 0f);
}
// Define a WASAPI out channel handler
_wasapiOutputHandler = new BassWasapiHandler(deviceIndex, false, 48000, 2, 0f, 0f);
// Create a buffer for analyzing when needed
_wasapiStreamBuffer = new DSP_BufferStream();
// setup a full-duplex stream
_wasapiInputHandler.SetFullDuplex(0, BASSFlag.BASS_STREAM_DECODE, false);
if (outputTargets != null)
{
foreach (IOutputPlugin outputTarget in outputTargets)
{
if (outputTarget is IExtendedPlugin)
{
((IExtendedPlugin) outputTarget).WasapiStreamHandle = _wasapiInputHandler.InputChannel;
}
outputTarget.StreamChannel = _wasapiInputHandler.OutputChannel;
}
}
}
internal async void StartInputStream(List<OutputPlugin> outputTargets)
{
if (_sourceDevice != null)
{
await StopInputStream();
}
lock (_wasapiInputLock)
{
_isReadingCancelled = false;
_onChannelReceiveDataProc = new DSPPROC(OnChannelDataReceived);
CreateWasapiDevice(outputTargets);
if (_wasapiInputHandler.Init())
{
_wasapiStreamBuffer.ChannelHandle = _wasapiInputHandler.InputChannel;
_wasapiStreamBuffer.Start();
_currentRecordingChannel = _wasapiInputHandler.InputChannel;
//Start the input / output streams
_wasapiOutputHandler.Init();
_wasapiOutputHandler.Start();
_wasapiInputHandler.Start();
_channelDSP = Bass.BASS_ChannelSetDSP(_currentRecordingChannel, _onChannelReceiveDataProc, IntPtr.Zero, 0);
_bassFXChannel = Bass.BASS_ChannelSetFX(_currentRecordingChannel, BASSFXType.BASS_FX_VOLUME, 0);
if (_bassFXChannel == 0)
{
var fxErrorCode = Bass.BASS_ErrorGetCode();
}
SetInputVolume(_inputVolume);
}
else
{
var initErrorCode = Bass.BASS_ErrorGetCode();
}
if (_currentRecordingChannel != 0)
{
Bass.BASS_ChannelPlay(_currentRecordingChannel, true);
}
}
}
public void SetInputVolume(int volume)
{
// Find the Global volume
Un4seen.Bass.AddOn.Fx.BASS_BFX_VOLUME volumeParam = new Un4seen.Bass.AddOn.Fx.BASS_BFX_VOLUME()
{
lChannel = 0
};
if (_bassFXChannel != 0)
{
if (Bass.BASS_FXGetParameters(_bassFXChannel, volumeParam))
{
float newVolume = (float)volume / 100;
volumeParam.fVolume = newVolume;
var wasSet = Bass.BASS_FXSetParameters(_bassFXChannel, volumeParam);
if (!wasSet)
{
var setErrorCode = Bass.BASS_ErrorGetCode();
}
}
}
}
private void OnChannelDataReceived(int handle, int channel, IntPtr buffer, int length, IntPtr user)
{
if (length > 0)
{
ProcessInput(channel, buffer, length);
}
else
{
_onVULevelsReceived?.Invoke(new VULevel());
}
}
private void ProcessInput(int channel, IntPtr buffer, int length)
{
int levelData = Bass.BASS_ChannelGetLevel((_wasapiInputHandler != null) ? _wasapiStreamBuffer.BufferStream : _currentRecordingChannel);
int leftLevel = Utils.LowWord32(levelData);
int rightLevel = Utils.HighWord32(levelData);
_onVULevelsReceived?.BeginInvoke(new VULevel() {LeftChannel = leftLevel, RightChannel = rightLevel}, null, null);
//foreach (DSP_BufferStream splitStream in _splitStreams)
//{
// splitStream.DSPProc(splitStream.OutputHandle, channel, buffer, length, IntPtr.Zero);
//}
}
internal async Task StopInputStream()
{
lock (_wasapiInputLock)
{
bool wasBufferStopped = false;
bool wasInputStopped = false;
bool wasOutputStopped = false;
bool wasRecordingDSPRemoved = false;
bool wasBassFxRemoved = false;
if (_currentRecordingChannel != 0)
{
if (_bassFXChannel != 0)
{
wasBassFxRemoved = Bass.BASS_ChannelRemoveFX(_currentRecordingChannel, _bassFXChannel);
}
if (_channelDSP != 0)
{
wasRecordingDSPRemoved = Bass.BASS_ChannelRemoveDSP(_currentRecordingChannel, _channelDSP);
}
}
if (_wasapiInputHandler != null)
{
wasInputStopped = _wasapiInputHandler.Stop();
if (wasInputStopped)
{
_wasapiInputHandler.Dispose();
}
}
if (_wasapiStreamBuffer != null)
{
wasBufferStopped = _wasapiStreamBuffer.Stop();
if (wasBufferStopped)
{
_wasapiStreamBuffer.Dispose();
}
}
if (_wasapiOutputHandler != null)
{
wasOutputStopped = _wasapiOutputHandler.Stop();
if (wasOutputStopped)
{
_wasapiOutputHandler.Dispose();
}
}
_onChannelReceiveDataProc = null;
_isReadingCancelled = true;
}
Bass.BASS_RecordFree();
_currentRecordingChannel = 0;
_onVULevelsReceived?.Invoke(new VULevel() { LeftChannel=0, RightChannel=0});
}