Skip to content

Commit

Permalink
Merge pull request #6 from ChristopheI/main
Browse files Browse the repository at this point in the history
Add support of G722 (tested) and G729 (not tested but should work - no WebRTC client found)
  • Loading branch information
ChristopheI committed Apr 27, 2022
2 parents c6e2694 + c3a36df commit 673ef52
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 35 deletions.
13 changes: 9 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,25 @@ The classes in this project provide **Audio End Point** and **Audio Source** fea

So used in correlation with **SIPSorceryMedia.FFMpeg** you have both **Audio and Video End Point** and **Audio and Video Source** using multi-platform component.

Using both you have these deatures:

- **Video codecs**: VP8, H264
- **Audio codecs**: PCMU, PCMA
Using both you have these features:

- **Video Input**:
- using local file or remote using URI [**`With SIPSorceryMedia.FFMpeg`**]
- using camera [**`With SIPSorceryMedia.FFMpeg`**]
- using screen [**`With SIPSorceryMedia.FFMpeg`**]

- **Audio Input**:
- using local file or remote using URI [**`With SIPSorceryMedia.FFMpeg`**]
- using microphone [**`With SIPSorceryMedia.FFMpeg or SIPSorceryMedia.SDL2`**]

- **Audio Ouput**:
- using a speaker [**`With SIPSorceryMedia.SDL2`**]

**Audio Codecs** supported by this library: (using **AudioEncoder** from **SIPSorcery** )
- PCMU
- PCMA
- G722
- G729 (not tested but should worked)

# Installing SDL2

Expand Down
19 changes: 9 additions & 10 deletions src/SDL2AudioEndPoint.cs
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,17 @@ public SDL2AudioEndPoint(string audioOutDeviceName, IAudioEncoder audioEncoder)
_audioEncoder = audioEncoder;

_audioOutDeviceName = audioOutDeviceName;


InitPlaybackDevice();
}

public void RestrictFormats(Func<AudioFormat, bool> filter) => _audioFormatManager.RestrictFormats(filter);

public void SetAudioSinkFormat(AudioFormat audioFormat)
{
_audioFormatManager.SetSelectedFormat(audioFormat);
if (_audioFormatManager != null)
{
_audioFormatManager.SetSelectedFormat(audioFormat);
InitPlaybackDevice();
}
}

public List<AudioFormat> GetAudioSinkFormats() => _audioFormatManager.GetSourceFormats();
Expand All @@ -87,9 +88,11 @@ private void InitPlaybackDevice()
SDL2Helper.CloseAudioPlaybackDevice(_audioOutDeviceId);
_audioOutDeviceId = 0;
}

// Init Playback device.
var audioSpec = SDL2Helper.GetDefaultAudioSpec();
AudioFormat audioFormat = _audioFormatManager.SelectedFormat;
var audioSpec = SDL2Helper.GetAudioSpec(audioFormat.ClockRate);

_audioOutDeviceId = SDL2Helper.OpenAudioPlaybackDevice(_audioOutDeviceName, ref audioSpec);
if(_audioOutDeviceId < 0)
{
Expand All @@ -105,7 +108,6 @@ private void InitPlaybackDevice()
}
}


/// <summary>
/// Event handler for playing audio samples received from the remote call party.
/// </summary>
Expand Down Expand Up @@ -154,9 +156,6 @@ public Task StartAudioSink()
{
if(!_isStarted)
{
var audioSpec = SDL2Helper.GetDefaultAudioSpec();
_audioOutDeviceId = SDL2Helper.OpenAudioPlaybackDevice(_audioOutDeviceName, ref audioSpec);

if (_audioOutDeviceId > 0)
{
_isStarted = true;
Expand Down
21 changes: 15 additions & 6 deletions src/SDL2AudioSource.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ namespace SIPSorceryMedia.SDL2
public class SDL2AudioSource: IAudioSource
{
static private ILogger logger = SIPSorcery.LogFactory.CreateLogger<SDL2AudioSource>();
static private List<AudioFormat> _supportedAudioFormats = SIPSorceryMedia.SDL2.SDL2Helper.GetSupportedAudioFormats();

private String _audioInDeviceName;
private uint _audioInDeviceId = 0;
Expand All @@ -29,7 +28,9 @@ public class SDL2AudioSource: IAudioSource

private BackgroundWorker backgroundWorker;

#region EVENT
private AudioSamplingRatesEnum audioSamplingRates;

#region EVENT
public event EncodedSampleDelegate ? OnAudioSourceEncodedSample = null;
public event RawAudioSampleDelegate ? OnAudioSourceRawSample = null;

Expand All @@ -43,10 +44,10 @@ public SDL2AudioSource(String audioInDeviceName, IAudioEncoder audioEncoder)

_audioInDeviceName = audioInDeviceName;

_audioFormatManager = new MediaFormatManager<AudioFormat>(_supportedAudioFormats);
_audioFormatManager = new MediaFormatManager<AudioFormat>(audioEncoder.SupportedFormats);
_audioEncoder = audioEncoder;

InitRecordingDevice();
//InitRecordingDevice();

backgroundWorker = new BackgroundWorker();
backgroundWorker.DoWork += BackgroundWorker_DoWork;
Expand All @@ -67,7 +68,7 @@ private unsafe void BackgroundWorker_DoWork(object sender, DoWorkEventArgs e)
SDL_DequeueAudio(_audioInDeviceId, (IntPtr)ptr, size);

short[] pcm = buf.Take((int)size * 2).Where((x, i) => i % 2 == 0).Select((y, i) => BitConverter.ToInt16(buf, i * 2)).ToArray();
OnAudioSourceRawSample?.Invoke(AudioSamplingRatesEnum.Rate8KHz, (uint)pcm.Length, pcm);
OnAudioSourceRawSample?.Invoke(audioSamplingRates, (uint)pcm.Length, pcm);

if (OnAudioSourceEncodedSample != null)
{
Expand All @@ -93,7 +94,13 @@ private void InitRecordingDevice()
}

// Init recording device.
audioSpec = SDL2Helper.GetDefaultAudioSpec();
AudioFormat audioFormat = _audioFormatManager.SelectedFormat;
if (audioFormat.ClockRate == AudioFormat.DEFAULT_CLOCK_RATE * 2)
audioSamplingRates = AudioSamplingRatesEnum.Rate16KHz;
else
audioSamplingRates = AudioSamplingRatesEnum.Rate8KHz;

audioSpec = SDL2Helper.GetAudioSpec(audioFormat.ClockRate);

_audioInDeviceId = SDL2Helper.OpenAudioRecordingDevice(_audioInDeviceName, ref audioSpec);
if (_audioInDeviceId < 0)
Expand Down Expand Up @@ -185,6 +192,8 @@ public void SetAudioSourceFormat(AudioFormat audioFormat)
{
logger.LogDebug($"Setting audio source format to {audioFormat.FormatID}:{audioFormat.Codec} {audioFormat.ClockRate}.");
_audioFormatManager.SetSelectedFormat(audioFormat);

InitRecordingDevice();
}
}

Expand Down
18 changes: 3 additions & 15 deletions src/SDL2Helper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,6 @@ public class SDL2Helper
{
private static Boolean _sdl2Initialised = false;

static public List<AudioFormat> GetSupportedAudioFormats() => new List<AudioFormat>
{
new AudioFormat(SDPWellKnownMediaFormatsEnum.PCMU),
new AudioFormat(SDPWellKnownMediaFormatsEnum.PCMA)
};

static public String ? GetAudioRecordingDevice(String startWithName) => GetAudioDevice(startWithName, true);

static public String? GetAudioPlaybackDevice(String startWithName) => GetAudioDevice(startWithName, false);
Expand All @@ -25,18 +19,12 @@ public class SDL2Helper

static public List<String> GetAudioRecordingDevices() => GetAudioDevices(true);

static public SDL_AudioSpec GetDefaultAudioSpec()
static public SDL_AudioSpec GetAudioSpec(int clockRate = AudioFormat.DEFAULT_CLOCK_RATE, byte channels = 1)
{
//SDL_AudioSpec desiredPlaybackSpec = new SDL_AudioSpec();
//desiredPlaybackSpec.freq = AudioFormat.DEFAULT_CLOCK_RATE;
//desiredPlaybackSpec.format = AUDIO_S16;
//desiredPlaybackSpec.channels = 1; // Value rturned by (byte)ffmpeg.av_get_channel_layout_nb_channels(ffmpeg.AV_CH_LAYOUT_MONO);


SDL_AudioSpec desiredPlaybackSpec = new SDL_AudioSpec();
desiredPlaybackSpec.freq = AudioFormat.DEFAULT_CLOCK_RATE;
desiredPlaybackSpec.freq = clockRate;
desiredPlaybackSpec.format = AUDIO_S16;
desiredPlaybackSpec.channels = 1; // Value returned by (byte)ffmpeg.av_get_channel_layout_nb_channels(ffmpeg.AV_CH_LAYOUT_MONO);
desiredPlaybackSpec.channels = channels; // Value returned by (byte)ffmpeg.av_get_channel_layout_nb_channels(ffmpeg.AV_CH_LAYOUT_MONO);
desiredPlaybackSpec.silence = 0;
//desiredPlaybackSpec.samples = 512;
//desiredPlaybackSpec.userdata = null;
Expand Down

0 comments on commit 673ef52

Please sign in to comment.