diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/MainScene.unity b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/MainScene.unity index 565081b0..d1399989 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/MainScene.unity +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/MainScene.unity @@ -104,7 +104,7 @@ NavMeshSettings: serializedVersion: 2 m_ObjectHideFlags: 0 m_BuildSettings: - serializedVersion: 2 + serializedVersion: 3 agentTypeID: 0 agentRadius: 0.5 agentHeight: 2 @@ -117,7 +117,7 @@ NavMeshSettings: cellSize: 0.16666667 manualTileSize: 0 tileSize: 256 - accuratePlacement: 0 + buildHeightMesh: 0 maxJobWorkers: 0 preserveTilesOutsideBounds: 0 debug: @@ -128,6 +128,7 @@ PrefabInstance: m_ObjectHideFlags: 0 serializedVersion: 2 m_Modification: + serializedVersion: 3 m_TransformParent: {fileID: 996985032} m_Modifications: - target: {fileID: 149840264038047480, guid: 117eea22f833ae64f95b5be7435be32d, type: 3} @@ -587,6 +588,9 @@ PrefabInstance: value: -40 objectReference: {fileID: 0} m_RemovedComponents: [] + m_RemovedGameObjects: [] + m_AddedGameObjects: [] + m_AddedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: 117eea22f833ae64f95b5be7435be32d, type: 3} --- !u!224 &123332879 stripped RectTransform: @@ -635,7 +639,6 @@ RectTransform: m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 996985032} - m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} m_AnchorMin: {x: 0, y: 0} m_AnchorMax: {x: 1, y: 1} @@ -740,13 +743,13 @@ Transform: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 575041316} + serializedVersion: 2 m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &705507993 GameObject: @@ -834,13 +837,13 @@ Transform: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 705507993} + serializedVersion: 2 m_LocalRotation: {x: 0.40821788, y: -0.23456968, z: 0.10938163, w: 0.8754261} m_LocalPosition: {x: 0, y: 3, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 50, y: -30, z: 0} --- !u!1 &963194225 GameObject: @@ -882,9 +885,17 @@ Camera: m_projectionMatrixMode: 1 m_GateFitMode: 2 m_FOVAxisMode: 0 + m_Iso: 200 + m_ShutterSpeed: 0.005 + m_Aperture: 16 + m_FocusDistance: 10 + m_FocalLength: 50 + m_BladeCount: 5 + m_Curvature: {x: 2, y: 11} + m_BarrelClipping: 0.25 + m_Anamorphism: 0 m_SensorSize: {x: 36, y: 24} m_LensShift: {x: 0, y: 0} - m_FocalLength: 50 m_NormalizedViewPortRect: serializedVersion: 2 x: 0 @@ -918,13 +929,13 @@ Transform: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 963194225} + serializedVersion: 2 m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 1, z: -10} m_LocalScale: {x: 1, y: 1, z: 1} m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &996985028 GameObject: @@ -1003,7 +1014,9 @@ Canvas: m_OverrideSorting: 0 m_OverridePixelPerfect: 0 m_SortingBucketNormalizedSize: 0 + m_VertexColorAlwaysGammaSpace: 0 m_AdditionalShaderChannelsFlag: 25 + m_UpdateRectTransformForStandalone: 0 m_SortingLayerID: 0 m_SortingOrder: 0 m_TargetDisplay: 0 @@ -1023,7 +1036,6 @@ RectTransform: - {fileID: 1214904789} - {fileID: 123332879} m_Father: {fileID: 0} - m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} m_AnchorMin: {x: 0, y: 0} m_AnchorMax: {x: 0, y: 0} @@ -1046,7 +1058,6 @@ MonoBehaviour: _senderVideoWidth: 1280 _senderVideoHeight: 720 _senderVideoFps: 30 - _inputAudioSource: {fileID: 1825738054} _inputSceneCamera: {fileID: 963194227} _callScreen: {fileID: 123332880} _mainScreen: {fileID: 355221056687574256} @@ -1096,147 +1107,20 @@ Transform: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1354881823} + serializedVersion: 2 m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &1825738053 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 1825738055} - - component: {fileID: 1825738054} - m_Layer: 0 - m_Name: MicrophoneInput - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!82 &1825738054 -AudioSource: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1825738053} - m_Enabled: 1 - serializedVersion: 4 - OutputAudioMixerGroup: {fileID: 0} - m_audioClip: {fileID: 0} - m_PlayOnAwake: 1 - m_Volume: 1 - m_Pitch: 1 - Loop: 0 - Mute: 0 - Spatialize: 0 - SpatializePostEffects: 0 - Priority: 128 - DopplerLevel: 1 - MinDistance: 1 - MaxDistance: 500 - Pan2D: 0 - rolloffMode: 0 - BypassEffects: 0 - BypassListenerEffects: 0 - BypassReverbZones: 0 - rolloffCustomCurve: - serializedVersion: 2 - m_Curve: - - serializedVersion: 3 - time: 0 - value: 1 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - - serializedVersion: 3 - time: 1 - value: 0 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - m_PreInfinity: 2 - m_PostInfinity: 2 - m_RotationOrder: 4 - panLevelCustomCurve: - serializedVersion: 2 - m_Curve: - - serializedVersion: 3 - time: 0 - value: 0 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - m_PreInfinity: 2 - m_PostInfinity: 2 - m_RotationOrder: 4 - spreadCustomCurve: - serializedVersion: 2 - m_Curve: - - serializedVersion: 3 - time: 0 - value: 0 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - m_PreInfinity: 2 - m_PostInfinity: 2 - m_RotationOrder: 4 - reverbZoneMixCustomCurve: - serializedVersion: 2 - m_Curve: - - serializedVersion: 3 - time: 0 - value: 1 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - m_PreInfinity: 2 - m_PostInfinity: 2 - m_RotationOrder: 4 ---- !u!4 &1825738055 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1825738053} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_ConstrainProportionsScale: 0 - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 5 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1001 &355221056687574255 PrefabInstance: m_ObjectHideFlags: 0 serializedVersion: 2 m_Modification: + serializedVersion: 3 m_TransformParent: {fileID: 996985032} m_Modifications: - target: {fileID: 355221055551435707, guid: 0718803385b9d8840b6d4f562c3d956a, type: 3} @@ -1812,6 +1696,9 @@ PrefabInstance: value: 0 objectReference: {fileID: 0} m_RemovedComponents: [] + m_RemovedGameObjects: [] + m_AddedGameObjects: [] + m_AddedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: 0718803385b9d8840b6d4f562c3d956a, type: 3} --- !u!114 &355221056687574256 stripped MonoBehaviour: @@ -1824,3 +1711,12 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: ecb25fba984048238b2cf9ae786dab82, type: 3} m_Name: m_EditorClassIdentifier: +--- !u!1660057539 &9223372036854775807 +SceneRoots: + m_ObjectHideFlags: 0 + m_Roots: + - {fileID: 1354881825} + - {fileID: 996985032} + - {fileID: 705507995} + - {fileID: 575041319} + - {fileID: 963194228} diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/StreamVideoManager.cs b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/StreamVideoManager.cs index 3e29a317..1d5f8f6d 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/StreamVideoManager.cs +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/StreamVideoManager.cs @@ -17,6 +17,18 @@ public class StreamVideoManager : MonoBehaviour public event Action CallEnded; public IStreamVideoClient Client { get; private set; } + + public void Init() + { + _clientConfig = new StreamClientConfig + { + LogLevel = StreamLogLevel.Debug, + }; + + Client = StreamVideoClient.CreateDefaultClient(_clientConfig); + Client.CallStarted += OnCallStarted; + Client.CallEnded += OnCallEnded; + } /// /// Join the Call with a given ID. We can either create it or try to join only. @@ -65,18 +77,6 @@ public void LeaveActiveCall() /// public void SetAudioREDundancyEncoding(bool value) => _clientConfig.Audio.EnableRed = value; - protected void Awake() - { - _clientConfig = new StreamClientConfig - { - LogLevel = StreamLogLevel.Debug, - }; - - Client = StreamVideoClient.CreateDefaultClient(_clientConfig); - Client.CallStarted += OnCallStarted; - Client.CallEnded += OnCallEnded; - } - protected async void Start() { var credentials = new AuthCredentials(_apiKey, _userId, _userToken); diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/CameraMediaDevicePanel.cs b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/CameraMediaDevicePanel.cs index 98c0d4f2..d40b0b3f 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/CameraMediaDevicePanel.cs +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/CameraMediaDevicePanel.cs @@ -1,11 +1,41 @@ using System.Collections.Generic; -using System.Linq; -using UnityEngine; +using StreamVideo.Core.DeviceManagers; namespace StreamVideo.ExampleProject.UI.Devices { - public class CameraMediaDevicePanel : MediaDevicePanelBase + public class CameraMediaDevicePanel : MediaDevicePanelBase { - protected override IEnumerable GetDevicesNames() => WebCamTexture.devices.Select(d => d.name); + protected override CameraDeviceInfo SelectedDevice => Client.VideoDeviceManager.SelectedDevice; + + protected override bool IsDeviceEnabled + { + get => Client.VideoDeviceManager.IsEnabled; + set => Client.VideoDeviceManager.SetEnabled(value); + } + + protected override IEnumerable GetDevices() => Client.VideoDeviceManager.EnumerateDevices(); + + protected override string GetDeviceName(CameraDeviceInfo device) => device.Name; + + protected override void ChangeDevice(CameraDeviceInfo device) + => Client.VideoDeviceManager.SelectDevice(device, UIManager.SenderVideoResolution, IsDeviceEnabled, + UIManager.SenderVideoFps); + + protected override void OnInit() + { + base.OnInit(); + + Client.VideoDeviceManager.SelectedDeviceChanged += OnSelectedDeviceChanged; + } + + protected override void OnDestroying() + { + Client.VideoDeviceManager.SelectedDeviceChanged -= OnSelectedDeviceChanged; + + base.OnDestroying(); + } + + private void OnSelectedDeviceChanged(CameraDeviceInfo previousDevice, CameraDeviceInfo currentDevice) + => SelectDeviceWithoutNotify(currentDevice); } } \ No newline at end of file diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/MediaDevicePanelBase.cs b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/MediaDevicePanelBase.cs index 534884eb..3d92b5cd 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/MediaDevicePanelBase.cs +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/MediaDevicePanelBase.cs @@ -1,45 +1,42 @@ -using System.Collections; +using System; +using System.Collections; using System.Collections.Generic; using System.Linq; +using StreamVideo.Core; using TMPro; using UnityEngine; namespace StreamVideo.ExampleProject.UI.Devices { - /// - /// Event handler for device changed event - /// - public delegate void DeviceChangeHandler(string deviceName, bool isActive); - - /// - /// Event handler for device toggled event - /// - public delegate void DeviceToggleHandler(bool isActive); - /// /// Panel that displays media device (microphone or camera) dropdown to pick the active device and a button to toggle on/off state /// - public abstract class MediaDevicePanelBase : MonoBehaviour + public abstract class MediaDevicePanelBase : MonoBehaviour + where TDevice : struct { - public event DeviceChangeHandler DeviceChanged; - public event DeviceToggleHandler DeviceToggled; + public void Init(IStreamVideoClient client, UIManager uiManager) + { + Client = client ?? throw new ArgumentNullException(nameof(client)); + UIManager = uiManager ? uiManager : throw new ArgumentNullException(nameof(uiManager)); - public string SelectedDeviceName { get; private set; } - - //StreamTodo: android has DeviceStatus: Enabled, Disabled, NotSelected - public bool IsDeviceActive { get; private set; } = true; + UpdateDevicesDropdown(GetDevices()); + + OnInit(); + } - public void SelectDeviceWithoutNotify(string deviceName) + public void SelectDeviceWithoutNotify(TDevice device) { - var index = _deviceNames.IndexOf(deviceName); + var index = _devices.IndexOf(device); if (index == -1) { - Debug.LogError($"Failed to find index for device: {deviceName}"); + Debug.LogError($"Failed to find index for device: {device}"); return; } _dropdown.SetValueWithoutNotify(index); } + + protected IStreamVideoClient Client { get; private set; } // Called by Unity protected void Awake() @@ -48,8 +45,6 @@ protected void Awake() _deviceButton.Init(_buttonOnSprite, _buttonOffSprite); _deviceButton.Clicked += OnDeviceButtonClicked; - - UpdateDevicesDropdown(GetDevicesNames().ToList()); _refreshDeviceInterval = new WaitForSeconds(0.5f); _refreshCoroutine = StartCoroutine(RefreshDevicesList()); @@ -58,20 +53,41 @@ protected void Awake() // Called by Unity protected void Start() { - _deviceButton.UpdateSprite(IsDeviceActive); + _deviceButton.UpdateSprite(IsDeviceEnabled); } // Called by Unity protected void OnDestroy() { + OnDestroying(); + if (_refreshCoroutine != null) { StopCoroutine(_refreshCoroutine); } } + + protected virtual void OnInit() + { + + } + + protected virtual void OnDestroying() + { + + } - protected abstract IEnumerable GetDevicesNames(); + protected abstract IEnumerable GetDevices(); + protected abstract TDevice SelectedDevice { get; } + protected abstract bool IsDeviceEnabled { get; set; } + protected UIManager UIManager { get; private set; } + protected abstract string GetDeviceName(TDevice device); + + protected abstract void ChangeDevice(TDevice device); + + private readonly List _devices = new List(); + [SerializeField] private Sprite _buttonOnSprite; @@ -86,28 +102,24 @@ protected void OnDestroy() private Coroutine _refreshCoroutine; private YieldInstruction _refreshDeviceInterval; - private readonly List _deviceNames = new List(); private void OnDropdownValueChanged(int optionIndex) { - var deviceName = _deviceNames.ElementAt(optionIndex); - if (deviceName == null) + var device = _devices.ElementAt(optionIndex); + if (device.Equals(default)) { Debug.LogError($"Failed to select device with index: {optionIndex}. Available devices: " + - string.Join(", ", _deviceNames)); + string.Join(", ", _devices)); return; } - SelectedDeviceName = deviceName; - - DeviceChanged?.Invoke(SelectedDeviceName, IsDeviceActive); + ChangeDevice(device); } private void OnDeviceButtonClicked() { - IsDeviceActive = !IsDeviceActive; - _deviceButton.UpdateSprite(IsDeviceActive); - DeviceToggled?.Invoke(IsDeviceActive); + IsDeviceEnabled = !IsDeviceEnabled; + _deviceButton.UpdateSprite(IsDeviceEnabled); } // User can add/remove devices any time so we must constantly monitor the devices list @@ -115,11 +127,16 @@ private IEnumerator RefreshDevicesList() { while (true) { - var availableDevices = GetDevicesNames().ToList(); - var devicesChanged = !_deviceNames.SequenceEqual(availableDevices); + while (Client == null) + { + yield return _refreshDeviceInterval; + } + + var availableDevices = GetDevices().ToList(); + var devicesChanged = !_devices.SequenceEqual(availableDevices); if (devicesChanged) { - var prevDevicesLog = string.Join(", ", _deviceNames); + var prevDevicesLog = string.Join(", ", _devices); var newDevicesLog = string.Join(", ", availableDevices); Debug.Log($"Device list changed. Previous: {prevDevicesLog}, Current: {newDevicesLog}"); @@ -130,17 +147,17 @@ private IEnumerator RefreshDevicesList() } } - private void UpdateDevicesDropdown(List devices) + private void UpdateDevicesDropdown(IEnumerable devices) { - _deviceNames.Clear(); - _deviceNames.AddRange(devices); + _devices.Clear(); + _devices.AddRange(devices); _dropdown.ClearOptions(); - _dropdown.AddOptions(devices); + _dropdown.AddOptions(devices.Select(GetDeviceName).ToList()); - if (!string.IsNullOrEmpty(SelectedDeviceName) && !devices.Contains(SelectedDeviceName)) + if (!EqualityComparer.Default.Equals(SelectedDevice, default) && !devices.Contains(SelectedDevice)) { - Debug.LogError($"Previously active device was unplugged: {SelectedDeviceName}"); + Debug.LogError($"Previously active device was unplugged: {SelectedDevice}"); //StreamTodo: handle case when user unplugged active device } } diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/MicrophoneMediaDevicePanel.cs b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/MicrophoneMediaDevicePanel.cs index 876f9e62..704bbe8a 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/MicrophoneMediaDevicePanel.cs +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Devices/MicrophoneMediaDevicePanel.cs @@ -1,10 +1,39 @@ using System.Collections.Generic; -using UnityEngine; +using StreamVideo.Core.DeviceManagers; namespace StreamVideo.ExampleProject.UI.Devices { - public class MicrophoneMediaDevicePanel : MediaDevicePanelBase + public class MicrophoneMediaDevicePanel : MediaDevicePanelBase { - protected override IEnumerable GetDevicesNames() => Microphone.devices; + protected override MicrophoneDeviceInfo SelectedDevice => Client.AudioDeviceManager.SelectedDevice; + + protected override bool IsDeviceEnabled + { + get => Client.AudioDeviceManager.IsEnabled; + set => Client.AudioDeviceManager.SetEnabled(value); + } + + protected override IEnumerable GetDevices() => Client.AudioDeviceManager.EnumerateDevices(); + + protected override string GetDeviceName(MicrophoneDeviceInfo device) => device.Name; + + protected override void ChangeDevice(MicrophoneDeviceInfo device) => Client.AudioDeviceManager.SelectDevice(device, IsDeviceEnabled); + + protected override void OnInit() + { + base.OnInit(); + + Client.AudioDeviceManager.SelectedDeviceChanged += OnSelectedDeviceChanged; + } + + protected override void OnDestroying() + { + Client.AudioDeviceManager.SelectedDeviceChanged -= OnSelectedDeviceChanged; + + base.OnDestroying(); + } + + private void OnSelectedDeviceChanged(MicrophoneDeviceInfo previousDevice, MicrophoneDeviceInfo currentDevice) + => SelectDeviceWithoutNotify(currentDevice); } } \ No newline at end of file diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/ParticipantView.cs b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/ParticipantView.cs index 8b9d2a12..ac22ea12 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/ParticipantView.cs +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/ParticipantView.cs @@ -43,26 +43,13 @@ public void UpdateIsDominantSpeaker(bool isDominantSpeaker) /// public void SetLocalCameraSource(WebCamTexture localWebCamTexture) { - _localWebCamTexture = localWebCamTexture; - - if (_localParticipantRenderTexture != null) - { - // Dispose previous texture - _localParticipantRenderTexture.Release(); - _localParticipantRenderTexture = null; - } - if (localWebCamTexture == null) { _video.texture = null; return; } - _localParticipantRenderTexture = new RenderTexture(localWebCamTexture.width, localWebCamTexture.height, 0, RenderTextureFormat.Default); - _localParticipantRenderTexture.Create(); - - // we set RenderTexture a a RawImage.texture because the RenderTexture will receive video stream from the local camera - _video.texture = _localParticipantRenderTexture; + _video.texture = localWebCamTexture; } // Called by Unity Engine @@ -74,17 +61,14 @@ protected void Awake() // Called by Unity Engine protected void Update() { - if (_localWebCamTexture != null) - { - Graphics.Blit(_localWebCamTexture, _localParticipantRenderTexture); - } - var rect = _videoRectTransform.rect; var videoRenderedSize = new Vector2(rect.width, rect.height); if (videoRenderedSize != _lastVideoRenderedSize) { _lastVideoRenderedSize = videoRenderedSize; var videoResolution = new VideoResolution((int)videoRenderedSize.x, (int)videoRenderedSize.y); + + // To optimize bandwidth we always request the video resolution that matches what we're actually rendering Participant.UpdateRequestedVideoResolution(videoResolution); Debug.Log($"Rendered resolution changed for participant `{Participant.UserId}`. Requested video resolution update to: {videoResolution}"); } @@ -115,8 +99,6 @@ protected void OnDestroy() private Color32 _defaultSpeakerFrameColor; private AudioSource _audioSource; - private RenderTexture _localParticipantRenderTexture; - private WebCamTexture _localWebCamTexture; private RectTransform _videoRectTransform; private Vector2 _lastVideoRenderedSize; diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/BaseScreenView.cs b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/BaseScreenView.cs index 83d2ac0e..28a317bc 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/BaseScreenView.cs +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/BaseScreenView.cs @@ -51,8 +51,6 @@ public void Hide() protected abstract void OnHide(); - protected void Log(string message, LogType type) => UIManager.Log(message, type); - private GameObject _gameObject; } } \ No newline at end of file diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/CallScreenView.cs b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/CallScreenView.cs index f0b5de93..ae136c51 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/CallScreenView.cs +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/CallScreenView.cs @@ -61,12 +61,9 @@ protected override void OnInit() { _leaveBtn.onClick.AddListener(VideoManager.LeaveActiveCall); _endBtn.onClick.AddListener(VideoManager.EndActiveCall); - - _cameraPanel.DeviceChanged += UIManager.ChangeCamera; - _cameraPanel.DeviceToggled += UIManager.SetCameraActive; - _microphonePanel.DeviceChanged += UIManager.ChangeMicrophone; - _microphonePanel.DeviceToggled += UIManager.SetMicrophoneActive; + _cameraPanel.Init(VideoManager.Client, UIManager); + _microphonePanel.Init(VideoManager.Client, UIManager); } protected override void OnShow(ShowArgs showArgs) @@ -93,7 +90,7 @@ protected override void OnShow(ShowArgs showArgs) _activeCall.SortedParticipantsUpdated += SortParticipantViews; - UIManager.ActiveCameraChanged += OnActiveCameraChanged; + UIManager.LocalCameraChanged += OnLocalCameraChanged; // Show active call ID so user can copy it and send others to join _joinCallIdInput.text = _activeCall.Id; @@ -112,7 +109,7 @@ protected override void OnHide() RemoveAllParticipants(); - UIManager.ActiveCameraChanged -= OnActiveCameraChanged; + UIManager.LocalCameraChanged -= OnLocalCameraChanged; } private void OnDominantSpeakerChanged(IStreamVideoCallParticipant currentDominantSpeaker, @@ -156,7 +153,9 @@ private void AddParticipant(IStreamVideoCallParticipant participant, bool sortPa if (participant.IsLocalParticipant) { // Set input camera as a video source for local participant - we won't receive OnTrack event for local participant - view.SetLocalCameraSource(UIManager.ActiveCamera); + var webCamTexture = VideoManager.Client.VideoDeviceManager.GetSelectedDeviceWebCamTexture(); + view.SetLocalCameraSource(webCamTexture); + //StreamTodo: this will invalidate each time WebCamTexture is internally replaced so we need a better way to expose this } if (sortParticipantViews) @@ -224,7 +223,7 @@ private void RemoveAllParticipants() _participantSessionIdToView.Clear(); } - private void OnActiveCameraChanged(WebCamTexture activeCamera) + private void OnLocalCameraChanged(WebCamTexture activeCamera) { // Input Camera changed so let's update the preview for local participant var localParticipant diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/MainScreenView.cs b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/MainScreenView.cs index 5d398789..a365270a 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/MainScreenView.cs +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/Screens/MainScreenView.cs @@ -1,5 +1,4 @@ using System; -using System.Linq; using StreamVideo.ExampleProject.UI.Devices; using TMPro; using UnityEngine; @@ -29,24 +28,18 @@ protected override void OnInit() _audioRedToggle.onValueChanged.AddListener(VideoManager.SetAudioREDundancyEncoding); _audioDtxToggle.onValueChanged.AddListener(VideoManager.SetAudioDtx); - _cameraPanel.DeviceChanged += UIManager.ChangeCamera; - _cameraPanel.DeviceToggled += UIManager.SetCameraActive; - - _microphonePanel.DeviceChanged += UIManager.ChangeMicrophone; - _microphonePanel.DeviceToggled += UIManager.SetMicrophoneActive; - - SmartPickDefaultCamera(); - SmartPickDefaultMicrophone(); + _cameraPanel.Init(VideoManager.Client, UIManager); + _microphonePanel.Init(VideoManager.Client, UIManager); } protected override void OnShow(CallScreenView.ShowArgs showArgs) { - UIManager.ActiveCameraChanged += OnActiveCameraChanged; + UIManager.LocalCameraChanged += OnLocalCameraChanged; } protected override void OnHide() { - UIManager.ActiveCameraChanged -= OnActiveCameraChanged; + UIManager.LocalCameraChanged -= OnLocalCameraChanged; } [SerializeField] @@ -82,7 +75,7 @@ private async void OnJoinCallButtonClicked() { if (string.IsNullOrEmpty(_joinCallIdInput.text)) { - Log("`Call ID` is required when trying to join a call", LogType.Error); + Debug.LogError("`Call ID` is required when trying to join a call"); return; } @@ -107,60 +100,11 @@ private async void OnCreateAndJoinCallButtonClicked() } } - private void OnActiveCameraChanged(WebCamTexture activeCamera) + private void OnLocalCameraChanged(WebCamTexture activeCamera) { _localCameraImage.texture = activeCamera; } - private void SmartPickDefaultCamera() - { - var devices = WebCamTexture.devices; - -#if UNITY_STANDALONE_WIN - //StreamTodo: remove this, "Capture" is our debug camera - _defaultCamera = devices.FirstOrDefault(d => d.name.Contains("Capture")); - -#elif UNITY_ANDROID || UNITY_IOS - _defaultCamera = devices.FirstOrDefault(d => d.isFrontFacing); -#endif - - if (string.IsNullOrEmpty(_defaultCamera.name)) - { - _defaultCamera = devices.FirstOrDefault(); - } - - if (string.IsNullOrEmpty(_defaultCamera.name)) - { - Debug.LogError("Failed to pick default camera device"); - return; - } - - _cameraPanel.SelectDeviceWithoutNotify(_defaultCamera.name); - UIManager.ChangeCamera(_defaultCamera.name, _cameraPanel.IsDeviceActive); - } - - //StreamTodo: remove - private void SmartPickDefaultMicrophone() - { - var preferredMicDevices = new[] { "bose", "airpods" }; - _defaultMicrophoneDeviceName = Microphone.devices.FirstOrDefault(d - => preferredMicDevices.Any(m => d.IndexOf(m, StringComparison.OrdinalIgnoreCase) != -1)); - - if (string.IsNullOrEmpty(_defaultMicrophoneDeviceName)) - { - _defaultMicrophoneDeviceName = Microphone.devices.FirstOrDefault(); - } - - if (string.IsNullOrEmpty(_defaultMicrophoneDeviceName)) - { - Debug.LogError("Failed to pick default microphone device"); - return; - } - - _microphonePanel.SelectDeviceWithoutNotify(_defaultMicrophoneDeviceName); - UIManager.ChangeMicrophone(_defaultMicrophoneDeviceName, _microphonePanel.IsDeviceActive); - } - private static string CreateRandomCallId() => Guid.NewGuid().ToString().Replace("-", ""); } } \ No newline at end of file diff --git a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/UIManager.cs b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/UIManager.cs index 1c19469a..21fb2721 100644 --- a/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/UIManager.cs +++ b/Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Scripts/UI/UIManager.cs @@ -1,128 +1,51 @@ using System; +using System.Linq; +using System.Threading.Tasks; +using StreamVideo.Core; +using StreamVideo.Core.DeviceManagers; using StreamVideo.Core.StatefulModels; using StreamVideo.ExampleProject.UI.Screens; +using StreamVideo.Libs.Utils; using UnityEngine; namespace StreamVideo.ExampleProject.UI { public class UIManager : MonoBehaviour { - public event Action ActiveCameraChanged; + public event Action LocalCameraChanged; - public WebCamTexture ActiveCamera { get; private set; } - public AudioSource InputAudioSource => _inputAudioSource; - public Camera InputSceneSource => _inputSceneCamera; - - public void ChangeMicrophone(string deviceName, bool isActive) - { - if (!string.IsNullOrEmpty(_selectedMicrophoneDeviceName)) - { - StopAudioRecording(); - } - - var prevDevice = _selectedMicrophoneDeviceName ?? "None"; - _selectedMicrophoneDeviceName = deviceName; - - if (isActive) - { - StartAudioRecording(); - } - - Debug.Log( - $"Changed selected MICROPHONE from `{prevDevice}` to `{_selectedMicrophoneDeviceName}`. Recording: {isActive}"); - } - - public void ChangeCamera(string deviceName, bool isActive) - { - var prevDevice = ActiveCamera != null ? ActiveCamera.deviceName : "None"; - - if (ActiveCamera == null) - { - ActiveCamera = new WebCamTexture(deviceName, _senderVideoWidth, _senderVideoHeight, _senderVideoFps); - } - - // Camera needs to be stopped before changing the deviceName - ActiveCamera.Stop(); - ActiveCamera.deviceName = deviceName; - - if (isActive) - { - ActiveCamera.Play(); - //StreamTodo: handle in coroutine and check if the camera started - } - - Debug.Log($"Changed active CAMERA from `{prevDevice}` to `{deviceName}`"); - - _videoManager.Client?.SetCameraInputSource(ActiveCamera); - - ActiveCameraChanged?.Invoke(ActiveCamera); - } - - /// - /// Start/stop microphone recording - /// - public void SetMicrophoneActive(bool isActive) - { - _videoManager.Client.AudioDeviceManager.SetEnabled(isActive); - - if (isActive) - { - StartAudioRecording(); - return; - } - - StopAudioRecording(); - } - - /// - /// Start/stop camera recording - /// - public void SetCameraActive(bool isActive) - { - _videoManager.Client.VideoDeviceManager.SetEnabled(isActive); - - if (isActive) - { - ActiveCamera.Play(); - Debug.Log($"Camera recording started for `{ActiveCamera.deviceName}`"); - return; - } - - ActiveCamera.Stop(); - Debug.Log($"Camera recording stopped for `{ActiveCamera.deviceName}`"); - } - - public void Log(string message, LogType type) - { - if (type == LogType.Exception) - { - throw new NotSupportedException("To log exceptions use " + nameof(Debug.LogException)); - } - - Debug.LogFormat(type, LogOption.None, context: null, format: message); - } + public VideoResolution SenderVideoResolution => new VideoResolution(_senderVideoWidth, _senderVideoHeight); + public int SenderVideoFps => _senderVideoFps; protected void Awake() { + _videoManager.Init(); + _videoManager.CallStarted += OnCallStarted; _videoManager.CallEnded += OnCallEnded; + _videoManager.Client.VideoDeviceManager.SelectedDeviceChanged += OnCameraDeviceChanged; + _videoManager.Client.AudioDeviceManager.SelectedDeviceChanged += OnMicrophoneDeviceChanged; + _mainScreen.Init(_videoManager, uiManager: this); _callScreen.Init(_videoManager, uiManager: this); - } - - protected void Start() - { - _videoManager.Client.SetAudioInputSource(_inputAudioSource); - _videoManager.Client.SetCameraInputSource(ActiveCamera); - ShowMainScreen(); + SelectFirstWorkingCameraOrDefaultAsync().LogIfFailed(); + SelectFirstMicrophone(); } + protected void Start() => ShowMainScreen(); + protected void OnDestroy() { _videoManager.CallStarted -= OnCallStarted; _videoManager.CallEnded -= OnCallEnded; + + if (_videoManager.Client != null) + { + _videoManager.Client.VideoDeviceManager.SelectedDeviceChanged -= OnCameraDeviceChanged; + _videoManager.Client.AudioDeviceManager.SelectedDeviceChanged -= OnMicrophoneDeviceChanged; + } } [SerializeField] @@ -137,70 +60,79 @@ protected void OnDestroy() [SerializeField] private int _senderVideoFps = 30; - [SerializeField] - private AudioSource _inputAudioSource; - - [SerializeField] - private Camera _inputSceneCamera; - [SerializeField] private CallScreenView _callScreen; [SerializeField] private MainScreenView _mainScreen; - private string _selectedMicrophoneDeviceName; - private void OnCallStarted(IStreamCall call) => ShowCallScreen(call); private void OnCallEnded() => ShowMainScreen(); - private void StartAudioRecording() + private void ShowMainScreen() + { + _callScreen.Hide(); + _mainScreen.Show(); + } + + private void ShowCallScreen(IStreamCall call) + { + _mainScreen.Hide(); + _callScreen.Show(new CallScreenView.ShowArgs(call)); + } + + private void OnMicrophoneDeviceChanged(MicrophoneDeviceInfo previousDevice, MicrophoneDeviceInfo currentDevice) + { + Debug.Log($"Changed selected MICROPHONE from `{previousDevice}` to `{currentDevice}`"); + } + + private void OnCameraDeviceChanged(CameraDeviceInfo previousDevice, CameraDeviceInfo currentDevice) + { + Debug.Log($"Changed active CAMERA from `{previousDevice}` to `{currentDevice}`"); + + var webCamTexture = _videoManager.Client.VideoDeviceManager.GetSelectedDeviceWebCamTexture(); + LocalCameraChanged?.Invoke(webCamTexture); + } + + private async Task SelectFirstWorkingCameraOrDefaultAsync() { - if (_inputAudioSource == null) + if (!_videoManager.Client.VideoDeviceManager.EnumerateDevices().Any()) { - Debug.LogError("Audio recording failed. Input Audio Source is null"); + Debug.LogError("No camera devices found! Video streaming will not work. Please ensure that a camera device is plugged in."); return; } - - if (string.IsNullOrEmpty(_selectedMicrophoneDeviceName)) + + var workingDevice = await _videoManager.Client.VideoDeviceManager.TryFindFirstWorkingDeviceAsync(); + if (workingDevice.HasValue) { - Debug.LogError("Audio recording failed. No microphone device selected."); + _videoManager.Client.VideoDeviceManager.SelectDevice(workingDevice.Value, enable: false); return; } - - //StreamTodo: should the volume be 0 so we never hear input from our own microphone? - _inputAudioSource.clip - = Microphone.Start(_selectedMicrophoneDeviceName, true, 3, AudioSettings.outputSampleRate); - _inputAudioSource.loop = true; - _inputAudioSource.Play(); - - Debug.Log($"Audio recording started for `{_selectedMicrophoneDeviceName}`"); - } - - private void StopAudioRecording() - { - var isRecording = !string.IsNullOrEmpty(_selectedMicrophoneDeviceName) && - Microphone.IsRecording(_selectedMicrophoneDeviceName); - if (!isRecording) + + Debug.LogWarning("No working camera found. Falling back to first device."); + + var firstDevice = _videoManager.Client.VideoDeviceManager.EnumerateDevices().FirstOrDefault(); + if (firstDevice == default) { + Debug.LogError("No camera devices found! Video streaming will not work. Please ensure that a camera device is plugged in."); return; } - - Microphone.End(_selectedMicrophoneDeviceName); - Debug.Log($"Audio recording stopped for `{_selectedMicrophoneDeviceName}`"); - } - - private void ShowMainScreen() - { - _callScreen.Hide(); - _mainScreen.Show(); + + _videoManager.Client.VideoDeviceManager.SelectDevice(firstDevice, enable: false); } - private void ShowCallScreen(IStreamCall call) + private void SelectFirstMicrophone() { - _mainScreen.Hide(); - _callScreen.Show(new CallScreenView.ShowArgs(call)); + // Select first microphone by default + var microphoneDevice = _videoManager.Client.AudioDeviceManager.EnumerateDevices().FirstOrDefault(); + if (microphoneDevice == default) + { + Debug.LogError("No microphone devices found! Audio streaming will not work. Please ensure that a microphone device is plugged in."); + return; + } + + _videoManager.Client.AudioDeviceManager.SelectDevice(microphoneDevice, enable: false); } } } \ No newline at end of file diff --git a/Packages/StreamVideo/DocsCodeSamples/01-basics/QuickStartCodeSamples.cs b/Packages/StreamVideo/DocsCodeSamples/01-basics/QuickStartCodeSamples.cs index 74b9413a..a526399a 100644 --- a/Packages/StreamVideo/DocsCodeSamples/01-basics/QuickStartCodeSamples.cs +++ b/Packages/StreamVideo/DocsCodeSamples/01-basics/QuickStartCodeSamples.cs @@ -4,6 +4,7 @@ using StreamVideo.Core.StatefulModels; using StreamVideo.Core.StatefulModels.Tracks; using UnityEngine; +using UnityEngine.Android; using UnityEngine.UI; namespace StreamVideoDocsCodeSamples._01_basics @@ -33,7 +34,7 @@ public async void CreateCallAndJoin() var callType = StreamCallType.Default; // Call type affects default permissions var callId = "my-call-id"; -// Notice that we pass create argument as true - this will create the call if it doesn't already exist + // Notice that we pass create argument as true - this will create the call if it doesn't already exist var streamCall = await _client.JoinCallAsync(callType, callId, create: true, ring: true, notify: false); } @@ -42,7 +43,7 @@ public async void JoinOtherCall() var callType = StreamCallType.Default; // Call type affects default permissions var callId = "my-call-id"; -// Notice that we pass create argument as false - if the call doesn't exist the join attempt will fail + // Notice that we pass create argument as false - if the call doesn't exist the join attempt will fail var streamCall = await _client.JoinCallAsync(callType, callId, create: false, ring: true, notify: false); } @@ -50,13 +51,13 @@ public async void GetCallParticipants() { var callType = StreamCallType.Default; // Call type affects default permissions var callId = "my-call-id"; - + var streamCall = await _client.JoinCallAsync(callType, callId, create: false, ring: true, notify: false); - + // Subscribe to events to get notified that streamCall.Participants collection changed streamCall.ParticipantJoined += OnParticipantJoined; streamCall.ParticipantLeft += OnParticipantLeft; - + // Iterate through current participants foreach (var participant in streamCall.Participants) { @@ -106,73 +107,133 @@ private void OnParticipantTrackAdded(IStreamVideoCallParticipant participant, IS // This assumes that this gameObject contains the AudioSource component but it's not a requirement. You can obtain the AudioSource reference in your preferred way var audioSource = GetComponent(); - + // This AudioSource will receive audio from the participant streamAudioTrack.SetAudioSourceTarget(audioSource); break; case StreamVideoTrack streamVideoTrack: - + // This assumes that this gameObject contains the RawImage component but it's not a requirement. You can obtain the RawImage reference in your preferred way var rawImage = GetComponent(); - + // This RawImage will receive video from the participant streamVideoTrack.SetRenderTarget(rawImage); break; } } - public void SetAudioInput() + public void ListAvailableMicrophoneDevices() { - // Obtain reference to an AudioSource that will be used a source of audio - var audioSource = GetComponent(); - _client.SetAudioInputSource(audioSource); + var microphones = _client.AudioDeviceManager.EnumerateDevices(); + + foreach (var mic in microphones) + { + Debug.Log(mic.Name); + } + } + + public void SelectAudioCapturingDevice() + { + // Enumerate available microphone devices + var microphones = _client.AudioDeviceManager.EnumerateDevices(); + + foreach (var mic in microphones) + { + Debug.Log(mic.Name); + } + + var firstMicrophone = microphones.First(); + + // Select microphone device to capture audio input. `enable` argument determines whether audio capturing should start + _client.AudioDeviceManager.SelectDevice(firstMicrophone, enable: true); + } + + public void StartStopAudioRecording() + { + // Start audio capturing + _client.AudioDeviceManager.Enable(); + + // Stop audio capturing + _client.AudioDeviceManager.Disable(); } - public void BindMicrophoneToAudioSource() + public void RequestMicrophonePermissionsIOSandWebGL() { - // Obtain reference to an AudioSource that will be used a source of audio - var inputAudioSource = GetComponent(); + // Request microphone permissions + Application.RequestUserAuthorization(UserAuthorization.Microphone); - // Get a valid microphone device name. - // You usually want to populate a dropdown list with Microphone.devices so that the user can pick which device should be used - _activeMicrophoneDeviceName = Microphone.devices.First(); + // Check if user granted microphone permission + if (!Application.HasUserAuthorization(UserAuthorization.Microphone)) + { + // Notify user that microphone permission was not granted and the microphone capturing will not work. + } + } - inputAudioSource.clip - = Microphone.Start(_activeMicrophoneDeviceName, true, 3, AudioSettings.outputSampleRate); - inputAudioSource.loop = true; - inputAudioSource.Play(); + public void RequestMicrophonePermissionsAndroid() + { + // Request microphone permissions + Permission.RequestUserPermission(Permission.Microphone); + + // Check if user granted microphone permission + if (!Permission.HasUserAuthorizedPermission(Permission.Microphone)) + { + // Notify user that microphone permission was not granted and the microphone capturing will not work. + } } - public void StopAudioRecording() + public void ListAvailableCameraDevices() { - Microphone.End(_activeMicrophoneDeviceName); + var cameras = _client.VideoDeviceManager.EnumerateDevices(); + + foreach (var cam in cameras) + { + Debug.Log(cam.Name); + } } - public void SetVideoInput() + public void SelectVideoCapturingDevice() { -// Obtain reference to a WebCamTexture that will be used a source of video - var webCamTexture = GetComponent(); - _client.SetCameraInputSource(webCamTexture); + // Enumerate available camera devices + var cameras = _client.VideoDeviceManager.EnumerateDevices(); + + var firstCamera = cameras.First(); + + // Select camera device to capture video input. `enable` argument determines whether video capturing should start + _client.VideoDeviceManager.SelectDevice(firstCamera, enable: true); } - public void BindCameraToWebCamTexture() + public void StartStopVideoCapturing() { -// Obtain a camera device - var cameraDevice = WebCamTexture.devices.First(); + // Start video capturing + _client.VideoDeviceManager.Enable(); - var width = 1920; - var height = 1080; - var fps = 30; + // Stop video capturing + _client.VideoDeviceManager.Disable(); + } -// Use device name to create a new WebCamTexture instance - var activeCamera = new WebCamTexture(cameraDevice.name, width, height, fps); + public void RequestCameraPermissionsIOSandWebGL() + { + // Request camera permissions + Application.RequestUserAuthorization(UserAuthorization.WebCam); + + // Check if user granted camera permission + if (!Application.HasUserAuthorization(UserAuthorization.WebCam)) + { + // Notify user that camera permission was not granted and the camera capturing will not work. + } + } -// Call Play() in order to start capturing the video - activeCamera.Play(); + public void RequestCameraPermissionsAndroid() + { + // Request camera permissions + Permission.RequestUserPermission(Permission.Camera); -// Set WebCamTexture in Stream's Client - this WebCamTexture will be the video source in video calls - _client.SetCameraInputSource(activeCamera); + // Check if user granted camera permission + if (!Permission.HasUserAuthorizedPermission(Permission.Camera)) + { + // Notify user that camera permission was not granted and the camera capturing will not work. + } } private IStreamVideoClient _client; diff --git a/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone.cs b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone.cs deleted file mode 100644 index d1665a64..00000000 --- a/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone.cs +++ /dev/null @@ -1,83 +0,0 @@ -using System.Linq; -using StreamVideo.Core; -using UnityEngine; - -namespace StreamVideoDocsCodeSamples._03_guides -{ - internal class CameraAndMicrophone : MonoBehaviour - { - public void SetupMicrophoneInput() - { - // Obtain reference to an AudioSource that will be used a source of audio - var inputAudioSource = GetComponent(); - -// Get a valid microphone device name. -// You usually want to populate a dropdown list with Microphone.devices so that the user can pick which device should be used - _activeMicrophoneDeviceName = Microphone.devices.First(); - - inputAudioSource.clip - = Microphone.Start(_activeMicrophoneDeviceName, true, 3, AudioSettings.outputSampleRate); - inputAudioSource.loop = true; - inputAudioSource.Play(); - - _client.SetAudioInputSource(inputAudioSource); - } - - public void ChangeMicrophoneDevice() - { - var newMicrophoneDeviceName = "test"; - - // Stop previously active microphone - Microphone.End(_activeMicrophoneDeviceName); - - // Obtain reference to an AudioSource that was setup as an input source - var inputAudioSource = GetComponent(); - - inputAudioSource.clip = Microphone.Start(newMicrophoneDeviceName, true, 3, AudioSettings.outputSampleRate); - } - - public void SetupCameraInput() - { - // Obtain a camera device - var cameraDevice = WebCamTexture.devices.First(); - - // Use device name to create a new WebCamTexture instance - var activeCamera = new WebCamTexture(cameraDevice.name, 1920, 1080, 24); - - // Call Play() in order to start capturing the video - activeCamera.Play(); - - // Set WebCamTexture in Stream's Client - this WebCamTexture will be the video source in video calls - _client.SetCameraInputSource(activeCamera); - } - - public void ChangeVideoDevice() - { - // Item from WebCamTexture.devices - var newDeviceName = "deviceName"; - - _activeCamera.Stop(); - _activeCamera.deviceName = newDeviceName; - _activeCamera.Play(); - } - - public void UpdateCameraInputSource() - { - // Obtain a camera device - var cameraDevice = WebCamTexture.devices.First(); - - // Use device name to create a new WebCamTexture instance - var activeCamera = new WebCamTexture(cameraDevice.name); - - // Call Play() in order to start capturing the video - activeCamera.Play(); - - _client.SetCameraInputSource(activeCamera); - } - - private IStreamVideoClient _client; - private string _activeMicrophoneDeviceName; - - private WebCamTexture _activeCamera; - } -} \ No newline at end of file diff --git a/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Camera.cs b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Camera.cs new file mode 100644 index 00000000..7ad6cf47 --- /dev/null +++ b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Camera.cs @@ -0,0 +1,158 @@ +using System.Linq; +using StreamVideo.Core; +using StreamVideo.Core.DeviceManagers; +using UnityEngine; +using UnityEngine.Android; +using UnityEngine.UI; + +namespace StreamVideoDocsCodeSamples._03_guides +{ + internal class CameraAndMicrophone_Camera : MonoBehaviour + { + public void ListAvailableCameraDevices() + { +var cameras = _client.VideoDeviceManager.EnumerateDevices(); + +foreach (var camera in cameras) +{ + Debug.Log(camera.Name); // Get camera name +} + } + + public void SelectCamera() + { + // Get available camera devices. Returns IEnumerable + var cameras = _client.VideoDeviceManager.EnumerateDevices(); + + foreach (var cam in cameras) + { + Debug.Log(cam.Name); // Get the name of the camera + } + + var camera = cameras.First(); + +_client.VideoDeviceManager.SelectDevice(camera, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, enable: true, requestedFPS: 24); + + +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_720p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_720p, enable: true, requestedFPS: 30); + } + + public void VideoResolutionValues() + { + // Get available camera devices. Returns IEnumerable + var cameras = _client.VideoDeviceManager.EnumerateDevices(); + + + var camera = cameras.First(); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_144p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_240p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_360p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_480p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_720p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_1080p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, new VideoResolution(500, 500), enable: true); + } + + public void GetSelectedCamera() + { + var selectedCamera = _client.VideoDeviceManager.SelectedDevice; + } + + public void StartStopCamera() + { + // Enable device to start capturing camera input + _client.VideoDeviceManager.Enable(); + + // Disable device to stop capturing camera input + _client.VideoDeviceManager.Disable(); + + // Set the enabled state by passing a boolean argument + _client.VideoDeviceManager.SetEnabled(true); + } + + public void GetLocalParticipantVideoPreview() + { +var webCamTexture = _client.VideoDeviceManager.GetSelectedDeviceWebCamTexture(); + +// You can attach this texture to RawImage UI Component +GetComponent().texture = webCamTexture; + } + +public void GetLocalParticipantVideoPreviewFull() +{ + // Triggered when the selected devices changes + _client.VideoDeviceManager.SelectedDeviceChanged += UpdateLocalParticipantPreview; +} + +private void UpdateLocalParticipantPreview(CameraDeviceInfo previousDevice, CameraDeviceInfo currentDevice) +{ + var webCamTexture = _client.VideoDeviceManager.GetSelectedDeviceWebCamTexture(); + + // You can attach this texture to RawImage UI Component + GetComponent().texture = webCamTexture; +} + + public void CheckCameraStatus() + { + // Check if currently selected device is enabled + var isDeviceEnabled = _client.VideoDeviceManager.IsEnabled; + } + + public void VideoDeviceManagerEvents() + { + // Triggered when the selected devices changes + _client.VideoDeviceManager.SelectedDeviceChanged += OnSelectedDeviceChanged; + + // Triggered when the IsEnabled property changes + _client.VideoDeviceManager.IsEnabledChanged += OnIsEnabledChanged; + } + + private void OnIsEnabledChanged(bool isEnabled) + { + } + + private void OnSelectedDeviceChanged(CameraDeviceInfo previousDevice, CameraDeviceInfo currentDevice) + { + } + + public void CameraTesting() + { + var cameras = _client.VideoDeviceManager.EnumerateDevices(); + var camera = cameras.First(); + + // Testing devices + + _client.VideoDeviceManager.TestDeviceAsync(camera); + + _client.VideoDeviceManager.TryFindFirstWorkingDeviceAsync(); + } + + public void CameraIOSPermissions() + { + // Request permission to use the Camera + Application.RequestUserAuthorization(UserAuthorization.WebCam); + + // Check if user granted camera permission + if (!Application.HasUserAuthorization(UserAuthorization.WebCam)) + { + // Notify user that camera permission was not granted and the camera capturing will not work. + } + } + + public void CameraAndroidPermissions() + { + // Request camera permissions + Permission.RequestUserPermission(Permission.Camera); + + // Check if user granted camera permission + if (!Permission.HasUserAuthorizedPermission(Permission.Camera)) + { + // Notify user that camera permission was not granted and the camera capturing will not work. + } + } + + private IStreamVideoClient _client; + } +} \ No newline at end of file diff --git a/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Camera.cs.meta b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Camera.cs.meta new file mode 100644 index 00000000..e7b9b8ec --- /dev/null +++ b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Camera.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 601ffac7ebe146cca1a7f818398f0174 +timeCreated: 1714136625 \ No newline at end of file diff --git a/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Microphone.cs b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Microphone.cs new file mode 100644 index 00000000..b946e4a5 --- /dev/null +++ b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Microphone.cs @@ -0,0 +1,115 @@ +using System.Linq; +using StreamVideo.Core; +using StreamVideo.Core.DeviceManagers; +using UnityEngine; +using UnityEngine.Android; + +namespace StreamVideoDocsCodeSamples._03_guides +{ + internal class CameraAndMicrophone_Microphone + { + public void ListAvailableMicrophoneDevices() + { + var microphones = _client.AudioDeviceManager.EnumerateDevices(); + + foreach (var mic in microphones) + { + Debug.Log(mic.Name); // Get microphone name + } + } + + public void SelectMicrophone() + { + // Get available microphone devices. Returns IEnumerable + var microphones = _client.AudioDeviceManager.EnumerateDevices(); + + foreach (var mic in microphones) + { + Debug.Log(mic.Name); // Get the name of the microphone + } + + var microphone = microphones.First(); + + // Select device for audio capturing. Pass the `enable` argument to control if capturing should be enabled + _client.AudioDeviceManager.SelectDevice(microphone, enable: true); + } + + public void GetSelectedMicrophone() + { + var selectedMicrophone = _client.AudioDeviceManager.SelectedDevice; + } + + public void StartStopMicrophone() + { + // Enable device to start capturing microphone input + _client.AudioDeviceManager.Enable(); + + // Disable device to stop capturing microphone input + _client.AudioDeviceManager.Disable(); + + // Set the enabled state by passing a boolean argument + _client.AudioDeviceManager.SetEnabled(true); + } + + public void CheckMicrophoneStatus() + { + // Check if currently selected device is enabled + var isDeviceEnabled = _client.AudioDeviceManager.IsEnabled; + } + + public void AudioDeviceManagerEvents() + { + // Triggered when the selected devices changes + _client.AudioDeviceManager.SelectedDeviceChanged += OnSelectedDeviceChanged; + + // Triggered when the IsEnabled property changes + _client.AudioDeviceManager.IsEnabledChanged += OnIsEnabledChanged; + } + + private void OnIsEnabledChanged(bool isEnabled) + { + } + + private void OnSelectedDeviceChanged(MicrophoneDeviceInfo previousDevice, MicrophoneDeviceInfo currentDevice) + { + } + + public void MicrophoneTesting() + { + var microphones = _client.AudioDeviceManager.EnumerateDevices(); + var microphone = microphones.First(); + + // Testing devices + + _client.AudioDeviceManager.TestDeviceAsync(microphone); + + _client.AudioDeviceManager.TryFindFirstWorkingDeviceAsync(); + } + + public void MicrophoneIOSPermissions() + { + // Request permission to use the Microphone + Application.RequestUserAuthorization(UserAuthorization.Microphone); + + // Check if user granted microphone permission + if (!Application.HasUserAuthorization(UserAuthorization.Microphone)) + { + // Notify user that microphone permission was not granted and the microphone capturing will not work. + } + } + + public void MicrophoneAndroidPermissions() + { + // Request microphone permissions + Permission.RequestUserPermission(Permission.Microphone); + + // Check if user granted microphone permission + if (!Permission.HasUserAuthorizedPermission(Permission.Microphone)) + { + // Notify user that microphone permission was not granted and the microphone capturing will not work. + } + } + + private IStreamVideoClient _client; + } +} \ No newline at end of file diff --git a/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone.cs.meta b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Microphone.cs.meta similarity index 100% rename from Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone.cs.meta rename to Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone_Microphone.cs.meta diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/AudioDeviceManager.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/AudioDeviceManager.cs deleted file mode 100644 index 2fe52333..00000000 --- a/Packages/StreamVideo/Runtime/Core/DeviceManagers/AudioDeviceManager.cs +++ /dev/null @@ -1,17 +0,0 @@ -using StreamVideo.Core.LowLevelClient; - -namespace StreamVideo.Core.DeviceManagers -{ - internal class AudioDeviceManager : DeviceManagerBase, IAudioDeviceManager - { - internal AudioDeviceManager(RtcSession rtcSession) - : base(rtcSession) - { - } - - protected override void OnSetEnabled(bool isEnabled) => RtcSession.TrySetAudioTrackEnabled(isEnabled); - - //StreamTodo: wrap all operations on the Microphone devices + monitor for devices list changes - //We could also allow to smart pick device -> sample each device and check which of them are actually gathering any input - } -} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/CameraDeviceInfo.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/CameraDeviceInfo.cs new file mode 100644 index 00000000..72f77933 --- /dev/null +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/CameraDeviceInfo.cs @@ -0,0 +1,39 @@ +using System; +using System.Threading.Tasks; + +namespace StreamVideo.Core.DeviceManagers +{ + /// + /// Represents a Physical Camera Device that can potentially be activated to capture a video stream + /// + public readonly struct CameraDeviceInfo : IEquatable + { + public string Name { get; } + public bool IsFrontFacing { get; } + + public CameraDeviceInfo(string name, bool isFrontFacing, IStreamVideoDeviceManager videoDeviceManager) + { + _videoDeviceManager = videoDeviceManager; + Name = name; + IsFrontFacing = isFrontFacing; + } + + public bool Equals(CameraDeviceInfo other) => Name == other.Name; + + public override bool Equals(object obj) => obj is CameraDeviceInfo other && Equals(other); + + public override int GetHashCode() => (Name != null ? Name.GetHashCode() : 0); + + public static bool operator ==(CameraDeviceInfo left, CameraDeviceInfo right) => left.Equals(right); + + public static bool operator !=(CameraDeviceInfo left, CameraDeviceInfo right) => !left.Equals(right); + + public Task TestDeviceAsync() => _videoDeviceManager.TestDeviceAsync(this); + + public override string ToString() => string.IsNullOrEmpty(Name) ? "None" : Name; + + internal bool IsValid => !string.IsNullOrEmpty(Name); + + private readonly IStreamVideoDeviceManager _videoDeviceManager; + } +} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/CameraDeviceInfo.cs.meta b/Packages/StreamVideo/Runtime/Core/DeviceManagers/CameraDeviceInfo.cs.meta new file mode 100644 index 00000000..c637c0f2 --- /dev/null +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/CameraDeviceInfo.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 685a39514ebf4db9983b43f86b44f6c2 +timeCreated: 1710241888 \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/DeviceManagerBase.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/DeviceManagerBase.cs index a99240be..3b38789a 100644 --- a/Packages/StreamVideo/Runtime/Core/DeviceManagers/DeviceManagerBase.cs +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/DeviceManagerBase.cs @@ -1,17 +1,51 @@ using System; +using System.Collections.Generic; +using System.Threading.Tasks; using StreamVideo.Core.LowLevelClient; +using StreamVideo.Libs.Logs; namespace StreamVideo.Core.DeviceManagers { - internal abstract class DeviceManagerBase : IDeviceManager + public delegate void DeviceEnabledChangeHandler(bool isEnabled); + + public delegate void + SelectedDeviceChangeHandler(TDeviceInfo previousDevice, TDeviceInfo currentDevice); + + internal abstract class DeviceManagerBase : IDeviceManager where TDeviceInfo : struct { - public bool IsEnabled { get; private set; } = true; + public event DeviceEnabledChangeHandler IsEnabledChanged; + + public event SelectedDeviceChangeHandler SelectedDeviceChanged; - internal DeviceManagerBase(RtcSession rtcSession) + public bool IsEnabled { - RtcSession = rtcSession ?? throw new ArgumentNullException(nameof(rtcSession)); - - //StreamTodo: react to when video & audio streams become available and disable them if IsEnabled was set to false before the call + get => _isEnabled; + private set + { + if (value == _isEnabled) + { + return; + } + + _isEnabled = value; + IsEnabledChanged?.Invoke(IsEnabled); + } + } + + public TDeviceInfo SelectedDevice + { + get => _selectedDevice; + protected set + { + if (EqualityComparer.Default.Equals(value, _selectedDevice)) + { + return; + } + + var prev = _selectedDevice; + _selectedDevice = value; + SelectedDeviceChanged?.Invoke(prev, value); + } } public void Enable() => SetEnabled(true); @@ -20,12 +54,70 @@ internal DeviceManagerBase(RtcSession rtcSession) public void SetEnabled(bool isEnabled) { + if (IsEnabled == isEnabled) + { + return; + } + IsEnabled = isEnabled; OnSetEnabled(isEnabled); } + public abstract IEnumerable EnumerateDevices(); + + public Task TestDeviceAsync(TDeviceInfo device, float timeout = 1f) + { + const float MinTimeout = 0f; + const float MaxTimeout = 20f; + + if (timeout <= MinTimeout || timeout > MaxTimeout) + { + throw new ArgumentOutOfRangeException( + $"'{nameof(timeout)}' argument must be between {MinTimeout} and {MaxTimeout} seconds, given: {timeout}"); + } + + return OnTestDeviceAsync(device, (int)(timeout * 1000)); + } + + public async Task TryFindFirstWorkingDeviceAsync(float testTimeoutPerDevice = 1f) + { + foreach (var device in EnumerateDevices()) + { + var isWorking = await TestDeviceAsync(device, testTimeoutPerDevice); + if (isWorking) + { + return device; + } + } + + return null; + } + + public void Dispose() => OnDisposing(); + + internal DeviceManagerBase(RtcSession rtcSession, IInternalStreamVideoClient client, ILogs logs) + { + RtcSession = rtcSession ?? throw new ArgumentNullException(nameof(rtcSession)); + Client = client ?? throw new ArgumentNullException(nameof(client)); + Logs = logs ?? throw new ArgumentNullException(nameof(logs)); + + //StreamTodo: react to when video & audio streams become available and disable them if IsEnabled was set to false before the call + } + protected RtcSession RtcSession { get; } + protected IInternalStreamVideoClient Client { get; } + protected ILogs Logs { get; } + protected abstract void OnSetEnabled(bool isEnabled); + + protected abstract Task OnTestDeviceAsync(TDeviceInfo device, int msTimeout); + + protected virtual void OnDisposing() + { + } + + private TDeviceInfo _selectedDevice; + private bool _isEnabled; } } \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IAudioDeviceManager.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IAudioDeviceManager.cs deleted file mode 100644 index 0d3e8d89..00000000 --- a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IAudioDeviceManager.cs +++ /dev/null @@ -1,10 +0,0 @@ -namespace StreamVideo.Core.DeviceManagers -{ - /// - /// Manages interactions with audio recording devices (Microphones). - /// - public interface IAudioDeviceManager : IDeviceManager - { - - } -} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IDeviceManager.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IDeviceManager.cs index ec5083c3..649bec20 100644 --- a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IDeviceManager.cs +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IDeviceManager.cs @@ -1,19 +1,37 @@ -namespace StreamVideo.Core.DeviceManagers +using System.Collections.Generic; +using System.Threading.Tasks; + +namespace StreamVideo.Core.DeviceManagers { - public interface IDeviceManager + public interface IDeviceManager where TDeviceInfo : struct { + /// + /// Event triggered when the changes. + /// + event DeviceEnabledChangeHandler IsEnabledChanged; + + /// + /// Event triggered when the changes. + /// + event SelectedDeviceChangeHandler SelectedDeviceChanged; + /// /// Is device enabled. Enabled device will stream output during the call. /// bool IsEnabled { get; } /// - /// Enable this device. Enabled device will stream output during the call. + /// Currently selected device. This device will be used to capture data. + /// + TDeviceInfo SelectedDevice { get; } + + /// + /// START capturing data from the . /// void Enable(); /// - /// Disable this device. This works like "mute" and stops streaming output during the call until you enable this again. + /// STOP capturing data from the /// void Disable(); @@ -21,5 +39,28 @@ public interface IDeviceManager /// Set enabled state for this device. /// void SetEnabled(bool isEnabled); + + /// + /// Enumerate all available devices. This list contains all devices exposed by the underlying OS. + /// + IEnumerable EnumerateDevices(); + + /// + /// Check if the device is capturing any data. + /// This can be useful when there are multiple devices available and you want to filter out the ones that actually work. + /// For example, on Windows/Mac/Linux there can be many virtual cameras/microphones available that are not capturing any data. + /// You typically want to present all available devices to users but you may want to show working devices first or enable the first working device by default. + /// + /// Device to test. You can obtain them from + /// How long the test will wait for camera input. Please not that depending on OS and the device there can be delay in starting the device. Timeout below 0.5 seconds can not be enough for some device. + /// True if device is providing captured data + Task TestDeviceAsync(TDeviceInfo device, float timeout = 1f); + + /// + /// Iterates over all available devices and performs on each until the first working device is found + /// + /// + /// First found working device of NULL if none of the devices worked + Task TryFindFirstWorkingDeviceAsync(float testTimeoutPerDevice = 1f); } } \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamAudioDeviceManager.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamAudioDeviceManager.cs new file mode 100644 index 00000000..42b8f5d7 --- /dev/null +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamAudioDeviceManager.cs @@ -0,0 +1,15 @@ +namespace StreamVideo.Core.DeviceManagers +{ + /// + /// Manages interactions with audio recording devices (Microphones). + /// + public interface IStreamAudioDeviceManager : IDeviceManager + { + /// + /// Select a microphone device for audio capturing. + /// + /// Device to select + /// Enable this device (Start Capturing Audio) + void SelectDevice(MicrophoneDeviceInfo device, bool enable); + } +} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IAudioDeviceManager.cs.meta b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamAudioDeviceManager.cs.meta similarity index 100% rename from Packages/StreamVideo/Runtime/Core/DeviceManagers/IAudioDeviceManager.cs.meta rename to Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamAudioDeviceManager.cs.meta diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamVideoDeviceManager.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamVideoDeviceManager.cs new file mode 100644 index 00000000..21901c9b --- /dev/null +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamVideoDeviceManager.cs @@ -0,0 +1,37 @@ +using UnityEngine; + +namespace StreamVideo.Core.DeviceManagers +{ + //StreamTodo: revise setting 3D Scene Camera as a video source + /// + /// Manages interactions with video recording devices - Cameras. + /// + public interface IStreamVideoDeviceManager : IDeviceManager + { + //StreamTodo: probably move all members from IDeviceManager here so we can have all comments specifically about video or audio + + /// + /// Select a camera device for video capturing. + /// + /// Camera device + /// Enable this device (Start Capturing Video) + /// Requested frame rate for the captured video. If the requested FPS is not supported by the camera, the closets available value will be selected + void SelectDevice(CameraDeviceInfo device, bool enable, int requestedFPS = 30); + + /// + /// Select a camera device for video capturing. + /// + /// Camera device + /// Requested video resolution for the captured video. If the requested resolution is not supported by the camera, the closest available one will be selected. + /// Enable this device (Start Capturing Video) + /// Requested frame rate for the captured video. If the requested FPS is not supported by the camera, the closets available value will be selected + void SelectDevice(CameraDeviceInfo device, VideoResolution requestedResolution, bool enable, int requestedFPS = 30); + + /// + /// Get the instance of for the selected device. This is useful if you want to + /// + /// This can change whenever a selected device is changed. Subscribe to to get notified when the selected device changes. + /// + WebCamTexture GetSelectedDeviceWebCamTexture(); + } +} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IVideoDeviceManager.cs.meta b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamVideoDeviceManager.cs.meta similarity index 100% rename from Packages/StreamVideo/Runtime/Core/DeviceManagers/IVideoDeviceManager.cs.meta rename to Packages/StreamVideo/Runtime/Core/DeviceManagers/IStreamVideoDeviceManager.cs.meta diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IVideoDeviceManager.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/IVideoDeviceManager.cs deleted file mode 100644 index 61fa70c3..00000000 --- a/Packages/StreamVideo/Runtime/Core/DeviceManagers/IVideoDeviceManager.cs +++ /dev/null @@ -1,10 +0,0 @@ -namespace StreamVideo.Core.DeviceManagers -{ - /// - /// Manages interactions with video recording devices (Cameras). - /// - public interface IVideoDeviceManager : IDeviceManager - { - - } -} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/MicrophoneDeviceInfo.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/MicrophoneDeviceInfo.cs new file mode 100644 index 00000000..993c0dd5 --- /dev/null +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/MicrophoneDeviceInfo.cs @@ -0,0 +1,31 @@ +using System; + +namespace StreamVideo.Core.DeviceManagers +{ + /// + /// Represents a Microphone Device that can potentially be activated to capture an audio stream + /// + public readonly struct MicrophoneDeviceInfo : IEquatable + { + public string Name { get; } + + public MicrophoneDeviceInfo(string name) + { + Name = name; + } + + public bool Equals(MicrophoneDeviceInfo other) => Name == other.Name; + + public override bool Equals(object obj) => obj is MicrophoneDeviceInfo other && Equals(other); + + public override int GetHashCode() => (Name != null ? Name.GetHashCode() : 0); + + public static bool operator ==(MicrophoneDeviceInfo left, MicrophoneDeviceInfo right) => left.Equals(right); + + public static bool operator !=(MicrophoneDeviceInfo left, MicrophoneDeviceInfo right) => !left.Equals(right); + + public override string ToString() => string.IsNullOrEmpty(Name) ? "None" : Name; + + internal bool IsValid => !string.IsNullOrEmpty(Name); + } +} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/MicrophoneDeviceInfo.cs.meta b/Packages/StreamVideo/Runtime/Core/DeviceManagers/MicrophoneDeviceInfo.cs.meta new file mode 100644 index 00000000..2a0fab2d --- /dev/null +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/MicrophoneDeviceInfo.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 5e87adf655884bc9884d3a9f7047dd47 +timeCreated: 1710241888 \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamAudioDeviceManager.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamAudioDeviceManager.cs new file mode 100644 index 00000000..c579efbb --- /dev/null +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamAudioDeviceManager.cs @@ -0,0 +1,163 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using StreamVideo.Core.LowLevelClient; +using StreamVideo.Libs.Logs; +using UnityEngine; +using Object = UnityEngine.Object; + +namespace StreamVideo.Core.DeviceManagers +{ + internal class StreamAudioDeviceManager : DeviceManagerBase, IStreamAudioDeviceManager + { + //StreamTodo: user can add/remove devices, we might want to expose DeviceAdded, DeviceRemoved events + public override IEnumerable EnumerateDevices() + { + foreach (var device in Microphone.devices) + { + yield return new MicrophoneDeviceInfo(device); + } + } + + protected override async Task OnTestDeviceAsync(MicrophoneDeviceInfo device, int msTimeout) + { + const int sampleRate = 44100; + var maxRecordingTime = (int)Math.Ceiling(msTimeout / 1000f); + + var clip = Microphone.Start(device.Name, true, maxRecordingTime, sampleRate); + if (clip == null) + { + return false; + } + + //StreamTodo: check in loop and exit early if device is working already + await Task.Delay(msTimeout); + + //StreamTodo: should we check Microphone.IsRecording? Also some sources add this after Mic.Start() while (!(Microphone.GetPosition(null) > 0)) { } + + var data = new float[clip.samples * clip.channels]; + clip.GetData(data, 0); + var hasData = false; + foreach (var sample in data) + { + if (sample != 0f) + { + hasData = true; + break; + } + } + + return hasData; + } + + /// + /// Select microphone device to capture audio input. Available microphone devices are listed in . + /// You can check the currently selected audio device with , and + /// get notified when the selected device changes by subscribing to . + /// + /// + /// Thrown when the provided device has an invalid name + public void SelectDevice(MicrophoneDeviceInfo device, bool enable) + { + if (!device.IsValid) + { + throw new ArgumentException($"{nameof(device)} argument is not valid. The device name is empty."); + } + + TryStopRecording(); + + SelectedDevice = device; + + var targetAudioSource = GetOrCreateTargetAudioSource(); + + targetAudioSource.clip + = Microphone.Start(SelectedDevice.Name, true, 3, AudioSettings.outputSampleRate); + targetAudioSource.loop = true; + +#if STREAM_DEBUG_ENABLED + Logs.Info($"Changed microphone device to: {SelectedDevice}"); +#endif + + //StreamTodo: in some cases starting the mic recording before the call was causing the recorded audio being played in speakers + //I think the reason was that AudioSource was being captured by an AudioListener but once I've joined the call, this disappeared + //Check if we can have this AudioSource to be ignored by AudioListener's or otherwise mute it when there is not active call session + + SetEnabled(enable); + } + + //StreamTodo: https://docs.unity3d.com/ScriptReference/AudioSource-ignoreListenerPause.html perhaps this should be enabled so that AudioListener doesn't affect recorded audio + + internal StreamAudioDeviceManager(RtcSession rtcSession, IInternalStreamVideoClient client, ILogs logs) + : base(rtcSession, client, logs) + { + } + + protected override void OnSetEnabled(bool isEnabled) + { + if (isEnabled && SelectedDevice.IsValid && !GetOrCreateTargetAudioSource().isPlaying) + { + GetOrCreateTargetAudioSource().Play(); + } + + if (!isEnabled) + { + TryStopRecording(); + } + + RtcSession.TrySetAudioTrackEnabled(isEnabled); + } + + protected override void OnDisposing() + { + TryStopRecording(); + + if (_targetAudioSourceContainer != null) + { + Object.Destroy(_targetAudioSourceContainer); + } + + base.OnDisposing(); + } + + //StreamTodo: wrap all operations on the Microphone devices + monitor for devices list changes + //We could also allow to smart pick device -> sample each device and check which of them are actually gathering any input + + private AudioSource _targetAudioSource; + private GameObject _targetAudioSourceContainer; + + private AudioSource GetOrCreateTargetAudioSource() + { + if (_targetAudioSource != null) + { + return _targetAudioSource; + } + + _targetAudioSourceContainer = new GameObject + { + name = $"[Stream][{nameof(StreamAudioDeviceManager)}] Microphone Buffer", +#if STREAM_DEBUG_ENABLED + hideFlags = HideFlags.DontSave +#else + hideFlags = HideFlags.HideInHierarchy | HideFlags.DontSave +#endif + }; + + _targetAudioSource = _targetAudioSourceContainer.AddComponent(); + Client.SetAudioInputSource(_targetAudioSource); + return _targetAudioSource; + } + + private void TryStopRecording() + { + if (!SelectedDevice.IsValid) + { + return; + } + + if (Microphone.IsRecording(SelectedDevice.Name)) + { + Microphone.End(SelectedDevice.Name); + } + } + } +} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/AudioDeviceManager.cs.meta b/Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamAudioDeviceManager.cs.meta similarity index 100% rename from Packages/StreamVideo/Runtime/Core/DeviceManagers/AudioDeviceManager.cs.meta rename to Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamAudioDeviceManager.cs.meta diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamVideoDeviceManager.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamVideoDeviceManager.cs new file mode 100644 index 00000000..5982e585 --- /dev/null +++ b/Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamVideoDeviceManager.cs @@ -0,0 +1,235 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Threading.Tasks; +using StreamVideo.Core.LowLevelClient; +using StreamVideo.Libs.Logs; +using UnityEngine; +using Object = UnityEngine.Object; + +namespace StreamVideo.Core.DeviceManagers +{ + // StreamTodo: write tests: + /* - change in video res & FPS needs to be reflected in sent video +- If you disable track before call it should stay disabled during the call +- disabling camera should disable the video track (same with mic) +- enabling the camera should enable the video track again (same with mic) +- changing a disabled camera should not enable it +- test that monitoring for video devices works and deviceAdded, deviceRemoved events are fired accordingly +- test that enabling device triggers capturing and disabling stops capturing +*/ + internal class StreamVideoDeviceManager : DeviceManagerBase, IStreamVideoDeviceManager + { + //StreamTodo: user can add/remove devices, we might want to expose DeviceAdded, DeviceRemoved events + public override IEnumerable EnumerateDevices() + { + foreach (var device in WebCamTexture.devices) + { + yield return new CameraDeviceInfo(device.name, device.isFrontFacing, this); + } + } + + public void SelectDevice(CameraDeviceInfo device, bool enable, int requestedFPS = 30) + => SelectDevice(device, VideoResolution.Res_720p, enable, requestedFPS); + + public void SelectDevice(CameraDeviceInfo device, VideoResolution requestedResolution, bool enable, int requestedFPS = 30) + { + if (!device.IsValid) + { + throw new ArgumentException($"{nameof(device)} argument is not valid. The device name is empty."); + } + + var deviceChanged = SelectedDevice != device; + var newInstanceNeeded = IsNewInstanceNeeded(device, requestedResolution); + + if (_activeCamera != null && _activeCamera.isPlaying) + { + _activeCamera.Stop(); + } + + if (newInstanceNeeded) + { + _activeCamera = new WebCamTexture(device.Name, (int)requestedResolution.Width, (int)requestedResolution.Height, requestedFPS); + SelectedDevice = device; + } + else + { + if (deviceChanged) + { + _activeCamera.deviceName = device.Name; + SelectedDevice = device; + } + } + + if (IsEnabled && enable && _activeCamera != null && !_activeCamera.isPlaying) + { + //OnSetEnabled will not trigger because IsEnabled value didn't change + _activeCamera.Play(); + Client.SetCameraInputSource(_activeCamera); + } + + SetEnabled(enable); + } + + //StreamTodo: better to not expose this and make fake tracks for local user. This way every participant is processed exactly the same + /// + /// Get the instance of for the selected device. This is useful if you want to + /// + /// This can change whenever a selected device is changed. Subscribe to to get notified when the selected device changes. + /// + public WebCamTexture GetSelectedDeviceWebCamTexture() => _activeCamera; + + internal StreamVideoDeviceManager(RtcSession rtcSession, IInternalStreamVideoClient client, ILogs logs) + : base(rtcSession, client, logs) + { + } + + protected override void OnSetEnabled(bool isEnabled) + { + if (isEnabled && _activeCamera != null && !_activeCamera.isPlaying) + { + _activeCamera.Play(); + Client.SetCameraInputSource(_activeCamera); + } + + if (!isEnabled && _activeCamera != null) + { + _activeCamera.Stop(); + } + + RtcSession.TrySetVideoTrackEnabled(isEnabled); + } + + protected override async Task OnTestDeviceAsync(CameraDeviceInfo device, int msTimeout) + { + WebCamTexture camTexture = null; + try + { + camTexture = new WebCamTexture(device.Name); + + // This can fail and the only result will be Unity logging "Could not start graph" and "Could not pause pControl" - these are logs and not exceptions. + camTexture.Play(); + + if (_stopwatch == null) + { + _stopwatch = new Stopwatch(); + } + + _stopwatch.Stop(); + _stopwatch.Reset(); + _stopwatch.Start(); + + var isCapturing = false; + + //StreamTodo: Investigate https://forum.unity.com/threads/get-webcamtexture-pixel-data-without-using-getpixels32.1315821/ + + Color[] frame1 = null, frame2 = null; + + while (_stopwatch.ElapsedMilliseconds < msTimeout) + { + //WebCamTexture.didUpdateThisFrame does not guarantee that camera is capturing data. We need to compare frames + if (camTexture.didUpdateThisFrame) + { + var frame = camTexture.GetPixels(); + + if (frame1 == null) + { + if (!IsFrameBlack(frame)) + { + frame1 = frame; + continue; + } + } + else + { + if (!IsFrameBlack(frame)) + { + frame2 = frame; + } + } + } + + if (frame1 != null && frame2 != null && !AreFramesEqual(frame1, frame2)) + { + isCapturing = true; + break; + } + + await Task.Delay(1); + } + + return isCapturing; + } + catch (Exception e) + { + Logs.Error(e.Message); + return false; + } + finally + { + if (camTexture != null) + { + camTexture.Stop(); + Object.Destroy(camTexture); + } + } + } + + protected override void OnDisposing() + { + if (_activeCamera != null) + { + if (_activeCamera.isPlaying) + { + _activeCamera.Stop(); + } + + Object.Destroy(_activeCamera); + } + + base.OnDisposing(); + } + + private WebCamTexture _activeCamera; + private Stopwatch _stopwatch; + + private bool IsNewInstanceNeeded(CameraDeviceInfo device, VideoResolution resolution, int fps = 30) + { + return _activeCamera == null || _activeCamera.requestedWidth != resolution.Width || + _activeCamera.requestedHeight != resolution.Height || + Mathf.Abs(_activeCamera.requestedFPS - fps) < 0.01f; + } + + private static bool AreFramesEqual(IReadOnlyList frame1, IReadOnlyList frame2) + { + if (frame1.Count != frame2.Count) + { + return false; + } + + for (var i = 0; i < frame1.Count; i++) + { + if (frame1[i] != frame2[i]) + { + return false; + } + } + + return true; + } + + private static bool IsFrameBlack(IReadOnlyList frame1) + { + for (var i = 0; i < frame1.Count; i++) + { + //StreamTodo: perhaps check if the whole frame is same color. In one case a virtual camera was solid orange + if (frame1[i] != Color.black) + { + return false; + } + } + + return true; + } + } +} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/VideoDeviceManager.cs.meta b/Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamVideoDeviceManager.cs.meta similarity index 100% rename from Packages/StreamVideo/Runtime/Core/DeviceManagers/VideoDeviceManager.cs.meta rename to Packages/StreamVideo/Runtime/Core/DeviceManagers/StreamVideoDeviceManager.cs.meta diff --git a/Packages/StreamVideo/Runtime/Core/DeviceManagers/VideoDeviceManager.cs b/Packages/StreamVideo/Runtime/Core/DeviceManagers/VideoDeviceManager.cs deleted file mode 100644 index 1b48a250..00000000 --- a/Packages/StreamVideo/Runtime/Core/DeviceManagers/VideoDeviceManager.cs +++ /dev/null @@ -1,20 +0,0 @@ -using StreamVideo.Core.LowLevelClient; - -namespace StreamVideo.Core.DeviceManagers -{ - internal class VideoDeviceManager : DeviceManagerBase, IVideoDeviceManager - { - internal VideoDeviceManager(RtcSession rtcSession) - : base(rtcSession) - { - } - - protected override void OnSetEnabled(bool isEnabled) => RtcSession.TrySetVideoTrackEnabled(isEnabled); - - //StreamTodo: wrap all Unity webcam texture operations here. Enabling/Disabling tracks should manage the WebCamTexture so that users only - //Also take into account that user may want to provide his instance of WebCamTexture + monitor for devices list changes - - //StreamTodo: add AutoDetectActiveDevice() method -> will sample each device and pick the first that delivers data - //We could also favor front camera on mobile devices - } -} \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/IInternalStreamVideoClient.cs b/Packages/StreamVideo/Runtime/Core/IInternalStreamVideoClient.cs index b77e4f03..0d6a94ca 100644 --- a/Packages/StreamVideo/Runtime/Core/IInternalStreamVideoClient.cs +++ b/Packages/StreamVideo/Runtime/Core/IInternalStreamVideoClient.cs @@ -2,6 +2,7 @@ using System.Threading.Tasks; using StreamVideo.Core.LowLevelClient; using StreamVideo.Core.StatefulModels; +using UnityEngine; namespace StreamVideo.Core { @@ -40,5 +41,9 @@ Task UpdateUserPermissions(IStreamCall call, string userId, List grantPe Task SetParticipantCustomDataAsync(IStreamVideoCallParticipant participant, Dictionary internalDictionary); + + void SetAudioInputSource(AudioSource audioSource); + + void SetCameraInputSource(WebCamTexture webCamTexture); } } \ No newline at end of file diff --git a/Packages/StreamVideo/Runtime/Core/IStreamVideoClient.cs b/Packages/StreamVideo/Runtime/Core/IStreamVideoClient.cs index 67bb78d0..753fd9e2 100644 --- a/Packages/StreamVideo/Runtime/Core/IStreamVideoClient.cs +++ b/Packages/StreamVideo/Runtime/Core/IStreamVideoClient.cs @@ -1,5 +1,4 @@ - -using System; +using System; using System.Collections.Generic; using System.Threading.Tasks; using StreamVideo.Core.QueryBuilders.Sort.Calls; @@ -8,7 +7,6 @@ using StreamVideo.Core.StatefulModels; using StreamVideo.Libs.Auth; using StreamVideo.Libs.VideoClientInstanceRunner; -using UnityEngine; namespace StreamVideo.Core { @@ -51,12 +49,12 @@ public interface IStreamVideoClient : IStreamVideoClientEventsListener, IDisposa /// /// Manager for video recording devices. Use it to interact with camera devices. /// - IVideoDeviceManager VideoDeviceManager { get; } + IStreamVideoDeviceManager VideoDeviceManager { get; } /// /// Manager for audio recording devices. Use it to interact with microphone devices. /// - IAudioDeviceManager AudioDeviceManager { get; } + IStreamAudioDeviceManager AudioDeviceManager { get; } /// /// Connect user to Stream server. Returns local user object of type @@ -72,30 +70,17 @@ public interface IStreamVideoClient : IStreamVideoClientEventsListener, IDisposa Task JoinCallAsync(StreamCallType callType, string callId, bool create, bool ring, bool notify); - /// - /// Set the source for sending AUDIO. Check out the docs to learn on how to capture audio from a Microphone device https://getstream.io/video/docs/unity/guides/camera-and-microphone/ - /// - /// - void SetAudioInputSource(AudioSource audioSource); - - /// - /// Set the source for sending VIDEO from a Camera device. - /// Video resolution and FPS (frames per second) defined in the passed will be used to define the max resolution and FPS of the published video - /// Check out the docs to learn how to setup capturing video from a camera device https://getstream.io/video/docs/unity/guides/camera-and-microphone/ - /// - /// - void SetCameraInputSource(WebCamTexture webCamTexture); - - /// - /// Set the source for sending VIDEO or rendered Scene Camera. You can pass any scene camera and the video will be sent to other participants. - /// - void SetCameraInputSource(Camera sceneCamera); - /// /// Will return null if the call doesn't exist /// Task GetCallAsync(StreamCallType callType, string callId); + /// + /// Get a call with a specified Type and ID. If such a call doesn't exist, it will be created. + /// + /// Call type - this defines the permissions and other settings for the call. Read more in the Call Types Docs + /// Call ID + /// Call object of type: Task GetOrCreateCallAsync(StreamCallType callType, string callId); /// diff --git a/Packages/StreamVideo/Runtime/Core/LowLevelClient/RtcSession.cs b/Packages/StreamVideo/Runtime/Core/LowLevelClient/RtcSession.cs index 72536ab2..1a212e19 100644 --- a/Packages/StreamVideo/Runtime/Core/LowLevelClient/RtcSession.cs +++ b/Packages/StreamVideo/Runtime/Core/LowLevelClient/RtcSession.cs @@ -87,6 +87,16 @@ public AudioSource AudioInput get => _audioInput; set { + if (value == null) + { + throw new ArgumentNullException(); + } + + if (value == _audioInput) + { + return; + } + var prev = _audioInput; _audioInput = value; @@ -102,6 +112,16 @@ public WebCamTexture VideoInput get => _videoInput; set { + if (value == null) + { + throw new ArgumentNullException(); + } + + if (value == _videoInput) + { + return; + } + var prev = _videoInput; _videoInput = value; @@ -280,6 +300,7 @@ public void TrySetAudioTrackEnabled(bool isEnabled) { if (Publisher?.PublisherAudioTrack == null) { + //StreamTodo: we probably want to cache this here and use once the track is available return; } @@ -290,6 +311,7 @@ public void TrySetVideoTrackEnabled(bool isEnabled) { if (Publisher?.PublisherVideoTrack == null) { + //StreamTodo: we probably want to cache this here and use once the track is available return; } diff --git a/Packages/StreamVideo/Runtime/Core/LowLevelClient/StreamPeerConnection.cs b/Packages/StreamVideo/Runtime/Core/LowLevelClient/StreamPeerConnection.cs index b1fae25b..fb292718 100644 --- a/Packages/StreamVideo/Runtime/Core/LowLevelClient/StreamPeerConnection.cs +++ b/Packages/StreamVideo/Runtime/Core/LowLevelClient/StreamPeerConnection.cs @@ -533,7 +533,7 @@ private VideoStreamTrack CreatePublisherVideoTrack() $"CreatePublisherVideoTrack, isPlaying: {_mediaInputProvider.VideoInput.isPlaying}, readable: {_mediaInputProvider.VideoInput.isReadable}"); #endif - return new VideoStreamTrack(_mediaInputProvider.VideoInput); + return new VideoStreamTrack(_publisherVideoTrackTexture); } //StreamTodo: CreatePublisherVideoTrackFromSceneCamera() is not used in any path diff --git a/Packages/StreamVideo/Runtime/Core/StatefulModels/IStreamCall.cs b/Packages/StreamVideo/Runtime/Core/StatefulModels/IStreamCall.cs index ccab98b9..d959ed7f 100644 --- a/Packages/StreamVideo/Runtime/Core/StatefulModels/IStreamCall.cs +++ b/Packages/StreamVideo/Runtime/Core/StatefulModels/IStreamCall.cs @@ -9,6 +9,9 @@ namespace StreamVideo.Core.StatefulModels { + /// + /// Represents a call session where participants can share audio and video streams. + /// public interface IStreamCall : IStreamStatefulModel, IHasCustomData { /// @@ -284,14 +287,14 @@ Task QueryMembersAsync(IEnumerable filters Task SendCustomEventAsync(Dictionary eventData); /// - /// Pin this participant locally. This will take effect on this client only. + /// Pin this participant locally. This will take effect on this device only. /// You can get all pinned participants with /// /// Participant to pin void PinLocally(IStreamVideoCallParticipant participant); /// - /// Unpin this participant locally. This will take effect on this client only. + /// Unpin this participant locally. This will take effect on this device only. /// You can get all pinned participants with /// /// Participant to unpin diff --git a/Packages/StreamVideo/Runtime/Core/StatefulModels/IStreamVideoCallParticipant.cs b/Packages/StreamVideo/Runtime/Core/StatefulModels/IStreamVideoCallParticipant.cs index 2e2b954d..688c7fda 100644 --- a/Packages/StreamVideo/Runtime/Core/StatefulModels/IStreamVideoCallParticipant.cs +++ b/Packages/StreamVideo/Runtime/Core/StatefulModels/IStreamVideoCallParticipant.cs @@ -41,6 +41,7 @@ public interface IStreamVideoCallParticipant : IStreamStatefulModel, IHasCustomD /// Is this participant currently the most actively speaking participant. /// bool IsDominantSpeaker { get; } + string UserId { get; } /// diff --git a/Packages/StreamVideo/Runtime/Core/StreamCallType.cs b/Packages/StreamVideo/Runtime/Core/StreamCallType.cs index b9552453..f448aa8e 100644 --- a/Packages/StreamVideo/Runtime/Core/StreamCallType.cs +++ b/Packages/StreamVideo/Runtime/Core/StreamCallType.cs @@ -3,7 +3,8 @@ namespace StreamVideo.Core { /// - /// Call type defines permission settings. You can set permissions for each type in + /// Call type defines permission settings. You can set permissions for each type in Stream Dashboard. + /// Read more about the call types in the Call Types Docs /// public readonly struct StreamCallType { @@ -25,7 +26,7 @@ public readonly struct StreamCallType public static StreamCallType Livestream => new StreamCallType("livestream"); /// - /// ** Use for development only! ** everything enabled, permissions open + /// ** Use for development only! ** should only be used for testing, permissions are open and everything is enabled (use carefully) /// public static StreamCallType Development => new StreamCallType("development"); diff --git a/Packages/StreamVideo/Runtime/Core/StreamVideoClient.cs b/Packages/StreamVideo/Runtime/Core/StreamVideoClient.cs index 6199f1f6..7f3de231 100644 --- a/Packages/StreamVideo/Runtime/Core/StreamVideoClient.cs +++ b/Packages/StreamVideo/Runtime/Core/StreamVideoClient.cs @@ -47,8 +47,8 @@ public class StreamVideoClient : IStreamVideoClient, IInternalStreamVideoClient public bool IsConnected => InternalLowLevelClient.ConnectionState == ConnectionState.Connected; - public IVideoDeviceManager VideoDeviceManager { get; } - public IAudioDeviceManager AudioDeviceManager { get; } + public IStreamVideoDeviceManager VideoDeviceManager { get; } + public IStreamAudioDeviceManager AudioDeviceManager { get; } /// /// Use this method to create the Video Client. You should have only one instance of this class @@ -183,7 +183,7 @@ public async Task ConnectUserAsync(AuthCredentials credentials public Task DisconnectAsync() => InternalLowLevelClient.DisconnectAsync(); - public void SetAudioInputSource(AudioSource audioSource) + void IInternalStreamVideoClient.SetAudioInputSource(AudioSource audioSource) { if (audioSource == null) { @@ -196,7 +196,7 @@ public void SetAudioInputSource(AudioSource audioSource) //StreamTodo: add IsActive flag to SetCameraInputSource SetAudioInputSource SetCameraInputSource //StreamTodo: later we should accept just Texture or RenderTexture or TextureProvider - public void SetCameraInputSource(WebCamTexture webCamTexture) + void IInternalStreamVideoClient.SetCameraInputSource(WebCamTexture webCamTexture) { if (webCamTexture == null) { @@ -206,10 +206,10 @@ public void SetCameraInputSource(WebCamTexture webCamTexture) InternalLowLevelClient.RtcSession.VideoInput = webCamTexture; } - public void SetCameraInputSource(Camera sceneCamera) - { - InternalLowLevelClient.RtcSession.VideoSceneInput = sceneCamera; - } + // public void SetCameraInputSource(Camera sceneCamera) + // { + // InternalLowLevelClient.RtcSession.VideoSceneInput = sceneCamera; + // } public async Task QueryCallsAsync(IEnumerable filters = null, CallSort sort = null, int limit = 25, string prev = null, string next = null, bool watch = false) @@ -385,8 +385,8 @@ private StreamVideoClient(IWebsocketClient coordinatorWebSocket, IWebsocketClien _cache = new Cache(this, serializer, _logs); InternalLowLevelClient.RtcSession.SetCache(_cache); - VideoDeviceManager = new VideoDeviceManager(InternalLowLevelClient.RtcSession); - AudioDeviceManager = new AudioDeviceManager(InternalLowLevelClient.RtcSession); + VideoDeviceManager = new StreamVideoDeviceManager(InternalLowLevelClient.RtcSession, this, _logs); + AudioDeviceManager = new StreamAudioDeviceManager(InternalLowLevelClient.RtcSession, this, _logs); SubscribeTo(InternalLowLevelClient); } diff --git a/Packages/StreamVideo/Runtime/Core/VideoResolution.cs b/Packages/StreamVideo/Runtime/Core/VideoResolution.cs index 06788547..0a0c9c06 100644 --- a/Packages/StreamVideo/Runtime/Core/VideoResolution.cs +++ b/Packages/StreamVideo/Runtime/Core/VideoResolution.cs @@ -6,7 +6,7 @@ namespace StreamVideo.Core /// /// Video resolution /// - /// Create a custom resolution or use one of the predefined: + /// You can create a custom resolution or use one of the predefined: /// - FullHD -> 1920x1080 /// - HD -> 1280x720 /// - SD -> 640x480 diff --git a/Packages/StreamVideo/Samples~/VideoChat/MainScene.unity b/Packages/StreamVideo/Samples~/VideoChat/MainScene.unity index 565081b0..d1399989 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/MainScene.unity +++ b/Packages/StreamVideo/Samples~/VideoChat/MainScene.unity @@ -104,7 +104,7 @@ NavMeshSettings: serializedVersion: 2 m_ObjectHideFlags: 0 m_BuildSettings: - serializedVersion: 2 + serializedVersion: 3 agentTypeID: 0 agentRadius: 0.5 agentHeight: 2 @@ -117,7 +117,7 @@ NavMeshSettings: cellSize: 0.16666667 manualTileSize: 0 tileSize: 256 - accuratePlacement: 0 + buildHeightMesh: 0 maxJobWorkers: 0 preserveTilesOutsideBounds: 0 debug: @@ -128,6 +128,7 @@ PrefabInstance: m_ObjectHideFlags: 0 serializedVersion: 2 m_Modification: + serializedVersion: 3 m_TransformParent: {fileID: 996985032} m_Modifications: - target: {fileID: 149840264038047480, guid: 117eea22f833ae64f95b5be7435be32d, type: 3} @@ -587,6 +588,9 @@ PrefabInstance: value: -40 objectReference: {fileID: 0} m_RemovedComponents: [] + m_RemovedGameObjects: [] + m_AddedGameObjects: [] + m_AddedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: 117eea22f833ae64f95b5be7435be32d, type: 3} --- !u!224 &123332879 stripped RectTransform: @@ -635,7 +639,6 @@ RectTransform: m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 996985032} - m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} m_AnchorMin: {x: 0, y: 0} m_AnchorMax: {x: 1, y: 1} @@ -740,13 +743,13 @@ Transform: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 575041316} + serializedVersion: 2 m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &705507993 GameObject: @@ -834,13 +837,13 @@ Transform: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 705507993} + serializedVersion: 2 m_LocalRotation: {x: 0.40821788, y: -0.23456968, z: 0.10938163, w: 0.8754261} m_LocalPosition: {x: 0, y: 3, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 50, y: -30, z: 0} --- !u!1 &963194225 GameObject: @@ -882,9 +885,17 @@ Camera: m_projectionMatrixMode: 1 m_GateFitMode: 2 m_FOVAxisMode: 0 + m_Iso: 200 + m_ShutterSpeed: 0.005 + m_Aperture: 16 + m_FocusDistance: 10 + m_FocalLength: 50 + m_BladeCount: 5 + m_Curvature: {x: 2, y: 11} + m_BarrelClipping: 0.25 + m_Anamorphism: 0 m_SensorSize: {x: 36, y: 24} m_LensShift: {x: 0, y: 0} - m_FocalLength: 50 m_NormalizedViewPortRect: serializedVersion: 2 x: 0 @@ -918,13 +929,13 @@ Transform: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 963194225} + serializedVersion: 2 m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 1, z: -10} m_LocalScale: {x: 1, y: 1, z: 1} m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &996985028 GameObject: @@ -1003,7 +1014,9 @@ Canvas: m_OverrideSorting: 0 m_OverridePixelPerfect: 0 m_SortingBucketNormalizedSize: 0 + m_VertexColorAlwaysGammaSpace: 0 m_AdditionalShaderChannelsFlag: 25 + m_UpdateRectTransformForStandalone: 0 m_SortingLayerID: 0 m_SortingOrder: 0 m_TargetDisplay: 0 @@ -1023,7 +1036,6 @@ RectTransform: - {fileID: 1214904789} - {fileID: 123332879} m_Father: {fileID: 0} - m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} m_AnchorMin: {x: 0, y: 0} m_AnchorMax: {x: 0, y: 0} @@ -1046,7 +1058,6 @@ MonoBehaviour: _senderVideoWidth: 1280 _senderVideoHeight: 720 _senderVideoFps: 30 - _inputAudioSource: {fileID: 1825738054} _inputSceneCamera: {fileID: 963194227} _callScreen: {fileID: 123332880} _mainScreen: {fileID: 355221056687574256} @@ -1096,147 +1107,20 @@ Transform: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1354881823} + serializedVersion: 2 m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_ConstrainProportionsScale: 0 m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &1825738053 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 1825738055} - - component: {fileID: 1825738054} - m_Layer: 0 - m_Name: MicrophoneInput - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!82 &1825738054 -AudioSource: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1825738053} - m_Enabled: 1 - serializedVersion: 4 - OutputAudioMixerGroup: {fileID: 0} - m_audioClip: {fileID: 0} - m_PlayOnAwake: 1 - m_Volume: 1 - m_Pitch: 1 - Loop: 0 - Mute: 0 - Spatialize: 0 - SpatializePostEffects: 0 - Priority: 128 - DopplerLevel: 1 - MinDistance: 1 - MaxDistance: 500 - Pan2D: 0 - rolloffMode: 0 - BypassEffects: 0 - BypassListenerEffects: 0 - BypassReverbZones: 0 - rolloffCustomCurve: - serializedVersion: 2 - m_Curve: - - serializedVersion: 3 - time: 0 - value: 1 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - - serializedVersion: 3 - time: 1 - value: 0 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - m_PreInfinity: 2 - m_PostInfinity: 2 - m_RotationOrder: 4 - panLevelCustomCurve: - serializedVersion: 2 - m_Curve: - - serializedVersion: 3 - time: 0 - value: 0 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - m_PreInfinity: 2 - m_PostInfinity: 2 - m_RotationOrder: 4 - spreadCustomCurve: - serializedVersion: 2 - m_Curve: - - serializedVersion: 3 - time: 0 - value: 0 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - m_PreInfinity: 2 - m_PostInfinity: 2 - m_RotationOrder: 4 - reverbZoneMixCustomCurve: - serializedVersion: 2 - m_Curve: - - serializedVersion: 3 - time: 0 - value: 1 - inSlope: 0 - outSlope: 0 - tangentMode: 0 - weightedMode: 0 - inWeight: 0.33333334 - outWeight: 0.33333334 - m_PreInfinity: 2 - m_PostInfinity: 2 - m_RotationOrder: 4 ---- !u!4 &1825738055 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1825738053} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_ConstrainProportionsScale: 0 - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 5 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1001 &355221056687574255 PrefabInstance: m_ObjectHideFlags: 0 serializedVersion: 2 m_Modification: + serializedVersion: 3 m_TransformParent: {fileID: 996985032} m_Modifications: - target: {fileID: 355221055551435707, guid: 0718803385b9d8840b6d4f562c3d956a, type: 3} @@ -1812,6 +1696,9 @@ PrefabInstance: value: 0 objectReference: {fileID: 0} m_RemovedComponents: [] + m_RemovedGameObjects: [] + m_AddedGameObjects: [] + m_AddedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: 0718803385b9d8840b6d4f562c3d956a, type: 3} --- !u!114 &355221056687574256 stripped MonoBehaviour: @@ -1824,3 +1711,12 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: ecb25fba984048238b2cf9ae786dab82, type: 3} m_Name: m_EditorClassIdentifier: +--- !u!1660057539 &9223372036854775807 +SceneRoots: + m_ObjectHideFlags: 0 + m_Roots: + - {fileID: 1354881825} + - {fileID: 996985032} + - {fileID: 705507995} + - {fileID: 575041319} + - {fileID: 963194228} diff --git a/Packages/StreamVideo/Samples~/VideoChat/Scripts/StreamVideoManager.cs b/Packages/StreamVideo/Samples~/VideoChat/Scripts/StreamVideoManager.cs index 3e29a317..1d5f8f6d 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/Scripts/StreamVideoManager.cs +++ b/Packages/StreamVideo/Samples~/VideoChat/Scripts/StreamVideoManager.cs @@ -17,6 +17,18 @@ public class StreamVideoManager : MonoBehaviour public event Action CallEnded; public IStreamVideoClient Client { get; private set; } + + public void Init() + { + _clientConfig = new StreamClientConfig + { + LogLevel = StreamLogLevel.Debug, + }; + + Client = StreamVideoClient.CreateDefaultClient(_clientConfig); + Client.CallStarted += OnCallStarted; + Client.CallEnded += OnCallEnded; + } /// /// Join the Call with a given ID. We can either create it or try to join only. @@ -65,18 +77,6 @@ public void LeaveActiveCall() /// public void SetAudioREDundancyEncoding(bool value) => _clientConfig.Audio.EnableRed = value; - protected void Awake() - { - _clientConfig = new StreamClientConfig - { - LogLevel = StreamLogLevel.Debug, - }; - - Client = StreamVideoClient.CreateDefaultClient(_clientConfig); - Client.CallStarted += OnCallStarted; - Client.CallEnded += OnCallEnded; - } - protected async void Start() { var credentials = new AuthCredentials(_apiKey, _userId, _userToken); diff --git a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/CameraMediaDevicePanel.cs b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/CameraMediaDevicePanel.cs index 98c0d4f2..d40b0b3f 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/CameraMediaDevicePanel.cs +++ b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/CameraMediaDevicePanel.cs @@ -1,11 +1,41 @@ using System.Collections.Generic; -using System.Linq; -using UnityEngine; +using StreamVideo.Core.DeviceManagers; namespace StreamVideo.ExampleProject.UI.Devices { - public class CameraMediaDevicePanel : MediaDevicePanelBase + public class CameraMediaDevicePanel : MediaDevicePanelBase { - protected override IEnumerable GetDevicesNames() => WebCamTexture.devices.Select(d => d.name); + protected override CameraDeviceInfo SelectedDevice => Client.VideoDeviceManager.SelectedDevice; + + protected override bool IsDeviceEnabled + { + get => Client.VideoDeviceManager.IsEnabled; + set => Client.VideoDeviceManager.SetEnabled(value); + } + + protected override IEnumerable GetDevices() => Client.VideoDeviceManager.EnumerateDevices(); + + protected override string GetDeviceName(CameraDeviceInfo device) => device.Name; + + protected override void ChangeDevice(CameraDeviceInfo device) + => Client.VideoDeviceManager.SelectDevice(device, UIManager.SenderVideoResolution, IsDeviceEnabled, + UIManager.SenderVideoFps); + + protected override void OnInit() + { + base.OnInit(); + + Client.VideoDeviceManager.SelectedDeviceChanged += OnSelectedDeviceChanged; + } + + protected override void OnDestroying() + { + Client.VideoDeviceManager.SelectedDeviceChanged -= OnSelectedDeviceChanged; + + base.OnDestroying(); + } + + private void OnSelectedDeviceChanged(CameraDeviceInfo previousDevice, CameraDeviceInfo currentDevice) + => SelectDeviceWithoutNotify(currentDevice); } } \ No newline at end of file diff --git a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/MediaDevicePanelBase.cs b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/MediaDevicePanelBase.cs index 534884eb..3d92b5cd 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/MediaDevicePanelBase.cs +++ b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/MediaDevicePanelBase.cs @@ -1,45 +1,42 @@ -using System.Collections; +using System; +using System.Collections; using System.Collections.Generic; using System.Linq; +using StreamVideo.Core; using TMPro; using UnityEngine; namespace StreamVideo.ExampleProject.UI.Devices { - /// - /// Event handler for device changed event - /// - public delegate void DeviceChangeHandler(string deviceName, bool isActive); - - /// - /// Event handler for device toggled event - /// - public delegate void DeviceToggleHandler(bool isActive); - /// /// Panel that displays media device (microphone or camera) dropdown to pick the active device and a button to toggle on/off state /// - public abstract class MediaDevicePanelBase : MonoBehaviour + public abstract class MediaDevicePanelBase : MonoBehaviour + where TDevice : struct { - public event DeviceChangeHandler DeviceChanged; - public event DeviceToggleHandler DeviceToggled; + public void Init(IStreamVideoClient client, UIManager uiManager) + { + Client = client ?? throw new ArgumentNullException(nameof(client)); + UIManager = uiManager ? uiManager : throw new ArgumentNullException(nameof(uiManager)); - public string SelectedDeviceName { get; private set; } - - //StreamTodo: android has DeviceStatus: Enabled, Disabled, NotSelected - public bool IsDeviceActive { get; private set; } = true; + UpdateDevicesDropdown(GetDevices()); + + OnInit(); + } - public void SelectDeviceWithoutNotify(string deviceName) + public void SelectDeviceWithoutNotify(TDevice device) { - var index = _deviceNames.IndexOf(deviceName); + var index = _devices.IndexOf(device); if (index == -1) { - Debug.LogError($"Failed to find index for device: {deviceName}"); + Debug.LogError($"Failed to find index for device: {device}"); return; } _dropdown.SetValueWithoutNotify(index); } + + protected IStreamVideoClient Client { get; private set; } // Called by Unity protected void Awake() @@ -48,8 +45,6 @@ protected void Awake() _deviceButton.Init(_buttonOnSprite, _buttonOffSprite); _deviceButton.Clicked += OnDeviceButtonClicked; - - UpdateDevicesDropdown(GetDevicesNames().ToList()); _refreshDeviceInterval = new WaitForSeconds(0.5f); _refreshCoroutine = StartCoroutine(RefreshDevicesList()); @@ -58,20 +53,41 @@ protected void Awake() // Called by Unity protected void Start() { - _deviceButton.UpdateSprite(IsDeviceActive); + _deviceButton.UpdateSprite(IsDeviceEnabled); } // Called by Unity protected void OnDestroy() { + OnDestroying(); + if (_refreshCoroutine != null) { StopCoroutine(_refreshCoroutine); } } + + protected virtual void OnInit() + { + + } + + protected virtual void OnDestroying() + { + + } - protected abstract IEnumerable GetDevicesNames(); + protected abstract IEnumerable GetDevices(); + protected abstract TDevice SelectedDevice { get; } + protected abstract bool IsDeviceEnabled { get; set; } + protected UIManager UIManager { get; private set; } + protected abstract string GetDeviceName(TDevice device); + + protected abstract void ChangeDevice(TDevice device); + + private readonly List _devices = new List(); + [SerializeField] private Sprite _buttonOnSprite; @@ -86,28 +102,24 @@ protected void OnDestroy() private Coroutine _refreshCoroutine; private YieldInstruction _refreshDeviceInterval; - private readonly List _deviceNames = new List(); private void OnDropdownValueChanged(int optionIndex) { - var deviceName = _deviceNames.ElementAt(optionIndex); - if (deviceName == null) + var device = _devices.ElementAt(optionIndex); + if (device.Equals(default)) { Debug.LogError($"Failed to select device with index: {optionIndex}. Available devices: " + - string.Join(", ", _deviceNames)); + string.Join(", ", _devices)); return; } - SelectedDeviceName = deviceName; - - DeviceChanged?.Invoke(SelectedDeviceName, IsDeviceActive); + ChangeDevice(device); } private void OnDeviceButtonClicked() { - IsDeviceActive = !IsDeviceActive; - _deviceButton.UpdateSprite(IsDeviceActive); - DeviceToggled?.Invoke(IsDeviceActive); + IsDeviceEnabled = !IsDeviceEnabled; + _deviceButton.UpdateSprite(IsDeviceEnabled); } // User can add/remove devices any time so we must constantly monitor the devices list @@ -115,11 +127,16 @@ private IEnumerator RefreshDevicesList() { while (true) { - var availableDevices = GetDevicesNames().ToList(); - var devicesChanged = !_deviceNames.SequenceEqual(availableDevices); + while (Client == null) + { + yield return _refreshDeviceInterval; + } + + var availableDevices = GetDevices().ToList(); + var devicesChanged = !_devices.SequenceEqual(availableDevices); if (devicesChanged) { - var prevDevicesLog = string.Join(", ", _deviceNames); + var prevDevicesLog = string.Join(", ", _devices); var newDevicesLog = string.Join(", ", availableDevices); Debug.Log($"Device list changed. Previous: {prevDevicesLog}, Current: {newDevicesLog}"); @@ -130,17 +147,17 @@ private IEnumerator RefreshDevicesList() } } - private void UpdateDevicesDropdown(List devices) + private void UpdateDevicesDropdown(IEnumerable devices) { - _deviceNames.Clear(); - _deviceNames.AddRange(devices); + _devices.Clear(); + _devices.AddRange(devices); _dropdown.ClearOptions(); - _dropdown.AddOptions(devices); + _dropdown.AddOptions(devices.Select(GetDeviceName).ToList()); - if (!string.IsNullOrEmpty(SelectedDeviceName) && !devices.Contains(SelectedDeviceName)) + if (!EqualityComparer.Default.Equals(SelectedDevice, default) && !devices.Contains(SelectedDevice)) { - Debug.LogError($"Previously active device was unplugged: {SelectedDeviceName}"); + Debug.LogError($"Previously active device was unplugged: {SelectedDevice}"); //StreamTodo: handle case when user unplugged active device } } diff --git a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/MicrophoneMediaDevicePanel.cs b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/MicrophoneMediaDevicePanel.cs index 876f9e62..704bbe8a 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/MicrophoneMediaDevicePanel.cs +++ b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Devices/MicrophoneMediaDevicePanel.cs @@ -1,10 +1,39 @@ using System.Collections.Generic; -using UnityEngine; +using StreamVideo.Core.DeviceManagers; namespace StreamVideo.ExampleProject.UI.Devices { - public class MicrophoneMediaDevicePanel : MediaDevicePanelBase + public class MicrophoneMediaDevicePanel : MediaDevicePanelBase { - protected override IEnumerable GetDevicesNames() => Microphone.devices; + protected override MicrophoneDeviceInfo SelectedDevice => Client.AudioDeviceManager.SelectedDevice; + + protected override bool IsDeviceEnabled + { + get => Client.AudioDeviceManager.IsEnabled; + set => Client.AudioDeviceManager.SetEnabled(value); + } + + protected override IEnumerable GetDevices() => Client.AudioDeviceManager.EnumerateDevices(); + + protected override string GetDeviceName(MicrophoneDeviceInfo device) => device.Name; + + protected override void ChangeDevice(MicrophoneDeviceInfo device) => Client.AudioDeviceManager.SelectDevice(device, IsDeviceEnabled); + + protected override void OnInit() + { + base.OnInit(); + + Client.AudioDeviceManager.SelectedDeviceChanged += OnSelectedDeviceChanged; + } + + protected override void OnDestroying() + { + Client.AudioDeviceManager.SelectedDeviceChanged -= OnSelectedDeviceChanged; + + base.OnDestroying(); + } + + private void OnSelectedDeviceChanged(MicrophoneDeviceInfo previousDevice, MicrophoneDeviceInfo currentDevice) + => SelectDeviceWithoutNotify(currentDevice); } } \ No newline at end of file diff --git a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/ParticipantView.cs b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/ParticipantView.cs index 8b9d2a12..ac22ea12 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/ParticipantView.cs +++ b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/ParticipantView.cs @@ -43,26 +43,13 @@ public void UpdateIsDominantSpeaker(bool isDominantSpeaker) /// public void SetLocalCameraSource(WebCamTexture localWebCamTexture) { - _localWebCamTexture = localWebCamTexture; - - if (_localParticipantRenderTexture != null) - { - // Dispose previous texture - _localParticipantRenderTexture.Release(); - _localParticipantRenderTexture = null; - } - if (localWebCamTexture == null) { _video.texture = null; return; } - _localParticipantRenderTexture = new RenderTexture(localWebCamTexture.width, localWebCamTexture.height, 0, RenderTextureFormat.Default); - _localParticipantRenderTexture.Create(); - - // we set RenderTexture a a RawImage.texture because the RenderTexture will receive video stream from the local camera - _video.texture = _localParticipantRenderTexture; + _video.texture = localWebCamTexture; } // Called by Unity Engine @@ -74,17 +61,14 @@ protected void Awake() // Called by Unity Engine protected void Update() { - if (_localWebCamTexture != null) - { - Graphics.Blit(_localWebCamTexture, _localParticipantRenderTexture); - } - var rect = _videoRectTransform.rect; var videoRenderedSize = new Vector2(rect.width, rect.height); if (videoRenderedSize != _lastVideoRenderedSize) { _lastVideoRenderedSize = videoRenderedSize; var videoResolution = new VideoResolution((int)videoRenderedSize.x, (int)videoRenderedSize.y); + + // To optimize bandwidth we always request the video resolution that matches what we're actually rendering Participant.UpdateRequestedVideoResolution(videoResolution); Debug.Log($"Rendered resolution changed for participant `{Participant.UserId}`. Requested video resolution update to: {videoResolution}"); } @@ -115,8 +99,6 @@ protected void OnDestroy() private Color32 _defaultSpeakerFrameColor; private AudioSource _audioSource; - private RenderTexture _localParticipantRenderTexture; - private WebCamTexture _localWebCamTexture; private RectTransform _videoRectTransform; private Vector2 _lastVideoRenderedSize; diff --git a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/BaseScreenView.cs b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/BaseScreenView.cs index 83d2ac0e..28a317bc 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/BaseScreenView.cs +++ b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/BaseScreenView.cs @@ -51,8 +51,6 @@ public void Hide() protected abstract void OnHide(); - protected void Log(string message, LogType type) => UIManager.Log(message, type); - private GameObject _gameObject; } } \ No newline at end of file diff --git a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/CallScreenView.cs b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/CallScreenView.cs index f0b5de93..ae136c51 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/CallScreenView.cs +++ b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/CallScreenView.cs @@ -61,12 +61,9 @@ protected override void OnInit() { _leaveBtn.onClick.AddListener(VideoManager.LeaveActiveCall); _endBtn.onClick.AddListener(VideoManager.EndActiveCall); - - _cameraPanel.DeviceChanged += UIManager.ChangeCamera; - _cameraPanel.DeviceToggled += UIManager.SetCameraActive; - _microphonePanel.DeviceChanged += UIManager.ChangeMicrophone; - _microphonePanel.DeviceToggled += UIManager.SetMicrophoneActive; + _cameraPanel.Init(VideoManager.Client, UIManager); + _microphonePanel.Init(VideoManager.Client, UIManager); } protected override void OnShow(ShowArgs showArgs) @@ -93,7 +90,7 @@ protected override void OnShow(ShowArgs showArgs) _activeCall.SortedParticipantsUpdated += SortParticipantViews; - UIManager.ActiveCameraChanged += OnActiveCameraChanged; + UIManager.LocalCameraChanged += OnLocalCameraChanged; // Show active call ID so user can copy it and send others to join _joinCallIdInput.text = _activeCall.Id; @@ -112,7 +109,7 @@ protected override void OnHide() RemoveAllParticipants(); - UIManager.ActiveCameraChanged -= OnActiveCameraChanged; + UIManager.LocalCameraChanged -= OnLocalCameraChanged; } private void OnDominantSpeakerChanged(IStreamVideoCallParticipant currentDominantSpeaker, @@ -156,7 +153,9 @@ private void AddParticipant(IStreamVideoCallParticipant participant, bool sortPa if (participant.IsLocalParticipant) { // Set input camera as a video source for local participant - we won't receive OnTrack event for local participant - view.SetLocalCameraSource(UIManager.ActiveCamera); + var webCamTexture = VideoManager.Client.VideoDeviceManager.GetSelectedDeviceWebCamTexture(); + view.SetLocalCameraSource(webCamTexture); + //StreamTodo: this will invalidate each time WebCamTexture is internally replaced so we need a better way to expose this } if (sortParticipantViews) @@ -224,7 +223,7 @@ private void RemoveAllParticipants() _participantSessionIdToView.Clear(); } - private void OnActiveCameraChanged(WebCamTexture activeCamera) + private void OnLocalCameraChanged(WebCamTexture activeCamera) { // Input Camera changed so let's update the preview for local participant var localParticipant diff --git a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/MainScreenView.cs b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/MainScreenView.cs index 5d398789..a365270a 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/MainScreenView.cs +++ b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/Screens/MainScreenView.cs @@ -1,5 +1,4 @@ using System; -using System.Linq; using StreamVideo.ExampleProject.UI.Devices; using TMPro; using UnityEngine; @@ -29,24 +28,18 @@ protected override void OnInit() _audioRedToggle.onValueChanged.AddListener(VideoManager.SetAudioREDundancyEncoding); _audioDtxToggle.onValueChanged.AddListener(VideoManager.SetAudioDtx); - _cameraPanel.DeviceChanged += UIManager.ChangeCamera; - _cameraPanel.DeviceToggled += UIManager.SetCameraActive; - - _microphonePanel.DeviceChanged += UIManager.ChangeMicrophone; - _microphonePanel.DeviceToggled += UIManager.SetMicrophoneActive; - - SmartPickDefaultCamera(); - SmartPickDefaultMicrophone(); + _cameraPanel.Init(VideoManager.Client, UIManager); + _microphonePanel.Init(VideoManager.Client, UIManager); } protected override void OnShow(CallScreenView.ShowArgs showArgs) { - UIManager.ActiveCameraChanged += OnActiveCameraChanged; + UIManager.LocalCameraChanged += OnLocalCameraChanged; } protected override void OnHide() { - UIManager.ActiveCameraChanged -= OnActiveCameraChanged; + UIManager.LocalCameraChanged -= OnLocalCameraChanged; } [SerializeField] @@ -82,7 +75,7 @@ private async void OnJoinCallButtonClicked() { if (string.IsNullOrEmpty(_joinCallIdInput.text)) { - Log("`Call ID` is required when trying to join a call", LogType.Error); + Debug.LogError("`Call ID` is required when trying to join a call"); return; } @@ -107,60 +100,11 @@ private async void OnCreateAndJoinCallButtonClicked() } } - private void OnActiveCameraChanged(WebCamTexture activeCamera) + private void OnLocalCameraChanged(WebCamTexture activeCamera) { _localCameraImage.texture = activeCamera; } - private void SmartPickDefaultCamera() - { - var devices = WebCamTexture.devices; - -#if UNITY_STANDALONE_WIN - //StreamTodo: remove this, "Capture" is our debug camera - _defaultCamera = devices.FirstOrDefault(d => d.name.Contains("Capture")); - -#elif UNITY_ANDROID || UNITY_IOS - _defaultCamera = devices.FirstOrDefault(d => d.isFrontFacing); -#endif - - if (string.IsNullOrEmpty(_defaultCamera.name)) - { - _defaultCamera = devices.FirstOrDefault(); - } - - if (string.IsNullOrEmpty(_defaultCamera.name)) - { - Debug.LogError("Failed to pick default camera device"); - return; - } - - _cameraPanel.SelectDeviceWithoutNotify(_defaultCamera.name); - UIManager.ChangeCamera(_defaultCamera.name, _cameraPanel.IsDeviceActive); - } - - //StreamTodo: remove - private void SmartPickDefaultMicrophone() - { - var preferredMicDevices = new[] { "bose", "airpods" }; - _defaultMicrophoneDeviceName = Microphone.devices.FirstOrDefault(d - => preferredMicDevices.Any(m => d.IndexOf(m, StringComparison.OrdinalIgnoreCase) != -1)); - - if (string.IsNullOrEmpty(_defaultMicrophoneDeviceName)) - { - _defaultMicrophoneDeviceName = Microphone.devices.FirstOrDefault(); - } - - if (string.IsNullOrEmpty(_defaultMicrophoneDeviceName)) - { - Debug.LogError("Failed to pick default microphone device"); - return; - } - - _microphonePanel.SelectDeviceWithoutNotify(_defaultMicrophoneDeviceName); - UIManager.ChangeMicrophone(_defaultMicrophoneDeviceName, _microphonePanel.IsDeviceActive); - } - private static string CreateRandomCallId() => Guid.NewGuid().ToString().Replace("-", ""); } } \ No newline at end of file diff --git a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/UIManager.cs b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/UIManager.cs index 1c19469a..21fb2721 100644 --- a/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/UIManager.cs +++ b/Packages/StreamVideo/Samples~/VideoChat/Scripts/UI/UIManager.cs @@ -1,128 +1,51 @@ using System; +using System.Linq; +using System.Threading.Tasks; +using StreamVideo.Core; +using StreamVideo.Core.DeviceManagers; using StreamVideo.Core.StatefulModels; using StreamVideo.ExampleProject.UI.Screens; +using StreamVideo.Libs.Utils; using UnityEngine; namespace StreamVideo.ExampleProject.UI { public class UIManager : MonoBehaviour { - public event Action ActiveCameraChanged; + public event Action LocalCameraChanged; - public WebCamTexture ActiveCamera { get; private set; } - public AudioSource InputAudioSource => _inputAudioSource; - public Camera InputSceneSource => _inputSceneCamera; - - public void ChangeMicrophone(string deviceName, bool isActive) - { - if (!string.IsNullOrEmpty(_selectedMicrophoneDeviceName)) - { - StopAudioRecording(); - } - - var prevDevice = _selectedMicrophoneDeviceName ?? "None"; - _selectedMicrophoneDeviceName = deviceName; - - if (isActive) - { - StartAudioRecording(); - } - - Debug.Log( - $"Changed selected MICROPHONE from `{prevDevice}` to `{_selectedMicrophoneDeviceName}`. Recording: {isActive}"); - } - - public void ChangeCamera(string deviceName, bool isActive) - { - var prevDevice = ActiveCamera != null ? ActiveCamera.deviceName : "None"; - - if (ActiveCamera == null) - { - ActiveCamera = new WebCamTexture(deviceName, _senderVideoWidth, _senderVideoHeight, _senderVideoFps); - } - - // Camera needs to be stopped before changing the deviceName - ActiveCamera.Stop(); - ActiveCamera.deviceName = deviceName; - - if (isActive) - { - ActiveCamera.Play(); - //StreamTodo: handle in coroutine and check if the camera started - } - - Debug.Log($"Changed active CAMERA from `{prevDevice}` to `{deviceName}`"); - - _videoManager.Client?.SetCameraInputSource(ActiveCamera); - - ActiveCameraChanged?.Invoke(ActiveCamera); - } - - /// - /// Start/stop microphone recording - /// - public void SetMicrophoneActive(bool isActive) - { - _videoManager.Client.AudioDeviceManager.SetEnabled(isActive); - - if (isActive) - { - StartAudioRecording(); - return; - } - - StopAudioRecording(); - } - - /// - /// Start/stop camera recording - /// - public void SetCameraActive(bool isActive) - { - _videoManager.Client.VideoDeviceManager.SetEnabled(isActive); - - if (isActive) - { - ActiveCamera.Play(); - Debug.Log($"Camera recording started for `{ActiveCamera.deviceName}`"); - return; - } - - ActiveCamera.Stop(); - Debug.Log($"Camera recording stopped for `{ActiveCamera.deviceName}`"); - } - - public void Log(string message, LogType type) - { - if (type == LogType.Exception) - { - throw new NotSupportedException("To log exceptions use " + nameof(Debug.LogException)); - } - - Debug.LogFormat(type, LogOption.None, context: null, format: message); - } + public VideoResolution SenderVideoResolution => new VideoResolution(_senderVideoWidth, _senderVideoHeight); + public int SenderVideoFps => _senderVideoFps; protected void Awake() { + _videoManager.Init(); + _videoManager.CallStarted += OnCallStarted; _videoManager.CallEnded += OnCallEnded; + _videoManager.Client.VideoDeviceManager.SelectedDeviceChanged += OnCameraDeviceChanged; + _videoManager.Client.AudioDeviceManager.SelectedDeviceChanged += OnMicrophoneDeviceChanged; + _mainScreen.Init(_videoManager, uiManager: this); _callScreen.Init(_videoManager, uiManager: this); - } - - protected void Start() - { - _videoManager.Client.SetAudioInputSource(_inputAudioSource); - _videoManager.Client.SetCameraInputSource(ActiveCamera); - ShowMainScreen(); + SelectFirstWorkingCameraOrDefaultAsync().LogIfFailed(); + SelectFirstMicrophone(); } + protected void Start() => ShowMainScreen(); + protected void OnDestroy() { _videoManager.CallStarted -= OnCallStarted; _videoManager.CallEnded -= OnCallEnded; + + if (_videoManager.Client != null) + { + _videoManager.Client.VideoDeviceManager.SelectedDeviceChanged -= OnCameraDeviceChanged; + _videoManager.Client.AudioDeviceManager.SelectedDeviceChanged -= OnMicrophoneDeviceChanged; + } } [SerializeField] @@ -137,70 +60,79 @@ protected void OnDestroy() [SerializeField] private int _senderVideoFps = 30; - [SerializeField] - private AudioSource _inputAudioSource; - - [SerializeField] - private Camera _inputSceneCamera; - [SerializeField] private CallScreenView _callScreen; [SerializeField] private MainScreenView _mainScreen; - private string _selectedMicrophoneDeviceName; - private void OnCallStarted(IStreamCall call) => ShowCallScreen(call); private void OnCallEnded() => ShowMainScreen(); - private void StartAudioRecording() + private void ShowMainScreen() + { + _callScreen.Hide(); + _mainScreen.Show(); + } + + private void ShowCallScreen(IStreamCall call) + { + _mainScreen.Hide(); + _callScreen.Show(new CallScreenView.ShowArgs(call)); + } + + private void OnMicrophoneDeviceChanged(MicrophoneDeviceInfo previousDevice, MicrophoneDeviceInfo currentDevice) + { + Debug.Log($"Changed selected MICROPHONE from `{previousDevice}` to `{currentDevice}`"); + } + + private void OnCameraDeviceChanged(CameraDeviceInfo previousDevice, CameraDeviceInfo currentDevice) + { + Debug.Log($"Changed active CAMERA from `{previousDevice}` to `{currentDevice}`"); + + var webCamTexture = _videoManager.Client.VideoDeviceManager.GetSelectedDeviceWebCamTexture(); + LocalCameraChanged?.Invoke(webCamTexture); + } + + private async Task SelectFirstWorkingCameraOrDefaultAsync() { - if (_inputAudioSource == null) + if (!_videoManager.Client.VideoDeviceManager.EnumerateDevices().Any()) { - Debug.LogError("Audio recording failed. Input Audio Source is null"); + Debug.LogError("No camera devices found! Video streaming will not work. Please ensure that a camera device is plugged in."); return; } - - if (string.IsNullOrEmpty(_selectedMicrophoneDeviceName)) + + var workingDevice = await _videoManager.Client.VideoDeviceManager.TryFindFirstWorkingDeviceAsync(); + if (workingDevice.HasValue) { - Debug.LogError("Audio recording failed. No microphone device selected."); + _videoManager.Client.VideoDeviceManager.SelectDevice(workingDevice.Value, enable: false); return; } - - //StreamTodo: should the volume be 0 so we never hear input from our own microphone? - _inputAudioSource.clip - = Microphone.Start(_selectedMicrophoneDeviceName, true, 3, AudioSettings.outputSampleRate); - _inputAudioSource.loop = true; - _inputAudioSource.Play(); - - Debug.Log($"Audio recording started for `{_selectedMicrophoneDeviceName}`"); - } - - private void StopAudioRecording() - { - var isRecording = !string.IsNullOrEmpty(_selectedMicrophoneDeviceName) && - Microphone.IsRecording(_selectedMicrophoneDeviceName); - if (!isRecording) + + Debug.LogWarning("No working camera found. Falling back to first device."); + + var firstDevice = _videoManager.Client.VideoDeviceManager.EnumerateDevices().FirstOrDefault(); + if (firstDevice == default) { + Debug.LogError("No camera devices found! Video streaming will not work. Please ensure that a camera device is plugged in."); return; } - - Microphone.End(_selectedMicrophoneDeviceName); - Debug.Log($"Audio recording stopped for `{_selectedMicrophoneDeviceName}`"); - } - - private void ShowMainScreen() - { - _callScreen.Hide(); - _mainScreen.Show(); + + _videoManager.Client.VideoDeviceManager.SelectDevice(firstDevice, enable: false); } - private void ShowCallScreen(IStreamCall call) + private void SelectFirstMicrophone() { - _mainScreen.Hide(); - _callScreen.Show(new CallScreenView.ShowArgs(call)); + // Select first microphone by default + var microphoneDevice = _videoManager.Client.AudioDeviceManager.EnumerateDevices().FirstOrDefault(); + if (microphoneDevice == default) + { + Debug.LogError("No microphone devices found! Audio streaming will not work. Please ensure that a microphone device is plugged in."); + return; + } + + _videoManager.Client.AudioDeviceManager.SelectDevice(microphoneDevice, enable: false); } } } \ No newline at end of file diff --git a/Packages/StreamVideo/Tests/Runtime/CallsTests.cs b/Packages/StreamVideo/Tests/Runtime/CallsTests.cs index d25eb2a9..46a28b40 100644 --- a/Packages/StreamVideo/Tests/Runtime/CallsTests.cs +++ b/Packages/StreamVideo/Tests/Runtime/CallsTests.cs @@ -29,19 +29,20 @@ private async Task When_two_clients_join_same_call_expect_no_errors_Async(ITestC [UnityTest] public IEnumerator When_client_joins_call_with_video_expect_receiving_video_track() - => ConnectAndExecute(When_client_joins_call_with_video_expect_receiving_video_track_Async); + => ConnectAndExecute(When_client_joins_call_with_video_expect_receiving_video_track_Async, + ignoreFailingMessages: true); private async Task When_client_joins_call_with_video_expect_receiving_video_track_Async( ITestClient clientA, ITestClient clientB) { var streamCall = await clientA.JoinRandomCallAsync(); - var webCamTexture = DisposableAssetsProvider.WebCamTextureFactory.Create(WebCamTexture.devices.First().name, 1920, 1080, 20); - webCamTexture.Play(); + var cameraDevice = await TestUtils.TryGetFirstWorkingCameraDeviceAsync(clientA.Client); + Debug.Log("Selected camera device: " + cameraDevice); + clientA.Client.VideoDeviceManager.SelectDevice(cameraDevice, enable: true); - clientA.Client.SetCameraInputSource(webCamTexture); - - var call = await clientB.Client.JoinCallAsync(streamCall.Type, streamCall.Id, create: false, ring: false, + var call = await clientB.Client.JoinCallAsync(streamCall.Type, streamCall.Id, create: false, + ring: false, notify: false); var otherParticipant = call.Participants.First(p => !p.IsLocalParticipant); @@ -54,21 +55,19 @@ private async Task When_client_joins_call_with_video_expect_receiving_video_trac } else { - otherParticipant.TrackAdded += (_, track) => - { - streamTrack = (StreamVideoTrack)track; - }; + otherParticipant.TrackAdded += (_, track) => { streamTrack = (StreamVideoTrack)track; }; await WaitForConditionAsync(() => streamTrack != null); } Assert.IsNotNull(streamTrack); } - + [UnityTest] public IEnumerator When_client_enables_video_during_call_expect_other_client_receiving_video_track() - => ConnectAndExecute(When_client_enables_video_during_call_expect_other_client_receiving_video_track_Async); - + => ConnectAndExecute(When_client_enables_video_during_call_expect_other_client_receiving_video_track_Async, + ignoreFailingMessages: true); + private async Task When_client_enables_video_during_call_expect_other_client_receiving_video_track_Async( ITestClient clientA, ITestClient clientB) { @@ -84,23 +83,18 @@ private async Task When_client_enables_video_during_call_expect_other_client_rec // Watch other participant video track StreamVideoTrack streamTrack = null; - otherParticipant.TrackAdded += (_, track) => - { - streamTrack = (StreamVideoTrack)track; - }; - + otherParticipant.TrackAdded += (_, track) => { streamTrack = (StreamVideoTrack)track; }; + // First participant - enable video track - var webCamTexture = DisposableAssetsProvider.WebCamTextureFactory.Create(WebCamTexture.devices.First().name, 1920, 1080, 20); - webCamTexture.Play(); + var cameraDevice = await TestUtils.TryGetFirstWorkingCameraDeviceAsync(clientA.Client); + clientA.Client.VideoDeviceManager.SelectDevice(cameraDevice, enable: true); - clientA.Client.SetCameraInputSource(webCamTexture); - // Wait for event await WaitForConditionAsync(() => streamTrack != null); Assert.IsNotNull(streamTrack); } - + //StreamTodo: test EndedAt field. (1) is it set when /video/call/{type}/{id}/mark_ended is called, (2) what happens if participants just leave the call // (3) if we re-join a previously ended call, is the endedAt null again? } diff --git a/Packages/StreamVideo/Tests/Shared/TestClient.cs b/Packages/StreamVideo/Tests/Shared/TestClient.cs index 01437166..2bb37c5e 100644 --- a/Packages/StreamVideo/Tests/Shared/TestClient.cs +++ b/Packages/StreamVideo/Tests/Shared/TestClient.cs @@ -83,7 +83,7 @@ var credentials Debug.Log($"Client connected in {timer.Elapsed.TotalSeconds:F2} seconds"); } - + private class DemoCredentialsApiResponse { public string UserId; diff --git a/Packages/StreamVideo/Tests/Shared/TestUtils.cs b/Packages/StreamVideo/Tests/Shared/TestUtils.cs index 02d26845..7797dc60 100644 --- a/Packages/StreamVideo/Tests/Shared/TestUtils.cs +++ b/Packages/StreamVideo/Tests/Shared/TestUtils.cs @@ -3,6 +3,9 @@ using System.Collections; using System.Linq; using System.Threading.Tasks; +using StreamVideo.Core; +using StreamVideo.Core.DeviceManagers; +using UnityEngine.TestTools; using UnityEditor.PackageManager; namespace StreamVideo.Tests.Shared @@ -12,13 +15,23 @@ public static class TestUtils public const string StreamVideoPackageName = "io.getstream.video"; public static IEnumerator RunAsIEnumerator(this Task task, - Action onSuccess = null) + Action onSuccess = null, bool ignoreFailingMessages = false) { + if (ignoreFailingMessages) + { + LogAssert.ignoreFailingMessages = true; + } + while (!task.IsCompleted) { yield return null; } + if (ignoreFailingMessages) + { + LogAssert.ignoreFailingMessages = false; + } + if (task.IsFaulted) { throw task.Exception; @@ -26,6 +39,22 @@ public static IEnumerator RunAsIEnumerator(this Task task, onSuccess?.Invoke(); } + + //StreamTodo: put this in VideoDeviceManager? + public static async Task TryGetFirstWorkingCameraDeviceAsync(IStreamVideoClient client) + { + var cameraManager = client.VideoDeviceManager; + foreach (var cameraDevice in cameraManager.EnumerateDevices()) + { + var isWorking = await cameraManager.TestDeviceAsync(cameraDevice, 0.5f); + if (isWorking) + { + return cameraDevice; + } + } + + return cameraManager.EnumerateDevices().First(); + } public static async Task GetStreamVideoPackageInfo() { diff --git a/Packages/StreamVideo/Tests/Shared/TestsBase.cs b/Packages/StreamVideo/Tests/Shared/TestsBase.cs index f97141e6..0c0ed17b 100644 --- a/Packages/StreamVideo/Tests/Shared/TestsBase.cs +++ b/Packages/StreamVideo/Tests/Shared/TestsBase.cs @@ -13,8 +13,9 @@ namespace StreamVideo.Tests.Shared { public delegate Task SingleClientTestHandler(ITestClient client); + public delegate Task TwoClientsTestHandler(ITestClient client1, ITestClient client2); - + public class TestsBase { [OneTimeSetUp] @@ -47,7 +48,7 @@ public async void TearDown() { return (true, TimeSpan.Zero); } - + var stopwatch = new Stopwatch(); stopwatch.Start(); while (stopwatch.ElapsedMilliseconds < timeoutMs) @@ -59,25 +60,26 @@ public async void TearDown() return (true, stopwatch.Elapsed); } } - + return (false, stopwatch.Elapsed); } - + protected static IEnumerator ConnectAndExecute(Func test) { yield return ConnectAndExecuteAsync(_ => test()).RunAsIEnumerator(); } - + protected static IEnumerator ConnectAndExecute(SingleClientTestHandler test) { yield return ConnectAndExecuteAsync(clients => test(clients[0]), clientsToSpawn: 1).RunAsIEnumerator(); } - - protected static IEnumerator ConnectAndExecute(TwoClientsTestHandler test) + + protected static IEnumerator ConnectAndExecute(TwoClientsTestHandler test, bool ignoreFailingMessages = false) { - yield return ConnectAndExecuteAsync(clients => test(clients[0], clients[1]), clientsToSpawn: 2).RunAsIEnumerator(); + yield return ConnectAndExecuteAsync(clients => test(clients[0], clients[1]), clientsToSpawn: 2) + .RunAsIEnumerator(ignoreFailingMessages: ignoreFailingMessages); } - + private static async Task ConnectAndExecuteAsync(Func test, int clientsToSpawn = 1) { var clients = await StreamTestClientProvider.Instance.GetConnectedTestClientsAsync(clientsToSpawn); @@ -110,7 +112,8 @@ private static async Task ConnectAndExecuteAsync(Func test, if (!completed) { - throw new AggregateException($"Failed all attempts. Last Exception: {exceptions.Last().Message} ", exceptions); + throw new AggregateException($"Failed all attempts. Last Exception: {exceptions.Last().Message} ", + exceptions); } } } diff --git a/Packages/manifest.json b/Packages/manifest.json index f6abcf19..1b209937 100644 --- a/Packages/manifest.json +++ b/Packages/manifest.json @@ -6,6 +6,7 @@ "com.unity.ide.rider": "3.0.27", "com.unity.ide.visualstudio": "2.0.18", "com.unity.ide.vscode": "1.2.5", + "com.unity.mobile.android-logcat": "1.4.1", "com.unity.nuget.newtonsoft-json": "3.2.1", "com.unity.test-framework": "1.1.33", "com.unity.textmeshpro": "3.0.6", diff --git a/Packages/packages-lock.json b/Packages/packages-lock.json index f103ed23..4fb65873 100644 --- a/Packages/packages-lock.json +++ b/Packages/packages-lock.json @@ -69,6 +69,13 @@ "dependencies": {}, "url": "https://packages.unity.com" }, + "com.unity.mobile.android-logcat": { + "version": "1.4.1", + "depth": 0, + "source": "registry", + "dependencies": {}, + "url": "https://packages.unity.com" + }, "com.unity.nuget.newtonsoft-json": { "version": "3.2.1", "depth": 0, diff --git a/ProjectSettings/ProjectSettings.asset b/ProjectSettings/ProjectSettings.asset index 10f66c3d..36e7a6ff 100644 --- a/ProjectSettings/ProjectSettings.asset +++ b/ProjectSettings/ProjectSettings.asset @@ -162,14 +162,14 @@ PlayerSettings: tvOS: 0 overrideDefaultApplicationIdentifier: 0 AndroidBundleVersionCode: 1 - AndroidMinSdkVersion: 22 + AndroidMinSdkVersion: 23 AndroidTargetSdkVersion: 0 AndroidPreferredInstallLocation: 1 aotOptions: stripEngineCode: 1 iPhoneStrippingLevel: 0 iPhoneScriptCallOptimization: 0 - ForceInternetPermission: 0 + ForceInternetPermission: 1 ForceSDCardPermission: 0 CreateWallpaper: 0 APKExpansionFiles: 0 @@ -245,7 +245,7 @@ PlayerSettings: useCustomBaseGradleTemplate: 0 useCustomGradlePropertiesTemplate: 0 useCustomProguardFile: 0 - AndroidTargetArchitectures: 1 + AndroidTargetArchitectures: 2 AndroidTargetDevices: 0 AndroidSplashScreenScale: 0 androidSplashScreen: {fileID: 0} diff --git a/docusaurus/docs/Unity/01-basics/03-quickstart.mdx b/docusaurus/docs/Unity/01-basics/03-quickstart.mdx index 3a52db54..8743c159 100644 --- a/docusaurus/docs/Unity/01-basics/03-quickstart.mdx +++ b/docusaurus/docs/Unity/01-basics/03-quickstart.mdx @@ -70,86 +70,77 @@ var callId = "my-call-id"; var streamCall = await _client.JoinCallAsync(callType, callId, create: false, ring: true, notify: false); ``` +## Capture Audio from a Microphone -## Setting Audio Input - -#### Bind microphone device to a `AudioSource` component - -This code will get the first microphone device from Unity's `Microphone.devices` list and stream it's input into a `AudioSource` component. +The `AudioDeviceManager` manages all interactions with camera devices. Below are several fundamental operations; for a comprehensive list, please visit our [Camera & Microphone](https://getstream.io/video/docs/unity/guides/camera-and-microphone/) documentation section. +List available microphone devices: ```csharp -// Obtain reference to an AudioSource that will be used a source of audio -var inputAudioSource = GetComponent(); - -// Get a valid microphone device name. -// You usually want to populate a dropdown list with Microphone.devices so that the user can pick which device should be used -_activeMicrophoneDeviceName = Microphone.devices.First(); +var microphones = _client.AudioDeviceManager.EnumerateDevices(); -inputAudioSource.clip - = Microphone.Start(_activeMicrophoneDeviceName, true, 3, AudioSettings.outputSampleRate); -inputAudioSource.loop = true; -inputAudioSource.Play(); +foreach (var mic in microphones) +{ + Debug.Log(mic.Name); +} ``` -- For standalone platforms like Windows, macOS, and Linux, you usually want to provide the user with a dropdown menu populated from the `Microphone.devices` so that the user can select the active microphone. -- For mobile platforms like Android or IOS the underlying OS is controlling the devices and the `Microphone.devices` - -Please also note that on mobile platforms you need to request appropriate [User Permissions](https://docs.unity3d.com/ScriptReference/Android.Permission.RequestUserPermission.html) in order to make use of the Microphone and the Camera. +Select active device: +```csharp +var firstMicrophone = microphones.First(); -#### Set `AudioSource` component as an Input Source for Audio. +// Select microphone device to capture audio input. `enable` argument determines whether audio capturing should start +_client.AudioDeviceManager.SelectDevice(firstMicrophone, enable: true); +``` +The `enable` argument determines whether audio capture should start for this device. -You can provide any `AudioSource` component as an input for audio. All you need to do is call the `_client.SetAudioInputSource(audioSource)` method as in the following example: +You can start/stop audio capturing with: ```csharp -// Obtain reference to an AudioSource that will be used a source of audio -var audioSource = GetComponent(); -_client.SetAudioInputSource(audioSource); +// Start audio capturing +_client.AudioDeviceManager.Enable(); + +// Stop audio capturing +_client.AudioDeviceManager.Disable(); ``` -Now the provided `AudioSource` will be used as an audio input for audio communication in calls. -Please note that the `AudioSrouce` does not necessarily need to be associated with a microphone device. This is indeed the most common use case but the `AudioSource` in fact serves as an audio buffer, so you can implement many other use cases with how the audio input is gathered. +#### Android & iOS platforms -#### Read more +For platforms like Android and iOS, the user needs to grant permission to access the microphone devices. You can read more about requesting permissions in the [Camera & Microphone](https://getstream.io/video/docs/unity/guides/camera-and-microphone/) docs section. -Please refer to Unity's documentation for more information on how to use Microphone devices: -* [Microphone.devices](https://docs.unity3d.com/ScriptReference/Microphone-devices.html) -* [Microphone.Start](https://docs.unity3d.com/ScriptReference/Microphone.Start.html) -* [Microphone.End](https://docs.unity3d.com/ScriptReference/Microphone.End.html) +## Capture Video from a Web Camera -## Setting Video Input +The `VideoDeviceManager` manages all interactions with camera devices. Below are several fundamental operations; for a comprehensive list, please visit our [Camera & Microphone](https://getstream.io/video/docs/unity/guides/camera-and-microphone/) documentation section. -You can use Unity's [WebCamTexture](https://docs.unity3d.com/ScriptReference/WebCamTexture.html) to interact with the camera device. +List available camera devices: +```csharp +var cameras = _client.VideoDeviceManager.EnumerateDevices(); -Once you select the camera device you should create a new instance of `WebCamTexture` and call `Play()` on it. +foreach (var cam in cameras) +{ + Debug.Log(cam.Name); +} +``` -Now you can set the `WebCamTexture` as a video input with `_client.SetCameraInputSource(activeCamera);` as shown in the following example: +Select active device: ```csharp -// Obtain a camera device -var cameraDevice = WebCamTexture.devices.First(); +var firstCamera = cameras.First(); -var width = 1920; -var height = 1080; -var fps = 30; - -// Use device name to create a new WebCamTexture instance -var activeCamera = new WebCamTexture(cameraDevice.name, width, height, fps); +// Select camera device to capture video input. `enable` argument determines whether video capturing should start +_client.VideoDeviceManager.SelectDevice(firstCamera, enable: true); +``` +The `enable` argument determines whether video capture should start immediately for this device. -// Call Play() in order to start capturing the video -activeCamera.Play(); +You can start/stop video capturing with: +```csharp +// Start video capturing +_client.VideoDeviceManager.Enable(); -// Set WebCamTexture in Stream's Client - this WebCamTexture will be the video source in video calls -_client.SetCameraInputSource(activeCamera); +// Stop video capturing +_client.VideoDeviceManager.Disable(); ``` -- For standalone platforms like Windows, macOS, and Linux, you usually want to provide the user with a dropdown menu populated from the `WebCamTexture.devices` so that the user can select the active camera. -- On mobile platforms like Android or IOS there will usually be two camera devices present in the `WebCamTexture.devices`: The front camera and the back camera. So you may either select the front camera automatically or give user an option to toggle between front and back cameras depending on your use case. - -#### Read more +#### Android & iOS platforms -Please refer to Unity's documentation for more information on how to use Camera devices: -* [WebCamTexture](https://docs.unity3d.com/ScriptReference/WebCamTexture.html) -* [WebCamTexture.devices](https://docs.unity3d.com/ScriptReference/WebCamTexture-devices.html) -* [WebCamTexture.Play](https://docs.unity3d.com/ScriptReference/WebCamTexture.Play.html) -* [WebCamTexture.Stop](https://docs.unity3d.com/ScriptReference/WebCamTexture.Stop.html) +For platforms like Android and iOS, a user needs to grant permission to access the camera devices. You can read more about requesting permissions in the [Camera & Microphone](https://getstream.io/video/docs/unity/guides/camera-and-microphone/) docs section. ## Handling participants diff --git a/docusaurus/docs/Unity/03-guides/04-camera-and-microphone.mdx b/docusaurus/docs/Unity/03-guides/04-camera-and-microphone.mdx index f70911fb..65c4b42d 100644 --- a/docusaurus/docs/Unity/03-guides/04-camera-and-microphone.mdx +++ b/docusaurus/docs/Unity/03-guides/04-camera-and-microphone.mdx @@ -3,134 +3,337 @@ title: Camera & Microphone description: Docs on sending & receiving --- -This page shortly describes how to send **Audio** and **Video** data to other participants. +This page details how to interact with **Microphone** and **Camera** devices to transmit audio and video streams to other call participants. Handling camera and microphone inputs in Unity can be challenging; however, Stream's Video SDK for Unity simplifies the process by managing the complexities internally, facilitating easy interaction with these devices. -Before you can set audio and video input sources you need to set up an instance of `StreamVideoClient`. Follow the [Client & Auth](../03-guides/01-client-auth.mdx) guide to learn how to do it. +## Interacting with Microphone -## Setup sending Audio +All interactions with microphone devices are handled by the `AudioDeviceManager` that can be accessed via `_client.AudioDeviceManager`. The `_client` field is an instance of `IStreamVideoClient`; if you haven't setup the video chat client yet, you can follow the [Client & Auth](../03-guides/01-client-auth.mdx) guide to learn how to do it. -In order to send audio data you need to set an instance of [AudioSource](https://docs.unity3d.com/ScriptReference/AudioSource.html) as an **input source** by calling the `SetAudioInputSource` method on the instance of `StreamVideoClient`. +#### List available microphone devices + +All available microphone devices can be accessed with `AudioDeviceManager.EnumerateDevices()`. This method returns the `IEnumerable`. +The `MicrophoneDeviceInfo` is a struct representing a single device. You can access the name of the device by the `Name` property. + +```csharp +var microphones = _client.AudioDeviceManager.EnumerateDevices(); + +foreach (var mic in microphones) +{ + Debug.Log(mic.Name); // Get microphone name +} +``` + +#### Select microphone + +**Declaration** + +`public void SelectDevice(MicrophoneDeviceInfo device, bool enable)` + +| Argument | Description | +| --- | --- | +| `device` | Microphone to select | +| `enable` | Enabled device is capturing audio input | + +```csharp +_client.AudioDeviceManager.SelectDevice(microphone, enable: true); +``` + +#### Get selected microphone + +You get the currently selected microphone device via `AudioDeviceManager.SelectedDevice`. Please note, that the returned `MicrophoneDeviceInfo` is a struct meaning + +```csharp +var selectedMicrophone = _client.AudioDeviceManager.SelectedDevice; +``` + +#### Start/Stop audio capturing + +Once a device is selected, you can start/stop the audio capturing with the `Enable()`, `Disable()`, or `SetEnabled(bool isEnabled)` methods. + +```csharp +// Enable device to start capturing microphone input +_client.AudioDeviceManager.Enable(); + +// Disable device to stop capturing microphone input +_client.AudioDeviceManager.Disable(); + +// Set the enabled state by passing a boolean argument +_client.AudioDeviceManager.SetEnabled(true); +``` + +#### Check if microphone is enabled + +Enabled device is actively capturing audio input from a selected microphone. + +```csharp +var isDeviceEnabled = _client.AudioDeviceManager.IsEnabled; +``` + +#### Events + +The `SelectedDeviceChanged` and `IsEnabledChanged` events are triggered when a new device is selected or a device enabled state changes respectively. ```csharp -_client.SetAudioInputSource(audioSource); // audioSource is of type AudioSource +public void AudioDeviceManagerEvents() +{ + // Triggered when the selected devices changes + _client.AudioDeviceManager.SelectedDeviceChanged += OnSelectedDeviceChanged; + + // Triggered when the IsEnabled property changes + _client.AudioDeviceManager.IsEnabledChanged += OnIsEnabledChanged; +} + +private void OnIsEnabledChanged(bool isEnabled) { } + +private void OnSelectedDeviceChanged(MicrophoneDeviceInfo previousDevice, MicrophoneDeviceInfo currentDevice) { } ``` -### Handle microphone input in Unity +### Android & iOS + +Users must grant permission to use the Microphone device for platforms like Android and IOS. Otherwise, capturing audio will not work. Typical patterns are requesting permissions when the application starts or when a user attempts to enable audio capturing. + +#### iOS and WebGL -The way you start streaming audio from a microphone device is by calling Unity's `Microphone.Start` method and providing the microphone device name. -You obtain the microphone device name from Unity's `Microphone.devices` array. +You can request permission to use a microphone device on iOS and WebGL platforms by using Unity's [RequestUserAuthorization](https://docs.unity3d.com/ScriptReference/Application.RequestUserAuthorization.html): ```csharp -// Obtain reference to an AudioSource that will be used a source of audio -var inputAudioSource = GetComponent(); +// Request permission to use the Microphone +Application.RequestUserAuthorization(UserAuthorization.Microphone); + +// Check if user granted microphone permission +if (!Application.HasUserAuthorization(UserAuthorization.Microphone)) +{ + // Notify user that microphone permission was not granted and the microphone capturing will not work. +} +``` -// Get a valid microphone device name. -// You usually want to populate a dropdown list with Microphone.devices so that the user can pick which device should be used -_activeMicrophoneDeviceName = Microphone.devices.First(); +#### Android -inputAudioSource.clip - = Microphone.Start(_activeMicrophoneDeviceName, true, 3, AudioSettings.outputSampleRate); -inputAudioSource.loop = true; -inputAudioSource.Play(); +For the Android platform, Unity recommends using the [Permission.RequestUserPermission](https://docs.unity3d.com/ScriptReference/Android.Permission.RequestUserPermission.html): -_client.SetAudioInputSource(inputAudioSource); +```csharp +// Request microphone permissions +Permission.RequestUserPermission(Permission.Microphone); + +// Check if user granted microphone permission +if (!Permission.HasUserAuthorizedPermission(Permission.Microphone)) +{ + // Notify user that microphone permission was not granted and the microphone capturing will not work. +} ``` -#### Change microphone device during the call +## Interacting with Web Camera + +All interactions with camera devices are handled by the `VideoDeviceManager` that can be accessed via `_client.VideoDeviceManager`. The `_client` field is an instance of `IStreamVideoClient`; if you haven't setup the video chat client yet, you can follow the [Client & Auth](../03-guides/01-client-auth.mdx) guide to learn how to do it. + + +#### List available camera devices -Here's an example of how to change the active microphone device: +All available microphone devices can be accessed with `AudioDeviceManager.EnumerateDevices()`. This method returns the `IEnumerable`. +The `MicrophoneDeviceInfo` is a struct representing a single device. You can access the name of the device by the `Name` property. ```csharp -// Stop previously active microphone -Microphone.End(_activeMicrophoneDeviceName); +var cameras = _client.VideoDeviceManager.EnumerateDevices(); + +foreach (var camera in cameras) +{ +Debug.Log(camera.Name); // Get camera name +} +``` + +#### Select camera + +**Declaration** -// Obtain reference to an AudioSource that was setup as an input source -var inputAudioSource = GetComponent(); +`public void SelectDevice(CameraDeviceInfo device, bool enable, int fps = 30)` -inputAudioSource.clip = Microphone.Start(newMicrophoneDeviceName, true, 3, AudioSettings.outputSampleRate); +| Argument | Description | +| --- | --- | +| `device` | Camera to select | +| `enable` | Enabled device is capturing video input | +| `fps` | (OPTIONAL) How many frames per second should the video be captured. The default value is `30` | + +```csharp +_client.VideoDeviceManager.SelectDevice(camera, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, enable: true, requestedFPS: 24); ``` -In case you need to use a different reference to `AudioSource`, you can simply call the `_client.SetAudioInputSource` again, and the audio track will be updated with the new audio input: +**Declaration** + +`public void SelectDevice(CameraDeviceInfo device, VideoResolution requestedResolution, bool enable, int requestedFPS = 30)` + +| Argument | Description | +| --- | --- | +| `device` | Camera to select | +| `enable` | Enabled device is capturing video input | +| `requestedResolution` | At what resolution should the video be captured | +| `fps` | (OPTIONAL) How many frames per second should the video be captured. The default value is `30` | + ```csharp -_client.SetAudioInputSource(inputAudioSource); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_720p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_720p, enable: true, requestedFPS: 30); ``` -#### Additional Notes +The `VideoResolution` argument type contains multiple predefined resolutions: + +| Value | Resolution | +| --- | --- | +| `VideoResolution.Res_144p` | 256x144 | +| `VideoResolution.Res_240p` | 320x240 | +| `VideoResolution.Res_360p` | 480x360 | +| `VideoResolution.Res_480p` | 640x480 | +| `VideoResolution.Res_720p` | 1280x720 | +| `VideoResolution.Res_1080p` | 1920x1080 | +| `new VideoResolution(int width, int height` | custom resolution | + +```csharp +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_144p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_240p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_360p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_480p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_720p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, VideoResolution.Res_1080p, enable: true); +_client.VideoDeviceManager.SelectDevice(camera, new VideoResolution(500, 500), enable: true); +``` -- For standalone platforms like **Windows** or **macOS** you'd usually implement a dropdown menu populated with `Microphone.devices` so that the user can pick which microphone device should be used. The reason for this is that on standalone platforms there can be multiple microphone devices plugged in. -- For mobile platforms like **Android** or **iOS** microphone devices are handled by the OS, so you usually just pick the first device. -- You should handle the case where user does not have a microphone device at all and the `Microphone.devices` array is empty. -- For mobile platforms like **Android** or **iOS** it's best to request a permission to access the microphone and handle the case where user did not grant the permission to use it. Read more in [Unity's docs](https://docs.unity3d.com/ScriptReference/Application.RequestUserAuthorization.html) +#### Requested resolution and FPS -Please refer to Unity's documentation for more information on how to use **Microphone** devices: -* [Microphone.devices](https://docs.unity3d.com/ScriptReference/Microphone-devices.html) -* [Microphone.Start](https://docs.unity3d.com/ScriptReference/Microphone.Start.html) -* [Microphone.End](https://docs.unity3d.com/ScriptReference/Microphone.End.html) +Please note that the video resolution and the FPS are the requested values passed to the camera device. Each device has it's own limitations and in case the passed values are not supported by the device the closest possible value will be selected. -## Setup sending Video +#### Get Selected Camera -In order to send video data you need to set an instance of [WebCamTexture](https://docs.unity3d.com/ScriptReference/WebCamTexture.html) as a **video source** by calling the `SetCameraInputSource` method on the instance of `StreamVideoClient`. +You can retrieve the currently selected camera device using `VideoDeviceManager.SelectedDevice`: ```csharp -_client.SetCameraInputSource(activeCamera); // activeCamera is of type WebCamTexture +public void GetSelectedCamera() +{ + var selectedCamera = _client.VideoDeviceManager.SelectedDevice; +} ``` -### Handle camera device input in Unity +#### Start/Stop Camera Capturing -They way you start streaming video from a camera device is by creating a `WebCamTexture` instance using the camera device name (obtained from `WebCamTexture.devices`) and calling `Play()` on the `WebCamTexture` instance. +Once a camera device is selected, you can start or stop the video capturing using the `Enable()`, `Disable()`, or `SetEnabled(bool isEnabled)` methods. ```csharp -// Obtain a camera device -var cameraDevice = WebCamTexture.devices.First(); +public void StartStopCamera() +{ + // Enable device to start capturing camera input + _client.VideoDeviceManager.Enable(); -// Use device name to create a new WebCamTexture instance -var activeCamera = new WebCamTexture(cameraDevice.name, 1920, 1080, 24); + // Disable device to stop capturing camera input + _client.VideoDeviceManager.Disable(); -// Call Play() in order to start capturing the video -activeCamera.Play(); + // Set the enabled state by passing a boolean argument + _client.VideoDeviceManager.SetEnabled(true); +} +``` + +#### Check if Camera is Enabled + +Check if the camera is enabled and actively capturing video input: -// Set WebCamTexture in Stream's Client - this WebCamTexture will be the video source in video calls -_client.SetCameraInputSource(activeCamera); +```csharp +public void CheckCameraStatus() +{ + // Check if currently selected device is enabled + var isDeviceEnabled = _client.VideoDeviceManager.IsEnabled; +} ``` -The video resolution and FPS parameters you set in your `WebCamTexture` instance will be used for the video publishing settings. In the above example, the video will aim to be streamed at 1080p (1920x1080) resolution and 24 frames per second. +#### Get Local Participant Camera Preview -:::note +For the local participant there will be no video and audio track defined in the `IStreamVideoCallParticipant` object because those streams are not being received from the remote servers like for all remote participants. If you'd wish to present the local participant video stream you get a reference to the instance of `WebCamTexture` associated with the selected device via `_client.VideoDeviceManager.GetSelectedDeviceWebCamTexture()`. -Stream service will dynamically adjust the video resolution and FPS parameters based on the network traffic. The ultimate goal is to ensure a smooth video experience without video stuttering. The settings you provide are the maximum aimed for if the network conditions allow it. +```csharp +var webCamTexture = _client.VideoDeviceManager.GetSelectedDeviceWebCamTexture(); -::: +// You can attach this texture to RawImage UI Component +GetComponent().texture = webCamTexture; +``` -#### Change camera device during the call +Please note that the instance of `WebCamTexture` may change every time a new device is selected therefore you should subscribe to the `SelectedDeviceChanged` event: +```csharp +public void GetLocalParticipantVideoPreviewFull() +{ + // Triggered when the selected devices changes + _client.VideoDeviceManager.SelectedDeviceChanged += UpdateLocalParticipantPreview; +} + +private void UpdateLocalParticipantPreview(CameraDeviceInfo previousDevice, CameraDeviceInfo currentDevice) +{ + var webCamTexture = _client.VideoDeviceManager.GetSelectedDeviceWebCamTexture(); + + // You can attach this texture to RawImage UI Component + GetComponent().texture = webCamTexture; +} +``` -The most efficient way to change the camera device is to update the `deviceName` property on the instance of `WebCamTexture` that was previously set as an input source via `_client.SetCameraInputSource`: +#### Check if camera is enabled ```csharp -_activeCamera.Stop(); -_activeCamera.deviceName = newDeviceName; -_activeCamera.Play(); +public void CheckCameraStatus() +{ + // Check if currently selected device is enabled + var isDeviceEnabled = _client.VideoDeviceManager.IsEnabled; +} ``` -In case you need to use a different reference to `WebCamTexture`, you can simply call the `_client.SetCameraInputSource` again, and the video track will be updated with the new camera input: +#### Events + +`SelectedDeviceChanged` and `IsEnabledChanged` events occur when a new device is selected or the device's enabled state changes. + ```csharp -_client.SetCameraInputSource(activeCamera); +public void VideoDeviceManagerEvents() +{ + // Triggered when the selected devices changes + _client.VideoDeviceManager.SelectedDeviceChanged += OnSelectedDeviceChanged; + + // Triggered when the IsEnabled property changes + _client.VideoDeviceManager.IsEnabledChanged += OnIsEnabledChanged; +} + +private void OnIsEnabledChanged(bool isEnabled) { } + +private void OnSelectedDeviceChanged(CameraDeviceInfo previousDevice, CameraDeviceInfo currentDevice) { } ``` -#### Additional Notes +### Android & iOS -- For standalone platforms like **Windows** or **macOS** you'd usually implement a dropdown menu populated with `WebCamTexture.devices` so that the user can pick which camera device should be used. -- For mobile platforms like **Android** or **iOS** there are usually two cameras available: `Front` and `Back` cameras. Depending on your use case you may either want to automatically select the `Front` camera or allow user to toggle between the `Front` and the `Back` cameras. -- You should handle the case where user does not have a camera device at all and the `WebCamTexture.devices` array is empty. -- For mobile platforms like **Android** or **iOS** it's best to request a permission to access the camera and handle the case where user did not grant the permission to use it. Read more in [Unity's docs](https://docs.unity3d.com/ScriptReference/Application.RequestUserAuthorization.html) +Users must grant permission to use the Camera device on Android and iOS platforms. Permissions are typically requested when the application starts or when a user attempts to enable video capturing. -:::note +#### iOS and WebGL -For Android, if you're setting the **WebCamTexture** resolution, you need to set the resolution as a multiple of 16x16 as required by webRTC +For iOS and WebGL platforms, you can request camera permission using Unity's `RequestUserAuthorization`: -::: +```csharp +public void CameraIOSPermissions() +{ + // Request permission to use the Camera + Application.RequestUserAuthorization(UserAuthorization.WebCam); + + // Check if user granted camera permission + if (!Application.HasUserAuthorization(UserAuthorization.WebCam)) + { + // Notify user that camera permission was not granted and the camera capturing will not work. + } +} +``` + +#### Android -Please refer to Unity's documentation for more information on how to use **Camera** devices: -* [WebCamTexture](https://docs.unity3d.com/ScriptReference/WebCamTexture.html) -* [WebCamTexture.devices](https://docs.unity3d.com/ScriptReference/WebCamTexture-devices.html) -* [WebCamTexture.Play](https://docs.unity3d.com/ScriptReference/WebCamTexture.Play.html) -* [WebCamTexture.Stop](https://docs.unity3d.com/ScriptReference/WebCamTexture.Stop.html) \ No newline at end of file +On Android, request camera permissions using `Permission.RequestUserPermission`: + +```csharp +public void CameraAndroidPermissions() +{ + // Request camera permissions + Permission.RequestUserPermission(Permission.Camera); + + // Check if user granted camera permission + if (!Permission.HasUserAuthorizedPermission(Permission.Camera)) + { + // Notify user that camera permission was not granted and the camera capturing will not work. + } +} +``` \ No newline at end of file