Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[flutter_webrtc] Update libwebrtc to m114 version #625

Merged
merged 15 commits into from
Nov 22, 2023
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update flutter webrtc to 0.9.46 version.
wanchao-xu committed Nov 3, 2023
commit 5876bad335683eef874fa59ad39f3153965d964f
2 changes: 2 additions & 0 deletions packages/flutter_webrtc/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
## 0.1.3

* Increase the minimum Flutter version to 3.3.
* Update libwebrtc to m114 version.
* Update flutter_webrtc to 0.9.46.
* Support the empty candidate for 'addIceCandidate' api.

## 0.1.2
2 changes: 1 addition & 1 deletion packages/flutter_webrtc/README.md
Original file line number Diff line number Diff line change
@@ -40,7 +40,7 @@ For other Tizen devices :

```yaml
dependencies:
flutter_webrtc: ^0.9.28
flutter_webrtc: ^0.9.46
flutter_webrtc_tizen: ^0.1.3
```

Original file line number Diff line number Diff line change
@@ -11,7 +11,6 @@ import 'src/get_display_media_sample.dart';
import 'src/get_user_media_sample.dart'
if (dart.library.html) 'src/get_user_media_sample_web.dart';
import 'src/loopback_data_channel_sample.dart';
import 'src/loopback_sample.dart';
import 'src/loopback_sample_unified_tracks.dart';
import 'src/route_item.dart';

@@ -109,14 +108,6 @@ class _MyAppState extends State<MyApp> {
builder: (BuildContext context) =>
GetDisplayMediaSample()));
}),
RouteItem(
title: 'LoopBack Sample',
push: (BuildContext context) {
Navigator.push(
context,
MaterialPageRoute(
builder: (BuildContext context) => LoopBackSample()));
}),
RouteItem(
title: 'LoopBack Sample (Unified Tracks)',
push: (BuildContext context) {
Original file line number Diff line number Diff line change
@@ -4,6 +4,7 @@ import 'package:collection/collection.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:permission_handler/permission_handler.dart';

class VideoSize {
VideoSize(this.width, this.height);
@@ -63,6 +64,7 @@ class _DeviceEnumerationSampleState extends State<DeviceEnumerationSample> {
@override
void initState() {
super.initState();

initRenderers();
loadDevices();
navigator.mediaDevices.ondevicechange = (event) {
@@ -130,6 +132,18 @@ class _DeviceEnumerationSampleState extends State<DeviceEnumerationSample> {
}

Future<void> loadDevices() async {
if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) {
//Ask for runtime permissions if necessary.
var status = await Permission.bluetooth.request();
if (status.isPermanentlyDenied) {
print('BLEpermdisabled');
}

status = await Permission.bluetoothConnect.request();
if (status.isPermanentlyDenied) {
print('ConnectPermdisabled');
}
}
final devices = await navigator.mediaDevices.enumerateDevices();
setState(() {
_devices = devices;
@@ -187,6 +201,14 @@ class _DeviceEnumerationSampleState extends State<DeviceEnumerationSample> {
await _localRenderer.audioOutput(deviceId!);
}

var _speakerphoneOn = false;

Future<void> _setSpeakerphoneOn() async {
_speakerphoneOn = !_speakerphoneOn;
await Helper.setSpeakerphoneOn(_speakerphoneOn);
setState(() {});
}

Future<void> _selectVideoInput(String? deviceId) async {
_selectedVideoInputId = deviceId;
if (!_inCalling) {
@@ -281,6 +303,8 @@ class _DeviceEnumerationSampleState extends State<DeviceEnumerationSample> {
senders.clear();
_inCalling = false;
await stopPCs();
_speakerphoneOn = false;
await Helper.setSpeakerphoneOn(_speakerphoneOn);
setState(() {});
} catch (e) {
print(e.toString());
@@ -307,20 +331,29 @@ class _DeviceEnumerationSampleState extends State<DeviceEnumerationSample> {
}).toList();
},
),
PopupMenuButton<String>(
onSelected: _selectAudioOutput,
icon: Icon(Icons.volume_down_alt),
itemBuilder: (BuildContext context) {
return _devices
.where((device) => device.kind == 'audiooutput')
.map((device) {
return PopupMenuItem<String>(
value: device.deviceId,
child: Text(device.label),
);
}).toList();
},
),
if (!WebRTC.platformIsMobile)
PopupMenuButton<String>(
onSelected: _selectAudioOutput,
icon: Icon(Icons.volume_down_alt),
itemBuilder: (BuildContext context) {
return _devices
.where((device) => device.kind == 'audiooutput')
.map((device) {
return PopupMenuItem<String>(
value: device.deviceId,
child: Text(device.label),
);
}).toList();
},
),
if (!kIsWeb && WebRTC.platformIsMobile)
IconButton(
disabledColor: Colors.grey,
onPressed: _setSpeakerphoneOn,
icon: Icon(
_speakerphoneOn ? Icons.speaker_phone : Icons.phone_android),
tooltip: 'Switch SpeakerPhone',
),
PopupMenuButton<String>(
onSelected: _selectVideoInput,
icon: Icon(Icons.switch_camera),
@@ -364,7 +397,7 @@ class _DeviceEnumerationSampleState extends State<DeviceEnumerationSample> {
child: Text('Select Video Size ($_selectedVideoSize)'),
),
PopupMenuDivider(),
...['320x240', '640x480', '1280x720', '1920x1080']
...['320x180', '640x360', '1280x720', '1920x1080']
.map((fps) => PopupMenuItem<String>(
value: fps,
child: Text(fps),
Original file line number Diff line number Diff line change
@@ -2,6 +2,7 @@ import 'dart:core';

import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_background/flutter_background.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:flutter_webrtc_example/src/widgets/screen_select_dialog.dart';

@@ -50,6 +51,38 @@ class _GetDisplayMediaSampleState extends State<GetDisplayMediaSample> {
await _makeCall(source);
}
} else {
if (WebRTC.platformIsAndroid) {
// Android specific
Future<void> requestBackgroundPermission([bool isRetry = false]) async {
// Required for android screenshare.
try {
var hasPermissions = await FlutterBackground.hasPermissions;
if (!isRetry) {
const androidConfig = FlutterBackgroundAndroidConfig(
notificationTitle: 'Screen Sharing',
notificationText: 'LiveKit Example is sharing the screen.',
notificationImportance: AndroidNotificationImportance.Default,
notificationIcon: AndroidResource(
name: 'livekit_ic_launcher', defType: 'mipmap'),
);
hasPermissions = await FlutterBackground.initialize(
androidConfig: androidConfig);
}
if (hasPermissions &&
!FlutterBackground.isBackgroundExecutionEnabled) {
await FlutterBackground.enableBackgroundExecution();
}
} catch (e) {
if (!isRetry) {
return await Future<void>.delayed(const Duration(seconds: 1),
() => requestBackgroundPermission(true));
}
print('could not publish video: $e');
}
}

await requestBackgroundPermission();
}
await _makeCall(null);
}
}
Original file line number Diff line number Diff line change
@@ -22,6 +22,7 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
bool _inCalling = false;
bool _isTorchOn = false;
MediaRecorder? _mediaRecorder;

bool get _isRec => _mediaRecorder != null;

List<MediaDeviceInfo>? _mediaDevicesList;
@@ -143,6 +144,18 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
}
}

void setZoom(double zoomLevel) async {
if (_localStream == null) throw Exception('Stream is not initialized');
// await videoTrack.setZoom(zoomLevel); //Use it after published webrtc_interface 1.1.1

// before the release, use can just call native method directly.
final videoTrack = _localStream!
.getVideoTracks()
.firstWhere((track) => track.kind == 'video');
await WebRTC.invokeMethod('mediaStreamTrackSetZoom',
<String, dynamic>{'trackId': videoTrack.id, 'zoomLevel': zoomLevel});
}

void _toggleCamera() async {
if (_localStream == null) throw Exception('Stream is not initialized');

@@ -218,14 +231,21 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
body: OrientationBuilder(
builder: (context, orientation) {
return Center(
child: Container(
margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0),
width: MediaQuery.of(context).size.width,
height: MediaQuery.of(context).size.height,
decoration: BoxDecoration(color: Colors.black54),
child: Container(
margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0),
width: MediaQuery.of(context).size.width,
height: MediaQuery.of(context).size.height,
decoration: BoxDecoration(color: Colors.black54),
child: GestureDetector(
onScaleStart: (details) {},
onScaleUpdate: (details) {
if (details.scale != 1.0) {
setZoom(details.scale);
}
},
child: RTCVideoView(_localRenderer, mirror: true),
),
);
));
},
),
floatingActionButton: FloatingActionButton(
Original file line number Diff line number Diff line change
@@ -115,10 +115,12 @@ class _DataChannelLoopBackSampleState extends State<DataChannelLoopBackSample> {
});

Timer(const Duration(seconds: 1), () {
setState(() {
_dc1Status = '';
_dc2Status = '';
});
if (mounted) {
setState(() {
_dc1Status = '';
_dc2Status = '';
});
}
});
}

Loading