Skip to content

Commit acb906b

Browse files
authored
Merge pull request #333 from wer-mathurin/pedantic
Apply pedantic warning - to improve score point on pub.dev
2 parents ddc7836 + a938452 commit acb906b

25 files changed

+340
-335
lines changed

analysis_options.yaml

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
include: package:pedantic/analysis_options.yaml

example/lib/src/get_user_media_sample_web.dart

+8-8
Original file line numberDiff line numberDiff line change
@@ -50,13 +50,13 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
5050
// Platform messages are asynchronous, so we initialize in an async method.
5151
_makeCall() async {
5252
final Map<String, dynamic> mediaConstraints = {
53-
"audio": true,
54-
"video": {
55-
"mandatory": {
56-
"minWidth":
53+
'audio': true,
54+
'video': {
55+
'mandatory': {
56+
'minWidth':
5757
'1280', // Provide your own width, height and frame rate here
58-
"minHeight": '720',
59-
"minFrameRate": '30',
58+
'minHeight': '720',
59+
'minFrameRate': '30',
6060
},
6161
}
6262
};
@@ -105,15 +105,15 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
105105
_captureFrame() async {
106106
final videoTrack = _localStream
107107
.getVideoTracks()
108-
.firstWhere((track) => track.kind == "video");
108+
.firstWhere((track) => track.kind == 'video');
109109
final frame = await videoTrack.captureFrame();
110110
showDialog(
111111
context: context,
112112
builder: (context) => AlertDialog(
113113
content: Image.network(frame, height: 720, width: 1280),
114114
actions: <Widget>[
115115
FlatButton(
116-
child: Text("OK"),
116+
child: Text('OK'),
117117
onPressed: Navigator.of(context, rootNavigator: true).pop,
118118
)
119119
],

lib/enums.dart

+21-21
Original file line numberDiff line numberDiff line change
@@ -43,65 +43,65 @@ enum RTCVideoViewObjectFit {
4343

4444
RTCIceConnectionState iceConnectionStateForString(String state) {
4545
switch (state) {
46-
case "new":
46+
case 'new':
4747
return RTCIceConnectionState.RTCIceConnectionStateNew;
48-
case "checking":
48+
case 'checking':
4949
return RTCIceConnectionState.RTCIceConnectionStateChecking;
50-
case "connected":
50+
case 'connected':
5151
return RTCIceConnectionState.RTCIceConnectionStateConnected;
52-
case "completed":
52+
case 'completed':
5353
return RTCIceConnectionState.RTCIceConnectionStateCompleted;
54-
case "failed":
54+
case 'failed':
5555
return RTCIceConnectionState.RTCIceConnectionStateFailed;
56-
case "disconnected":
56+
case 'disconnected':
5757
return RTCIceConnectionState.RTCIceConnectionStateDisconnected;
58-
case "closed":
58+
case 'closed':
5959
return RTCIceConnectionState.RTCIceConnectionStateClosed;
60-
case "count":
60+
case 'count':
6161
return RTCIceConnectionState.RTCIceConnectionStateCount;
6262
}
6363
return RTCIceConnectionState.RTCIceConnectionStateClosed;
6464
}
6565

6666
RTCIceGatheringState iceGatheringStateforString(String state) {
6767
switch (state) {
68-
case "new":
68+
case 'new':
6969
return RTCIceGatheringState.RTCIceGatheringStateNew;
70-
case "gathering":
70+
case 'gathering':
7171
return RTCIceGatheringState.RTCIceGatheringStateGathering;
72-
case "complete":
72+
case 'complete':
7373
return RTCIceGatheringState.RTCIceGatheringStateComplete;
7474
}
7575
return RTCIceGatheringState.RTCIceGatheringStateNew;
7676
}
7777

7878
RTCSignalingState signalingStateForString(String state) {
7979
switch (state) {
80-
case "stable":
80+
case 'stable':
8181
return RTCSignalingState.RTCSignalingStateStable;
82-
case "have-local-offer":
82+
case 'have-local-offer':
8383
return RTCSignalingState.RTCSignalingStateHaveLocalOffer;
84-
case "have-local-pranswer":
84+
case 'have-local-pranswer':
8585
return RTCSignalingState.RTCSignalingStateHaveLocalPrAnswer;
86-
case "have-remote-offer":
86+
case 'have-remote-offer':
8787
return RTCSignalingState.RTCSignalingStateHaveRemoteOffer;
88-
case "have-remote-pranswer":
88+
case 'have-remote-pranswer':
8989
return RTCSignalingState.RTCSignalingStateHaveRemotePrAnswer;
90-
case "closed":
90+
case 'closed':
9191
return RTCSignalingState.RTCSignalingStateClosed;
9292
}
9393
return RTCSignalingState.RTCSignalingStateClosed;
9494
}
9595

9696
RTCDataChannelState rtcDataChannelStateForString(String state) {
9797
switch (state) {
98-
case "connecting":
98+
case 'connecting':
9999
return RTCDataChannelState.RTCDataChannelConnecting;
100-
case "open":
100+
case 'open':
101101
return RTCDataChannelState.RTCDataChannelOpen;
102-
case "closing":
102+
case 'closing':
103103
return RTCDataChannelState.RTCDataChannelClosing;
104-
case "closed":
104+
case 'closed':
105105
return RTCDataChannelState.RTCDataChannelClosed;
106106
}
107107
return RTCDataChannelState.RTCDataChannelClosed;

lib/get_user_media.dart

+11-11
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,14 @@ import 'utils.dart';
66
class navigator {
77
static Future<MediaStream> getUserMedia(
88
Map<String, dynamic> mediaConstraints) async {
9-
MethodChannel channel = WebRTC.methodChannel();
9+
var channel = WebRTC.methodChannel();
1010
try {
11-
final Map<dynamic, dynamic> response = await channel.invokeMethod(
11+
final response = await channel.invokeMethod<Map<dynamic, dynamic>>(
1212
'getUserMedia',
1313
<String, dynamic>{'constraints': mediaConstraints},
1414
);
15-
String streamId = response["streamId"];
16-
MediaStream stream = new MediaStream(streamId, "local");
15+
String streamId = response['streamId'];
16+
var stream = MediaStream(streamId, 'local');
1717
stream.setMediaTracks(response['audioTracks'], response['videoTracks']);
1818
return stream;
1919
} on PlatformException catch (e) {
@@ -27,14 +27,14 @@ class navigator {
2727
* */
2828
static Future<MediaStream> getDisplayMedia(
2929
Map<String, dynamic> mediaConstraints) async {
30-
MethodChannel channel = WebRTC.methodChannel();
30+
var channel = WebRTC.methodChannel();
3131
try {
32-
final Map<dynamic, dynamic> response = await channel.invokeMethod(
32+
final response = await channel.invokeMethod<Map<dynamic, dynamic>>(
3333
'getDisplayMedia',
3434
<String, dynamic>{'constraints': mediaConstraints},
3535
);
36-
String streamId = response["streamId"];
37-
MediaStream stream = new MediaStream(streamId, "local");
36+
String streamId = response['streamId'];
37+
var stream = MediaStream(streamId, 'local');
3838
stream.setMediaTracks(response['audioTracks'], response['videoTracks']);
3939
return stream;
4040
} on PlatformException catch (e) {
@@ -43,13 +43,13 @@ class navigator {
4343
}
4444

4545
static Future<List<dynamic>> getSources() async {
46-
MethodChannel channel = WebRTC.methodChannel();
46+
var channel = WebRTC.methodChannel();
4747
try {
48-
final Map<dynamic, dynamic> response = await channel.invokeMethod(
48+
final response = await channel.invokeMethod<Map<dynamic, dynamic>>(
4949
'getSources',
5050
<String, dynamic>{},
5151
);
52-
List<dynamic> sources = response["sources"];
52+
List<dynamic> sources = response['sources'];
5353
return sources;
5454
} on PlatformException catch (e) {
5555
throw 'Unable to getSources: ${e.message}';

lib/media_recorder.dart

+8-4
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,13 @@ class MediaRecorder {
1414
{MediaStreamTrack videoTrack, RecorderAudioChannel audioChannel
1515
//TODO: add codec/quality options
1616
}) async {
17-
if (path == null) throw ArgumentError.notNull("path");
18-
if (audioChannel == null && videoTrack == null)
19-
throw Exception("Neither audio nor video track were provided");
17+
if (path == null) {
18+
throw ArgumentError.notNull('path');
19+
}
20+
21+
if (audioChannel == null && videoTrack == null) {
22+
throw Exception('Neither audio nor video track were provided');
23+
}
2024

2125
await WebRTC.methodChannel().invokeMethod('startRecordToFile', {
2226
'path': path,
@@ -29,7 +33,7 @@ class MediaRecorder {
2933
void startWeb(MediaStream stream,
3034
{Function(dynamic blob, bool isLastOne) onDataChunk,
3135
String mimeType = 'video/mp4;codecs=h264'}) {
32-
throw "It's for Flutter Web only";
36+
throw 'It\'s for Flutter Web only';
3337
}
3438

3539
Future<dynamic> stop() async => await WebRTC.methodChannel()

lib/media_stream.dart

+19-17
Original file line numberDiff line numberDiff line change
@@ -7,32 +7,30 @@ class MediaStream {
77
final MethodChannel _channel = WebRTC.methodChannel();
88
final String _streamId;
99
final String _ownerTag;
10-
List<MediaStreamTrack> _audioTracks = List<MediaStreamTrack>();
11-
List<MediaStreamTrack> _videoTracks = List<MediaStreamTrack>();
10+
final _audioTracks = <MediaStreamTrack>[];
11+
final _videoTracks = <MediaStreamTrack>[];
1212

1313
MediaStream(this._streamId, this._ownerTag);
1414

1515
String get ownerTag => _ownerTag;
1616
String get id => _streamId;
1717

1818
void setMediaTracks(List<dynamic> audioTracks, List<dynamic> videoTracks) {
19-
var newAudioTracks = List<MediaStreamTrack>();
19+
_audioTracks.clear();
2020
audioTracks.forEach((track) {
21-
newAudioTracks.add(MediaStreamTrack(
22-
track["id"], track["label"], track["kind"], track["enabled"]));
21+
_audioTracks.add(MediaStreamTrack(
22+
track['id'], track['label'], track['kind'], track['enabled']));
2323
});
24-
this._audioTracks = newAudioTracks;
2524

26-
var newVideoTracks = List<MediaStreamTrack>();
25+
_videoTracks.clear();
2726
videoTracks.forEach((track) {
28-
newVideoTracks.add(MediaStreamTrack(
29-
track["id"], track["label"], track["kind"], track["enabled"]));
27+
_videoTracks.add(MediaStreamTrack(
28+
track['id'], track['label'], track['kind'], track['enabled']));
3029
});
31-
this._videoTracks = newVideoTracks;
3230
}
3331

3432
Future<void> getMediaTracks() async {
35-
final Map<dynamic, dynamic> response = await _channel.invokeMethod(
33+
final response = await _channel.invokeMethod<Map<dynamic, dynamic>>(
3634
'mediaStreamGetTracks',
3735
<String, dynamic>{'streamId': _streamId},
3836
);
@@ -42,26 +40,30 @@ class MediaStream {
4240

4341
Future<void> addTrack(MediaStreamTrack track,
4442
{bool addToNative = true}) async {
45-
if (track.kind == 'audio')
43+
if (track.kind == 'audio') {
4644
_audioTracks.add(track);
47-
else
45+
} else {
4846
_videoTracks.add(track);
47+
}
4948

50-
if (addToNative)
49+
if (addToNative) {
5150
await _channel.invokeMethod('mediaStreamAddTrack',
5251
<String, dynamic>{'streamId': _streamId, 'trackId': track.id});
52+
}
5353
}
5454

5555
Future<void> removeTrack(MediaStreamTrack track,
5656
{bool removeFromNative = true}) async {
57-
if (track.kind == 'audio')
57+
if (track.kind == 'audio') {
5858
_audioTracks.removeWhere((it) => it.id == track.id);
59-
else
59+
} else {
6060
_videoTracks.removeWhere((it) => it.id == track.id);
61+
}
6162

62-
if (removeFromNative)
63+
if (removeFromNative) {
6364
await _channel.invokeMethod('mediaStreamRemoveTrack',
6465
<String, dynamic>{'streamId': _streamId, 'trackId': track.id});
66+
}
6567
}
6668

6769
List<MediaStreamTrack> getAudioTracks() {

lib/media_stream_track.dart

+10-10
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,11 @@
11
import 'dart:async';
2-
3-
import 'package:flutter/services.dart';
42
import 'utils.dart';
53

64
class MediaStreamTrack {
7-
MethodChannel _channel = WebRTC.methodChannel();
8-
String _trackId;
9-
String _label;
10-
String _kind;
5+
final _channel = WebRTC.methodChannel();
6+
final String _trackId;
7+
final String _label;
8+
final String _kind;
119
bool _enabled;
1210

1311
MediaStreamTrack(this._trackId, this._label, this._kind, this._enabled);
@@ -64,10 +62,12 @@ class MediaStreamTrack {
6462
}
6563

6664
/// On Flutter Web returns Future<dynamic> which contains data url on success
67-
captureFrame([String filePath]) => _channel.invokeMethod(
68-
'captureFrame',
69-
<String, dynamic>{'trackId': _trackId, 'path': filePath},
70-
);
65+
Future<dynamic> captureFrame([String filePath]) {
66+
return _channel.invokeMethod<void>(
67+
'captureFrame',
68+
<String, dynamic>{'trackId': _trackId, 'path': filePath},
69+
);
70+
}
7171

7272
Future<void> dispose() async {
7373
await _channel.invokeMethod(

0 commit comments

Comments
 (0)