WebRTCWrapper.java

  1package eu.siacs.conversations.xmpp.jingle;
  2
  3import android.content.Context;
  4import android.os.Build;
  5import android.os.Handler;
  6import android.os.Looper;
  7import android.util.Log;
  8
  9import com.google.common.base.Optional;
 10import com.google.common.base.Preconditions;
 11import com.google.common.collect.ImmutableSet;
 12import com.google.common.collect.Iterables;
 13import com.google.common.util.concurrent.Futures;
 14import com.google.common.util.concurrent.ListenableFuture;
 15import com.google.common.util.concurrent.MoreExecutors;
 16import com.google.common.util.concurrent.SettableFuture;
 17
 18import org.webrtc.AudioSource;
 19import org.webrtc.AudioTrack;
 20import org.webrtc.Camera1Enumerator;
 21import org.webrtc.Camera2Enumerator;
 22import org.webrtc.CameraEnumerationAndroid;
 23import org.webrtc.CameraEnumerator;
 24import org.webrtc.CameraVideoCapturer;
 25import org.webrtc.CandidatePairChangeEvent;
 26import org.webrtc.DataChannel;
 27import org.webrtc.DefaultVideoDecoderFactory;
 28import org.webrtc.DefaultVideoEncoderFactory;
 29import org.webrtc.EglBase;
 30import org.webrtc.IceCandidate;
 31import org.webrtc.MediaConstraints;
 32import org.webrtc.MediaStream;
 33import org.webrtc.MediaStreamTrack;
 34import org.webrtc.PeerConnection;
 35import org.webrtc.PeerConnectionFactory;
 36import org.webrtc.RtpReceiver;
 37import org.webrtc.RtpTransceiver;
 38import org.webrtc.SdpObserver;
 39import org.webrtc.SessionDescription;
 40import org.webrtc.SurfaceTextureHelper;
 41import org.webrtc.VideoSource;
 42import org.webrtc.VideoTrack;
 43import org.webrtc.audio.JavaAudioDeviceModule;
 44import org.webrtc.voiceengine.WebRtcAudioEffects;
 45
 46import java.util.ArrayList;
 47import java.util.Collections;
 48import java.util.List;
 49import java.util.Set;
 50
 51import javax.annotation.Nonnull;
 52import javax.annotation.Nullable;
 53
 54import eu.siacs.conversations.Config;
 55import eu.siacs.conversations.services.AppRTCAudioManager;
 56
 57public class WebRTCWrapper {
 58
 59    private static final String EXTENDED_LOGGING_TAG = WebRTCWrapper.class.getSimpleName();
 60
 61    //we should probably keep this in sync with: https://github.com/signalapp/Signal-Android/blob/master/app/src/main/java/org/thoughtcrime/securesms/ApplicationContext.java#L296
 62    private static final Set<String> HARDWARE_AEC_BLACKLIST = new ImmutableSet.Builder<String>()
 63            .add("Pixel")
 64            .add("Pixel XL")
 65            .add("Moto G5")
 66            .add("Moto G (5S) Plus")
 67            .add("Moto G4")
 68            .add("TA-1053")
 69            .add("Mi A1")
 70            .add("Mi A2")
 71            .add("E5823") // Sony z5 compact
 72            .add("Redmi Note 5")
 73            .add("FP2") // Fairphone FP2
 74            .add("MI 5")
 75            .build();
 76
 77    private static final int CAPTURING_RESOLUTION = 1920;
 78    private static final int CAPTURING_MAX_FRAME_RATE = 30;
 79
 80    private final EventCallback eventCallback;
 81    private final AppRTCAudioManager.AudioManagerEvents audioManagerEvents = new AppRTCAudioManager.AudioManagerEvents() {
 82        @Override
 83        public void onAudioDeviceChanged(AppRTCAudioManager.AudioDevice selectedAudioDevice, Set<AppRTCAudioManager.AudioDevice> availableAudioDevices) {
 84            eventCallback.onAudioDeviceChanged(selectedAudioDevice, availableAudioDevices);
 85        }
 86    };
 87    private final Handler mainHandler = new Handler(Looper.getMainLooper());
 88    private VideoTrack localVideoTrack = null;
 89    private VideoTrack remoteVideoTrack = null;
 90    private final PeerConnection.Observer peerConnectionObserver = new PeerConnection.Observer() {
 91        @Override
 92        public void onSignalingChange(PeerConnection.SignalingState signalingState) {
 93            Log.d(EXTENDED_LOGGING_TAG, "onSignalingChange(" + signalingState + ")");
 94            //this is called after removeTrack or addTrack
 95            //and should then trigger a content-add or content-remove or something
 96            //https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/removeTrack
 97        }
 98
 99        @Override
100        public void onConnectionChange(PeerConnection.PeerConnectionState newState) {
101            eventCallback.onConnectionChange(newState);
102        }
103
104        @Override
105        public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
106
107        }
108
109        @Override
110        public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) {
111            Log.d(Config.LOGTAG, "remote candidate selected: " + event.remote);
112            Log.d(Config.LOGTAG, "local candidate selected: " + event.local);
113        }
114
115        @Override
116        public void onIceConnectionReceivingChange(boolean b) {
117
118        }
119
120        @Override
121        public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {
122            Log.d(EXTENDED_LOGGING_TAG, "onIceGatheringChange(" + iceGatheringState + ")");
123        }
124
125        @Override
126        public void onIceCandidate(IceCandidate iceCandidate) {
127            eventCallback.onIceCandidate(iceCandidate);
128        }
129
130        @Override
131        public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) {
132
133        }
134
135        @Override
136        public void onAddStream(MediaStream mediaStream) {
137            Log.d(EXTENDED_LOGGING_TAG, "onAddStream(numAudioTracks=" + mediaStream.audioTracks.size() + ",numVideoTracks=" + mediaStream.videoTracks.size() + ")");
138            final List<VideoTrack> videoTracks = mediaStream.videoTracks;
139            if (videoTracks.size() > 0) {
140                remoteVideoTrack = videoTracks.get(0);
141                Log.d(Config.LOGTAG, "remote video track enabled?=" + remoteVideoTrack.enabled());
142            } else {
143                Log.d(Config.LOGTAG, "no remote video tracks found");
144            }
145        }
146
147        @Override
148        public void onRemoveStream(MediaStream mediaStream) {
149
150        }
151
152        @Override
153        public void onDataChannel(DataChannel dataChannel) {
154
155        }
156
157        @Override
158        public void onRenegotiationNeeded() {
159
160        }
161
162        @Override
163        public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
164            final MediaStreamTrack track = rtpReceiver.track();
165            Log.d(EXTENDED_LOGGING_TAG, "onAddTrack(kind=" + (track == null ? "null" : track.kind()) + ",numMediaStreams=" + mediaStreams.length + ")");
166        }
167
168        @Override
169        public void onTrack(RtpTransceiver transceiver) {
170            Log.d(EXTENDED_LOGGING_TAG, "onTrack(mid=" + transceiver.getMid() + ",media=" + transceiver.getMediaType() + ")");
171        }
172    };
173    @Nullable
174    private PeerConnection peerConnection = null;
175    private AudioTrack localAudioTrack = null;
176    private AppRTCAudioManager appRTCAudioManager = null;
177    private Context context = null;
178    private EglBase eglBase = null;
179    private CapturerChoice capturerChoice;
180
181    WebRTCWrapper(final EventCallback eventCallback) {
182        this.eventCallback = eventCallback;
183    }
184
185    private static void dispose(final PeerConnection peerConnection) {
186        try {
187            peerConnection.dispose();
188        } catch (final IllegalStateException e) {
189            Log.e(Config.LOGTAG, "unable to dispose of peer connection", e);
190        }
191    }
192
193    @Nullable
194    private static CapturerChoice of(CameraEnumerator enumerator, final String deviceName, Set<String> availableCameras) {
195        final CameraVideoCapturer capturer = enumerator.createCapturer(deviceName, null);
196        if (capturer == null) {
197            return null;
198        }
199        final ArrayList<CameraEnumerationAndroid.CaptureFormat> choices = new ArrayList<>(enumerator.getSupportedFormats(deviceName));
200        Collections.sort(choices, (a, b) -> b.width - a.width);
201        for (final CameraEnumerationAndroid.CaptureFormat captureFormat : choices) {
202            if (captureFormat.width <= CAPTURING_RESOLUTION) {
203                return new CapturerChoice(capturer, captureFormat, availableCameras);
204            }
205        }
206        return null;
207    }
208
209    public void setup(final Context context, final AppRTCAudioManager.SpeakerPhonePreference speakerPhonePreference) throws InitializationException {
210        try {
211            PeerConnectionFactory.initialize(
212                    PeerConnectionFactory.InitializationOptions.builder(context).createInitializationOptions()
213            );
214        } catch (final UnsatisfiedLinkError e) {
215            throw new InitializationException(e);
216        }
217        this.eglBase = EglBase.create();
218        this.context = context;
219        mainHandler.post(() -> {
220            appRTCAudioManager = AppRTCAudioManager.create(context, speakerPhonePreference);
221            appRTCAudioManager.start(audioManagerEvents);
222            eventCallback.onAudioDeviceChanged(appRTCAudioManager.getSelectedAudioDevice(), appRTCAudioManager.getAudioDevices());
223        });
224    }
225
226    synchronized void initializePeerConnection(final Set<Media> media, final List<PeerConnection.IceServer> iceServers) throws InitializationException {
227        Preconditions.checkState(this.eglBase != null);
228        Preconditions.checkNotNull(media);
229        Preconditions.checkArgument(media.size() > 0, "media can not be empty when initializing peer connection");
230        final boolean setUseHardwareAcousticEchoCanceler = WebRtcAudioEffects.canUseAcousticEchoCanceler() && !HARDWARE_AEC_BLACKLIST.contains(Build.MODEL);
231        Log.d(Config.LOGTAG, String.format("setUseHardwareAcousticEchoCanceler(%s) model=%s", setUseHardwareAcousticEchoCanceler, Build.MODEL));
232        PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder()
233                .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglBase.getEglBaseContext()))
234                .setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglBase.getEglBaseContext(), true, true))
235                .setAudioDeviceModule(JavaAudioDeviceModule.builder(context)
236                        .setUseHardwareAcousticEchoCanceler(setUseHardwareAcousticEchoCanceler)
237                        .createAudioDeviceModule()
238                )
239                .createPeerConnectionFactory();
240
241
242        final MediaStream stream = peerConnectionFactory.createLocalMediaStream("my-media-stream");
243
244        final Optional<CapturerChoice> optionalCapturerChoice = media.contains(Media.VIDEO) ? getVideoCapturer() : Optional.absent();
245
246        if (optionalCapturerChoice.isPresent()) {
247            this.capturerChoice = optionalCapturerChoice.get();
248            final CameraVideoCapturer capturer = this.capturerChoice.cameraVideoCapturer;
249            final VideoSource videoSource = peerConnectionFactory.createVideoSource(false);
250            SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("webrtc", eglBase.getEglBaseContext());
251            capturer.initialize(surfaceTextureHelper, requireContext(), videoSource.getCapturerObserver());
252            Log.d(Config.LOGTAG, String.format("start capturing at %dx%d@%d", capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate()));
253            capturer.startCapture(capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate());
254
255            this.localVideoTrack = peerConnectionFactory.createVideoTrack("my-video-track", videoSource);
256
257            stream.addTrack(this.localVideoTrack);
258        }
259
260
261        if (media.contains(Media.AUDIO)) {
262            //set up audio track
263            final AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
264            this.localAudioTrack = peerConnectionFactory.createAudioTrack("my-audio-track", audioSource);
265            stream.addTrack(this.localAudioTrack);
266        }
267
268
269        final PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
270        rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; //XEP-0176 doesn't support tcp
271        final PeerConnection peerConnection = peerConnectionFactory.createPeerConnection(rtcConfig, peerConnectionObserver);
272        if (peerConnection == null) {
273            throw new InitializationException("Unable to create PeerConnection");
274        }
275        peerConnection.addStream(stream);
276        peerConnection.setAudioPlayout(true);
277        peerConnection.setAudioRecording(true);
278        this.peerConnection = peerConnection;
279    }
280
281    synchronized void close() {
282        final PeerConnection peerConnection = this.peerConnection;
283        final CapturerChoice capturerChoice = this.capturerChoice;
284        final AppRTCAudioManager audioManager = this.appRTCAudioManager;
285        final EglBase eglBase = this.eglBase;
286        if (peerConnection != null) {
287            dispose(peerConnection);
288            this.peerConnection = null;
289        }
290        if (audioManager != null) {
291            mainHandler.post(audioManager::stop);
292        }
293        this.localVideoTrack = null;
294        this.remoteVideoTrack = null;
295        if (capturerChoice != null) {
296            try {
297                capturerChoice.cameraVideoCapturer.stopCapture();
298            } catch (InterruptedException e) {
299                Log.e(Config.LOGTAG, "unable to stop capturing");
300            }
301        }
302        if (eglBase != null) {
303            eglBase.release();
304            this.eglBase = null;
305        }
306    }
307
308    synchronized void verifyClosed() {
309        if (this.peerConnection != null
310                || this.eglBase != null
311                || this.localVideoTrack != null
312                || this.remoteVideoTrack != null) {
313            final IllegalStateException e = new IllegalStateException("WebRTCWrapper hasn't been closed properly");
314            Log.e(Config.LOGTAG, "verifyClosed() failed. Going to throw", e);
315            throw e;
316        }
317    }
318
319    boolean isCameraSwitchable() {
320        final CapturerChoice capturerChoice = this.capturerChoice;
321        return capturerChoice != null && capturerChoice.availableCameras.size() > 1;
322    }
323
324    boolean isFrontCamera() {
325        final CapturerChoice capturerChoice = this.capturerChoice;
326        return capturerChoice == null || capturerChoice.isFrontCamera;
327    }
328
329    ListenableFuture<Boolean> switchCamera() {
330        final CapturerChoice capturerChoice = this.capturerChoice;
331        if (capturerChoice == null) {
332            return Futures.immediateFailedFuture(new IllegalStateException("CameraCapturer has not been initialized"));
333        }
334        final SettableFuture<Boolean> future = SettableFuture.create();
335        capturerChoice.cameraVideoCapturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() {
336            @Override
337            public void onCameraSwitchDone(boolean isFrontCamera) {
338                capturerChoice.isFrontCamera = isFrontCamera;
339                future.set(isFrontCamera);
340            }
341
342            @Override
343            public void onCameraSwitchError(final String message) {
344                future.setException(new IllegalStateException(String.format("Unable to switch camera %s", message)));
345            }
346        });
347        return future;
348    }
349
350    boolean isMicrophoneEnabled() {
351        final AudioTrack audioTrack = this.localAudioTrack;
352        if (audioTrack == null) {
353            throw new IllegalStateException("Local audio track does not exist (yet)");
354        }
355        try {
356            return audioTrack.enabled();
357        } catch (final IllegalStateException e) {
358            //sometimes UI might still be rendering the buttons when a background thread has already ended the call
359            return false;
360        }
361    }
362
363    void setMicrophoneEnabled(final boolean enabled) {
364        final AudioTrack audioTrack = this.localAudioTrack;
365        if (audioTrack == null) {
366            throw new IllegalStateException("Local audio track does not exist (yet)");
367        }
368        audioTrack.setEnabled(enabled);
369    }
370
371    boolean isVideoEnabled() {
372        final VideoTrack videoTrack = this.localVideoTrack;
373        if (videoTrack == null) {
374            throw new IllegalStateException("Local video track does not exist");
375        }
376        return videoTrack.enabled();
377    }
378
379    void setVideoEnabled(final boolean enabled) {
380        final VideoTrack videoTrack = this.localVideoTrack;
381        if (videoTrack == null) {
382            throw new IllegalStateException("Local video track does not exist");
383        }
384        videoTrack.setEnabled(enabled);
385    }
386
387    ListenableFuture<SessionDescription> createOffer() {
388        return Futures.transformAsync(getPeerConnectionFuture(), peerConnection -> {
389            final SettableFuture<SessionDescription> future = SettableFuture.create();
390            peerConnection.createOffer(new CreateSdpObserver() {
391                @Override
392                public void onCreateSuccess(SessionDescription sessionDescription) {
393                    future.set(sessionDescription);
394                }
395
396                @Override
397                public void onCreateFailure(String s) {
398                    future.setException(new IllegalStateException("Unable to create offer: " + s));
399                }
400            }, new MediaConstraints());
401            return future;
402        }, MoreExecutors.directExecutor());
403    }
404
405    ListenableFuture<SessionDescription> createAnswer() {
406        return Futures.transformAsync(getPeerConnectionFuture(), peerConnection -> {
407            final SettableFuture<SessionDescription> future = SettableFuture.create();
408            peerConnection.createAnswer(new CreateSdpObserver() {
409                @Override
410                public void onCreateSuccess(SessionDescription sessionDescription) {
411                    future.set(sessionDescription);
412                }
413
414                @Override
415                public void onCreateFailure(String s) {
416                    future.setException(new IllegalStateException("Unable to create answer: " + s));
417                }
418            }, new MediaConstraints());
419            return future;
420        }, MoreExecutors.directExecutor());
421    }
422
423    ListenableFuture<Void> setLocalDescription(final SessionDescription sessionDescription) {
424        Log.d(EXTENDED_LOGGING_TAG, "setting local description:");
425        for (final String line : sessionDescription.description.split(eu.siacs.conversations.xmpp.jingle.SessionDescription.LINE_DIVIDER)) {
426            Log.d(EXTENDED_LOGGING_TAG, line);
427        }
428        return Futures.transformAsync(getPeerConnectionFuture(), peerConnection -> {
429            final SettableFuture<Void> future = SettableFuture.create();
430            peerConnection.setLocalDescription(new SetSdpObserver() {
431                @Override
432                public void onSetSuccess() {
433                    future.set(null);
434                }
435
436                @Override
437                public void onSetFailure(final String s) {
438                    future.setException(new IllegalArgumentException("unable to set local session description: " + s));
439
440                }
441            }, sessionDescription);
442            return future;
443        }, MoreExecutors.directExecutor());
444    }
445
446    ListenableFuture<Void> setRemoteDescription(final SessionDescription sessionDescription) {
447        Log.d(EXTENDED_LOGGING_TAG, "setting remote description:");
448        for (final String line : sessionDescription.description.split(eu.siacs.conversations.xmpp.jingle.SessionDescription.LINE_DIVIDER)) {
449            Log.d(EXTENDED_LOGGING_TAG, line);
450        }
451        return Futures.transformAsync(getPeerConnectionFuture(), peerConnection -> {
452            final SettableFuture<Void> future = SettableFuture.create();
453            peerConnection.setRemoteDescription(new SetSdpObserver() {
454                @Override
455                public void onSetSuccess() {
456                    future.set(null);
457                }
458
459                @Override
460                public void onSetFailure(String s) {
461                    future.setException(new IllegalArgumentException("unable to set remote session description: " + s));
462
463                }
464            }, sessionDescription);
465            return future;
466        }, MoreExecutors.directExecutor());
467    }
468
469    @Nonnull
470    private ListenableFuture<PeerConnection> getPeerConnectionFuture() {
471        final PeerConnection peerConnection = this.peerConnection;
472        if (peerConnection == null) {
473            return Futures.immediateFailedFuture(new IllegalStateException("initialize PeerConnection first"));
474        } else {
475            return Futures.immediateFuture(peerConnection);
476        }
477    }
478
479    void addIceCandidate(IceCandidate iceCandidate) {
480        requirePeerConnection().addIceCandidate(iceCandidate);
481    }
482
483    private CameraEnumerator getCameraEnumerator() {
484        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
485            return new Camera2Enumerator(requireContext());
486        } else {
487            return new Camera1Enumerator();
488        }
489    }
490
491    private Optional<CapturerChoice> getVideoCapturer() {
492        final CameraEnumerator enumerator = getCameraEnumerator();
493        final Set<String> deviceNames = ImmutableSet.copyOf(enumerator.getDeviceNames());
494        for (final String deviceName : deviceNames) {
495            if (enumerator.isFrontFacing(deviceName)) {
496                final CapturerChoice capturerChoice = of(enumerator, deviceName, deviceNames);
497                if (capturerChoice == null) {
498                    return Optional.absent();
499                }
500                capturerChoice.isFrontCamera = true;
501                return Optional.of(capturerChoice);
502            }
503        }
504        if (deviceNames.size() == 0) {
505            return Optional.absent();
506        } else {
507            return Optional.fromNullable(of(enumerator, Iterables.get(deviceNames, 0), deviceNames));
508        }
509    }
510
511    public PeerConnection.PeerConnectionState getState() {
512        return requirePeerConnection().connectionState();
513    }
514
515    EglBase.Context getEglBaseContext() {
516        return this.eglBase.getEglBaseContext();
517    }
518
519    Optional<VideoTrack> getLocalVideoTrack() {
520        return Optional.fromNullable(this.localVideoTrack);
521    }
522
523    Optional<VideoTrack> getRemoteVideoTrack() {
524        return Optional.fromNullable(this.remoteVideoTrack);
525    }
526
527    private PeerConnection requirePeerConnection() {
528        final PeerConnection peerConnection = this.peerConnection;
529        if (peerConnection == null) {
530            throw new IllegalStateException("initialize PeerConnection first");
531        }
532        return peerConnection;
533    }
534
535    private Context requireContext() {
536        final Context context = this.context;
537        if (context == null) {
538            throw new IllegalStateException("call setup first");
539        }
540        return context;
541    }
542
543    AppRTCAudioManager getAudioManager() {
544        return appRTCAudioManager;
545    }
546
547    public interface EventCallback {
548        void onIceCandidate(IceCandidate iceCandidate);
549
550        void onConnectionChange(PeerConnection.PeerConnectionState newState);
551
552        void onAudioDeviceChanged(AppRTCAudioManager.AudioDevice selectedAudioDevice, Set<AppRTCAudioManager.AudioDevice> availableAudioDevices);
553    }
554
555    private static abstract class SetSdpObserver implements SdpObserver {
556
557        @Override
558        public void onCreateSuccess(org.webrtc.SessionDescription sessionDescription) {
559            throw new IllegalStateException("Not able to use SetSdpObserver");
560        }
561
562        @Override
563        public void onCreateFailure(String s) {
564            throw new IllegalStateException("Not able to use SetSdpObserver");
565        }
566
567    }
568
569    private static abstract class CreateSdpObserver implements SdpObserver {
570
571
572        @Override
573        public void onSetSuccess() {
574            throw new IllegalStateException("Not able to use CreateSdpObserver");
575        }
576
577
578        @Override
579        public void onSetFailure(String s) {
580            throw new IllegalStateException("Not able to use CreateSdpObserver");
581        }
582    }
583
584    static class InitializationException extends Exception {
585
586        private InitializationException(final Throwable throwable) {
587            super(throwable);
588        }
589
590        private InitializationException(final String message) {
591            super(message);
592        }
593    }
594
595    private static class CapturerChoice {
596        private final CameraVideoCapturer cameraVideoCapturer;
597        private final CameraEnumerationAndroid.CaptureFormat captureFormat;
598        private final Set<String> availableCameras;
599        private boolean isFrontCamera = false;
600
601        CapturerChoice(CameraVideoCapturer cameraVideoCapturer, CameraEnumerationAndroid.CaptureFormat captureFormat, Set<String> cameras) {
602            this.cameraVideoCapturer = cameraVideoCapturer;
603            this.captureFormat = captureFormat;
604            this.availableCameras = cameras;
605        }
606
607        int getFrameRate() {
608            return Math.max(captureFormat.framerate.min, Math.min(CAPTURING_MAX_FRAME_RATE, captureFormat.framerate.max));
609        }
610    }
611}