直接上代码;webrtc 工具类
package com.example.mqttdome;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.WindowManager;
import android.widget.Toast;
import org.webrtc.AudioSource;
import org.webrtc.AudioTrack;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraVideoCapturer;
import org.webrtc.DataChannel;
import org.webrtc.DefaultVideoDecoderFactory;
import org.webrtc.DefaultVideoEncoderFactory;
import org.webrtc.EglBase;
import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.RtpReceiver;
import org.webrtc.ScreenCapturerAndroid;
import org.webrtc.SessionDescription;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoDecoderFactory;
import org.webrtc.VideoEncoderFactory;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import java.util.ArrayList;
public class WebRTCUtils {
private static final String TAG = "webrtc_utils";
private final PeerConnectionFactory factory;
private final PeerConnection peer;
private final EglBase.Context eglBaseContext;
private final SurfaceTextureHelper surfaceTextureHelper;
private final VideoCapturer camera;
private final AudioTrack audioTrack;
private VideoTrack videoTrack;
private OnWebRtcListener webRtcListener;
public EglBase.Context getEglBaseContext() {
return eglBaseContext;
}
public void setWebRtcListener(OnWebRtcListener webRtcListener) {
this.webRtcListener = webRtcListener;
}
public VideoTrack getVideoTrack() {
return videoTrack;
}
public WebRTCUtils(Context context, Intent data) {
eglBaseContext = EglBase.create().getEglBaseContext();
surfaceTextureHelper = SurfaceTextureHelper.create("SurfaceTextureHelper_jason", eglBaseContext);
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
VideoEncoderFactory enc = new DefaultVideoEncoderFactory(eglBaseContext, true, false);
VideoDecoderFactory dec = new DefaultVideoDecoderFactory(eglBaseContext);
PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
.builder(context)
.createInitializationOptions());
factory = PeerConnectionFactory.builder()
.setOptions(options)
.setVideoDecoderFactory(dec)
.setVideoEncoderFactory(enc)
.createPeerConnectionFactory();
ArrayList<PeerConnection.IceServer> iceServers = new ArrayList<>();
iceServers.add(PeerConnection.IceServer.builder("stun:stun3.l.google.com:19302").createIceServer());
peer = factory.createPeerConnection(iceServers, new PeerObserver() {
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
super.onIceCandidate(iceCandidate);
webRtcListener.onIceCandidate(iceCandidate);
}
@Override
public void onAddStream(MediaStream mediaStream) {
super.onAddStream(mediaStream);
webRtcListener.onAddStream(mediaStream);
}
});
AudioSource audioSource = factory.createAudioSource(new MediaConstraints());
audioTrack = factory.createAudioTrack("jason_audio_track", audioSource);
camera = createCameraDevices(true, context);
if (camera != null) {
VideoSource videoSource = factory.createVideoSource(camera.isScreencast());
camera.initialize(surfaceTextureHelper, context, videoSource.getCapturerObserver());
videoTrack = factory.createVideoTrack("VideoTrack_jason", videoSource);
}
MediaStream stream = factory.createLocalMediaStream("jason_stream");
stream.addTrack(audioTrack);
if (videoTrack != null)
stream.addTrack(videoTrack);
if (peer != null)
peer.addStream(stream);
// start capture
if (camera != null)
camera.startCapture(640, 360, 25);
Log.i(TAG, "WebRTCUtils: camera:" + camera);
}
public void createOffer() {
if (peer == null)
return;
peer.createOffer(new SdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
super.onCreateSuccess(sessionDescription);
peer.setLocalDescription(new SdpObserver(), sessionDescription);
// send sdp
String description = sessionDescription.description;
webRtcListener.onSdpSendRemoteOffer(description);
Log.i(TAG, "onCreateSuccess: sdp:" + description);
}
}, new MediaConstraints());
}
public void receiveOffer(String sdp) {
if (peer == null)
return;
peer.setRemoteDescription(new SdpObserver(), new SessionDescription(SessionDescription.Type.OFFER, sdp));
peer.createAnswer(new SdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
super.onCreateSuccess(sessionDescription);
peer.setLocalDescription(new SdpObserver(), sessionDescription);
// send back sdp
String description = sessionDescription.description;
webRtcListener.onSdpSendRemoteAnswer(description);
Log.i(TAG, "onCreateSuccess: sdp back:" + description);
}
}, new MediaConstraints());
}
public void saveAnswer(String sdp) {
if (peer == null)
return;
Log.i(TAG, "saveAnswer: " + sdp);
peer.setRemoteDescription(new SdpObserver(), new SessionDescription(SessionDescription.Type.ANSWER, sdp));
}
public void saveCandidate(int lineIndex, String mid, String candidate) {
if (peer == null)
return;
Log.i(TAG, "saveCandidate: " + candidate);
peer.addIceCandidate(new IceCandidate(
mid,
lineIndex,
candidate
));
}
public void release() {
try {
if (camera != null)
camera.stopCapture();
} catch (Exception e) {
Log.i(TAG, "release: error:" + e.getMessage());
}
if (audioTrack!=null)
audioTrack.setEnabled(false);
if (videoTrack!=null)
videoTrack.setEnabled(false);
if (peer!=null){
peer.close();
peer.dispose();
}
factory.dispose();
surfaceTextureHelper.dispose();
}
// -----------------------------------------------------------------------------------------工具类
public interface OnWebRtcListener {
void onIceCandidate(IceCandidate iceCandidate);
void onAddStream(MediaStream mediaStream);
void onSdpSendRemoteOffer(String sdp);
void onSdpSendRemoteAnswer(String sdp);
}
private static class PeerObserver implements PeerConnection.Observer {
@Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
}
@Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
}
@Override
public void onIceConnectionReceivingChange(boolean b) {
}
@Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {
}
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
}
@Override
public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) {
}
@Override
public void onAddStream(MediaStream mediaStream) {
}
@Override
public void onRemoveStream(MediaStream mediaStream) {
}
@Override
public void onDataChannel(DataChannel dataChannel) {
}
@Override
public void onRenegotiationNeeded() {
}
@Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
}
}
private static class SdpObserver implements org.webrtc.SdpObserver {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
}
@Override
public void onSetSuccess() {
}
@Override
public void onCreateFailure(String s) {
}
@Override
public void onSetFailure(String s) {
}
}
public static void startCaptor(Activity activity) {
MediaProjectionManager manager = (MediaProjectionManager) activity.getSystemService(Context.MEDIA_PROJECTION_SERVICE);
if (manager == null) {
Log.i(TAG, "startCaptor: manager is null");
return;
}
Intent intent = manager.createScreenCaptureIntent();
activity.startActivityForResult(intent, 119);
}
public static VideoCapturer createCameraDevices(boolean isFront, Context context) {
Camera2Enumerator camera2Enumerator = new Camera2Enumerator(context);
String[] deviceNames1 = camera2Enumerator.getDeviceNames();
for (String item : deviceNames1) {
if (isFront ? camera2Enumerator.isFrontFacing(item) : camera2Enumerator.isBackFacing(item)) {
CameraVideoCapturer cameraVideoCapturer = camera2Enumerator.createCapturer(item, null);
if (cameraVideoCapturer != null){
Log.i(TAG, "createCameraDevices: use camera 2 ");
Toast.makeText(context, "camera 2", Toast.LENGTH_LONG).show();
return cameraVideoCapturer;
}
}
}
Camera1Enumerator enumerator = new Camera1Enumerator(false);
final String[] deviceNames = enumerator.getDeviceNames();
// First, try to find front facing camera
for (String deviceName : deviceNames) {
if (isFront ? enumerator.isFrontFacing(deviceName) : enumerator.isBackFacing(deviceName)) {
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
Log.i(TAG, "createCameraDevices: use camera 1 ");
Toast.makeText(context, "camera 1", Toast.LENGTH_LONG).show();
return videoCapturer;
}
}
}
return null;
}
}