webrtc 实现android端对端的视频➕聊天互通的实现是在webRTC(十六):webrtc 实现android端对端的视频互通 基础上进行的,基本配置请先阅读端对端的视频互通
web端实现
- 效果
- 在创建通道连接createPeerConnection时创建DataChannel
/*
DataChannel.Init 可配参数说明:
ordered:是否保证顺序传输;
maxRetransmitTimeMs:重传允许的最长时间;
maxRetransmits:重传允许的最大次数;
*/
DataChannel.Init init = new DataChannel.Init();
init.ordered = true;
mDataChannel = connection.createDataChannel("dataChannel", init);
mDataChannel.registerObserver(mDataChannelObserver);
- 在mPeerConnectionObserver的onDataChannel回掉中注册DataChannel
dataChannel.registerObserver(mDataChannelObserver);
- DataChannel 观察者,监听回掉,接收数据
/**
* DataChannel 观察者,监听回掉
*/
private DataChannel.Observer mDataChannelObserver = new DataChannel.Observer() {
@Override
public void onBufferedAmountChange(long l) {
}
@Override
public void onStateChange() {
Log.d(TAG, "onDataChannel onStateChange:" + mDataChannel.state());
}
@Override
public void onMessage(DataChannel.Buffer buffer) {
Log.d(TAG, "onDataChannel onMessage : " + buffer);
ByteBuffer data = buffer.data;
byte[] bytes = new byte[data.capacity()];
data.get(bytes);
String msg = new String(bytes);
//TODO --------------------------------------------------------------------------------------------
Log.e(TAG, "msg============> : " + msg);
if (webRtcListener != null) {
webRtcListener.onReceiveDataChannelMessage(msg);
}
}
};
- 发送消息
/**
* 发送消息
*
* @param message
*/
public void sendDataChannelMessage(String message) {
byte[] msg = message.getBytes();
DataChannel.Buffer buffer = new DataChannel.Buffer(
ByteBuffer.wrap(msg),
false);
mDataChannel.send(buffer);
}
}
源码
- 布局
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/RemoteSurfaceView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center" />
<org.webrtc.SurfaceViewRenderer
android:id="@+id/LocalSurfaceView"
android:layout_width="120dp"
android:layout_height="160dp"
android:layout_gravity="right|center_vertical" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical">
<EditText
android:id="@+id/et_input"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
<Button
android:id="@+id/btn_send"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="发送" />
</LinearLayout>
<androidx.core.widget.NestedScrollView
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1">
<TextView
android:id="@+id/tv_context"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="top|start"
android:layout_margin="5dp"
android:textColor="@android:color/white" />
</androidx.core.widget.NestedScrollView>
<androidx.core.widget.NestedScrollView
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1">
<TextView
android:id="@+id/LogcatView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="top|start"
android:layout_margin="5dp"
android:textColor="@android:color/white" />
</androidx.core.widget.NestedScrollView>
</LinearLayout>
</FrameLayout>
- 信息交互实现(代码内有详细注释)
package com.example.webrtcdemo;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Context;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import com.example.webrtcdemo.signal.SignalClient;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.AudioSource;
import org.webrtc.AudioTrack;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.DataChannel;
import org.webrtc.DefaultVideoDecoderFactory;
import org.webrtc.DefaultVideoEncoderFactory;
import org.webrtc.EglBase;
import org.webrtc.IceCandidate;
import org.webrtc.Logging;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.MediaStreamTrack;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.RendererCommon;
import org.webrtc.RtpReceiver;
import org.webrtc.SdpObserver;
import org.webrtc.SessionDescription;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoDecoderFactory;
import org.webrtc.VideoEncoderFactory;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
public class ChatActivity extends AppCompatActivity implements SignalClient.OnSignalEventListener {
private String serverAddr;
private String roomName;
private TextView mLogcatView;
private SurfaceViewRenderer mLocalSurfaceView;
private SurfaceViewRenderer mRemoteSurfaceView;
private static final String TAG = "ChatActivity----huang";
private static final int VIDEO_RESOLUTION_WIDTH = 1280;
private static final int VIDEO_RESOLUTION_HEIGHT = 720;
private static final int VIDEO_FPS = 30;
private TextView tvContext;
private EditText etInput;
private Button btnSend;
/**
* 状态机
*/
private String mState = "init";
public static final String VIDEO_TRACK_ID = "1";//"ARDAMSv0";
public static final String AUDIO_TRACK_ID = "2";//"ARDAMSa0";
//用于数据传输
private PeerConnection mPeerConnection;
private PeerConnectionFactory mPeerConnectionFactory;
//OpenGL ES
private EglBase mRootEglBase;
//纹理渲染
private SurfaceTextureHelper mSurfaceTextureHelper;
private VideoTrack mVideoTrack;
private AudioTrack mAudioTrack;
private VideoCapturer mVideoCapturer;
private DataChannel mDataChannel;
public void doLeave() {
logcatOnUI("Leave room, Wait ...");
hangup();
SignalClient.getInstance().leaveRoom();
}
@Override
protected void onDestroy() {
super.onDestroy();
doLeave();
mLocalSurfaceView.release();
mRemoteSurfaceView.release();
mVideoCapturer.dispose();
mSurfaceTextureHelper.dispose();
PeerConnectionFactory.stopInternalTracingCapture();
PeerConnectionFactory.shutdownInternalTracer();
mPeerConnectionFactory.dispose();
}
@Override
protected void onResume() {
super.onResume();
mVideoCapturer.startCapture(VIDEO_RESOLUTION_WIDTH, VIDEO_RESOLUTION_HEIGHT, VIDEO_FPS);
}
@Override
protected void onPause() {
super.onPause();
try {
mVideoCapturer.stopCapture();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_chat);
mRootEglBase = EglBase.create();
initData();
initView();
initListener();
//创建 factory, pc是从factory里获得的
mPeerConnectionFactory = createPeerConnectionFactory(this);
// NOTE: this _must_ happen while PeerConnectionFactory is alive!
Logging.enableLogToDebugOutput(Logging.Severity.LS_VERBOSE);
/**
* 捕捉视频
*/
mVideoCapturer = createVideoCapturer();
/**
* 纹理渲染
*/
mSurfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", mRootEglBase.getEglBaseContext());
/**
* 创建视频源,isScreencast:是否是投屏
*/
VideoSource videoSource = mPeerConnectionFactory.createVideoSource(false);
//传递视频信息
mVideoCapturer.initialize(mSurfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver());
mVideoTrack = mPeerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
mVideoTrack.setEnabled(true);
mVideoTrack.addSink(mLocalSurfaceView);
/**
* 创建音屏源
*/
AudioSource audioSource = mPeerConnectionFactory.createAudioSource(new MediaConstraints());
mAudioTrack = mPeerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
mAudioTrack.setEnabled(true);
/**
* 添加监听
*/
SignalClient.getInstance().setSignalEventListener(this);
/**
* 加入房间
*/
SignalClient.getInstance().joinRoom(serverAddr, roomName);
}
private void initData() {
/**
* 服务器
*/
serverAddr = getIntent().getStringExtra("ServerAddr");
/**
* 房间名称
*/
roomName = getIntent().getStringExtra("RoomName");
}
private void initView() {
tvContext = findViewById(R.id.tv_context);
etInput = findViewById(R.id.et_input);
btnSend = findViewById(R.id.btn_send);
// ---------------------------------------------
mLogcatView = findViewById(R.id.LogcatView);
mLocalSurfaceView = findViewById(R.id.LocalSurfaceView);
mRemoteSurfaceView = findViewById(R.id.RemoteSurfaceView);
/**
* 初始化,并传入openGL上下文
*/
mLocalSurfaceView.init(mRootEglBase.getEglBaseContext(), null);
/**
* 缩放类型,RendererCommon.ScalingType.SCALE_ASPECT_FILL:按照比例填充
*/
mLocalSurfaceView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
/**
* 反转摄像头画面
*/
mLocalSurfaceView.setMirror(true);
/**
* 缩放时是否使用硬件
*/
mLocalSurfaceView.setEnableHardwareScaler(false /* enabled */);
mRemoteSurfaceView.init(mRootEglBase.getEglBaseContext(), null);
mRemoteSurfaceView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
mRemoteSurfaceView.setMirror(true);
mRemoteSurfaceView.setEnableHardwareScaler(true /* enabled */);
/**
* 当两个view叠加时,谁在上面
*/
mLocalSurfaceView.setZOrderMediaOverlay(true);
}
private void initListener() {
btnSend.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String message = etInput.getText().toString();
if (TextUtils.isEmpty(message)){
return;
}
String text = "我: " + message;
tvContext.setText(tvContext.getText().toString() + "\n" + text);
sendDataChannelMessage(message);
etInput.setText("");
}
});
this.setWebRtcListener(new WebRtcListener() {
@Override
public void onReceiveDataChannelMessage(String message) {
runOnUiThread(new Runnable() {
@Override
public void run() {
tvContext.setText(tvContext.getText().toString() + "\n" + "对方: "+message);
}
});
}
});
}
/**
* 创建 factory, pc是从factory里获得的
*
* @param context
* @return
*/
public PeerConnectionFactory createPeerConnectionFactory(Context context) {
final VideoEncoderFactory encoderFactory;
final VideoDecoderFactory decoderFactory;
/**
* 编码
*/
encoderFactory = new DefaultVideoEncoderFactory(
mRootEglBase.getEglBaseContext(),
false /* enableIntelVp8Encoder */,
true);//android H264 才能硬件加速
/**
*解码
*/
decoderFactory = new DefaultVideoDecoderFactory(mRootEglBase.getEglBaseContext());
PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions.builder(context)
.setEnableInternalTracer(true)//打开日志,线上关闭
.createInitializationOptions());
PeerConnectionFactory.Builder builder = PeerConnectionFactory.builder()
.setVideoEncoderFactory(encoderFactory)
.setVideoDecoderFactory(decoderFactory);
builder.setOptions(null);
return builder.createPeerConnectionFactory();
}
/**
* 捕捉视频
* 使用Camera2
* Read more about Camera2 here
* https://developer.android.com/reference/android/hardware/camera2/package-summary.html
**/
private VideoCapturer createVideoCapturer() {
if (Camera2Enumerator.isSupported(this)) {
return createCameraCapturer(new Camera2Enumerator(this));
} else {
return createCameraCapturer(new Camera1Enumerator(true));
}
}
/**
* 捕捉视频
*
* @param enumerator
* @return
*/
private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
final String[] deviceNames = enumerator.getDeviceNames();
// 使用前置摄像头
Log.d(TAG, "使用前置摄像头.");
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "创建前置摄像头捕获程序.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
// 使用后置摄像头
Log.d(TAG, "使用后置摄像头.");
for (String deviceName : deviceNames) {
if (!enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "创建后置摄像头捕获程序.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
return null;
}
/**
* 连接服务器成功
*/
@Override
public void onConnected() {
logcatOnUI("连接服务器成功");
}
/**
* 正在连接服务器
*/
@Override
public void onConnecting() {
logcatOnUI("正在连接服务器");
}
/**
* 断开连接
*/
@Override
public void onDisconnected() {
logcatOnUI("断开连接");
}
/**
* 本地用户加入房间
*
* @param roomName
* @param userID
*/
@Override
public void onUserJoined(String roomName, String userID) {
logcatOnUI("本地用户加入房间");
mState = "joined";
//这里应该创建PeerConnection
if (mPeerConnection == null) {
mPeerConnection = createPeerConnection();
}
}
/**
* 本地用户离开房间
*
* @param roomName
* @param userID
*/
@Override
public void onUserLeaved(String roomName, String userID) {
logcatOnUI("本地用户离开房间");
mState = "leaved";
}
/**
* 远端客户加入房间
*
* @param roomName
*/
@Override
public void onRemoteUserJoined(String roomName) {
logcatOnUI("远端客户加入房间, room: " + roomName);
if (mState.equals("joined_unbind")) {
if (mPeerConnection == null) {
mPeerConnection = createPeerConnection();
}
}
mState = "joined_conn";
//调用call, 进行媒体协商
doStartCall();
}
/**
* 远端客户离开房间
*
* @param roomName
* @param userID
*/
@Override
public void onRemoteUserLeaved(String roomName, String userID) {
logcatOnUI("远端客户离开房间, room: " + roomName + "uid:" + userID);
mState = "joined_unbind";
if (mPeerConnection != null) {
mPeerConnection.close();
mPeerConnection = null;
}
}
/**
* 房间满员
*
* @param roomName
* @param userID
*/
@Override
public void onRoomFull(String roomName, String userID) {
logcatOnUI("房间满员, room: " + roomName + "uid:" + userID);
mState = "leaved";
if (mLocalSurfaceView != null) {
mLocalSurfaceView.release();
mLocalSurfaceView = null;
}
if (mRemoteSurfaceView != null) {
mRemoteSurfaceView.release();
mRemoteSurfaceView = null;
}
if (mVideoCapturer != null) {
mVideoCapturer.dispose();
mVideoCapturer = null;
}
if (mSurfaceTextureHelper != null) {
mSurfaceTextureHelper.dispose();
mSurfaceTextureHelper = null;
}
PeerConnectionFactory.stopInternalTracingCapture();
PeerConnectionFactory.shutdownInternalTracer();
if (mPeerConnectionFactory != null) {
mPeerConnectionFactory.dispose();
mPeerConnectionFactory = null;
}
finish();
}
/**
* 消息通道
*
* @param message
*/
@Override
public void onMessage(JSONObject message) {
Log.i(TAG, "onMessage: " + message);
try {
String type = message.getString("type");
if (type.equals("offer")) {
onRemoteOfferReceived(message);
} else if (type.equals("answer")) {
onRemoteAnswerReceived(message);
} else if (type.equals("candidate")) {
onRemoteCandidateReceived(message);
} else {
Log.w(TAG, "the type is invalid: " + type);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
/**
* 创建通道连接
*
* @return
*/
public PeerConnection createPeerConnection() {
Log.i(TAG, "创建 PeerConnection ...");
LinkedList<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
PeerConnection.IceServer ice_server =
PeerConnection.IceServer.builder("turn:www.huangxiaoguo.club:3478")
.setPassword("123456")
.setUsername("huang")
.createIceServer();
iceServers.add(ice_server);
PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
// TCP 候选者仅在连接到支持的服务器时有用
// candidates are only useful when connecting to a server that supports
// ICE-TCP.
rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
//rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
//rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
// Use ECDSA encryption.
//rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
// Enable DTLS for normal calls and disable for loopback calls.
rtcConfig.enableDtlsSrtp = true;
//rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
PeerConnection connection =
mPeerConnectionFactory.createPeerConnection(rtcConfig,
mPeerConnectionObserver);
if (connection == null) {
Log.e(TAG, "Failed to createPeerConnection !");
return null;
}
List<String> mediaStreamLabels = Collections.singletonList("ARDAMS");
connection.addTrack(mVideoTrack, mediaStreamLabels);
connection.addTrack(mAudioTrack, mediaStreamLabels);
/*
DataChannel.Init 可配参数说明:
ordered:是否保证顺序传输;
maxRetransmitTimeMs:重传允许的最长时间;
maxRetransmits:重传允许的最大次数;
*/
DataChannel.Init init = new DataChannel.Init();
init.ordered = true;
mDataChannel = connection.createDataChannel("dataChannel", init);
mDataChannel.registerObserver(mDataChannelObserver);
return connection;
}
private PeerConnection.Observer mPeerConnectionObserver = new PeerConnection.Observer() {
@Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
Log.i(TAG, "onSignalingChange: " + signalingState);
}
@Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
Log.i(TAG, "onIceConnectionChange: " + iceConnectionState);
}
@Override
public void onIceConnectionReceivingChange(boolean b) {
Log.i(TAG, "onIceConnectionChange: " + b);
}
@Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {
Log.i(TAG, "onIceGatheringChange: " + iceGatheringState);
}
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
Log.i(TAG, "onIceCandidate: " + iceCandidate);
try {
JSONObject message = new JSONObject();
//message.put("userId", RTCSignalClient.getInstance().getUserId());
message.put("type", "candidate");
message.put("label", iceCandidate.sdpMLineIndex);
message.put("id", iceCandidate.sdpMid);
message.put("candidate", iceCandidate.sdp);
SignalClient.getInstance().sendMessage(message);
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) {
for (int i = 0; i < iceCandidates.length; i++) {
Log.i(TAG, "onIceCandidatesRemoved: " + iceCandidates[i]);
}
mPeerConnection.removeIceCandidates(iceCandidates);
}
@Override
public void onAddStream(MediaStream mediaStream) {
Log.i(TAG, "onAddStream: " + mediaStream.videoTracks.size());
}
@Override
public void onRemoveStream(MediaStream mediaStream) {
Log.i(TAG, "onRemoveStream");
}
@Override
public void onDataChannel(DataChannel dataChannel) {
Log.i(TAG, "onDataChannel");
dataChannel.registerObserver(mDataChannelObserver);
}
@Override
public void onRenegotiationNeeded() {
Log.i(TAG, "onRenegotiationNeeded");
}
@Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
MediaStreamTrack track = rtpReceiver.track();
if (track instanceof VideoTrack) {
Log.i(TAG, "onAddVideoTrack");
VideoTrack remoteVideoTrack = (VideoTrack) track;
remoteVideoTrack.setEnabled(true);
remoteVideoTrack.addSink(mRemoteSurfaceView);
}
/**
* 这里设置false,可以静音功能
*/
for (MediaStream mediaStream : mediaStreams) {
try {
mediaStream.audioTracks.get(0).setEnabled(false);
}catch (Exception e){
e.printStackTrace();
}
}
}
};
/**
* DataChannel 观察者,监听回掉
*/
private DataChannel.Observer mDataChannelObserver = new DataChannel.Observer() {
@Override
public void onBufferedAmountChange(long l) {
}
@Override
public void onStateChange() {
Log.d(TAG, "onDataChannel onStateChange:" + mDataChannel.state());
}
@Override
public void onMessage(DataChannel.Buffer buffer) {
Log.d(TAG, "onDataChannel onMessage : " + buffer);
ByteBuffer data = buffer.data;
byte[] bytes = new byte[data.capacity()];
data.get(bytes);
String msg = new String(bytes);
//TODO --------------------------------------------------------------------------------------------
Log.e(TAG, "msg============> : " + msg);
if (webRtcListener != null) {
webRtcListener.onReceiveDataChannelMessage(msg);
}
}
};
private WebRtcListener webRtcListener;
public void setWebRtcListener(WebRtcListener webRtcListener) {
this.webRtcListener = webRtcListener;
}
public interface WebRtcListener {
void onReceiveDataChannelMessage(String message);
}
// ----------------------------------开始进行媒体协商--------------------------------------//
/**
* 开始进行媒体协商
*/
public void doStartCall() {
logcatOnUI("Start Call, Wait ...");
if (mPeerConnection == null) {
mPeerConnection = createPeerConnection();
}
MediaConstraints mediaConstraints = new MediaConstraints();
//回声消除
mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googEchoCancellation", "true"));
//自动增益
mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googAutoGainControl", "true"));
//高音过滤
mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googHighpassFilter", "true"));
//噪音处理
mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googNoiseSuppression", "true"));
mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
//打开DTLS,不打开和浏览器之间不可以进行互通
mediaConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
mPeerConnection.createOffer(new SimpleSdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
Log.i(TAG, "Create local offer success: \n" + sessionDescription.description);
mPeerConnection.setLocalDescription(this, sessionDescription);
JSONObject message = new JSONObject();
try {
message.put("type", "offer");
message.put("sdp", sessionDescription.description);
SignalClient.getInstance().sendMessage(message);
} catch (JSONException e) {
e.printStackTrace();
}
}
}, mediaConstraints);
}
public class SimpleSdpObserver implements SdpObserver {
@Override
public void onCreateSuccess(SessionDescription sdp) {
Log.i(TAG, "SdpObserver: onCreateSuccess !");
try {
JSONObject message = new JSONObject();
message.put("type", sdp.type.canonicalForm());
message.put("sdp", sdp.description);
SignalClient.getInstance().sendMessage(message);
mPeerConnection.setLocalDescription(this, sdp);
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onSetSuccess() {
Log.i(TAG, "SdpObserver: onSetSuccess");
}
@Override
public void onCreateFailure(String msg) {
Log.e(TAG, "SdpObserver onCreateFailure: " + msg);
}
@Override
public void onSetFailure(String msg) {
Log.e(TAG, "SdpObserver onSetFailure: " + msg);
}
}
/**
* 与远端进行媒体协商
*
* @param message
*/
private void onRemoteOfferReceived(JSONObject message) {
logcatOnUI("Receive Remote Call ...");
if (mPeerConnection == null) {
mPeerConnection = createPeerConnection();
}
try {
String description = message.getString("sdp");
mPeerConnection.setRemoteDescription(
new SimpleSdpObserver(),
new SessionDescription(
SessionDescription.Type.OFFER,
description));
doAnswerCall();
} catch (JSONException e) {
e.printStackTrace();
}
}
/**
* 创建回答
*/
public void doAnswerCall() {
logcatOnUI("Answer Call, Wait ...");
if (mPeerConnection == null) {
mPeerConnection = createPeerConnection();
}
MediaConstraints sdpMediaConstraints = new MediaConstraints();
//打开DTLS,不打开和浏览器之间不可以进行互通
sdpMediaConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
Log.i(TAG, "Create answer ...");
mPeerConnection.createAnswer(new SimpleSdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
Log.i(TAG, "Create answer success !");
mPeerConnection.setLocalDescription(this,
sessionDescription);
JSONObject message = new JSONObject();
try {
message.put("type", "answer");
message.put("sdp", sessionDescription.description);
SignalClient.getInstance().sendMessage(message);
} catch (JSONException e) {
e.printStackTrace();
}
}
}, sdpMediaConstraints);
updateCallState(false);
}
private void updateCallState(boolean idle) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (idle) {
mRemoteSurfaceView.setVisibility(View.GONE);
} else {
mRemoteSurfaceView.setVisibility(View.VISIBLE);
}
}
});
}
/**
* 接收远程回答
*
* @param message
*/
private void onRemoteAnswerReceived(JSONObject message) {
logcatOnUI("Receive Remote Answer ...");
try {
String description = message.getString("sdp");
mPeerConnection.setRemoteDescription(
new SimpleSdpObserver(),
new SessionDescription(
SessionDescription.Type.ANSWER,
description));
} catch (JSONException e) {
e.printStackTrace();
}
updateCallState(false);
}
/**
* 接收远程候选人
*
* @param message
*/
private void onRemoteCandidateReceived(JSONObject message) {
logcatOnUI("Receive Remote Candidate ...");
try {
IceCandidate remoteIceCandidate =
new IceCandidate(message.getString("id"),
message.getInt("label"),
message.getString("candidate"));
mPeerConnection.addIceCandidate(remoteIceCandidate);
} catch (JSONException e) {
e.printStackTrace();
}
}
/**
* 接收远程挂起事件
*/
private void onRemoteHangup() {
logcatOnUI("Receive Remote Hangup Event ...");
hangup();
}
/**
* 挂断
*/
private void hangup() {
logcatOnUI("Hangup Call, Wait ...");
if (mPeerConnection == null) {
return;
}
mPeerConnection.close();
mPeerConnection = null;
logcatOnUI("Hangup Done.");
updateCallState(true);
}
private void logcatOnUI(String msg) {
Log.i(TAG, msg);
runOnUiThread(new Runnable() {
@Override
public void run() {
String output = mLogcatView.getText() + "\n" + msg;
mLogcatView.setText(output);
}
});
}
/********************************************************************/
/**
* 发送消息
*
* @param message
*/
public void sendDataChannelMessage(String message) {
byte[] msg = message.getBytes();
DataChannel.Buffer buffer = new DataChannel.Buffer(
ByteBuffer.wrap(msg),
false);
mDataChannel.send(buffer);
}
}
web端测试地址:https://www.huangxiaoguo.club/textchat/room.html
服务器地址:https://www.huangxiaoguo.club
房间号:555555 (建议,因为web端写死为555555了)
这样web端和web端,web端和android端,android端和android端视频➕聊天互通就完成了
源码地址:https://gitee.com/huangxiaoguo/webRTCdemo