Android 基于kotlin的 webRtc音视频通信
1.环境配置AndroidStudio 3.2gradle引入implementation 'org.webrtc:google-webrtc:1.0.26131'权限列表<uses-permission android:name="android.permission.CAMERA" /><uses-permission android:name=...
·
1.环境配置
Android Studio 3.2
gradle引入
implementation 'org.webrtc:google-webrtc:1.0.26131'
权限列表
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
2、webRtc连接流程简述
1.webrtc总体可以看做两个端(offer发送端,answer响应端)
2.webrtc通信之间需要保有一个独立于turn/stun服务之外的一个信令服务,turn/stun服务只是保证offer端和answer端之间数据的通信的;信令服务是处理业务层方面逻辑的offer/answer的转发者,信令服务主要用于转发offer端发送的offer信息和answer端响应的answer信息,其信令转发的目的是为了实现offer-answer之间的谁呼叫谁、谁响应谁的一个直白的实现,常用技术有websocket、TCP、UDP、http、https等常见网络通信请求方式,具体信令通信协议看个人业务场景实现需要。
3.webrtc响应流程
3、基本代码封装实现
package com.linzi.mysignapp.webrtc
import android.content.Context
import android.media.AudioManager
import android.util.Log
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.async
import org.webrtc.*
/**
* @author linzi
* @date 2019/12/20
*/
class RtcClient{
private var peerConnectionFactory: PeerConnectionFactory?=null
private var videoCapturer:VideoCapturer?=null
var mediaStream: MediaStream?=null
private var audioStream: MediaStream?=null
private var audioTrack:AudioTrack?=null
private var videoTrack:VideoTrack?=null
private var localPreview:SurfaceViewRenderer?=null
var eglBaseContext: EglBase.Context?=null
private val TAG=this.javaClass.name
private var iceServers = ArrayList<PeerConnection.IceServer>()
private var call:RtcCallBack?=null
private var context : Context?=null
var peerConnectionMap: HashMap<String, PeerConnection?>? = null
private var pcConstraints=MediaConstraints()
private var channels=ArrayList<DataChannel?>()
constructor(context:Context){
this.context=context
init(context)
}
//初始化
private fun init(app:Context){
/**
* @author linzi
* @date 2019/12/20 11:18
* @context 初始化配置
*/
eglBaseContext = EglBase.create().eglBaseContext
// create PeerConnectionFactory
PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
.builder(app)
.createInitializationOptions())
val options = PeerConnectionFactory.Options()
val defaultVideoEncoderFactory = DefaultVideoEncoderFactory(eglBaseContext, true, true)
val defaultVideoDecoderFactory = DefaultVideoDecoderFactory(eglBaseContext)
pcConstraints.mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"))
pcConstraints.mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"))
pcConstraints.optional.add(MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"))
peerConnectionFactory = PeerConnectionFactory.builder()
.setVideoEncoderFactory(defaultVideoEncoderFactory)
.setVideoDecoderFactory(defaultVideoDecoderFactory)
.setOptions(options)
.createPeerConnectionFactory()
}
/**
* @author linzi
* @date 2019/12/20 17:22
* @context 设置回调
*/
fun setCallBack(call:RtcCallBack){
this.call=call
}
/**
* @author linzi
* @date 2019/12/20 17:22
* @context 创建连接
*/
fun call(){
iceServers.add(PeerConnection.IceServer.builder("stun:stun.schlund.de").createIceServer())
}
/**
* @author linzi
* @date 2019/12/20 17:19
* @context 创建offer
*/
fun createOffer(id:String){
var peerConnection=getOrCreatePeerConnection(id)
peerConnection?.createOffer(object : SdpAdapter() {
override fun onCreateSuccess(sessionDescription: SessionDescription?) {
super.onCreateSuccess(sessionDescription)
peerConnection?.setLocalDescription(SdpAdapter(), sessionDescription)
call?.offer(sessionDescription,id)
}
}, pcConstraints)
}
/**
* @author linzi
* @date 2019/12/20 17:19
* @context 响应offer
*/
fun createAnswer(id:String){
//创建answer对象
var peerConnection=getOrCreatePeerConnection(id)
peerConnection?.createAnswer(object : SdpAdapter() {
override fun onCreateSuccess(sdp: SessionDescription?) {
super.onCreateSuccess(sdp)
peerConnection?.setLocalDescription(SdpAdapter(), sdp)
call?.answer(sdp,id)
}
override fun onCreateFailure(p0: String?) {
super.onCreateFailure(p0)
Log.e("创建失败",p0)
}
}, pcConstraints)
}
/**
* @author linzi
* @date 2019/12/20 17:21
* @context offer入池,answer端使用
*/
fun responseOffer(id:String,sdp:String?){
var peerConnection=getOrCreatePeerConnection(id)
peerConnection?.setRemoteDescription(SdpAdapter(),
SessionDescription(SessionDescription.Type.OFFER, sdp))
}
/**
* @author linzi
* @date 2019/12/20 17:21
* @context answer入池,offer端使用
*/
fun responseAnswer(id:String,sdp:String?){
var peerConnection=getOrCreatePeerConnection(id)
peerConnection?.setRemoteDescription(SdpAdapter(),
SessionDescription(SessionDescription.Type.ANSWER, sdp))
}
/**
* @author linzi
* @date 2019/12/20 17:21
* @context candidate入池
*/
fun response(uid:String,id:String?,lable:Int,candidate:String?){
var peerConnection=getOrCreatePeerConnection(uid)
peerConnection?.addIceCandidate(IceCandidate(
id,
lable,
candidate
))
}
/**
* @author linzi
* @date 2019/12/30 10:10
* @context 开启音频采集
*/
fun startLocalAudioCapture() {
//语音
val audioConstraints = MediaConstraints()
//回声消除
audioConstraints.mandatory.add(MediaConstraints.KeyValuePair("googEchoCancellation", "true"))
// //自动增益
audioConstraints.mandatory.add(MediaConstraints.KeyValuePair("googAutoGainControl", "true"))
//高音过滤
audioConstraints.mandatory.add(MediaConstraints.KeyValuePair("googHighpassFilter", "true"))
//噪音处理
audioConstraints.mandatory.add(MediaConstraints.KeyValuePair("googNoiseSuppression", "true"))
val audioSource = peerConnectionFactory?.createAudioSource(audioConstraints)
audioTrack = peerConnectionFactory?.createAudioTrack("ARDAMSa0", audioSource)
audioTrack!!.setVolume(100.0)
audioStream = peerConnectionFactory?.createLocalMediaStream("audioStream")
audioStream?.addTrack(audioTrack)
// mediaStream?.addTrack(audioTrack)
}
/**
* @author linzi
* @date 2019/12/30 10:10
* @context 开启前置视频采集
*/
fun startLocalVideoCapture(id: String?){
val surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext)
// create VideoCapturer
if(videoCapturer!=null) {
videoCapturer?.stopCapture()
videoCapturer=null
}
if(videoTrack!=null){
videoTrack?.dispose()
videoTrack=null
}
videoCapturer = createCameraCapturer(true)
val videoSource = peerConnectionFactory!!.createVideoSource(videoCapturer!!.isScreencast)
videoSource.adaptOutputFormat(480,854,20)
videoCapturer?.initialize(surfaceTextureHelper, context, videoSource.capturerObserver)
videoCapturer?.startCapture(1080, 1920, 20)
// 视频采集
videoTrack = peerConnectionFactory!!.createVideoTrack("100", videoSource)
/**
* @author linzi
* @date 2019/12/20 11:33
* @context 将视频和音频封装为流
*/
var peerConnection=peerConnectionMap?.get(id)
if(mediaStream!=null){
peerConnection?.removeStream(mediaStream)
}
mediaStream = peerConnectionFactory?.createLocalMediaStream("mediaStream")
mediaStream?.addTrack(videoTrack)
peerConnection?.addStream(mediaStream)
}
/**
* @author linzi
* @date 2019/12/30 10:10
* @context 开启后置视频采集
*/
fun startLocalVideoBackCapture(id:String?){
val surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext)
// create VideoCapturer
if(videoCapturer!=null) {
videoCapturer?.stopCapture()
videoCapturer=null
}
if(videoTrack!=null){
videoTrack?.dispose()
videoTrack=null
}
videoCapturer = createCameraCapturer(false)
val videoSource = peerConnectionFactory!!.createVideoSource(videoCapturer!!.isScreencast)
videoSource.adaptOutputFormat(480,854,20)
videoCapturer?.initialize(surfaceTextureHelper, context, videoSource.capturerObserver)
videoCapturer?.startCapture(1080, 1920, 20)
// 视频采集
videoTrack = peerConnectionFactory!!.createVideoTrack("100", videoSource)
/**
* @author linzi
* @date 2019/12/20 11:33
* @context 将视频和音频封装为流
*/
var peerConnection = peerConnectionMap?.get(id)
if(mediaStream!=null){
peerConnection?.removeStream(mediaStream)
}
mediaStream = peerConnectionFactory?.createLocalMediaStream("mediaStream")
mediaStream?.addTrack(videoTrack)
peerConnection?.addStream(mediaStream)
}
fun setLocalPreview(localView:SurfaceViewRenderer?){
if(videoTrack==null){
throw Exception("请先初始化相机")
}
if(localPreview!=null){
videoTrack?.removeSink(localPreview)
}
localPreview=localView
//摄像头采集视频预览
if(localPreview!=null) {
try {
localPreview?.release()
localPreview?.setMirror(true)
localPreview?.init(eglBaseContext, null)
}catch (e:java.lang.Exception){}
// // display in localView
videoTrack?.addSink(localPreview)
}
}
/**
* @author linzi
* @date 2019/12/30 10:10
* @context 选择摄像头
*/
private fun createCameraCapturer(isFront: Boolean): VideoCapturer? {
val enumerator = Camera1Enumerator(false)
val deviceNames = enumerator.deviceNames
// First, try to find front facing camera
for (deviceName in deviceNames) {
if (if (isFront) enumerator.isFrontFacing(deviceName) else enumerator.isBackFacing(deviceName)) {
val videoCapturer = enumerator.createCapturer(deviceName, null)
if (videoCapturer != null) {
return videoCapturer
}
}
}
return null
}
/**
* @author linzi
* @date 2019/12/30 10:10
* @context 创建连接池(支持多人通信)
*/
@Synchronized
private fun getOrCreatePeerConnection(id: String): PeerConnection? {
if(peerConnectionMap==null){
peerConnectionMap = HashMap()
}
var peerConnection = peerConnectionMap?.get(id)
if (peerConnection != null) {
return peerConnection
}
// var config= PeerConnection.RTCConfiguration(iceServers)
peerConnection = peerConnectionFactory!!.createPeerConnection(iceServers, object : PeerConnectionAdapter() {
override fun onIceCandidate(p0: IceCandidate?) {
super.onIceCandidate(p0)
call?.iceData(p0, id)
}
override fun onAddStream(p0: MediaStream?) {
super.onAddStream(p0)
call?.stream(p0,id)
}
override fun onAddTrack(p0: RtpReceiver?, p1: Array<out MediaStream>?) {
super.onAddTrack(p0, p1)
call?.track(p0?.track(),id)
}
override fun onIceConnectionChange(p0: PeerConnection.IceConnectionState?) {
super.onIceConnectionChange(p0)
if (p0 == PeerConnection.IceConnectionState.DISCONNECTED||p0 == PeerConnection.IceConnectionState.CLOSED) {
peerConnectionMap!!.remove(id)
}
}
override fun onDataChannel(p0: DataChannel?) {
super.onDataChannel(p0)
channels.add(p0)
}
})
if(mediaStream!=null) {
peerConnection!!.addStream(mediaStream)
}
if(audioStream!=null) {
peerConnection!!.addStream(audioStream)
}
peerConnectionMap!![id] = peerConnection
return peerConnection
}
/**
* @author linzi
* @date 2019/12/30 10:11
* @context 开启麦克风
*/
fun speak(){
try {
val audioManager = context?.getSystemService(Context.AUDIO_SERVICE) as AudioManager?
audioManager?.isMicrophoneMute=false
} catch (e: Exception) {
e.printStackTrace()
}
}
/**
* @author linzi
* @date 2019/12/30 10:11
* @context 关闭麦克风
*/
fun noSpeak(){
try {
val audioManager = context?.getSystemService(Context.AUDIO_SERVICE) as AudioManager?
audioManager?.isMicrophoneMute=true
} catch (e: Exception) {
e.printStackTrace()
}
}
/**
* @author linzi
* @date 2019/12/30 10:11
* @context 信息监听(answer/offer)
*/
open class SdpAdapter:SdpObserver{
override fun onSetFailure(p0: String?) {
}
override fun onSetSuccess() {
}
override fun onCreateSuccess(p0: SessionDescription?) {
}
override fun onCreateFailure(p0: String?) {
}
}
/**
* @author linzi
* @date 2019/12/30 10:12
* @context 连接监听
*/
open class PeerConnectionAdapter : PeerConnection.Observer{
override fun onIceCandidate(p0: IceCandidate?) {
}
override fun onDataChannel(p0: DataChannel?) {
}
override fun onIceConnectionReceivingChange(p0: Boolean) {
}
override fun onIceConnectionChange(p0: PeerConnection.IceConnectionState?) {
}
override fun onIceGatheringChange(p0: PeerConnection.IceGatheringState?) {
}
override fun onAddStream(p0: MediaStream?) {
}
override fun onSignalingChange(p0: PeerConnection.SignalingState?) {
}
override fun onIceCandidatesRemoved(p0: Array<out IceCandidate>?) {
}
override fun onRemoveStream(p0: MediaStream?) {
}
override fun onRenegotiationNeeded() {
}
override fun onAddTrack(p0: RtpReceiver?, p1: Array<out MediaStream>?) {
}
}
/**
* @author linzi
* @date 2019/12/25 13:18
* @context 扬声器相关
*/
private var currVolume = 0
/**
* 打开扬声器
*/
fun openSpeaker() {
try {
val audioManager = context?.getSystemService(Context.AUDIO_SERVICE) as AudioManager?
audioManager!!.mode = AudioManager.MODE_IN_CALL
currVolume = audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL)
// if (!audioManager.isSpeakerphoneOn) {
//setSpeakerphoneOn() only work when audio mode set to MODE_IN_CALL.
audioManager.isSpeakerphoneOn = true
audioManager.isMicrophoneMute = true
// audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL,
// audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL),
// AudioManager.STREAM_VOICE_CALL)
audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL,
10,
AudioManager.STREAM_VOICE_CALL)
// }
} catch (e: Exception) {
e.printStackTrace()
}
}
/**
* 关闭扬声器
*/
fun closeSpeaker() {
try {
val audioManager = context?.getSystemService(Context.AUDIO_SERVICE) as AudioManager?
audioManager!!.mode = AudioManager.MODE_IN_CALL
if (audioManager != null) {
// if (audioManager.isSpeakerphoneOn) {
audioManager.isSpeakerphoneOn = false
// audioManager.isMicrophoneMute = false
audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, currVolume,
AudioManager.STREAM_VOICE_CALL)
// }
}
} catch (e: Exception) {
e.printStackTrace()
}
}
fun closeOldConnect(id:String){
peerConnectionMap!![id]!!.close()
peerConnectionMap!!.remove(id)
}
/**
* @author linzi
* @date 2019/12/30 10:12
* @context 销毁处理
*/
fun destroy(){
GlobalScope.async {
closeSpeaker()
}
try {
if(peerConnectionMap!=null) {
for (it in peerConnectionMap!!.keys) {
peerConnectionMap!![it]!!.close()
}
peerConnectionMap!!.clear()
}
audioStream?.dispose()
mediaStream?.dispose()
}catch (e:Exception){
e.printStackTrace()
}
videoCapturer?.stopCapture()
videoCapturer?.dispose()
}
/**
* @author linzi
* @date 2019/12/30 10:12
* @context 连接回调
*/
interface RtcCallBack{
fun offer(sessionDescription: SessionDescription?,id:String)
fun answer(sessionDescription: SessionDescription?,id:String)
fun iceData(data:IceCandidate?,id:String)
fun stream(stream:MediaStream?,id:String)
fun track(track:MediaStreamTrack?,id:String)
}
}
这是国内免费能使用的stun服务器https://www.jianshu.com/p/0943038b51de
android端代码已上传到码云
更多推荐
所有评论(0)