1
0
mirror of https://gitlab.crans.org/nounous/ghostream.git synced 2024-12-22 15:02:19 +00:00

Proper multi client WebRTC

This commit is contained in:
Alexandre Iooss 2020-09-25 15:12:28 +02:00
parent ef760ae4cc
commit 6b5fea66e6
No known key found for this signature in database
GPG Key ID: 6C79278F3FCDCC02

View File

@ -1,7 +1,6 @@
package stream package stream
import ( import (
"context"
"fmt" "fmt"
"io" "io"
"log" "log"
@ -21,13 +20,32 @@ const (
) )
var ( var (
iceConnectedCtx, iceConnectedCtxCancel = context.WithCancel(context.Background()) videoTracks []*webrtc.Track
audioTracks []*webrtc.Track
) )
// Helper to reslice tracks
func removeTrack(tracks []*webrtc.Track, track *webrtc.Track) []*webrtc.Track {
for i, t := range tracks {
if t == track {
return append(tracks[:i], tracks[i+1:]...)
}
}
return nil
}
// newPeerHandler is called when server receive a new session description // newPeerHandler is called when server receive a new session description
// this initiates a WebRTC connection and return server description // this initiates a WebRTC connection and return server description
func newPeerHandler(api *webrtc.API, remoteSdp webrtc.SessionDescription, audioTrack *webrtc.Track, videoTrack *webrtc.Track) webrtc.SessionDescription { func newPeerHandler(remoteSdp webrtc.SessionDescription) webrtc.SessionDescription {
// Create media engine using client SDP
mediaEngine := webrtc.MediaEngine{}
if err := mediaEngine.PopulateFromSDP(remoteSdp); err != nil {
log.Println("Failed to create new media engine", err)
return webrtc.SessionDescription{}
}
// Create a new PeerConnection // Create a new PeerConnection
api := webrtc.NewAPI(webrtc.WithMediaEngine(mediaEngine))
peerConnection, err := api.NewPeerConnection(webrtc.Configuration{ peerConnection, err := api.NewPeerConnection(webrtc.Configuration{
ICEServers: []webrtc.ICEServer{ ICEServers: []webrtc.ICEServer{
{ {
@ -40,18 +58,11 @@ func newPeerHandler(api *webrtc.API, remoteSdp webrtc.SessionDescription, audioT
return webrtc.SessionDescription{} return webrtc.SessionDescription{}
} }
// Set the handler for ICE connection state // Create video track
// This will notify you when the peer has connected/disconnected codec, payloadType := getPayloadType(mediaEngine, webrtc.RTPCodecTypeVideo, "VP8")
peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) { videoTrack, err := webrtc.NewTrack(payloadType, rand.Uint32(), "video", "pion", codec)
log.Printf("Connection State has changed %s \n", connectionState.String()) if err != nil {
if connectionState == webrtc.ICEConnectionStateConnected { log.Println("Failed to create new video track", err)
iceConnectedCtxCancel()
}
})
// Add audio and video tracks
if _, err = peerConnection.AddTrack(audioTrack); err != nil {
log.Println("Failed to add audio track", err)
return webrtc.SessionDescription{} return webrtc.SessionDescription{}
} }
if _, err = peerConnection.AddTrack(videoTrack); err != nil { if _, err = peerConnection.AddTrack(videoTrack); err != nil {
@ -59,6 +70,18 @@ func newPeerHandler(api *webrtc.API, remoteSdp webrtc.SessionDescription, audioT
return webrtc.SessionDescription{} return webrtc.SessionDescription{}
} }
// Create audio track
codec, payloadType = getPayloadType(mediaEngine, webrtc.RTPCodecTypeAudio, "opus")
audioTrack, err := webrtc.NewTrack(payloadType, rand.Uint32(), "audio", "pion", codec)
if err != nil {
log.Println("Failed to create new audio track", err)
return webrtc.SessionDescription{}
}
if _, err = peerConnection.AddTrack(audioTrack); err != nil {
log.Println("Failed to add audio track", err)
return webrtc.SessionDescription{}
}
// Set the remote SessionDescription // Set the remote SessionDescription
if err = peerConnection.SetRemoteDescription(remoteSdp); err != nil { if err = peerConnection.SetRemoteDescription(remoteSdp); err != nil {
log.Println("Failed to set remote description", err) log.Println("Failed to set remote description", err)
@ -81,6 +104,21 @@ func newPeerHandler(api *webrtc.API, remoteSdp webrtc.SessionDescription, audioT
return webrtc.SessionDescription{} return webrtc.SessionDescription{}
} }
// Set the handler for ICE connection state
// This will notify you when the peer has connected/disconnected
peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
log.Printf("Connection State has changed %s \n", connectionState.String())
if connectionState == webrtc.ICEConnectionStateConnected {
// Register tracks
videoTracks = append(videoTracks, videoTrack)
audioTracks = append(audioTracks, audioTrack)
} else if connectionState == webrtc.ICEConnectionStateDisconnected {
// Unregister tracks
videoTracks = removeTrack(videoTracks, videoTrack)
audioTracks = removeTrack(audioTracks, audioTrack)
}
})
// Block until ICE Gathering is complete, disabling trickle ICE // Block until ICE Gathering is complete, disabling trickle ICE
// we do this because we only can exchange one signaling message // we do this because we only can exchange one signaling message
// in a production application you should exchange ICE Candidates via OnICECandidate // in a production application you should exchange ICE Candidates via OnICECandidate
@ -90,130 +128,81 @@ func newPeerHandler(api *webrtc.API, remoteSdp webrtc.SessionDescription, audioT
return *peerConnection.LocalDescription() return *peerConnection.LocalDescription()
} }
// Serve WebRTC media streaming server func playVideo() {
func Serve(remoteSdpChan chan webrtc.SessionDescription, localSdpChan chan webrtc.SessionDescription) { // Open a IVF file and start reading using our IVFReader
// Assert that we have an audio or video file file, ivfErr := os.Open(videoFileName)
_, err := os.Stat(videoFileName) if ivfErr != nil {
haveVideoFile := !os.IsNotExist(err) panic(ivfErr)
_, err = os.Stat(audioFileName)
haveAudioFile := !os.IsNotExist(err)
if !haveAudioFile || !haveVideoFile {
panic("Could not find `" + audioFileName + "` or `" + videoFileName + "`")
} }
// Create media engine ivf, header, ivfErr := ivfreader.NewWith(file)
// Only support VP8 and Opus if ivfErr != nil {
mediaEngine := webrtc.MediaEngine{} panic(ivfErr)
offer := <-remoteSdpChan
if err = mediaEngine.PopulateFromSDP(offer); err != nil {
panic(err)
} }
// Create a new API object // Send our video file frame at a time. Pace our sending so we send it at the same speed it should be played back as.
api := webrtc.NewAPI(webrtc.WithMediaEngine(mediaEngine)) // This isn't required since the video is timestamped, but we will such much higher loss if we send all at once.
sleepTime := time.Millisecond * time.Duration((float32(header.TimebaseNumerator)/float32(header.TimebaseDenominator))*1000)
for {
// Need at least one client
frame, _, ivfErr := ivf.ParseNextFrame()
if ivfErr == io.EOF {
fmt.Printf("All video frames parsed and sent")
os.Exit(0)
}
// Create video track
codec, payloadType := getPayloadType(mediaEngine, webrtc.RTPCodecTypeVideo, "VP8")
videoTrack, err := webrtc.NewTrack(payloadType, rand.Uint32(), "video", "pion", codec)
if err != nil {
panic(err)
}
// Create audio track
codec, payloadType = getPayloadType(mediaEngine, webrtc.RTPCodecTypeAudio, "opus")
audioTrack, err := webrtc.NewTrack(payloadType, rand.Uint32(), "audio", "pion", codec)
if err != nil {
panic(err)
}
localSdpChan <- newPeerHandler(api, offer, audioTrack, videoTrack)
go func() {
// Open a IVF file and start reading using our IVFReader
file, ivfErr := os.Open(videoFileName)
if ivfErr != nil { if ivfErr != nil {
panic(ivfErr) panic(ivfErr)
} }
ivf, header, ivfErr := ivfreader.NewWith(file) time.Sleep(sleepTime)
if ivfErr != nil { for _, t := range videoTracks {
panic(ivfErr) if ivfErr = t.WriteSample(media.Sample{Data: frame, Samples: 90000}); ivfErr != nil {
}
// Wait for connection established
<-iceConnectedCtx.Done()
// Send our video file frame at a time. Pace our sending so we send it at the same speed it should be played back as.
// This isn't required since the video is timestamped, but we will such much higher loss if we send all at once.
sleepTime := time.Millisecond * time.Duration((float32(header.TimebaseNumerator)/float32(header.TimebaseDenominator))*1000)
for {
// Need at least one client
frame, _, ivfErr := ivf.ParseNextFrame()
if ivfErr == io.EOF {
fmt.Printf("All video frames parsed and sent")
os.Exit(0)
}
if ivfErr != nil {
panic(ivfErr)
}
time.Sleep(sleepTime)
if ivfErr = videoTrack.WriteSample(media.Sample{Data: frame, Samples: 90000}); ivfErr != nil {
log.Fatalln("Failed to write video stream:", ivfErr) log.Fatalln("Failed to write video stream:", ivfErr)
} }
} }
}() }
}
func playAudio() {
// Open a IVF file and start reading using our IVFReader
file, oggErr := os.Open(audioFileName)
if oggErr != nil {
panic(oggErr)
}
// Open on oggfile in non-checksum mode.
ogg, _, oggErr := oggreader.NewWith(file)
if oggErr != nil {
panic(oggErr)
}
// Keep track of last granule, the difference is the amount of samples in the buffer
var lastGranule uint64
for {
// Need at least one client
pageData, pageHeader, oggErr := ogg.ParseNextPage()
if oggErr == io.EOF {
fmt.Printf("All audio pages parsed and sent")
os.Exit(0)
}
go func() {
// Open a IVF file and start reading using our IVFReader
file, oggErr := os.Open(audioFileName)
if oggErr != nil { if oggErr != nil {
panic(oggErr) panic(oggErr)
} }
// Open on oggfile in non-checksum mode. // The amount of samples is the difference between the last and current timestamp
ogg, _, oggErr := oggreader.NewWith(file) sampleCount := float64(pageHeader.GranulePosition - lastGranule)
if oggErr != nil { lastGranule = pageHeader.GranulePosition
panic(oggErr)
}
// Wait for connection established for _, t := range audioTracks {
<-iceConnectedCtx.Done() if oggErr = t.WriteSample(media.Sample{Data: pageData, Samples: uint32(sampleCount)}); oggErr != nil {
// Keep track of last granule, the difference is the amount of samples in the buffer
var lastGranule uint64
for {
// Need at least one client
pageData, pageHeader, oggErr := ogg.ParseNextPage()
if oggErr == io.EOF {
fmt.Printf("All audio pages parsed and sent")
os.Exit(0)
}
if oggErr != nil {
panic(oggErr)
}
// The amount of samples is the difference between the last and current timestamp
sampleCount := float64(pageHeader.GranulePosition - lastGranule)
lastGranule = pageHeader.GranulePosition
if oggErr = audioTrack.WriteSample(media.Sample{Data: pageData, Samples: uint32(sampleCount)}); oggErr != nil {
log.Fatalln("Failed to write audio stream:", oggErr) log.Fatalln("Failed to write audio stream:", oggErr)
} }
// Convert seconds to Milliseconds, Sleep doesn't accept floats
time.Sleep(time.Duration((sampleCount/48000)*1000) * time.Millisecond)
} }
}()
// Handle new connections // Convert seconds to Milliseconds, Sleep doesn't accept floats
for { time.Sleep(time.Duration((sampleCount/48000)*1000) * time.Millisecond)
// Wait for incoming session description
// then send the local description to browser
offer := <-remoteSdpChan
localSdpChan <- newPeerHandler(api, offer, audioTrack, videoTrack)
} }
} }
@ -228,3 +217,17 @@ func getPayloadType(m webrtc.MediaEngine, codecType webrtc.RTPCodecType, codecNa
} }
panic(fmt.Sprintf("Remote peer does not support %s", codecName)) panic(fmt.Sprintf("Remote peer does not support %s", codecName))
} }
// Serve WebRTC media streaming server
func Serve(remoteSdpChan chan webrtc.SessionDescription, localSdpChan chan webrtc.SessionDescription) {
go playVideo()
go playAudio()
// Handle new connections
for {
// Wait for incoming session description
// then send the local description to browser
offer := <-remoteSdpChan
localSdpChan <- newPeerHandler(offer)
}
}