add webrtc
parent
fa460ce101
commit
50a1dc0f34
@ -0,0 +1,76 @@
|
||||
var pc = null;
|
||||
|
||||
function negotiate() {
|
||||
pc.addTransceiver('video', { direction: 'recvonly' });
|
||||
pc.addTransceiver('audio', { direction: 'recvonly' });
|
||||
return pc.createOffer().then((offer) => {
|
||||
return pc.setLocalDescription(offer);
|
||||
}).then(() => {
|
||||
// wait for ICE gathering to complete
|
||||
return new Promise((resolve) => {
|
||||
if (pc.iceGatheringState === 'complete') {
|
||||
resolve();
|
||||
} else {
|
||||
const checkState = () => {
|
||||
if (pc.iceGatheringState === 'complete') {
|
||||
pc.removeEventListener('icegatheringstatechange', checkState);
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
pc.addEventListener('icegatheringstatechange', checkState);
|
||||
}
|
||||
});
|
||||
}).then(() => {
|
||||
var offer = pc.localDescription;
|
||||
return fetch('/offer', {
|
||||
body: JSON.stringify({
|
||||
sdp: offer.sdp,
|
||||
type: offer.type,
|
||||
}),
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
method: 'POST'
|
||||
});
|
||||
}).then((response) => {
|
||||
return response.json();
|
||||
}).then((answer) => {
|
||||
return pc.setRemoteDescription(answer);
|
||||
}).catch((e) => {
|
||||
alert(e);
|
||||
});
|
||||
}
|
||||
|
||||
function start() {
|
||||
var config = {
|
||||
sdpSemantics: 'unified-plan'
|
||||
};
|
||||
|
||||
if (document.getElementById('use-stun').checked) {
|
||||
config.iceServers = [{ urls: ['stun:stun.l.google.com:19302'] }];
|
||||
}
|
||||
|
||||
pc = new RTCPeerConnection(config);
|
||||
|
||||
// connect audio / video
|
||||
pc.addEventListener('track', (evt) => {
|
||||
if (evt.track.kind == 'video') {
|
||||
document.getElementById('video').srcObject = evt.streams[0];
|
||||
} else {
|
||||
document.getElementById('audio').srcObject = evt.streams[0];
|
||||
}
|
||||
});
|
||||
|
||||
document.getElementById('start').style.display = 'none';
|
||||
negotiate();
|
||||
document.getElementById('stop').style.display = 'inline-block';
|
||||
}
|
||||
|
||||
function stop() {
|
||||
document.getElementById('stop').style.display = 'none';
|
||||
|
||||
// close peer connection
|
||||
setTimeout(() => {
|
||||
pc.close();
|
||||
}, 500);
|
||||
}
|
@ -0,0 +1,83 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>WebRTC webcam</title>
|
||||
<style>
|
||||
button {
|
||||
padding: 8px 16px;
|
||||
}
|
||||
|
||||
video {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.option {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
#media {
|
||||
max-width: 1280px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="option">
|
||||
<input id="use-stun" type="checkbox"/>
|
||||
<label for="use-stun">Use STUN server</label>
|
||||
</div>
|
||||
<button id="start" onclick="start()">Start</button>
|
||||
<button id="stop" style="display: none" onclick="stop()">Stop</button>
|
||||
<form class="form-inline" id="echo-form">
|
||||
<div class="form-group">
|
||||
<p>input text</p>
|
||||
|
||||
<textarea cols="2" rows="3" style="width:600px;height:50px;" class="form-control" id="message">test</textarea>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-default">Send</button>
|
||||
</form>
|
||||
|
||||
<div id="media">
|
||||
<h2>Media</h2>
|
||||
|
||||
<audio id="audio" autoplay="true"></audio>
|
||||
<video id="video" autoplay="true" playsinline="true"></video>
|
||||
</div>
|
||||
|
||||
<script src="client.js"></script>
|
||||
<script type="text/javascript" src="http://cdn.sockjs.org/sockjs-0.3.4.js"></script>
|
||||
<script src="http://code.jquery.com/jquery-2.1.1.min.js"></script>
|
||||
</body>
|
||||
<script type="text/javascript" charset="utf-8">
|
||||
|
||||
$(document).ready(function() {
|
||||
var host = window.location.hostname
|
||||
var ws = new WebSocket("ws://"+host+":8000/humanecho");
|
||||
//document.getElementsByTagName("video")[0].setAttribute("src", aa["video"]);
|
||||
ws.onopen = function() {
|
||||
console.log('Connected');
|
||||
};
|
||||
ws.onmessage = function(e) {
|
||||
console.log('Received: ' + e.data);
|
||||
data = e
|
||||
var vid = JSON.parse(data.data);
|
||||
console.log(typeof(vid),vid)
|
||||
//document.getElementsByTagName("video")[0].setAttribute("src", vid["video"]);
|
||||
|
||||
};
|
||||
ws.onclose = function(e) {
|
||||
console.log('Closed');
|
||||
};
|
||||
|
||||
$('#echo-form').on('submit', function(e) {
|
||||
e.preventDefault();
|
||||
var message = $('#message').val();
|
||||
console.log('Sending: ' + message);
|
||||
ws.send(message);
|
||||
$('#message').val('');
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</html>
|
@ -0,0 +1,158 @@
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
from typing import Tuple, Dict, Optional, Set, Union
|
||||
from av.frame import Frame
|
||||
from av.packet import Packet
|
||||
import fractions
|
||||
|
||||
AUDIO_PTIME = 0.020 # 20ms audio packetization
|
||||
VIDEO_CLOCK_RATE = 90000
|
||||
VIDEO_PTIME = 1 / 25 # 30fps
|
||||
VIDEO_TIME_BASE = fractions.Fraction(1, VIDEO_CLOCK_RATE)
|
||||
SAMPLE_RATE = 16000
|
||||
AUDIO_TIME_BASE = fractions.Fraction(1, SAMPLE_RATE)
|
||||
|
||||
#from aiortc.contrib.media import MediaPlayer, MediaRelay
|
||||
#from aiortc.rtcrtpsender import RTCRtpSender
|
||||
from aiortc import (
|
||||
MediaStreamTrack,
|
||||
)
|
||||
|
||||
logging.basicConfig()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PlayerStreamTrack(MediaStreamTrack):
|
||||
"""
|
||||
A video track that returns an animated flag.
|
||||
"""
|
||||
|
||||
def __init__(self, player, kind):
|
||||
super().__init__() # don't forget this!
|
||||
self.kind = kind
|
||||
self._player = player
|
||||
self._queue = asyncio.Queue()
|
||||
|
||||
_start: float
|
||||
_timestamp: int
|
||||
|
||||
async def next_timestamp(self) -> Tuple[int, fractions.Fraction]:
|
||||
if self.readyState != "live":
|
||||
raise Exception
|
||||
|
||||
if self.kind == 'video':
|
||||
if hasattr(self, "_timestamp"):
|
||||
self._timestamp += int(VIDEO_PTIME * VIDEO_CLOCK_RATE)
|
||||
wait = self._start + (self._timestamp / VIDEO_CLOCK_RATE) - time.time()
|
||||
await asyncio.sleep(wait)
|
||||
else:
|
||||
self._start = time.time()
|
||||
self._timestamp = 0
|
||||
return self._timestamp, VIDEO_TIME_BASE
|
||||
else: #audio
|
||||
if hasattr(self, "_timestamp"):
|
||||
self._timestamp += int(AUDIO_PTIME * SAMPLE_RATE)
|
||||
wait = self._start + (self._timestamp / SAMPLE_RATE) - time.time()
|
||||
await asyncio.sleep(wait)
|
||||
else:
|
||||
self._start = time.time()
|
||||
self._timestamp = 0
|
||||
return self._timestamp, AUDIO_TIME_BASE
|
||||
|
||||
async def recv(self) -> Union[Frame, Packet]:
|
||||
# frame = self.frames[self.counter % 30]
|
||||
self._player._start(self)
|
||||
frame = await self._queue.get()
|
||||
pts, time_base = await self.next_timestamp()
|
||||
frame.pts = pts
|
||||
frame.time_base = time_base
|
||||
if frame is None:
|
||||
self.stop()
|
||||
raise Exception
|
||||
return frame
|
||||
|
||||
def stop(self):
|
||||
super().stop()
|
||||
if self._player is not None:
|
||||
self._player._stop(self)
|
||||
self._player = None
|
||||
|
||||
def player_worker_thread(
|
||||
quit_event,
|
||||
loop,
|
||||
container,
|
||||
audio_track,
|
||||
video_track
|
||||
):
|
||||
container.render(quit_event,loop,audio_track,video_track)
|
||||
|
||||
class HumanPlayer:
|
||||
|
||||
def __init__(
|
||||
self, nerfreal, format=None, options=None, timeout=None, loop=False, decode=True
|
||||
):
|
||||
self.__thread: Optional[threading.Thread] = None
|
||||
self.__thread_quit: Optional[threading.Event] = None
|
||||
|
||||
# examine streams
|
||||
self.__started: Set[PlayerStreamTrack] = set()
|
||||
self.__audio: Optional[PlayerStreamTrack] = None
|
||||
self.__video: Optional[PlayerStreamTrack] = None
|
||||
|
||||
self.__audio = PlayerStreamTrack(self, kind="audio")
|
||||
self.__video = PlayerStreamTrack(self, kind="video")
|
||||
|
||||
self.__container = nerfreal
|
||||
|
||||
|
||||
@property
|
||||
def audio(self) -> MediaStreamTrack:
|
||||
"""
|
||||
A :class:`aiortc.MediaStreamTrack` instance if the file contains audio.
|
||||
"""
|
||||
return self.__audio
|
||||
|
||||
@property
|
||||
def video(self) -> MediaStreamTrack:
|
||||
"""
|
||||
A :class:`aiortc.MediaStreamTrack` instance if the file contains video.
|
||||
"""
|
||||
return self.__video
|
||||
|
||||
def _start(self, track: PlayerStreamTrack) -> None:
|
||||
self.__started.add(track)
|
||||
if self.__thread is None:
|
||||
self.__log_debug("Starting worker thread")
|
||||
self.__thread_quit = threading.Event()
|
||||
self.__thread = threading.Thread(
|
||||
name="media-player",
|
||||
target=player_worker_thread,
|
||||
args=(
|
||||
self.__thread_quit,
|
||||
asyncio.get_event_loop(),
|
||||
self.__container,
|
||||
self.__audio,
|
||||
self.__video
|
||||
),
|
||||
)
|
||||
self.__thread.start()
|
||||
|
||||
def _stop(self, track: PlayerStreamTrack) -> None:
|
||||
self.__started.discard(track)
|
||||
|
||||
if not self.__started and self.__thread is not None:
|
||||
self.__log_debug("Stopping worker thread")
|
||||
self.__thread_quit.set()
|
||||
self.__thread.join()
|
||||
self.__thread = None
|
||||
|
||||
if not self.__started and self.__container is not None:
|
||||
#self.__container.close()
|
||||
self.__container = None
|
||||
|
||||
def __log_debug(self, msg: str, *args) -> None:
|
||||
logger.debug(f"HumanPlayer {msg}", *args)
|
Loading…
Reference in New Issue