Commit c07480c6 authored by nextime's avatar nextime

Add examples

parent 85be6bd0
from __future__ import annotations
import cv2
import acapture
import asyncio
import logging
import os
from asyncio import StreamReader
from pyrtmp import StreamClosedException
from pyrtmp.flv import FLVMediaType, FLVWriter
from pyrtmp.rtmp import RTMPProtocol, SimpleRTMPController, SimpleRTMPServer
from pyrtmp.session_manager import SessionManager
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
class RTMP2SocketController(SimpleRTMPController):
def __init__(self, output_directory: str):
self.output_directory = output_directory
super().__init__()
async def on_ns_publish(self, session, message) -> None:
publishing_name = message.publishing_name
prefix = os.path.join(self.output_directory, f"{publishing_name}")
session.state = RemoteProcessFLVWriter()
logger.debug(f"output to {prefix}.flv")
await session.state.initialize(
command=f"ffmpeg -y -i pipe:0 -c:v copy -c:a copy -f flv {prefix}.flv",
stdout_log=f"{prefix}.stdout.log",
stderr_log=f"{prefix}.stderr.log",
)
session.state.write_header()
await super().on_ns_publish(session, message)
async def on_metadata(self, session, message) -> None:
session.state.write(0, message.to_raw_meta(), FLVMediaType.OBJECT)
await super().on_metadata(session, message)
async def on_video_message(self, session, message) -> None:
session.state.write(message.timestamp, message.payload, FLVMediaType.VIDEO)
await super().on_video_message(session, message)
async def on_audio_message(self, session, message) -> None:
session.state.write(message.timestamp, message.payload, FLVMediaType.AUDIO)
await super().on_audio_message(session, message)
async def on_stream_closed(self, session: SessionManager, exception: StreamClosedException) -> None:
await session.state.close()
await super().on_stream_closed(session, exception)
class OpenCVFLVWriter:
def __init__(self)
self.write = FLVWriter()
async def initialize(self):
class RemoteProcessFLVWriter:
def __init__(self):
self.proc = None
self.stdout = None
self.stderr = None
self.writer = FLVWriter()
async def initialize(self, command: str, stdout_log: str, stderr_log: str):
self.proc = await asyncio.create_subprocess_shell(
command,
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
self.stdout = asyncio.create_task(self._read_to_file(stdout_log, self.proc.stdout))
self.stderr = asyncio.create_task(self._read_to_file(stderr_log, self.proc.stderr))
async def _read_to_file(self, filename: str, stream: StreamReader):
fp = open(filename, "w")
while not stream.at_eof():
data = await stream.readline()
fp.write(data.decode())
fp.flush()
fp.close()
def write_header(self):
buffer = self.writer.write_header()
self.proc.stdin.write(buffer)
def write(self, timestamp: int, payload: bytes, media_type: FLVMediaType):
buffer = self.writer.write(timestamp, payload, media_type)
self.proc.stdin.write(buffer)
async def close(self):
await self.proc.stdin.drain()
self.proc.stdin.close()
await self.proc.wait()
class SimpleServer(SimpleRTMPServer):
def __init__(self, output_directory: str):
self.output_directory = output_directory
super().__init__()
async def create(self, host: str, port: int):
loop = asyncio.get_event_loop()
self.server = await loop.create_server(
lambda: RTMPProtocol(controller=RTMP2SocketController(self.output_directory)),
host=host,
port=port,
)
async def main():
current_dir = os.path.dirname(os.path.abspath(__file__))
server = SimpleServer(output_directory=current_dir)
await server.create(host="0.0.0.0", port=1935)
await server.start()
await server.wait_closed()
if __name__ == "__main__":
asyncio.run(main())
import asyncio
import logging
import os
from pyrtmp import StreamClosedException
from pyrtmp.flv import FLVFileWriter, FLVMediaType
from pyrtmp.rtmp import RTMPProtocol, SimpleRTMPController, SimpleRTMPServer
from pyrtmp.session_manager import SessionManager
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
class RTMP2FLVController(SimpleRTMPController):
def __init__(self, output_directory: str):
self.output_directory = output_directory
super().__init__()
async def on_ns_publish(self, session, message) -> None:
publishing_name = message.publishing_name
file_path = os.path.join(self.output_directory, f"{publishing_name}.flv")
session.state = FLVFileWriter(output=file_path)
await super().on_ns_publish(session, message)
async def on_metadata(self, session, message) -> None:
session.state.write(0, message.to_raw_meta(), FLVMediaType.OBJECT)
await super().on_metadata(session, message)
async def on_video_message(self, session, message) -> None:
session.state.write(message.timestamp, message.payload, FLVMediaType.VIDEO)
await super().on_video_message(session, message)
async def on_audio_message(self, session, message) -> None:
session.state.write(message.timestamp, message.payload, FLVMediaType.AUDIO)
await super().on_audio_message(session, message)
async def on_stream_closed(self, session: SessionManager, exception: StreamClosedException) -> None:
session.state.close()
await super().on_stream_closed(session, exception)
class SimpleServer(SimpleRTMPServer):
def __init__(self, output_directory: str):
self.output_directory = output_directory
super().__init__()
async def create(self, host: str, port: int):
loop = asyncio.get_event_loop()
self.server = await loop.create_server(
lambda: RTMPProtocol(controller=RTMP2FLVController(self.output_directory)),
host=host,
port=port,
)
async def main():
current_dir = os.path.dirname(os.path.abspath(__file__))
server = SimpleServer(output_directory=current_dir)
await server.create(host="0.0.0.0", port=1935)
await server.start()
await server.wait_closed()
if __name__ == "__main__":
asyncio.run(main())
from __future__ import annotations
import abc
import asyncio
import logging
from asyncio import StreamReader, StreamWriter, events
from pyrtmp import StreamClosedException
from pyrtmp.messages import Chunk
from pyrtmp.messages.audio import AudioMessage
from pyrtmp.messages.command import NCConnect, NCCreateStream, NSCloseStream, NSDeleteStream, NSPublish
from pyrtmp.messages.data import MetaDataMessage
from pyrtmp.messages.factory import MessageFactory
from pyrtmp.messages.protocol_control import SetChunkSize, SetPeerBandwidth, WindowAcknowledgementSize
from pyrtmp.messages.user_control import StreamBegin
from pyrtmp.messages.video import VideoMessage
from pyrtmp.session_manager import SessionManager
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
class BaseRTMPController(abc.ABC):
async def client_callback(self, reader: StreamReader, writer: StreamWriter) -> None:
raise NotImplementedError()
async def on_handshake(self, session: SessionManager) -> None:
raise NotImplementedError()
async def on_nc_connect(self, session: SessionManager, message: NCConnect) -> None:
raise NotImplementedError()
async def on_window_acknowledgement_size(self, session: SessionManager, message: WindowAcknowledgementSize) -> None:
raise NotImplementedError()
async def on_nc_create_stream(self, session: SessionManager, message: NCCreateStream) -> None:
raise NotImplementedError()
async def on_ns_publish(self, session: SessionManager, message: NSPublish) -> None:
raise NotImplementedError()
async def on_metadata(self, session: SessionManager, message: MetaDataMessage) -> None:
raise NotImplementedError()
async def on_set_chunk_size(self, session: SessionManager, message: SetChunkSize) -> None:
raise NotImplementedError()
async def on_video_message(self, session: SessionManager, message: VideoMessage) -> None:
raise NotImplementedError()
async def on_audio_message(self, session: SessionManager, message: AudioMessage) -> None:
raise NotImplementedError()
async def on_ns_close_stream(self, session: SessionManager, message: NSCloseStream) -> None:
raise NotImplementedError()
async def on_ns_delete_stream(self, session: SessionManager, message: NSDeleteStream) -> None:
raise NotImplementedError()
async def on_unknown_message(self, session: SessionManager, message: Chunk) -> None:
raise NotImplementedError()
async def on_stream_closed(self, session: SessionManager, exception: StreamClosedException) -> None:
raise NotImplementedError()
async def cleanup(self, session: SessionManager) -> None:
raise NotImplementedError()
class SimpleRTMPController(BaseRTMPController):
async def client_callback(self, reader: StreamReader, writer: StreamWriter) -> None:
# create session per client
session = SessionManager(reader=reader, writer=writer)
logger.debug(f"Client connected {session.peername}")
try:
# do handshake
await self.on_handshake(session)
logger.debug(f"Handshake! {session.peername}")
# read chunks
async for chunk in session.read_chunks_from_stream():
message = MessageFactory.from_chunk(chunk)
# logger.debug(f"Receiving {str(message)} {message.chunk_id}")
if isinstance(message, NCConnect):
await self.on_nc_connect(session, message)
elif isinstance(message, WindowAcknowledgementSize):
await self.on_window_acknowledgement_size(session, message)
elif isinstance(message, NCCreateStream):
await self.on_nc_create_stream(session, message)
elif isinstance(message, NSPublish):
await self.on_ns_publish(session, message)
elif isinstance(message, MetaDataMessage):
await self.on_metadata(session, message)
elif isinstance(message, SetChunkSize):
await self.on_set_chunk_size(session, message)
elif isinstance(message, VideoMessage):
await self.on_video_message(session, message)
elif isinstance(message, AudioMessage):
await self.on_audio_message(session, message)
elif isinstance(message, NSCloseStream):
await self.on_ns_close_stream(session, message)
elif isinstance(message, NSDeleteStream):
await self.on_ns_delete_stream(session, message)
else:
await self.on_unknown_message(session, message)
except StreamClosedException as ex:
logger.debug(f"Client disconnected {session.peername}")
await self.on_stream_closed(session, ex)
except Exception as ex:
logger.exception(ex)
finally:
await self.cleanup(session)
writer.close()
async def on_handshake(self, session: SessionManager) -> None:
await session.handshake()
async def on_nc_connect(self, session: SessionManager, message: NCConnect) -> None:
session.write_chunk_to_stream(WindowAcknowledgementSize(ack_window_size=5000000))
session.write_chunk_to_stream(SetPeerBandwidth(ack_window_size=5000000, limit_type=2))
session.write_chunk_to_stream(StreamBegin(stream_id=0))
session.write_chunk_to_stream(SetChunkSize(chunk_size=8192))
session.writer_chunk_size = 8192
session.write_chunk_to_stream(message.create_response())
await session.drain()
async def on_window_acknowledgement_size(self, session: SessionManager, message: WindowAcknowledgementSize) -> None:
pass
async def on_nc_create_stream(self, session: SessionManager, message: NCCreateStream) -> None:
session.write_chunk_to_stream(message.create_response())
await session.drain()
async def on_ns_publish(self, session: SessionManager, message: NSPublish) -> None:
session.write_chunk_to_stream(StreamBegin(stream_id=1))
session.write_chunk_to_stream(message.create_response())
await session.drain()
async def on_metadata(self, session: SessionManager, message: MetaDataMessage) -> None:
pass
async def on_set_chunk_size(self, session: SessionManager, message: SetChunkSize) -> None:
session.reader_chunk_size = message.chunk_size
async def on_video_message(self, session: SessionManager, message: VideoMessage) -> None:
pass
async def on_audio_message(self, session: SessionManager, message: AudioMessage) -> None:
pass
async def on_ns_close_stream(self, session: SessionManager, message: NSCloseStream) -> None:
pass
async def on_ns_delete_stream(self, session: SessionManager, message: NSDeleteStream) -> None:
pass
async def on_unknown_message(self, session: SessionManager, message: Chunk) -> None:
logger.warning(f"Unknown message {str(message)}")
async def on_stream_closed(self, session: SessionManager, exception: StreamClosedException) -> None:
pass
async def cleanup(self, session: SessionManager) -> None:
logger.debug(f"Clean up {session.peername}")
class RTMPProtocol(asyncio.StreamReaderProtocol):
def __init__(self, controller: BaseRTMPController) -> None:
self.callback = controller.client_callback
self.loop = events.get_event_loop()
super().__init__(
StreamReader(loop=self.loop),
self.callback,
loop=self.loop,
)
class SimpleRTMPServer:
def __init__(self) -> None:
self.server = None
self.on_start = None
self.on_stop = None
def _signal_on_start(self) -> None:
if self.on_start:
self.on_start()
def _signal_on_stop(self) -> None:
if self.on_stop:
self.on_stop()
async def create(self, host: str, port: int) -> None:
loop = asyncio.get_event_loop()
self.server = await loop.create_server(
lambda: RTMPProtocol(controller=SimpleRTMPController()),
host=host,
port=port,
)
async def start(self) -> None:
addr = self.server.sockets[0].getsockname()
await self.server.start_serving()
self._signal_on_start()
logger.info(f"Serving on {addr}")
async def wait_closed(self) -> None:
await self.server.wait_closed()
async def stop(self) -> None:
self.server.close()
self._signal_on_stop()
async def main() -> None:
server = SimpleRTMPServer()
await server.create(host="0.0.0.0", port=1935)
await server.start()
await server.wait_closed()
if __name__ == "__main__":
asyncio.run(main())
# This script plays back a video file on the virtual camera.
# It also shows how to:
# - select a specific camera device
# - use BGR as pixel format
import argparse
import pyvirtualcam
from pyvirtualcam import PixelFormat
import cv2
parser = argparse.ArgumentParser()
parser.add_argument("video_path", help="path to input video file")
parser.add_argument("--fps", action="store_true", help="output fps every second")
parser.add_argument("--device", help="virtual camera device, e.g. /dev/video0 (optional)")
args = parser.parse_args()
video = cv2.VideoCapture(args.video_path)
if not video.isOpened():
raise ValueError("error opening video")
length = int(video.get(cv2.CAP_PROP_FRAME_COUNT))
width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = video.get(cv2.CAP_PROP_FPS)
with pyvirtualcam.Camera(width, height, fps, fmt=PixelFormat.BGR,
device=args.device, print_fps=args.fps) as cam:
print(f'Virtual cam started: {cam.device} ({cam.width}x{cam.height} @ {cam.fps}fps)')
count = 0
while True:
# Restart video on last frame.
if count == length:
count = 0
video.set(cv2.CAP_PROP_POS_FRAMES, 0)
# Read video frame.
ret, frame = video.read()
if not ret:
raise RuntimeError('Error fetching frame')
# Send to virtual cam.
cam.send(frame)
# Wait until it's time for the next frame
cam.sleep_until_next_frame()
count += 1
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment