Iray SDK API nvidia_logo_transpbg.gif Up
Example for the RTMP server
[Previous] [Next] [Up]

This example renders a scene and serves an interactive video stream over RTMP.

New Topics

  • Running an RTMP server together with an HTTP server and rendering a scene with mouse interaction.

Detailed Description

Running an RTMP and HTTP server with mouse interaction


This example first starts an HTTP server over which a flash application is fetched with a browser or a standalone flash client which the client then uses to view and interact with the video stream.

The stream will by default use the "screen video" codec which is provided with the library and encode the canvas produced by rendering the scene. This encoded frame is then send over the RTMP stream to the flash client.

On the connection a so called Remote Procedure Call (RPC) is installed which gets called when the client interacts with the video stream using the mouse.

The provided flash file (the .swf file) can be reproduced running the free Adobe Flex SDK compiler mxmlc on the included .mxml file.

Example Source

Source Code Location: examples/example_rtmp_server.cpp

/******************************************************************************
* Copyright 2023 NVIDIA Corporation. All rights reserved.
*****************************************************************************/
// examples/example_rtmp_server.cpp
//
// Serves a flash player (.swf file) over HTTP to a client browser which then connects to the RTMP
// server which produces a video stream from the rendering of a scene.
//
// The example expects the following command line arguments:
//
// example_rtmp_server <swf_file> <scene_file> <mdl_path> <port>
//
// swf_file the flash player, the .swf-file included in the examples directory
// scene_file some scene file, e.g., main.mi
// mdl_path path to the MDL modules, e.g., iray-<version>/mdl
// port port for the HTTP server
#include <mi/neuraylib.h>
// Include code shared by all examples.
#include "example_shared.h"
// Include an implementation of IRender_target.
#include "example_render_target_simple.h"
#include <fstream>
#include <iostream>
#include <vector>
// HTTP server implementation
//
// The HTTP servers just serves the .swf file.
// A simple implementation of the IBuffer interface.
class Buffer : public mi::base::Interface_implement<mi::neuraylib::IBuffer>
{
public:
const mi::Uint8* get_data() const { return &m_buffer[0]; }
mi::Size get_data_size() const { return m_buffer.size(); }
Buffer( const std::vector<mi::Uint8>& content) { m_buffer = content; }
private:
std::vector<mi::Uint8> m_buffer;
};
// An HTTP response handler which always sets the content type for flash.
class Response_handler : public mi::base::Interface_implement<mi::http::IResponse_handler>
{
public:
void handle( mi::http::IConnection* connection)
{
mi::http::IResponse* iresponse( connection->get_response());
iresponse->set_header( "Content-Type", "application/x-shockwave-flash");
}
};
// An HTTP request handler which always sends the .swf file.
class Request_handler : public mi::base::Interface_implement<mi::http::IRequest_handler>
{
public:
Request_handler( const char* swf_file) : m_swf_file( swf_file) { }
bool handle( mi::http::IConnection* connection)
{
std::ifstream file( m_swf_file, std::ios::in|std::ios::binary|std::ios::ate);
check_success( file);
std::ifstream::pos_type size = file.tellg();
std::vector<mi::Uint8> data( static_cast<mi::Size>( size));
file.seekg( 0, std::ios::beg);
file.read( reinterpret_cast<char*>( &data[0]), size);
file.close();
mi::base::Handle<mi::neuraylib::IBuffer> buffer( new Buffer( data));
connection->enqueue( buffer.get());
return true;
}
private:
const char* m_swf_file;
};
// RTMP server implementation
//
// The RTMP renders a given scene and interprets mouse movements as camera movements.
// An RTMP play event handler that chooses the screen video codec and initializes it with a
// predefined window size.
class Play_event_handler : public mi::base::Interface_implement<mi::rtmp::IPlay_event_handler>
{
public:
bool handle( bool is_start, mi::rtmp::IStream* stream, mi::neuraylib::IVideo_data** out)
{
if( is_start) {
check_success( stream->use_codec( "screen video"));
check_success( codec->init( 512, 384, out) == 0);
}
else {
check_success( codec->close( out) == 0);
}
return true;
}
};
// An RTMP frame event handler that encodes a frame and gives it to the RTMP server for
// sending. Note that this event runs in another thread than the other event handlers, most
// importantly the render handler, so care needs to be taken to avoid synchronization issues.
class Frame_event_handler : public mi::base::Interface_implement<mi::rtmp::IFrame_event_handler>
{
public:
bool handle(
mi::rtmp::IStream* stream, mi::neuraylib::IVideo_data** out, bool send_queue_is_full)
{
if (send_queue_is_full) // we do not want to increase buffering
return true;
{
mi::base::Lock::Block block( &m_cached_canvas_lock);
canvas = m_cached_canvas.get();
if ( !canvas)
return true;
canvas->retain();
}
bool result = (codec->encode_canvas( canvas, out) == 0);
canvas->release();
return result;
}
void update_canvas( mi::neuraylib::ICanvas* new_canvas)
{
mi::base::Lock::Block block( &m_cached_canvas_lock);
m_cached_canvas = make_handle_dup( new_canvas);
}
private:
mi::base::Lock m_cached_canvas_lock;
};
// An RTMP render event handler that renders a given scene into a canvas and saves it for the
// frame event handler to encode.
class Render_event_handler : public mi::base::Interface_implement<mi::rtmp::IRender_event_handler>
{
public:
Render_event_handler(
mi::neuraylib::INeuray* neuray, mi::neuraylib::IScope* scope, Frame_event_handler* handler)
: m_neuray( neuray, mi::base::DUP_INTERFACE),
m_scope( scope, mi::base::DUP_INTERFACE),
m_frame_handler( handler, mi::base::DUP_INTERFACE)
{
mi::base::Handle<mi::neuraylib::ITransaction> transaction( m_scope->create_transaction());
{
transaction->edit<mi::neuraylib::IScene>( "the_scene"));
m_render_context = scene->create_render_context( transaction.get(), "iray");
check_success( m_render_context.is_valid_interface());
mi::base::Handle<mi::IString> scheduler_mode( transaction->create<mi::IString>());
scheduler_mode->set_c_str( "interactive");
m_render_context->set_option( "scheduler_mode", scheduler_mode.get());
mi::base::Handle<mi::IFloat32> interval( transaction->create<mi::IFloat32>());
interval->set_value( 0.1f);
m_render_context->set_option( "interactive_update_interval", interval.get());
}
transaction->commit();
}
bool handle( mi::rtmp::IStream* /*stream*/)
{
mi::base::Handle<mi::neuraylib::ITransaction> transaction( m_scope->create_transaction());
{
m_neuray->get_api_component<mi::neuraylib::IImage_api>());
new Render_target( image_api.get(), "Color", 512, 384));
check_success(
m_render_context->render( transaction.get(), render_target.get(), 0) >= 0);
mi::base::Handle<mi::neuraylib::ICanvas> canvas( render_target->get_canvas( 0));
m_frame_handler->update_canvas( canvas.get());
}
transaction->commit();
return true;
}
private:
};
// An RTMP stream event handler that registers the play and render event handlers above.
class Stream_event_handler : public mi::base::Interface_implement<mi::rtmp::IStream_event_handler>
{
public:
Stream_event_handler( mi::neuraylib::INeuray* neuray, mi::neuraylib::IScope* scope)
: m_neuray( neuray, mi::base::DUP_INTERFACE), m_scope( scope, mi::base::DUP_INTERFACE) { }
bool handle(
bool is_create, mi::rtmp::IStream* stream,
const mi::IData* /*command_arguments*/)
{
if( is_create) {
new Play_event_handler());
stream->register_play_event_handler( play_event_handler.get());
mi::base::Handle<Frame_event_handler> frame_event_handler( new Frame_event_handler());
new Render_event_handler( m_neuray.get(), m_scope.get(),frame_event_handler.get()));
stream->register_render_event_handler( render_event_handler.get());
stream->register_frame_event_handler( frame_event_handler.get());
}
return true;
}
private:
};
// An RTMP call event handler that moves the camera according to the arguments 'pan_x' and 'pan_y'.
class Call_event_handler : public mi::base::Interface_implement<mi::rtmp::ICall_event_handler>
{
public:
Call_event_handler( mi::neuraylib::IScope* scope) : m_scope( scope, mi::base::DUP_INTERFACE) { }
bool handle(
mi::rtmp::IConnection* /*connection*/,
const char* /*procedure_name*/,
const mi::IData* /*command_arguments*/,
const mi::IData* user_arguments,
mi::IData** /*response_arguments*/)
{
mi::base::Handle<mi::neuraylib::ITransaction> transaction( m_scope->create_transaction());
{
// The "camera" name matches the camera in main.mi in the examples directory.
transaction->edit<mi::neuraylib::ICamera>( "camera"));
check_success( camera.is_valid_interface());
mi::base::Handle<const mi::IMap> imap( user_arguments->get_interface<const mi::IMap>());
check_success( imap.is_valid_interface());
mi::base::Handle<const mi::ISint32> pan_x( imap->get_value<mi::ISint32>( "pan_x"));
if ( pan_x) {
mi::Float64 x = camera->get_offset_x();
camera->set_offset_x( x - pan_x->get_value<mi::Sint32>());
// The example client also demonstrates how to send/parse a double.
imap->get_value<mi::IFloat64>( "pan_xd"));
if( pan_xd) {
mi::Float64 xd = pan_xd->get_value<mi::Float64>();
check_success( mi::Sint32(xd) == pan_x->get_value<mi::Sint32>());
}
}
mi::base::Handle<const mi::ISint32> pan_y( imap->get_value<mi::ISint32>( "pan_y"));
if( pan_y) {
mi::Float64 y = camera->get_offset_y();
camera->set_offset_y( y - pan_y->get_value<mi::Sint32>());
}
// Demonstrate getting a bool from the example client
imap->get_value<mi::IBoolean>( "going_right"));
if ( dir) {
bool going_right = dir->get_value<bool>();
going_right = !going_right; // avoid compiler warning
}
}
transaction->commit();
return true;
}
private:
};
// An RTMP connect event handler that registers the stream and call event handlers above.
class Connect_event_handler : public mi::base::Interface_implement<mi::rtmp::IConnect_event_handler>
{
public:
Connect_event_handler( mi::neuraylib::INeuray* neuray, mi::neuraylib::IScope* scope)
: m_neuray( neuray, mi::base::DUP_INTERFACE), m_scope( scope, mi::base::DUP_INTERFACE) { }
bool handle(
bool is_create, mi::rtmp::IConnection* connection,
const mi::IData* /*command_arguments*/,
const mi::IData* /*user_arguments*/)
{
if( is_create) {
new Stream_event_handler( m_neuray.get(), m_scope.get()));
connection->register_stream_event_handler( stream_event_handler.get());
new Call_event_handler( m_scope.get()));
connection->register_remote_call_handler( call_event_handler.get(), "moveCamera");
}
return true;
}
private:
};
void configuration( mi::neuraylib::INeuray* neuray, const char* mdl_path)
{
// Configure the neuray library. Here we set the search path for .mdl files.
check_success( rc->add_mdl_path( mdl_path) == 0);
check_success( rc->add_mdl_path( ".") == 0);
// Load the OpenImageIO, Iray Photoreal, and .mi importer plugins.
// Also load the default video codec plugin which will be used to encode the rendered frames.
check_success( pc->load_plugin_library( "nv_openimageio" MI_BASE_DLL_FILE_EXT) == 0);
check_success( pc->load_plugin_library( "libiray" MI_BASE_DLL_FILE_EXT) == 0);
check_success( pc->load_plugin_library( "mi_importer" MI_BASE_DLL_FILE_EXT) == 0);
check_success( pc->load_plugin_library( "screen_video" MI_BASE_DLL_FILE_EXT) == 0);
}
void prepare_rendering(
mi::neuraylib::INeuray* neuray, const char* scene_file)
{
// Get the database, the global scope of the database, and create a transaction in the global
// scope for importing the scene file and storing the scene.
check_success( database.is_valid_interface());
database->get_global_scope());
scope->create_transaction());
check_success( transaction.is_valid_interface());
// Import the scene file
check_success( import_api.is_valid_interface());
mi::base::Handle<const mi::IString> uri( import_api->convert_filename_to_uri( scene_file));
import_api->import_elements( transaction.get(), uri->get_c_str()));
check_success( import_result->get_error_number() == 0);
// Create the scene object
transaction->create<mi::neuraylib::IScene>( "Scene"));
scene->set_rootgroup( import_result->get_rootgroup());
scene->set_options( import_result->get_options());
scene->set_camera_instance( import_result->get_camera_inst());
// And store it in the database such that the render loop can later access it
transaction->store( scene.get(), "the_scene");
transaction->commit();
}
void run_http_and_rtmp_server(
mi::neuraylib::INeuray* neuray, const char* port, const char* swf_file)
{
// Create an HTTP server instance
http_factory->create_server());
// Install our HTTP request and response handlers
new Request_handler( swf_file));
http_server->install( request_handler.get());
new Response_handler());
http_server->install( response_handler.get());
// Assemble HTTP server address
const char* ip = "0.0.0.0:";
char address[255];
address[0] = '\0';
strncat( address, ip, sizeof(address) - 1);
strncat( address, port, sizeof(address) - 1 - strlen(address));
// Start HTTP server
http_server->start( address);
// Create an RTMP server instance
mi::base::Handle<mi::rtmp::IServer> rtmp_server( rtmp_factory->create_server());
// Install our HTTP connect handler
database->get_global_scope());
new Connect_event_handler( neuray, scope.get()));
rtmp_server->install( connect_handler.get());
// Start RTMP server
rtmp_server->start( "0.0.0.0:1935");
// Run both servers for fixed time interval
sleep_seconds( 30);
http_server->shutdown();
rtmp_server->shutdown();
}
int main( int argc, char* argv[])
{
// Collect command line parameters
if( argc != 5) {
std::cerr << "Usage: example_rtmp_server <swf_file> <scene_file> <mdl_path> <port>"
<< std::endl;
keep_console_open();
return EXIT_FAILURE;
}
const char* swf_file = argv[1];
const char* scene_file = argv[2];
const char* mdl_path = argv[3];
const char* port = argv[4];
// Access the neuray library
mi::base::Handle<mi::neuraylib::INeuray> neuray( load_and_get_ineuray());
check_success( neuray.is_valid_interface());
// Configure the neuray library
configuration( neuray.get(), mdl_path);
// Start the neuray library
mi::Sint32 result = neuray->start();
check_start_success( result);
// Set up the scene
prepare_rendering( neuray.get(), scene_file);
// Serve video stream via RTMP server
run_http_and_rtmp_server( neuray.get(), port, swf_file);
// Shut down the neuray library
check_success( neuray->shutdown() == 0);
neuray = 0;
// Unload the neuray library
check_success( unload());
keep_console_open();
return EXIT_SUCCESS;
}
This interface represents bool.
Definition: inumber.h:122
This interface is the base interface of all types.
Definition: idata.h:297
This interface represents mi::Float32.
Definition: inumber.h:221
This interface represents mi::Float64.
Definition: inumber.h:232
This interface represents maps, i.e., a key-value based data structure.
Definition: imap.h:41
This interface represents mi::Sint32.
Definition: inumber.h:199
A simple string class.
Definition: istring.h:22
Handle class template for interfaces, automatizing the lifetime control via reference counting.
Definition: handle.h:113
Mixin class template for deriving interface implementations.
Definition: interface_implement.h:41
Utility class to acquire a lock that is released by the destructor.
Definition: lock.h:61
Non-recursive lock class.
Definition: lock.h:49
The connection class represents a connection from a client to the server.
Definition: http.h:277
virtual IResponse * get_response()=0
Returns the response associated with the connection.
virtual bool enqueue(neuraylib::IBuffer *buffer)=0
Enqueues a buffer to be sent on the connection.
The factory can be used to instantiate the built-in HTTP classes.
Definition: http.h:922
This interface holds all the parameters of a response.
Definition: http.h:197
The camera defines the viewpoint from which the scene is rendered.
Definition: icamera.h:56
Abstract interface for a canvas represented by a rectangular array of tiles.
Definition: icanvas.h:85
This interface is used to interact with the distributed database.
Definition: idatabase.h:293
This interface provides various utilities related to canvases and buffers.
Definition: iimage_api.h:49
This interface is used to import files.
Definition: iimport_api.h:100
This is an object representing the Iray library.
Definition: ineuray.h:44
virtual Sint32 shutdown(bool blocking=true)=0
Shuts down the library.
virtual base::IInterface * get_api_component(const base::Uuid &uuid) const =0
Returns an API component from the Iray SDK API.
virtual Sint32 start(bool blocking=true)=0
Starts the operation of the Iray library.
This interface is used to load plugins and to query information about loaded plugins.
Definition: iplugin_configuration.h:24
This interface is used to query and change the rendering configuration.
Definition: irendering_configuration.h:109
The scene is the top-level element describing a subset of DB elements to be rendered.
Definition: iscene.h:44
A scope is the context which determines the visibility of database elements.
Definition: iscope.h:48
virtual ITransaction * create_transaction()=0
Creates a new transaction associated with this scope.
A buffer for video data representing a frame.
Definition: ivideo_plugin.h:33
The connection class represents a connection from a client to the server.
Definition: rtmp.h:212
virtual void register_stream_event_handler(IStream_event_handler *stream_event_handler)=0
Registers a stream event handler.
virtual void register_remote_call_handler(ICall_event_handler *call_handler, const char *procedure_name=0)=0
Registers a call event handler for the passed procedure name.
The factory can be used to instantiate the built-in RTMP server.
Definition: rtmp.h:547
Representing an RTMP stream.
Definition: rtmp.h:419
virtual bool use_codec(const char *name)=0
Tells the RTMP server which codec will be used on the stream.
virtual void register_render_event_handler(IRender_event_handler *render_event_handler)=0
Registers the render event handler.
virtual void register_play_event_handler(IPlay_event_handler *play_event_handler)=0
Registers the play event handler.
virtual void register_frame_event_handler(IFrame_event_handler *frame_event_handler)=0
Registers the frame event handler.
virtual neuraylib::IVideo_encoder * get_video_codec()=0
Returns the video codec for this stream.
#define MI_BASE_DLL_FILE_EXT
The operating system specific default filename extension for shared libraries (DLLs)
Definition: config.h:340
virtual const IInterface * get_interface(const Uuid &interface_id) const =0
Acquires a const interface from another.
virtual Uint32 retain() const =0
Increments the reference count.
Handle<Interface> make_handle_dup(Interface *iptr)
Converts passed-in interface pointer to a handle, without taking interface over.
Definition: handle.h:439
static const Dup_interface DUP_INTERFACE
Symbolic constant to trigger a special constructor in the Handle class.
Definition: handle.h:37
virtual Uint32 release() const =0
Decrements the reference count.
unsigned char Uint8
8-bit unsigned integer.
Definition: types.h:47
Uint64 Size
Unsigned integral type that is large enough to hold the size of all types.
Definition: types.h:112
double Float64
64-bit float.
Definition: types.h:52
signed int Sint32
32-bit signed integer.
Definition: types.h:46
Common namespace for APIs of NVIDIA Advanced Rendering Center GmbH.
Definition: neuraylib.h:179
Iray SDK API.

Flex Source Code Location: examples/example_rtmp_server.mxml and examples/example_rtmp_server_actionscript.as

<?xml version="1.0" encoding="utf-8"?>
<!--
/******************************************************************************
* Copyright 2023 NVIDIA Corporation. All rights reserved.
*****************************************************************************/
-->
<mx:Application
xmlns:mx="http://www.adobe.com/2006/mxml"
layout="horizontal"
initialize="init()" xmlns:local="*">
<mx:Script>
<![CDATA[
import mx.core.Application;
public function init():void {
vidplayer.makeConnection("rtmp://" + getHost());
}
public function getHost():String {
var location:String = Application.application.url;
var components:Array = location.split("/");
if (components.length < 3)
return "localhost";
var host_port:Array = components[2].split(":");
if (host_port.length <= 1)
return "localhost";
return host_port[0];
}
]]>
</mx:Script>
<!-- refer to the actionscript object -->
<local:example_rtmp_server_actionscript includeInLayout="true" id="vidplayer" width="1024" height="786" />
</mx:Application>
/******************************************************************************
* Copyright 2023 NVIDIA Corporation. All rights reserved.
* Germany. All rights reserved
*****************************************************************************/
package {
import flash.events.MouseEvent;
import flash.events.NetStatusEvent;
import flash.events.SecurityErrorEvent;
import flash.media.Video;
import flash.net.NetConnection;
import flash.net.NetStream;
import mx.core.Application;
import mx.core.UIComponent;
public class example_rtmp_server_actionscript extends UIComponent {
private var streamName:String = "example_rtmp_server";
public var connection:NetConnection = null;
private var video:Video = null;
private var mystream:NetStream = null;
private var client:Object = null;
private var mouseButton:Boolean = false;
private var mousePosX:int = 0;
private var mousePosY:int = 0;
public function example_rtmp_server_actionscript() {
super();
this.addEventListener(MouseEvent.MOUSE_DOWN, this.onMouseDown);
}
public function makeConnection(url:String):void {
if (connection != null) {
mystream = null;
connection.close();
} else {
connection = new NetConnection();
}
connection.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler);
connection.addEventListener(SecurityErrorEvent.SECURITY_ERROR, securityErrorHandler);
var args:Object = new Object();
args["resolution_x"] = floor16(this.width).toString();
args["resolution_y"] = floor16(this.height).toString();
connection.connect(url,args);
}
private function floor16(val:int):int {
return int(val/16) * 16;
}
public function closeConnection():void {
if (connection != null) {
mystream = null;
connection.close();
}
}
private function netStatusHandler(event:NetStatusEvent):void {
switch (event.info.code) {
case "NetConnection.Connect.Success":
connectStream();
break;
case "NetStream.Play.StreamNotFound":
trace("Stream not found: " + streamName);
break;
}
}
private function securityErrorHandler(event:SecurityErrorEvent):void {
trace("securityErrorHandler: " + event);
}
private function connectStream():void {
mystream = new NetStream(connection);
mystream.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler);
if (video == null) {
video = new Video(this.width,this.height);
video.smoothing = true;
}
video.attachNetStream(mystream);
addChild(video);
mystream.play(streamName);
}
public function onMouseDown(event: MouseEvent):void {
var x: int = event.stageX - (event.target as UIComponent).parent.x;
var y: int = event.stageY - (event.target as UIComponent).parent.y;
mousePosX = x;
mousePosY = y;
Application.application.addEventListener(MouseEvent.MOUSE_UP, this.onMouseUp);
Application.application.addEventListener(MouseEvent.MOUSE_MOVE, this.onMouseMove);
mouseButton = true;
}
public function onMouseUp(event: MouseEvent):void {
if (mouseButton) {
mouseButton = false;
Application.application.removeEventListener(MouseEvent.MOUSE_UP, this.onMouseUp);
Application.application.removeEventListener(MouseEvent.MOUSE_MOVE, this.onMouseMove);
}
}
public function onMouseMove(event: MouseEvent):void
{
var x: int = event.stageX - (event.target as UIComponent).parent.x;
var y: int = event.stageY - (event.target as UIComponent).parent.y;
if (mouseButton && connection && connection.connected && mystream) {
var diff_x:int = x-mousePosX;
var diff_y:int = y-mousePosY;
var args:Object = new Object();
if (diff_x != 0) args["pan_x"] = diff_x;
if (diff_y != 0) args["pan_y"] = -diff_y;
if (diff_x || diff_y) {
// For demonstration purposes also send a double..
args["pan_xd"] = (diff_x < 0) ? diff_x - 0.1 : diff_x + 0.1
// ..and some bool
args["going_right"] = diff_x > 0 ? true : false;
connection.call("moveCamera",null,args);
}
mousePosX = x;
mousePosY = y;
}
}
}
}
[Previous] [Next] [Up]