Iray Programmer's Manual

Running an RTMP and HTTP server with mouse interaction

The example program renders a scene and serves an interactive video stream over RTMP. The provided Flash file (the .swf file) can be reproduced running the free Adobe Flex SDK compiler mxmlc on the included .mxml file.

Note the following:

  • The example_rtmp_server.cpp starts a HTTP server. A Flash application is fetched with a browser or a standalone Flash client, which the client then uses to view and interact with the video stream.
  • By default, the stream uses the screen video codec, which is provided with the library and encodes the canvas produced by scene rendering. The encoded frame is sent over the RTMP stream to the Flash client.
  • On the connection, a Remote Procedure Call (RPC) is installed. The RPC is called when the client interacts with the video stream using the mouse.

The example program consists of three files:

example_rtmp_server.cpp

001 /******************************************************************************
002  * © 1986, 2014 NVIDIA Corporation. All rights reserved.
003  *****************************************************************************/
004 
005 // examples/example_rtmp_server.cpp
006 //
007 // Serves a flash player (.swf file) over HTTP to a client browser which then connects to the RTMP
008 // server which produces a video stream from the rendering of a scene.
009 //
010 // The example expects the following command line arguments:
011 //
012 //   example_rtmp_server <swf_file> <scene_file> <mdl_path> <port>
013 //
014 // swf_file         the flash player, the .swf-file included in the examples directory
015 // scene_file       some scene file, e.g., main.mi
016 // mdl_path         path to the MDL modules, e.g., iray-<version>/mdl
017 // port             port for the HTTP server
018 
019 #include <mi/neuraylib.h>
020 
021 // Include code shared by all examples.
022 #include "example_shared.h"
023 // Include an implementation of IRender_target.
024 #include "example_render_target_simple.h"
025 
026 #include <cstdio>
027 #include <fstream>
028 #include <vector>
029 
030 // HTTP server implementation
031 //
032 // The HTTP servers just serves the .swf file.
033 
034 // A simple implementation of the IBuffer interface.
035 class Buffer : public mi::base::Interface_implement<mi::IBuffer>
036 {
037 public:
038     const mi::Uint8* get_data() const { return &m_buffer[0]; }
039 
040     mi::Size get_data_size() const { return m_buffer.size(); }
041 
042     Buffer( const std::vector<mi::Uint8>& content) { m_buffer = content; }
043 
044 private:
045     std::vector<mi::Uint8> m_buffer;
046 };
047 
048 // An HTTP response handler which always sets the content type for flash.
049 class Response_handler : public mi::base::Interface_implement<mi::http::IResponse_handler>
050 {
051 public:
052     void handle( mi::http::IConnection* connection)
053     {
054         mi::http::IResponse* iresponse( connection->get_response());
055         iresponse->set_header( "Content-Type", "application/x-shockwave-flash");
056     }
057 };
058 
059 // An HTTP request handler which always sends the .swf file.
060 class Request_handler : public mi::base::Interface_implement<mi::http::IRequest_handler>
061 {
062 public:
063     Request_handler( const char* swf_file) : m_swf_file( swf_file) { }
064 
065     bool handle( mi::http::IConnection* connection)
066     {
067         std::ifstream file( m_swf_file, std::ios::in|std::ios::binary|std::ios::ate);
068         check_success( file);
069 
070         std::ifstream::pos_type size = file.tellg();
071         std::vector<mi::Uint8> data( size);
072         file.seekg( 0, std::ios::beg);
073         file.read( (char*) &data[0], size);
074         file.close();
075 
076         mi::base::Handle<mi::IBuffer> buffer( new Buffer( data));
077         connection->enqueue( buffer.get());
078         return true;
079     }
080 
081 private:
082     const char* m_swf_file;
083 };
084 
085 // RTMP server implementation
086 //
087 // The RTMP renders a given scene and interprets mouse movements as camera movements.
088 
089 // An RTMP play event handler that chooses the screen video codec and initializes it with a
090 // predefined window size.
091 class Play_event_handler : public mi::base::Interface_implement<mi::rtmp::IPlay_event_handler>
092 {
093 public:
094     bool handle( bool is_start, mi::rtmp::IStream* stream, mi::IVideo_data** out)
095     {
096         if( is_start) {
097             check_success( stream->use_codec( "screen video"));
098             mi::base::Handle<mi::IVideo_encoder> codec( stream->get_video_codec());
099             check_success( codec->init( 512, 384, out));
100         }
101         else {
102             mi::base::Handle<mi::IVideo_encoder> codec( stream->get_video_codec());
103             check_success( codec->close( out));
104         }
105         return true;
106     }
107 };
108 
109 // An RTMP frame event handler that encodes a frame and gives it to the RTMP server for
110 // sending. Note that this event runs in another thread than the other event handlers, most
111 // importantly the render handler, so care needs to be taken to avoid synchronization issues.
112 class Frame_event_handler : public mi::base::Interface_implement<mi::rtmp::IFrame_event_handler>
113 {
114 public:
115     bool handle( mi::rtmp::IStream* stream, mi::IVideo_data** out, bool send_queue_is_full)
116     {
117         if (send_queue_is_full) // we do not want to increase buffering
118             return true;
119         mi::base::Handle<mi::IVideo_encoder> codec( stream->get_video_codec());
120         mi::neuraylib::ICanvas* canvas = 0;
121         {
122             mi::base::Lock::Block block( &m_cached_canvas_lock);
123             canvas = m_cached_canvas.get();
124             if ( !canvas)
125                 return true;
126             canvas->retain();
127         }
128         bool result = codec->encode_canvas( canvas, out);
129         canvas->release();
130         return result;
131     }
132     void update_canvas(mi::base::Handle<mi::neuraylib::ICanvas> new_canvas)
133     {
134         mi::base::Lock::Block block( &m_cached_canvas_lock);
135         m_cached_canvas = new_canvas;
136     }
137 private:
138     mi::base::Lock m_cached_canvas_lock;
139     mi::base::Handle<mi::neuraylib::ICanvas> m_cached_canvas;
140 };
141 
142 
143 // An RTMP render event handler that renders a given scene into a canvas and saves it for the
144 // frame event handler to encode.
145 class Render_event_handler : public mi::base::Interface_implement<mi::rtmp::IRender_event_handler>
146 {
147 public:
148     Render_event_handler(
149         mi::base::Handle<mi::neuraylib::INeuray> neuray,
150         mi::base::Handle<mi::neuraylib::IScope> scope,
151         mi::base::Handle<Frame_event_handler> handler)
152       : m_neuray( neuray), m_scope( scope), m_frame_handler( handler)
153     {
154         mi::base::Handle<mi::neuraylib::ITransaction> transaction( m_scope->create_transaction());
155         {
156             mi::base::Handle<mi::neuraylib::IScene> scene(
157                 transaction->edit<mi::neuraylib::IScene>( "the_scene"));
158             m_render_context = scene->create_render_context( transaction.get(), "iray");
159             check_success( m_render_context.is_valid_interface());
160             mi::base::Handle<mi::IString> scheduler_mode( transaction->create<mi::IString>());
161             scheduler_mode->set_c_str( "interactive");
162             m_render_context->set_option( "scheduler_mode", scheduler_mode.get());
163             mi::base::Handle<mi::IFloat32> interval( transaction->create<mi::IFloat32>());
164             interval->set_value( 0.1f);
165             m_render_context->set_option( "interactive_update_interval", interval.get());
166         }
167         transaction->commit();
168     }
169 
170     bool handle( mi::rtmp::IStream* /*stream*/)
171     {
172         mi::base::Handle<mi::neuraylib::ITransaction> transaction( m_scope->create_transaction());
173         {
174             mi::base::Handle<mi::neuraylib::IImage_api> image_api(
175                 m_neuray->get_api_component<mi::neuraylib::IImage_api>());
176             mi::base::Handle<mi::neuraylib::IRender_target> render_target(
177                 new Render_target( image_api.get(), "Color", 512, 384));
178             check_success(
179                 m_render_context->render( transaction.get(), render_target.get(), 0) >= 0);
180 
181             mi::base::Handle<mi::neuraylib::ICanvas> canvas( render_target->get_canvas( 0));
182             m_frame_handler->update_canvas( canvas);
183         }
184         transaction->commit();
185         return true;
186     }
187 
188 private:
189     mi::base::Handle<mi::neuraylib::INeuray> m_neuray;
190     mi::base::Handle<mi::neuraylib::IScope> m_scope;
191     mi::base::Handle<Frame_event_handler> m_frame_handler;
192     mi::base::Handle<mi::neuraylib::IRender_context> m_render_context;
193 };
194 
195 // An RTMP stream event handler that registers the play and render event handlers above.
196 class Stream_event_handler : public mi::base::Interface_implement<mi::rtmp::IStream_event_handler>
197 {
198 public:
199     Stream_event_handler(
200         mi::base::Handle<mi::neuraylib::INeuray> neuray,
201         mi::base::Handle<mi::neuraylib::IScope> scope)
202       : m_neuray( neuray), m_scope( scope) { }
203 
204     bool handle(
205         bool is_create, mi::rtmp::IStream* stream,
206         const mi::IData* /*command_arguments*/)
207     {
208         if( is_create) {
209             mi::base::Handle<mi::rtmp::IPlay_event_handler> play_event_handler(
210                 new Play_event_handler());
211             stream->register_play_event_handler( play_event_handler.get());
212             mi::base::Handle<Frame_event_handler> frame_event_handler( new Frame_event_handler());
213             mi::base::Handle<mi::rtmp::IRender_event_handler> render_event_handler(
214                 new Render_event_handler( m_neuray, m_scope, frame_event_handler));
215             stream->register_render_event_handler( render_event_handler.get());
216             stream->register_frame_event_handler( frame_event_handler.get());
217         }
218         return true;
219     }
220 
221 private:
222     mi::base::Handle<mi::neuraylib::INeuray> m_neuray;
223     mi::base::Handle<mi::neuraylib::IScope> m_scope;
224 };
225 
226 // An RTMP call event handler that moves the camera according to the arguments 'pan_x' and 'pan_y'.
227 class Call_event_handler : public mi::base::Interface_implement<mi::rtmp::ICall_event_handler>
228 {
229 public:
230     Call_event_handler( mi::base::Handle<mi::neuraylib::IScope> scope) : m_scope( scope) { }
231 
232     bool handle(
233         mi::rtmp::IConnection* /*connection*/,
234         const char* /*procedure_name*/,
235         const mi::IData* /*command_arguments*/,
236         const mi::IData* user_arguments,
237         mi::IData** /*response_arguments*/)
238     {
239         mi::base::Handle<mi::neuraylib::ITransaction> transaction( m_scope->create_transaction());
240         {
241             // The "camera" name matches the camera in main.mi in the examples directory.
242             mi::base::Handle<mi::ICamera> camera( transaction->edit<mi::ICamera>( "camera"));
243             check_success( camera.is_valid_interface());
244             mi::base::Handle<const mi::IMap> imap( user_arguments->get_interface<const mi::IMap>());
245             check_success( imap.is_valid_interface());
246             mi::base::Handle<const mi::ISint32> pan_x( imap->get_value<mi::ISint32>( "pan_x"));
247             if ( pan_x) {
248                 mi::Float64 x = camera->get_offset_x();
249                 camera->set_offset_x( x - pan_x->get_value<mi::Sint32>());
250                 // The example client also demonstrates how to send/parse a double.
251                 mi::base::Handle<const mi::IFloat64> pan_xd(
252                     imap->get_value<mi::IFloat64>( "pan_xd"));
253                 if( pan_xd) {
254                     mi::Float64 xd = pan_xd->get_value<mi::Float64>();
255                     check_success( mi::Sint32(xd) == pan_x->get_value<mi::Sint32>());
256                 }
257             }
258             mi::base::Handle<const mi::ISint32> pan_y( imap->get_value<mi::ISint32>( "pan_y"));
259             if( pan_y) {
260                 mi::Float64 y = camera->get_offset_y();
261                 camera->set_offset_y( y - pan_y->get_value<mi::Sint32>());
262             }
263             // Demonstrate getting a bool from the example client
264             mi::base::Handle<const mi::IBoolean> dir(
265                 imap->get_value<mi::IBoolean>( "going_right"));
266             if ( dir) {
267                 bool going_right = dir->get_value<bool>();
268                 going_right = !going_right; // avoid compiler warning
269             }
270         }
271         transaction->commit();
272         return true;
273     }
274 
275 private:
276     mi::base::Handle<mi::neuraylib::IScope> m_scope;
277 };
278 
279 // An RTMP connect event handler that registers the stream and call event handlers above.
280 class Connect_event_handler : public mi::base::Interface_implement<mi::rtmp::IConnect_event_handler>
281 {
282 public:
283     Connect_event_handler(
284         mi::base::Handle<mi::neuraylib::INeuray> neuray,
285         mi::base::Handle<mi::neuraylib::IScope> scope)
286       : m_neuray( neuray), m_scope( scope) { }
287 
288     bool handle(
289         bool is_create, mi::rtmp::IConnection* connection,
290         const mi::IData* /*command_arguments*/,
291         const mi::IData* /*user_arguments*/)
292     {
293         if( is_create) {
294             mi::base::Handle<mi::rtmp::IStream_event_handler> stream_event_handler(
295                 new Stream_event_handler( m_neuray, m_scope));
296             connection->register_stream_event_handler( stream_event_handler.get());
297             mi::base::Handle<mi::rtmp::ICall_event_handler> call_event_handler(
298                 new Call_event_handler( m_scope));
299             connection->register_remote_call_handler( call_event_handler.get(), "moveCamera");
300         }
301         return true;
302     }
303 
304 private:
305     mi::base::Handle<mi::neuraylib::INeuray> m_neuray;
306     mi::base::Handle<mi::neuraylib::IScope> m_scope;
307 };
308 
309 void configuration( mi::base::Handle<mi::neuraylib::INeuray> neuray, const char* mdl_path)
310 {
311     // Configure the neuray library. Here we set the search path for .mdl files.
312     mi::base::Handle<mi::neuraylib::IRendering_configuration> rendering_configuration(
313         neuray->get_api_component<mi::neuraylib::IRendering_configuration>());
314     check_success( rendering_configuration.is_valid_interface());
315     check_success( rendering_configuration->add_mdl_path( mdl_path) == 0);
316 
317     // Load the FreeImage, Iray Photoreal, and .mi importer plugins.
318     // Also load the default video codec plugin which will be used to encode the rendered frames.
319     mi::base::Handle<mi::neuraylib::IPlugin_configuration> plugin_configuration(
320         neuray->get_api_component<mi::neuraylib::IPlugin_configuration>());
321 #ifndef MI_PLATFORM_WINDOWS
322     check_success( plugin_configuration->load_plugin_library( "freeimage.so") == 0);
323     check_success( plugin_configuration->load_plugin_library( "libiray.so") == 0);
324     check_success( plugin_configuration->load_plugin_library( "mi_importer.so") == 0);
325     check_success( plugin_configuration->load_plugin_library( "screen_video.so") == 0);
326 #else
327     check_success( plugin_configuration->load_plugin_library( "freeimage.dll") == 0);
328     check_success( plugin_configuration->load_plugin_library( "libiray.dll") == 0);
329     check_success( plugin_configuration->load_plugin_library( "mi_importer.dll") == 0);
330     check_success( plugin_configuration->load_plugin_library( "screen_video.dll") == 0);
331 #endif
332 }
333 
334 void prepare_rendering( mi::base::Handle<mi::neuraylib::INeuray> neuray,
335                         const char* scene_file)
336 {
337     // Get the database, the global scope, which is the root for all transactions,
338     // and create a transaction for importing the scene file and storing the scene.
339     mi::base::Handle<mi::neuraylib::IDatabase> database(
340         neuray->get_api_component<mi::neuraylib::IDatabase>());
341     check_success( database.is_valid_interface());
342     mi::base::Handle<mi::neuraylib::IScope> scope(
343         database->get_global_scope());
344     mi::base::Handle<mi::neuraylib::ITransaction> transaction(
345         scope->create_transaction());
346     check_success( transaction.is_valid_interface());
347 
348     // Import the scene file
349     mi::base::Handle<mi::neuraylib::IImport_api> import_api(
350         neuray->get_api_component<mi::neuraylib::IImport_api>());
351     check_success( import_api.is_valid_interface());
352     mi::base::Handle<const mi::IString> uri( import_api->convert_filename_to_uri( scene_file));
353     mi::base::Handle<const mi::IImport_result> import_result(
354         import_api->import_elements( transaction.get(), uri->get_c_str()));
355     check_success( import_result->get_error_number() == 0);
356 
357     // Create the scene object
358     mi::base::Handle<mi::neuraylib::IScene> scene(
359         transaction->create<mi::neuraylib::IScene>( "Scene"));
360     scene->set_rootgroup(       import_result->get_rootgroup());
361     scene->set_options(         import_result->get_options());
362     scene->set_camera_instance( import_result->get_camera_inst());
363 
364     // And store it in the database such that the render loop can later access it
365     transaction->store( scene.get(), "the_scene");
366     transaction->commit();
367 }
368 
369 void run_http_and_rtmp_server( mi::base::Handle<mi::neuraylib::INeuray> neuray,
370                                const char* port, const char* swf_file)
371 {
372     // Create an HTTP server instance
373     mi::base::Handle<mi::http::IFactory> http_factory(
374         neuray->get_api_component<mi::http::IFactory>());
375     mi::base::Handle<mi::http::IServer> http_server(
376         http_factory->create_server());
377 
378     // Install our HTTP request and response handlers
379     mi::base::Handle<mi::http::IRequest_handler> request_handler(
380         new Request_handler( swf_file));
381     http_server->install( request_handler.get());
382     mi::base::Handle<mi::http::IResponse_handler> response_handler(
383         new Response_handler());
384     http_server->install( response_handler.get());
385 
386     // Assemble HTTP server address
387     const char* ip = "0.0.0.0:";
388     char address[255];
389     address[0] = '\0';
390     strncat( address, ip, sizeof(address) - 1);
391     strncat( address, port, sizeof(address) - 1 - strlen(address));
392 
393     // Start HTTP server
394     http_server->start( address);
395 
396     // Create an RTMP server instance
397     mi::base::Handle<mi::rtmp::IFactory> rtmp_factory(
398         neuray->get_api_component<mi::rtmp::IFactory>());
399     mi::base::Handle<mi::rtmp::IServer> rtmp_server( rtmp_factory->create_server());
400 
401     // Install our HTTP connect handler
402     mi::base::Handle<mi::neuraylib::IDatabase> database(
403         neuray->get_api_component<mi::neuraylib::IDatabase>());
404     mi::base::Handle<mi::neuraylib::IScope> scope(
405         database->get_global_scope());
406     mi::base::Handle<mi::rtmp::IConnect_event_handler> connect_handler(
407         new Connect_event_handler( neuray, scope));
408     rtmp_server->install( connect_handler.get());
409 
410     // Start RTMP server
411     rtmp_server->start( "0.0.0.0:1935");
412 
413     // Run both servers for fixed time interval
414     sleep_seconds( 30);
415     http_server->shutdown();
416     rtmp_server->shutdown();
417 }
418 
419 int main( int argc, char* argv[])
420 {
421     // Collect command line parameters
422     if( argc != 5) {
423         fprintf( stderr,
424             "Usage: example_rtmp_server <swf_file> <scene_file> <mdl_path> <port>\n");
425         keep_console_open();
426         return EXIT_FAILURE;
427     }
428     const char* swf_file    = argv[1];
429     const char* scene_file  = argv[2];
430     const char* mdl_path = argv[3];
431     const char* port        = argv[4];
432 
433     // Access the neuray library
434     mi::base::Handle<mi::neuraylib::INeuray> neuray( load_and_get_ineuray());
435     check_success( neuray.is_valid_interface());
436 
437     // Configure the neuray library
438     configuration( neuray, mdl_path);
439 
440     // Start the neuray library
441     check_success( neuray->start() == 0);
442 
443     // Set up the scene
444     prepare_rendering( neuray, scene_file);
445 
446     // Serve video stream via RTMP server
447     run_http_and_rtmp_server( neuray, port, swf_file);
448 
449     // Shut down the neuray library
450     check_success( neuray->shutdown() == 0);
451     neuray = 0;
452 
453     // Unload the neuray library
454     check_success( unload());
455 
456     keep_console_open();
457     return EXIT_SUCCESS;
458 }

example_rtmp_server.mxml

001 <?xml version="1.0" encoding="utf-8"?>
002 <!--
003 /******************************************************************************
004  * © 1986, 2014 NVIDIA Corporation. All rights reserved.
005  *****************************************************************************/
006 -->
007 
008 <mx:Application
009     xmlns:mx="http://www.adobe.com/2006/mxml"
010     layout="horizontal"
011     initialize="init()" xmlns:local="*">
012 
013 <mx:Script>
014     <![CDATA[
015         import mx.core.Application;
016 
017         public function init():void {
018             vidplayer.makeConnection("rtmp://" + getHost());
019         }
020 
021         public function getHost():String  {
022             var location:String = Application.application.url;
023             var components:Array = location.split("/");
024             if (components.length < 3)
025                 return "localhost";
026             var host_port:Array = components[2].split(":");
027             if (host_port.length <= 1)
028                return "localhost";
029             return host_port[0];
030         }
031     ]]>
032 </mx:Script>
033 
034 <!-- refer to the actionscript object -->
035 <local:example_rtmp_server_actionscript includeInLayout="true" id="vidplayer" width="1024" height="786" />
036 </mx:Application>

example_rtmp_server_actionscript.as

001 /******************************************************************************
002  * © 1986, 2014 NVIDIA Corporation. All rights reserved.
003  * Germany. All rights reserved
004  *****************************************************************************/
005 
006 package {
007     import flash.events.MouseEvent;
008     import flash.events.NetStatusEvent;
009     import flash.events.SecurityErrorEvent;
010     import flash.media.Video;
011     import flash.net.NetConnection;
012     import flash.net.NetStream;
013     import mx.core.Application;
014     import mx.core.UIComponent;
015 
016     public class example_rtmp_server_actionscript extends UIComponent {
017         private var streamName:String = "example_rtmp_server";
018         public var connection:NetConnection = null;
019         private var video:Video = null;
020         private var mystream:NetStream = null;
021         private var client:Object = null;
022         private var mouseButton:Boolean = false;
023         private var mousePosX:int = 0;
024         private var mousePosY:int = 0;
025 
026         public function example_rtmp_server_actionscript() {
027             super();
028             this.addEventListener(MouseEvent.MOUSE_DOWN, this.onMouseDown);
029         }
030 
031         public function makeConnection(url:String):void {
032             if (connection != null) {
033                 mystream = null;
034                 connection.close();
035             } else {
036                 connection = new NetConnection();
037             }
038             connection.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler);
039             connection.addEventListener(SecurityErrorEvent.SECURITY_ERROR, securityErrorHandler);
040             var args:Object = new Object();
041             args["resolution_x"] = floor16(this.width).toString();
042             args["resolution_y"] = floor16(this.height).toString();
043             connection.connect(url,args);
044         }
045 
046         private function floor16(val:int):int  {
047             return int(val/16) * 16;
048         }
049 
050         public function closeConnection():void {
051             if (connection != null) {
052                 mystream = null;
053                 connection.close();
054             }
055         }
056 
057         private function netStatusHandler(event:NetStatusEvent):void {
058             switch (event.info.code) {
059                 case "NetConnection.Connect.Success":
060                     connectStream();
061                     break;
062                 case "NetStream.Play.StreamNotFound":
063                     trace("Stream not found: " + streamName);
064                     break;
065             }
066         }
067 
068         private function securityErrorHandler(event:SecurityErrorEvent):void {
069             trace("securityErrorHandler: " + event);
070         }
071 
072         private function connectStream():void {
073             mystream = new NetStream(connection);
074             mystream.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler);
075             if (video == null) {
076                 video = new Video(this.width,this.height);
077                 video.smoothing = true;
078             }
079             video.attachNetStream(mystream);
080             addChild(video);
081             mystream.play(streamName);
082         }
083 
084         public function onMouseDown(event: MouseEvent):void {
085             var x: int = event.stageX - (event.target as UIComponent).parent.x;
086             var y: int = event.stageY - (event.target as UIComponent).parent.y;
087             mousePosX = x;
088             mousePosY = y;
089             Application.application.addEventListener(MouseEvent.MOUSE_UP, this.onMouseUp);
090             Application.application.addEventListener(MouseEvent.MOUSE_MOVE, this.onMouseMove);
091             mouseButton = true;
092         }
093 
094         public function onMouseUp(event: MouseEvent):void {
095             if (mouseButton) {
096                 mouseButton = false;
097                 Application.application.removeEventListener(MouseEvent.MOUSE_UP, this.onMouseUp);
098                 Application.application.removeEventListener(MouseEvent.MOUSE_MOVE, this.onMouseMove);
099             }
100         }
101 
102         public function onMouseMove(event: MouseEvent):void
103         {
104             var x: int = event.stageX - (event.target as UIComponent).parent.x;
105             var y: int = event.stageY - (event.target as UIComponent).parent.y;
106             if (mouseButton && connection && connection.connected && mystream) {
107                 var diff_x:int = x-mousePosX;
108                 var diff_y:int = y-mousePosY;
109                 var args:Object = new Object();
110                 if (diff_x != 0) args["pan_x"] = diff_x;
111                 if (diff_y != 0) args["pan_y"] = -diff_y;
112                 if (diff_x || diff_y) {
113                    // For demonstration purposes also send a double..
114                    args["pan_xd"] = (diff_x < 0) ? diff_x - 0.1 : diff_x + 0.1
115                    // ..and some bool
116                    args["going_right"] = diff_x > 0 ? true : false;
117                    connection.call("moveCamera",null,args);
118                 }
119                 mousePosX = x;
120                 mousePosY = y;
121             }
122         }
123     }
124 }