Running an RTMP and HTTP server with mouse interaction
The example program renders a scene and serves an interactive video stream over RTMP. The provided Flash file (the .swf file) can be reproduced running the free Adobe Flex SDK compiler mxmlc on the included .mxml file.
Note the following:
- The example_rtmp_server.cpp starts a HTTP server. A Flash application is fetched with a browser or a standalone Flash client, which the client then uses to view and interact with the video stream.
- By default, the stream uses the screen video codec, which is provided with the library and encodes the canvas produced by scene rendering. The encoded frame is sent over the RTMP stream to the Flash client.
- On the connection, a Remote Procedure Call (RPC) is installed. The RPC is called when the client interacts with the video stream using the mouse.
The example program consists of three files:
example_rtmp_server.cpp
001 /****************************************************************************** 002 * © 1986, 2016 NVIDIA Corporation. All rights reserved. 003 *****************************************************************************/ 004 005 // examples/example_rtmp_server.cpp 006 // 007 // Serves a flash player (.swf file) over HTTP to a client browser which then connects to the RTMP 008 // server which produces a video stream from the rendering of a scene. 009 // 010 // The example expects the following command line arguments: 011 // 012 // example_rtmp_server <swf_file> <scene_file> <mdl_path> <port> 013 // 014 // swf_file the flash player, the .swf-file included in the examples directory 015 // scene_file some scene file, e.g., main.mi 016 // mdl_path path to the MDL modules, e.g., iray-<version>/mdl 017 // port port for the HTTP server 018 019 #include <mi/neuraylib.h> 020 021 // Include code shared by all examples. 022 #include "example_shared.h" 023 // Include an implementation of IRender_target. 024 #include "example_render_target_simple.h" 025 026 #include <cstdio> 027 #include <fstream> 028 #include <vector> 029 030 // HTTP server implementation 031 // 032 // The HTTP servers just serves the .swf file. 033 034 // A simple implementation of the IBuffer interface. 035 class Buffer : public mi::base::Interface_implement<mi::neuraylib::IBuffer> 036 { 037 public: 038 const mi::Uint8* get_data() const { return &m_buffer[0]; } 039 040 mi::Size get_data_size() const { return m_buffer.size(); } 041 042 Buffer( const std::vector<mi::Uint8>& content) { m_buffer = content; } 043 044 private: 045 std::vector<mi::Uint8> m_buffer; 046 }; 047 048 // An HTTP response handler which always sets the content type for flash. 049 class Response_handler : public mi::base::Interface_implement<mi::http::IResponse_handler> 050 { 051 public: 052 void handle( mi::http::IConnection* connection) 053 { 054 mi::http::IResponse* iresponse( connection->get_response()); 055 iresponse->set_header( "Content-Type", "application/x-shockwave-flash"); 056 } 057 }; 058 059 // An HTTP request handler which always sends the .swf file. 060 class Request_handler : public mi::base::Interface_implement<mi::http::IRequest_handler> 061 { 062 public: 063 Request_handler( const char* swf_file) : m_swf_file( swf_file) { } 064 065 bool handle( mi::http::IConnection* connection) 066 { 067 std::ifstream file( m_swf_file, std::ios::in|std::ios::binary|std::ios::ate); 068 check_success( file); 069 070 std::ifstream::pos_type size = file.tellg(); 071 std::vector<mi::Uint8> data( static_cast<mi::Size>( size)); 072 file.seekg( 0, std::ios::beg); 073 file.read( (char*) &data[0], size); 074 file.close(); 075 076 mi::base::Handle<mi::neuraylib::IBuffer> buffer( new Buffer( data)); 077 connection->enqueue( buffer.get()); 078 return true; 079 } 080 081 private: 082 const char* m_swf_file; 083 }; 084 085 // RTMP server implementation 086 // 087 // The RTMP renders a given scene and interprets mouse movements as camera movements. 088 089 // An RTMP play event handler that chooses the screen video codec and initializes it with a 090 // predefined window size. 091 class Play_event_handler : public mi::base::Interface_implement<mi::rtmp::IPlay_event_handler> 092 { 093 public: 094 bool handle( bool is_start, mi::rtmp::IStream* stream, mi::neuraylib::IVideo_data** out) 095 { 096 if( is_start) { 097 check_success( stream->use_codec( "screen video")); 098 mi::base::Handle<mi::neuraylib::IVideo_encoder> codec( stream->get_video_codec()); 099 check_success( codec->init( 512, 384, out)); 100 } 101 else { 102 mi::base::Handle<mi::neuraylib::IVideo_encoder> codec( stream->get_video_codec()); 103 check_success( codec->close( out)); 104 } 105 return true; 106 } 107 }; 108 109 // An RTMP frame event handler that encodes a frame and gives it to the RTMP server for 110 // sending. Note that this event runs in another thread than the other event handlers, most 111 // importantly the render handler, so care needs to be taken to avoid synchronization issues. 112 class Frame_event_handler : public mi::base::Interface_implement<mi::rtmp::IFrame_event_handler> 113 { 114 public: 115 bool handle( 116 mi::rtmp::IStream* stream, mi::neuraylib::IVideo_data** out, bool send_queue_is_full) 117 { 118 if (send_queue_is_full) // we do not want to increase buffering 119 return true; 120 mi::base::Handle<mi::neuraylib::IVideo_encoder> codec( stream->get_video_codec()); 121 mi::neuraylib::ICanvas* canvas = 0; 122 { 123 mi::base::Lock::Block block( &m_cached_canvas_lock); 124 canvas = m_cached_canvas.get(); 125 if ( !canvas) 126 return true; 127 canvas->retain(); 128 } 129 bool result = codec->encode_canvas( canvas, out); 130 canvas->release(); 131 return result; 132 } 133 void update_canvas(mi::base::Handle<mi::neuraylib::ICanvas> new_canvas) 134 { 135 mi::base::Lock::Block block( &m_cached_canvas_lock); 136 m_cached_canvas = new_canvas; 137 } 138 private: 139 mi::base::Lock m_cached_canvas_lock; 140 mi::base::Handle<mi::neuraylib::ICanvas> m_cached_canvas; 141 }; 142 143 144 // An RTMP render event handler that renders a given scene into a canvas and saves it for the 145 // frame event handler to encode. 146 class Render_event_handler : public mi::base::Interface_implement<mi::rtmp::IRender_event_handler> 147 { 148 public: 149 Render_event_handler( 150 mi::base::Handle<mi::neuraylib::INeuray> neuray, 151 mi::base::Handle<mi::neuraylib::IScope> scope, 152 mi::base::Handle<Frame_event_handler> handler) 153 : m_neuray( neuray), m_scope( scope), m_frame_handler( handler) 154 { 155 mi::base::Handle<mi::neuraylib::ITransaction> transaction( m_scope->create_transaction()); 156 { 157 mi::base::Handle<mi::neuraylib::IScene> scene( 158 transaction->edit<mi::neuraylib::IScene>( "the_scene")); 159 m_render_context = scene->create_render_context( transaction.get(), "iray"); 160 check_success( m_render_context.is_valid_interface()); 161 mi::base::Handle<mi::IString> scheduler_mode( transaction->create<mi::IString>()); 162 scheduler_mode->set_c_str( "interactive"); 163 m_render_context->set_option( "scheduler_mode", scheduler_mode.get()); 164 mi::base::Handle<mi::IFloat32> interval( transaction->create<mi::IFloat32>()); 165 interval->set_value( 0.1f); 166 m_render_context->set_option( "interactive_update_interval", interval.get()); 167 } 168 transaction->commit(); 169 } 170 171 bool handle( mi::rtmp::IStream* /*stream*/) 172 { 173 mi::base::Handle<mi::neuraylib::ITransaction> transaction( m_scope->create_transaction()); 174 { 175 mi::base::Handle<mi::neuraylib::IImage_api> image_api( 176 m_neuray->get_api_component<mi::neuraylib::IImage_api>()); 177 mi::base::Handle<mi::neuraylib::IRender_target> render_target( 178 new Render_target( image_api.get(), "Color", 512, 384)); 179 check_success( 180 m_render_context->render( transaction.get(), render_target.get(), 0) >= 0); 181 182 mi::base::Handle<mi::neuraylib::ICanvas> canvas( render_target->get_canvas( 0)); 183 m_frame_handler->update_canvas( canvas); 184 } 185 transaction->commit(); 186 return true; 187 } 188 189 private: 190 mi::base::Handle<mi::neuraylib::INeuray> m_neuray; 191 mi::base::Handle<mi::neuraylib::IScope> m_scope; 192 mi::base::Handle<Frame_event_handler> m_frame_handler; 193 mi::base::Handle<mi::neuraylib::IRender_context> m_render_context; 194 }; 195 196 // An RTMP stream event handler that registers the play and render event handlers above. 197 class Stream_event_handler : public mi::base::Interface_implement<mi::rtmp::IStream_event_handler> 198 { 199 public: 200 Stream_event_handler( 201 mi::base::Handle<mi::neuraylib::INeuray> neuray, 202 mi::base::Handle<mi::neuraylib::IScope> scope) 203 : m_neuray( neuray), m_scope( scope) { } 204 205 bool handle( 206 bool is_create, mi::rtmp::IStream* stream, 207 const mi::IData* /*command_arguments*/) 208 { 209 if( is_create) { 210 mi::base::Handle<mi::rtmp::IPlay_event_handler> play_event_handler( 211 new Play_event_handler()); 212 stream->register_play_event_handler( play_event_handler.get()); 213 mi::base::Handle<Frame_event_handler> frame_event_handler( new Frame_event_handler()); 214 mi::base::Handle<mi::rtmp::IRender_event_handler> render_event_handler( 215 new Render_event_handler( m_neuray, m_scope, frame_event_handler)); 216 stream->register_render_event_handler( render_event_handler.get()); 217 stream->register_frame_event_handler( frame_event_handler.get()); 218 } 219 return true; 220 } 221 222 private: 223 mi::base::Handle<mi::neuraylib::INeuray> m_neuray; 224 mi::base::Handle<mi::neuraylib::IScope> m_scope; 225 }; 226 227 // An RTMP call event handler that moves the camera according to the arguments 'pan_x' and 'pan_y'. 228 class Call_event_handler : public mi::base::Interface_implement<mi::rtmp::ICall_event_handler> 229 { 230 public: 231 Call_event_handler( mi::base::Handle<mi::neuraylib::IScope> scope) : m_scope( scope) { } 232 233 bool handle( 234 mi::rtmp::IConnection* /*connection*/, 235 const char* /*procedure_name*/, 236 const mi::IData* /*command_arguments*/, 237 const mi::IData* user_arguments, 238 mi::IData** /*response_arguments*/) 239 { 240 mi::base::Handle<mi::neuraylib::ITransaction> transaction( m_scope->create_transaction()); 241 { 242 // The "camera" name matches the camera in main.mi in the examples directory. 243 mi::base::Handle<mi::neuraylib::ICamera> camera( 244 transaction->edit<mi::neuraylib::ICamera>( "camera")); 245 check_success( camera.is_valid_interface()); 246 mi::base::Handle<const mi::IMap> imap( user_arguments->get_interface<const mi::IMap>()); 247 check_success( imap.is_valid_interface()); 248 mi::base::Handle<const mi::ISint32> pan_x( imap->get_value<mi::ISint32>( "pan_x")); 249 if ( pan_x) { 250 mi::Float64 x = camera->get_offset_x(); 251 camera->set_offset_x( x - pan_x->get_value<mi::Sint32>()); 252 // The example client also demonstrates how to send/parse a double. 253 mi::base::Handle<const mi::IFloat64> pan_xd( 254 imap->get_value<mi::IFloat64>( "pan_xd")); 255 if( pan_xd) { 256 mi::Float64 xd = pan_xd->get_value<mi::Float64>(); 257 check_success( mi::Sint32(xd) == pan_x->get_value<mi::Sint32>()); 258 } 259 } 260 mi::base::Handle<const mi::ISint32> pan_y( imap->get_value<mi::ISint32>( "pan_y")); 261 if( pan_y) { 262 mi::Float64 y = camera->get_offset_y(); 263 camera->set_offset_y( y - pan_y->get_value<mi::Sint32>()); 264 } 265 // Demonstrate getting a bool from the example client 266 mi::base::Handle<const mi::IBoolean> dir( 267 imap->get_value<mi::IBoolean>( "going_right")); 268 if ( dir) { 269 bool going_right = dir->get_value<bool>(); 270 going_right = !going_right; // avoid compiler warning 271 } 272 } 273 transaction->commit(); 274 return true; 275 } 276 277 private: 278 mi::base::Handle<mi::neuraylib::IScope> m_scope; 279 }; 280 281 // An RTMP connect event handler that registers the stream and call event handlers above. 282 class Connect_event_handler : public mi::base::Interface_implement<mi::rtmp::IConnect_event_handler> 283 { 284 public: 285 Connect_event_handler( 286 mi::base::Handle<mi::neuraylib::INeuray> neuray, 287 mi::base::Handle<mi::neuraylib::IScope> scope) 288 : m_neuray( neuray), m_scope( scope) { } 289 290 bool handle( 291 bool is_create, mi::rtmp::IConnection* connection, 292 const mi::IData* /*command_arguments*/, 293 const mi::IData* /*user_arguments*/) 294 { 295 if( is_create) { 296 mi::base::Handle<mi::rtmp::IStream_event_handler> stream_event_handler( 297 new Stream_event_handler( m_neuray, m_scope)); 298 connection->register_stream_event_handler( stream_event_handler.get()); 299 mi::base::Handle<mi::rtmp::ICall_event_handler> call_event_handler( 300 new Call_event_handler( m_scope)); 301 connection->register_remote_call_handler( call_event_handler.get(), "moveCamera"); 302 } 303 return true; 304 } 305 306 private: 307 mi::base::Handle<mi::neuraylib::INeuray> m_neuray; 308 mi::base::Handle<mi::neuraylib::IScope> m_scope; 309 }; 310 311 void configuration( mi::base::Handle<mi::neuraylib::INeuray> neuray, const char* mdl_path) 312 { 313 // Configure the neuray library. Here we set the search path for .mdl files. 314 mi::base::Handle<mi::neuraylib::IRendering_configuration> rendering_configuration( 315 neuray->get_api_component<mi::neuraylib::IRendering_configuration>()); 316 check_success( rendering_configuration.is_valid_interface()); 317 check_success( rendering_configuration->add_mdl_path( mdl_path) == 0); 318 319 // Load the FreeImage, Iray Photoreal, and .mi importer plugins. 320 // Also load the default video codec plugin which will be used to encode the rendered frames. 321 mi::base::Handle<mi::neuraylib::IPlugin_configuration> plugin_configuration( 322 neuray->get_api_component<mi::neuraylib::IPlugin_configuration>()); 323 #ifndef MI_PLATFORM_WINDOWS 324 check_success( plugin_configuration->load_plugin_library( "freeimage.so") == 0); 325 check_success( plugin_configuration->load_plugin_library( "libiray.so") == 0); 326 check_success( plugin_configuration->load_plugin_library( "mi_importer.so") == 0); 327 check_success( plugin_configuration->load_plugin_library( "screen_video.so") == 0); 328 #else 329 check_success( plugin_configuration->load_plugin_library( "freeimage.dll") == 0); 330 check_success( plugin_configuration->load_plugin_library( "libiray.dll") == 0); 331 check_success( plugin_configuration->load_plugin_library( "mi_importer.dll") == 0); 332 check_success( plugin_configuration->load_plugin_library( "screen_video.dll") == 0); 333 #endif 334 } 335 336 void prepare_rendering( mi::base::Handle<mi::neuraylib::INeuray> neuray, 337 const char* scene_file) 338 { 339 // Get the database, the global scope of the database, and create a transaction in the global 340 // scope for importing the scene file and storing the scene. 341 mi::base::Handle<mi::neuraylib::IDatabase> database( 342 neuray->get_api_component<mi::neuraylib::IDatabase>()); 343 check_success( database.is_valid_interface()); 344 mi::base::Handle<mi::neuraylib::IScope> scope( 345 database->get_global_scope()); 346 mi::base::Handle<mi::neuraylib::ITransaction> transaction( 347 scope->create_transaction()); 348 check_success( transaction.is_valid_interface()); 349 350 // Import the scene file 351 mi::base::Handle<mi::neuraylib::IImport_api> import_api( 352 neuray->get_api_component<mi::neuraylib::IImport_api>()); 353 check_success( import_api.is_valid_interface()); 354 mi::base::Handle<const mi::IString> uri( import_api->convert_filename_to_uri( scene_file)); 355 mi::base::Handle<const mi::neuraylib::IImport_result> import_result( 356 import_api->import_elements( transaction.get(), uri->get_c_str())); 357 check_success( import_result->get_error_number() == 0); 358 359 // Create the scene object 360 mi::base::Handle<mi::neuraylib::IScene> scene( 361 transaction->create<mi::neuraylib::IScene>( "Scene")); 362 scene->set_rootgroup( import_result->get_rootgroup()); 363 scene->set_options( import_result->get_options()); 364 scene->set_camera_instance( import_result->get_camera_inst()); 365 366 // And store it in the database such that the render loop can later access it 367 transaction->store( scene.get(), "the_scene"); 368 transaction->commit(); 369 } 370 371 void run_http_and_rtmp_server( mi::base::Handle<mi::neuraylib::INeuray> neuray, 372 const char* port, const char* swf_file) 373 { 374 // Create an HTTP server instance 375 mi::base::Handle<mi::http::IFactory> http_factory( 376 neuray->get_api_component<mi::http::IFactory>()); 377 mi::base::Handle<mi::http::IServer> http_server( 378 http_factory->create_server()); 379 380 // Install our HTTP request and response handlers 381 mi::base::Handle<mi::http::IRequest_handler> request_handler( 382 new Request_handler( swf_file)); 383 http_server->install( request_handler.get()); 384 mi::base::Handle<mi::http::IResponse_handler> response_handler( 385 new Response_handler()); 386 http_server->install( response_handler.get()); 387 388 // Assemble HTTP server address 389 const char* ip = "0.0.0.0:"; 390 char address[255]; 391 address[0] = '\0'; 392 strncat( address, ip, sizeof(address) - 1); 393 strncat( address, port, sizeof(address) - 1 - strlen(address)); 394 395 // Start HTTP server 396 http_server->start( address); 397 398 // Create an RTMP server instance 399 mi::base::Handle<mi::rtmp::IFactory> rtmp_factory( 400 neuray->get_api_component<mi::rtmp::IFactory>()); 401 mi::base::Handle<mi::rtmp::IServer> rtmp_server( rtmp_factory->create_server()); 402 403 // Install our HTTP connect handler 404 mi::base::Handle<mi::neuraylib::IDatabase> database( 405 neuray->get_api_component<mi::neuraylib::IDatabase>()); 406 mi::base::Handle<mi::neuraylib::IScope> scope( 407 database->get_global_scope()); 408 mi::base::Handle<mi::rtmp::IConnect_event_handler> connect_handler( 409 new Connect_event_handler( neuray, scope)); 410 rtmp_server->install( connect_handler.get()); 411 412 // Start RTMP server 413 rtmp_server->start( "0.0.0.0:1935"); 414 415 // Run both servers for fixed time interval 416 sleep_seconds( 30); 417 http_server->shutdown(); 418 rtmp_server->shutdown(); 419 } 420 421 int main( int argc, char* argv[]) 422 { 423 // Collect command line parameters 424 if( argc != 5) { 425 fprintf( stderr, 426 "Usage: example_rtmp_server <swf_file> <scene_file> <mdl_path> <port>\n"); 427 keep_console_open(); 428 return EXIT_FAILURE; 429 } 430 const char* swf_file = argv[1]; 431 const char* scene_file = argv[2]; 432 const char* mdl_path = argv[3]; 433 const char* port = argv[4]; 434 435 // Access the neuray library 436 mi::base::Handle<mi::neuraylib::INeuray> neuray( load_and_get_ineuray()); 437 check_success( neuray.is_valid_interface()); 438 439 // Configure the neuray library 440 configuration( neuray, mdl_path); 441 442 // Start the neuray library 443 mi::Sint32 result = neuray->start(); 444 check_start_success( result); 445 446 // Set up the scene 447 prepare_rendering( neuray, scene_file); 448 449 // Serve video stream via RTMP server 450 run_http_and_rtmp_server( neuray, port, swf_file); 451 452 // Shut down the neuray library 453 check_success( neuray->shutdown() == 0); 454 neuray = 0; 455 456 // Unload the neuray library 457 check_success( unload()); 458 459 keep_console_open(); 460 return EXIT_SUCCESS; 461 }
example_rtmp_server.mxml
001 <?xml version="1.0" encoding="utf-8"?> 002 <!-- 003 /****************************************************************************** 004 * © 1986, 2016 NVIDIA Corporation. All rights reserved. 005 *****************************************************************************/ 006 --> 007 008 <mx:Application 009 xmlns:mx="http://www.adobe.com/2006/mxml" 010 layout="horizontal" 011 initialize="init()" xmlns:local="*"> 012 013 <mx:Script> 014 <![CDATA[ 015 import mx.core.Application; 016 017 public function init():void { 018 vidplayer.makeConnection("rtmp://" + getHost()); 019 } 020 021 public function getHost():String { 022 var location:String = Application.application.url; 023 var components:Array = location.split("/"); 024 if (components.length < 3) 025 return "localhost"; 026 var host_port:Array = components[2].split(":"); 027 if (host_port.length <= 1) 028 return "localhost"; 029 return host_port[0]; 030 } 031 ]]> 032 </mx:Script> 033 034 <!-- refer to the actionscript object --> 035 <local:example_rtmp_server_actionscript includeInLayout="true" id="vidplayer" width="1024" height="786" /> 036 </mx:Application>
example_rtmp_server_actionscript.as
001 /****************************************************************************** 002 * © 1986, 2016 NVIDIA Corporation. All rights reserved. 003 * Germany. All rights reserved 004 *****************************************************************************/ 005 006 package { 007 import flash.events.MouseEvent; 008 import flash.events.NetStatusEvent; 009 import flash.events.SecurityErrorEvent; 010 import flash.media.Video; 011 import flash.net.NetConnection; 012 import flash.net.NetStream; 013 import mx.core.Application; 014 import mx.core.UIComponent; 015 016 public class example_rtmp_server_actionscript extends UIComponent { 017 private var streamName:String = "example_rtmp_server"; 018 public var connection:NetConnection = null; 019 private var video:Video = null; 020 private var mystream:NetStream = null; 021 private var client:Object = null; 022 private var mouseButton:Boolean = false; 023 private var mousePosX:int = 0; 024 private var mousePosY:int = 0; 025 026 public function example_rtmp_server_actionscript() { 027 super(); 028 this.addEventListener(MouseEvent.MOUSE_DOWN, this.onMouseDown); 029 } 030 031 public function makeConnection(url:String):void { 032 if (connection != null) { 033 mystream = null; 034 connection.close(); 035 } else { 036 connection = new NetConnection(); 037 } 038 connection.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler); 039 connection.addEventListener(SecurityErrorEvent.SECURITY_ERROR, securityErrorHandler); 040 var args:Object = new Object(); 041 args["resolution_x"] = floor16(this.width).toString(); 042 args["resolution_y"] = floor16(this.height).toString(); 043 connection.connect(url,args); 044 } 045 046 private function floor16(val:int):int { 047 return int(val/16) * 16; 048 } 049 050 public function closeConnection():void { 051 if (connection != null) { 052 mystream = null; 053 connection.close(); 054 } 055 } 056 057 private function netStatusHandler(event:NetStatusEvent):void { 058 switch (event.info.code) { 059 case "NetConnection.Connect.Success": 060 connectStream(); 061 break; 062 case "NetStream.Play.StreamNotFound": 063 trace("Stream not found: " + streamName); 064 break; 065 } 066 } 067 068 private function securityErrorHandler(event:SecurityErrorEvent):void { 069 trace("securityErrorHandler: " + event); 070 } 071 072 private function connectStream():void { 073 mystream = new NetStream(connection); 074 mystream.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler); 075 if (video == null) { 076 video = new Video(this.width,this.height); 077 video.smoothing = true; 078 } 079 video.attachNetStream(mystream); 080 addChild(video); 081 mystream.play(streamName); 082 } 083 084 public function onMouseDown(event: MouseEvent):void { 085 var x: int = event.stageX - (event.target as UIComponent).parent.x; 086 var y: int = event.stageY - (event.target as UIComponent).parent.y; 087 mousePosX = x; 088 mousePosY = y; 089 Application.application.addEventListener(MouseEvent.MOUSE_UP, this.onMouseUp); 090 Application.application.addEventListener(MouseEvent.MOUSE_MOVE, this.onMouseMove); 091 mouseButton = true; 092 } 093 094 public function onMouseUp(event: MouseEvent):void { 095 if (mouseButton) { 096 mouseButton = false; 097 Application.application.removeEventListener(MouseEvent.MOUSE_UP, this.onMouseUp); 098 Application.application.removeEventListener(MouseEvent.MOUSE_MOVE, this.onMouseMove); 099 } 100 } 101 102 public function onMouseMove(event: MouseEvent):void 103 { 104 var x: int = event.stageX - (event.target as UIComponent).parent.x; 105 var y: int = event.stageY - (event.target as UIComponent).parent.y; 106 if (mouseButton && connection && connection.connected && mystream) { 107 var diff_x:int = x-mousePosX; 108 var diff_y:int = y-mousePosY; 109 var args:Object = new Object(); 110 if (diff_x != 0) args["pan_x"] = diff_x; 111 if (diff_y != 0) args["pan_y"] = -diff_y; 112 if (diff_x || diff_y) { 113 // For demonstration purposes also send a double.. 114 args["pan_xd"] = (diff_x < 0) ? diff_x - 0.1 : diff_x + 0.1 115 // ..and some bool 116 args["going_right"] = diff_x > 0 ? true : false; 117 connection.call("moveCamera",null,args); 118 } 119 mousePosX = x; 120 mousePosY = y; 121 } 122 } 123 } 124 }