1 /*
2 * This file is part of bino, a 3D video player.
3 *
4 * Copyright (C) 2010, 2011, 2012, 2013, 2014, 2015
5 * Martin Lambers <marlam@marlam.de>
6 * Stefan Eilemann <eile@eyescale.ch>
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 */
21
22 #include "config.h"
23
24 #include <sstream>
25 #include <algorithm>
26 #include <cmath>
27 #include <cstdlib>
28 #include <unistd.h> // for usleep()
29
30 #include <eq/eq.h>
31
32 #include "base/dbg.h"
33 #include "base/msg.h"
34 #include "base/ser.h"
35
36 #include "base/gettext.h"
37 #define _(string) gettext(string)
38
39 #include "dispatch.h"
40 #include "media_input.h"
41 #include "video_output.h"
42 #include "player_equalizer.h"
43
44 /*
45 * Every eq::Node has a special player: player_eq_node.
46 * These node players do not control the video playing themselves. Instead,
47 * they are told what to do via the frame data.
48 *
49 * eq::Config has the master player object. This master also plays the sound,
50 * to be able to synchronize the video to it.
51 *
52 * The application node reuses the player of eq::Config, so that it does not
53 * need to run two players.
54 *
55 * Each eq::Window has a special video_output: video_output_eq_window.
56 * It manages the video textures.
57 *
58 * Each eq::Channel than calls the window's display function to render its subset of
59 * the video.
60 */
61
62 extern dispatch* global_dispatch;
63 static player_equalizer* global_player_equalizer = NULL;
64
65 /*
66 * player_eq_node
67 *
68 * Implementation of player for eq_node.
69 * The player_eq_node instance of the application node lives in eq_config instead of eq_node.
70 */
71
72 class player_eq_node : public player
73 {
74 private:
75 class media_input _media_input;
76
77 public:
player_eq_node()78 player_eq_node() : player()
79 {
80 }
81
init(const open_input_data & input)82 bool init(const open_input_data& input)
83 {
84 try {
85 _media_input.open(input.urls, input.dev_request);
86 if (_media_input.video_streams() == 0)
87 throw exc(_("No video streams found."));
88 if (input.params.stereo_layout_is_set() || input.params.stereo_layout_swap_is_set())
89 _media_input.set_stereo_layout(input.params.stereo_layout(), input.params.stereo_layout_swap());
90 _media_input.select_video_stream(input.params.video_stream());
91 player::open();
92 }
93 catch (std::exception& e) {
94 msg::err("%s", e.what());
95 return false;
96 }
97 return true;
98 }
99
seek(int64_t pos)100 void seek(int64_t pos)
101 {
102 _media_input.seek(pos);
103 // The master player read a video frame; do the same to keep sync
104 start_frame_read();
105 }
106
start_frame_read()107 void start_frame_read()
108 {
109 // Only called on slave nodes
110 _media_input.start_video_frame_read();
111 }
112
finish_frame_read()113 void finish_frame_read()
114 {
115 // Only called on slave nodes
116 _video_frame = _media_input.finish_video_frame_read();
117 if (!_video_frame.is_valid())
118 {
119 msg::err(_("Reading input frame failed."));
120 std::exit(1);
121 }
122 }
123 };
124
125 /*
126 * video_output_eq_channel
127 *
128 * Implementation of video_output for eq_channel.
129 *
130 * Much of the video_output interface is not relevant for Equalizer, and thus
131 * is implemented with simple stub functions.
132 */
133
134 class video_output_eq_channel : public video_output
135 {
136 private:
137 eq::Channel *_channel;
138 const float _canvas_width;
139 const float _canvas_height;
140 float _canvas_video_area_w;
141 float _canvas_video_area_h;
142
143 protected:
video_display_width() const144 int video_display_width() const { return screen_width() * _canvas_video_area_w; }
video_display_height() const145 int video_display_height() const { return screen_height() * _canvas_video_area_h; }
screen_width() const146 int screen_width() const { return _channel->getPixelViewport().w / _channel->getViewport().w; }
screen_height() const147 int screen_height() const { return _channel->getPixelViewport().h / _channel->getViewport().h; }
screen_pixel_aspect_ratio() const148 float screen_pixel_aspect_ratio() const
149 {
150 float pixels_per_unit_x = _channel->getPixelViewport().w / (_canvas_width * _channel->getViewport().w);
151 float pixels_per_unit_y = _channel->getPixelViewport().h / (_canvas_height * _channel->getViewport().h);
152 return pixels_per_unit_y / pixels_per_unit_x;
153 }
width() const154 int width() const { return _channel->getPixelViewport().w; }
height() const155 int height() const { return _channel->getPixelViewport().h; }
pos_x() const156 int pos_x() const { return 0; }
pos_y() const157 int pos_y() const { return 0; }
glewGetContext() const158 GLEWContext* glewGetContext() const { return const_cast<GLEWContext*>(_channel->getWindow()->glewGetContext()); }
context_is_stereo() const159 bool context_is_stereo() const { return false; }
recreate_context(bool)160 void recreate_context(bool) { }
trigger_resize(int,int)161 void trigger_resize(int, int) { }
162
163 public:
wait_for_subtitle_renderer()164 int64_t wait_for_subtitle_renderer()
165 {
166 if (!_subtitle_renderer.is_initialized())
167 {
168 msg::wrn(_("Waiting for subtitle renderer initialization..."));
169 try
170 {
171 while (!_subtitle_renderer.is_initialized())
172 {
173 usleep(10000);
174 }
175 }
176 catch (std::exception &e)
177 {
178 msg::err("%s", e.what());
179 std::exit(1);
180 }
181 }
182 return 0;
183 }
supports_stereo() const184 bool supports_stereo() const { return false; }
center()185 void center() { }
enter_fullscreen()186 void enter_fullscreen() { }
exit_fullscreen()187 void exit_fullscreen() { }
process_events()188 void process_events() { }
189
190 public:
video_output_eq_channel(eq::Channel * channel,float canvas_width,float canvas_height)191 video_output_eq_channel(eq::Channel *channel,
192 float canvas_width, float canvas_height) :
193 video_output(),
194 _channel(channel),
195 _canvas_width(canvas_width),
196 _canvas_height(canvas_height)
197 {
198 }
199
set_canvas_size(float canvas_video_area_w,float canvas_video_area_h)200 void set_canvas_size(float canvas_video_area_w, float canvas_video_area_h)
201 {
202 _canvas_video_area_w = canvas_video_area_w;
203 _canvas_video_area_h = canvas_video_area_h;
204 }
205
display_current_frame(bool mono_right_instead_of_left,float x,float y,float w,float h,const GLint viewport[4],const float tex_coords[4][2])206 void display_current_frame(bool mono_right_instead_of_left,
207 float x, float y, float w, float h,
208 const GLint viewport[4], const float tex_coords[4][2])
209 {
210 float my_tex_coords[2][4][2] =
211 {
212 {
213 { tex_coords[0][0], tex_coords[0][1] },
214 { tex_coords[1][0], tex_coords[1][1] },
215 { tex_coords[2][0], tex_coords[2][1] },
216 { tex_coords[3][0], tex_coords[3][1] },
217 },
218 {
219 { tex_coords[0][0], tex_coords[0][1] },
220 { tex_coords[1][0], tex_coords[1][1] },
221 { tex_coords[2][0], tex_coords[2][1] },
222 { tex_coords[3][0], tex_coords[3][1] },
223 },
224 };
225 GLint vp[2][4];
226 std::memcpy(vp[0], viewport, 4 * sizeof(int));
227 std::memcpy(vp[1], viewport, 4 * sizeof(int));
228 video_output::display_current_frame(0, true, mono_right_instead_of_left,
229 x, y, w, h, vp, my_tex_coords, 0, 0, parameters::mode_mono_left);
230 }
231 };
232
233 /*
234 * eq_init_data
235 */
236
237 class eq_init_data : public co::Object
238 {
239 public:
240 eq::uint128_t frame_data_id;
241 open_input_data input;
242 parameters params;
243 bool flat_screen;
244 float canvas_width;
245 float canvas_height;
246
eq_init_data()247 eq_init_data()
248 {
249 flat_screen = true;
250 }
251
~eq_init_data()252 virtual ~eq_init_data()
253 {
254 }
255
256 protected:
getChangeType() const257 virtual ChangeType getChangeType() const
258 {
259 return co::Object::STATIC;
260 }
261
getInstanceData(co::DataOStream & os)262 virtual void getInstanceData(co::DataOStream &os)
263 {
264 std::ostringstream oss;
265 s11n::save(oss, frame_data_id.high());
266 s11n::save(oss, frame_data_id.low());
267 s11n::save(oss, input);
268 s11n::save(oss, params);
269 s11n::save(oss, flat_screen);
270 s11n::save(oss, canvas_width);
271 s11n::save(oss, canvas_height);
272 os << oss.str();
273 }
274
applyInstanceData(co::DataIStream & is)275 virtual void applyInstanceData(co::DataIStream &is)
276 {
277 std::string s;
278 is >> s;
279 std::istringstream iss(s);
280 s11n::load(iss, frame_data_id.high());
281 s11n::load(iss, frame_data_id.low());
282 s11n::load(iss, input);
283 s11n::load(iss, params);
284 s11n::load(iss, flat_screen);
285 s11n::load(iss, canvas_width);
286 s11n::load(iss, canvas_height);
287 }
288 };
289
290 /*
291 * eq_frame_data
292 */
293
294 class eq_frame_data : public co::Object
295 {
296 public:
297 std::string dispatch_state;
298 subtitle_box subtitle;
299 int64_t seek_to;
300 bool prep_frame;
301 bool drop_frame;
302 bool display_frame;
303 bool display_statistics;
304 struct { float x, y, w, h, d; } canvas_video_area;
305 float tex_coords[4][2];
306
307 public:
eq_frame_data()308 eq_frame_data() :
309 seek_to(0),
310 prep_frame(false),
311 drop_frame(false),
312 display_frame(false),
313 display_statistics(false)
314 {
315 }
316
317 protected:
getChangeType() const318 virtual ChangeType getChangeType() const
319 {
320 return co::Object::INSTANCE;
321 }
322
getInstanceData(co::DataOStream & os)323 virtual void getInstanceData(co::DataOStream &os)
324 {
325 std::ostringstream oss;
326 s11n::save(oss, dispatch_state);
327 s11n::save(oss, subtitle);
328 s11n::save(oss, seek_to);
329 s11n::save(oss, prep_frame);
330 s11n::save(oss, drop_frame);
331 s11n::save(oss, display_frame);
332 s11n::save(oss, display_statistics);
333 s11n::save(oss, &canvas_video_area, sizeof(canvas_video_area));
334 s11n::save(oss, tex_coords, sizeof(tex_coords));
335 os << oss.str();
336 }
337
applyInstanceData(co::DataIStream & is)338 virtual void applyInstanceData(co::DataIStream &is)
339 {
340 std::string s;
341 is >> s;
342 std::istringstream iss(s);
343 s11n::load(iss, dispatch_state);
344 s11n::load(iss, subtitle);
345 s11n::load(iss, seek_to);
346 s11n::load(iss, prep_frame);
347 s11n::load(iss, drop_frame);
348 s11n::load(iss, display_frame);
349 s11n::load(iss, display_statistics);
350 s11n::load(iss, &canvas_video_area, sizeof(canvas_video_area));
351 s11n::load(iss, tex_coords, sizeof(tex_coords));
352 }
353 };
354
355 /*
356 * eq_config
357 */
358
359 class eq_config : public eq::Config
360 {
361 private:
362 eq_init_data _eq_init_data; // Master eq_init_data instance
363 eq_frame_data _eq_frame_data; // Master eq_frame_data instance
364
365 public:
eq_config(eq::ServerPtr parent)366 eq_config(eq::ServerPtr parent) : eq::Config(parent)
367 {
368 }
369
init(const open_input_data & input,bool flat_screen)370 bool init(const open_input_data& input, bool flat_screen)
371 {
372 msg::dbg(HERE);
373 setLatency( 0 );
374
375 // Initialize master init/frame data instances
376 _eq_init_data.input = input;
377 _eq_init_data.params = dispatch::parameters();
378 _eq_init_data.flat_screen = flat_screen;
379 // Find canvas
380 if (getCanvases().size() < 1)
381 {
382 msg::err(_("No canvas in Equalizer configuration."));
383 return false;
384 }
385 float canvas_w = getCanvases()[0]->getWall().getWidth();
386 float canvas_h = getCanvases()[0]->getWall().getHeight();
387 _eq_init_data.canvas_width = canvas_w;
388 _eq_init_data.canvas_height = canvas_h;
389 msg::inf(_("Equalizer canvas: %gx%g, aspect ratio %g:1"), canvas_w, canvas_h, canvas_w / canvas_h);
390 // Register master instances
391 registerObject(&_eq_frame_data);
392 _eq_init_data.frame_data_id = _eq_frame_data.getID();
393 registerObject(&_eq_init_data);
394 msg::dbg(HERE);
395 return eq::Config::init(_eq_init_data.getID());
396 }
397
exit()398 virtual bool exit()
399 {
400 msg::dbg(HERE);
401 bool ret = eq::Config::exit();
402 // Deregister master instances
403 deregisterObject(&_eq_init_data);
404 deregisterObject(&_eq_frame_data);
405 msg::dbg(HERE);
406 return ret;
407 }
408
startFrame()409 virtual uint32_t startFrame()
410 {
411 // Run player steps until we are told to do something
412 bool more_steps;
413 do {
414 global_player_equalizer->step(&more_steps, &_eq_frame_data.seek_to,
415 &_eq_frame_data.prep_frame, &_eq_frame_data.drop_frame, &_eq_frame_data.display_frame);
416 dispatch::process_all_events();
417 if (!dispatch::playing())
418 more_steps = false;
419 }
420 while (more_steps
421 && _eq_frame_data.seek_to == -1
422 && !_eq_frame_data.prep_frame
423 && !_eq_frame_data.drop_frame
424 && !_eq_frame_data.display_frame
425 && !dispatch::pausing());
426 if (!more_steps) {
427 this->exit();
428 return 0;
429 }
430 // Update the video state for all (it might have changed via handleEvent())
431 _eq_frame_data.subtitle = global_player_equalizer->get_subtitle_box();
432 _eq_frame_data.dispatch_state = global_dispatch->save_state();
433 // Find region of canvas to use, depending on the video aspect ratio and zoom level
434 float aspect_ratio = dispatch::media_input()->video_frame_template().aspect_ratio;
435 float crop_aspect_ratio = dispatch::parameters().crop_aspect_ratio();
436 float canvas_aspect_ratio = _eq_init_data.canvas_width / _eq_init_data.canvas_height;
437 float zoom = dispatch::parameters().zoom();
438 if (_eq_init_data.flat_screen) {
439 _eq_frame_data.tex_coords[0][0] = 0.0f;
440 _eq_frame_data.tex_coords[0][1] = 0.0f;
441 _eq_frame_data.tex_coords[1][0] = 1.0f;
442 _eq_frame_data.tex_coords[1][1] = 0.0f;
443 _eq_frame_data.tex_coords[2][0] = 1.0f;
444 _eq_frame_data.tex_coords[2][1] = 1.0f;
445 _eq_frame_data.tex_coords[3][0] = 0.0f;
446 _eq_frame_data.tex_coords[3][1] = 1.0f;
447 if (crop_aspect_ratio > 0.0f) {
448 if (aspect_ratio >= crop_aspect_ratio) {
449 float cutoff = (1.0f - crop_aspect_ratio / aspect_ratio) / 2.0f;
450 _eq_frame_data.tex_coords[0][0] += cutoff;
451 _eq_frame_data.tex_coords[1][0] -= cutoff;
452 _eq_frame_data.tex_coords[2][0] -= cutoff;
453 _eq_frame_data.tex_coords[3][0] += cutoff;
454 } else {
455 float cutoff = (1.0f - aspect_ratio / crop_aspect_ratio) / 2.0f;
456 _eq_frame_data.tex_coords[0][1] += cutoff;
457 _eq_frame_data.tex_coords[1][1] += cutoff;
458 _eq_frame_data.tex_coords[2][1] -= cutoff;
459 _eq_frame_data.tex_coords[3][1] -= cutoff;
460 }
461 aspect_ratio = crop_aspect_ratio;
462 }
463 if (aspect_ratio >= canvas_aspect_ratio) {
464 // need black borders top and bottom
465 float zoom_aspect_ratio = zoom * canvas_aspect_ratio + (1.0f - zoom) * aspect_ratio;
466 _eq_frame_data.canvas_video_area.w = 1.0f;
467 _eq_frame_data.canvas_video_area.h = canvas_aspect_ratio / zoom_aspect_ratio;
468 _eq_frame_data.canvas_video_area.x = (1.0f - _eq_frame_data.canvas_video_area.w) / 2.0f;
469 _eq_frame_data.canvas_video_area.y = (1.0f - _eq_frame_data.canvas_video_area.h) / 2.0f;
470 float cutoff = (1.0f - zoom_aspect_ratio / aspect_ratio) / 2.0f;
471 _eq_frame_data.tex_coords[0][0] += cutoff;
472 _eq_frame_data.tex_coords[1][0] -= cutoff;
473 _eq_frame_data.tex_coords[2][0] -= cutoff;
474 _eq_frame_data.tex_coords[3][0] += cutoff;
475 } else {
476 // need black borders left and right
477 _eq_frame_data.canvas_video_area.w = aspect_ratio / canvas_aspect_ratio;
478 _eq_frame_data.canvas_video_area.h = 1.0f;
479 _eq_frame_data.canvas_video_area.x = (1.0f - _eq_frame_data.canvas_video_area.w) / 2.0f;
480 _eq_frame_data.canvas_video_area.y = (1.0f - _eq_frame_data.canvas_video_area.h) / 2.0f;
481 }
482 }
483 else
484 {
485 compute_3d_canvas(&_eq_frame_data.canvas_video_area.h, &_eq_frame_data.canvas_video_area.d);
486 // compute width and offset for 1m high 'screen' quad in 3D space
487 _eq_frame_data.canvas_video_area.w = _eq_frame_data.canvas_video_area.h * aspect_ratio;
488 _eq_frame_data.canvas_video_area.x = -0.5f * _eq_frame_data.canvas_video_area.w;
489 _eq_frame_data.canvas_video_area.y = -0.5f * _eq_frame_data.canvas_video_area.h;
490 _eq_frame_data.tex_coords[0][0] = 0.0f;
491 _eq_frame_data.tex_coords[0][1] = 0.0f;
492 _eq_frame_data.tex_coords[1][0] = 1.0f;
493 _eq_frame_data.tex_coords[1][1] = 0.0f;
494 _eq_frame_data.tex_coords[2][0] = 1.0f;
495 _eq_frame_data.tex_coords[2][1] = 1.0f;
496 _eq_frame_data.tex_coords[3][0] = 0.0f;
497 _eq_frame_data.tex_coords[3][1] = 1.0f;
498 }
499 // Commit the updated frame data
500 const eq::uint128_t version = _eq_frame_data.commit();
501 // Start this frame with the committed frame data
502 return eq::Config::startFrame(version);
503 }
504
handleEvent(const eq::ConfigEvent * event)505 virtual bool handleEvent(const eq::ConfigEvent *event)
506 {
507 if (eq::Config::handleEvent(event))
508 {
509 return true;
510 }
511 if (event->data.type == eq::Event::KEY_PRESS)
512 {
513 switch (event->data.keyPress.key)
514 {
515 case 'S':
516 _eq_frame_data.display_statistics = !_eq_frame_data.display_statistics;
517 break;
518 case 'q':
519 controller::send_cmd(command::toggle_play);
520 break;
521 case 'e':
522 case eq::KC_F7:
523 controller::send_cmd(command::toggle_stereo_mode_swap);
524 break;
525 case 'f':
526 /* fullscreen toggling not supported with Equalizer */
527 break;
528 case 'c':
529 /* window centering not supported with Equalizer */
530 break;
531 case ' ':
532 case 'p':
533 controller::send_cmd(command::toggle_pause);
534 break;
535 case '.':
536 controller::send_cmd(command::step);
537 break;
538 case 'v':
539 /* TODO: cycling video streams is currently not supported with Equalizer.
540 * We would have to cycle the streams in all node players, and thus communicate
541 * the change via frame data. */
542 //controller::send_cmd(command::cycle_video_stream);
543 break;
544 case 'a':
545 /* TODO: cycling audio streams is currently not supported with Equalizer.
546 * We would have to cycle the streams in all node players, and thus communicate
547 * the change via frame data. */
548 //controller::send_cmd(command::cycle_audio_stream);
549 break;
550 case 's':
551 /* TODO: cycling subtitle streams is currently not supported with Equalizer.
552 * We would have to cycle the streams in all node players, and thus communicate
553 * the change via frame data. */
554 //controller::send_cmd(command::cycle_subtitle_stream);
555 break;
556 case '1':
557 controller::send_cmd(command::adjust_contrast, -0.05f);
558 break;
559 case '2':
560 controller::send_cmd(command::adjust_contrast, +0.05f);
561 break;
562 case '3':
563 controller::send_cmd(command::adjust_brightness, -0.05f);
564 break;
565 case '4':
566 controller::send_cmd(command::adjust_brightness, +0.05f);
567 break;
568 case '5':
569 controller::send_cmd(command::adjust_hue, -0.05f);
570 break;
571 case '6':
572 controller::send_cmd(command::adjust_hue, +0.05f);
573 break;
574 case '7':
575 controller::send_cmd(command::adjust_saturation, -0.05f);
576 break;
577 case '8':
578 controller::send_cmd(command::adjust_saturation, +0.05f);
579 break;
580 case '[':
581 controller::send_cmd(command::adjust_parallax, -0.01f);
582 break;
583 case ']':
584 controller::send_cmd(command::adjust_parallax, +0.01f);
585 break;
586 case '(':
587 controller::send_cmd(command::adjust_ghostbust, -0.01f);
588 break;
589 case ')':
590 controller::send_cmd(command::adjust_ghostbust, +0.01f);
591 break;
592 case '<':
593 controller::send_cmd(command::adjust_zoom, -0.1f);
594 break;
595 case '>':
596 controller::send_cmd(command::adjust_zoom, +0.1f);
597 break;
598 case eq::KC_LEFT:
599 controller::send_cmd(command::seek, -10.0f);
600 break;
601 case eq::KC_RIGHT:
602 controller::send_cmd(command::seek, +10.0f);
603 break;
604 case eq::KC_DOWN:
605 controller::send_cmd(command::seek, -60.0f);
606 break;
607 case eq::KC_UP:
608 controller::send_cmd(command::seek, +60.0f);
609 break;
610 case eq::KC_PAGE_DOWN:
611 controller::send_cmd(command::seek, -600.0f);
612 break;
613 case eq::KC_PAGE_UP:
614 controller::send_cmd(command::seek, +600.0f);
615 break;
616 }
617 }
618 else if (event->data.type == eq::Event::CHANNEL_POINTER_BUTTON_RELEASE)
619 {
620 float event_px = event->data.pointerButtonRelease.x; // Event position in pixels
621 float channel_pw = event->data.context.pvp.w; // Channel width in pixels
622 float event_x = event_px / channel_pw; // Event position relative to channel
623 // Event position relative to destination view (which seems to be the same as the canvas?)
624 float dest = event->data.context.vp.x + event_x * event->data.context.vp.w;
625 dest = std::min(std::max(dest, 0.0f), 1.0f); // Clamp to [0,1] - just to be sure
626 controller::send_cmd(command::set_pos, dest); // Seek to this position
627 }
628 return true;
629 }
630
631 private:
compute_3d_canvas(float * height,float * distance)632 void compute_3d_canvas(float *height, float *distance)
633 {
634 float angle = -1.0f;
635 *height = 0.0f;
636 *distance = 0.0f;
637
638 const eq::Canvases &canvases = getCanvases();
639 for (eq::Canvases::const_iterator i = canvases.begin(); i != canvases.end(); i++)
640 {
641 const eq::Segments &segments = (*i)->getSegments();
642 for (eq::Segments::const_iterator j = segments.begin(); j != segments.end(); j++)
643 {
644 const eq::Segment *segment = *j;
645 eq::Wall wall = segment->getWall();
646 #if 0 // Hack to compute rotated walls for Equalizer configuration. See doc/multi-display.txt.
647 eq::Matrix4f matrix(eq::Matrix4f::IDENTITY);
648 matrix.rotate(1.3f, eq::Vector3f::FORWARD);
649 wall.bottomLeft = matrix * wall.bottomLeft;
650 wall.bottomRight = matrix * wall.bottomRight;
651 wall.topLeft = matrix * wall.topLeft;
652 std::cout << wall << std::endl;
653 #endif
654 const eq::Vector3f u = wall.bottomRight - wall.bottomLeft;
655 const eq::Vector3f v = wall.topLeft - wall.bottomLeft;
656 eq::Vector3f w = u.cross(v);
657 w.normalize();
658
659 const eq::Vector3f dot(w.dot(eq::Vector3f::FORWARD));
660 const float val = dot.squared_length();
661 if (val < angle) // facing more away then previous segment
662 {
663 continue;
664 }
665
666 // transform wall to full canvas
667 eq::Viewport vp = eq::Viewport::FULL;
668 vp.transform(segment->getViewport());
669 wall.apply(vp);
670
671 const eq::Vector3f topRight = wall.topLeft + wall.bottomRight - wall.bottomLeft;
672 float yMin = std::min(wall.bottomLeft.y(), wall.bottomRight.y());
673 float yMax = std::max(wall.bottomLeft.y(), wall.bottomRight.y());
674 yMin = std::min(yMin, wall.topLeft.y());
675 yMax = std::max(yMax, wall.topLeft.y());
676 yMin = std::min(yMin, topRight.y());
677 yMax = std::max(yMax, topRight.y());
678
679 const float h = yMax - yMin;
680 const eq::Vector3f center = (wall.bottomRight + wall.topLeft) * 0.5f;
681 const float d = -center.z();
682
683 // 'same' orientation and distance
684 if (std::fabs(angle - val) < 0.0001f && std::fabs(d - *distance) < 0.0001f)
685 {
686 if (h > *height)
687 {
688 *height = h;
689 }
690 }
691 else
692 {
693 *height = h;
694 *distance = d;
695 angle = val;
696 }
697 }
698 }
699 }
700 };
701
702 /*
703 * eq_node
704 */
705
706 class eq_node : public eq::Node
707 {
708 private:
709 dispatch* _dispatch;
710 player_eq_node _player;
711
712 public:
713 eq_init_data init_data;
714 eq_frame_data frame_data;
715
eq_node(eq::Config * parent)716 eq_node(eq::Config *parent) : eq::Node(parent), _dispatch(NULL)
717 {
718 }
719
~eq_node()720 ~eq_node()
721 {
722 delete _dispatch;
723 }
724
725 protected:
configInit(const eq::uint128_t & init_id)726 virtual bool configInit(const eq::uint128_t &init_id)
727 {
728 if (!eq::Node::configInit(init_id))
729 {
730 return false;
731 }
732 // Map our InitData instance to the master instance
733 eq_config *config = static_cast<eq_config *>(getConfig());
734 if (!config->mapObject(&init_data, init_id))
735 {
736 msg::err(_("Init data mapping failed."));
737 return false;
738 }
739 // Map our FrameData instance to the master instance
740 if (!config->mapObject(&frame_data, init_data.frame_data_id))
741 {
742 msg::err(_("Frame data mapping failed."));
743 return false;
744 }
745
746 msg::dbg(HERE);
747 // Create decoders and input
748 if (!isApplicationNode( ))
749 {
750 _dispatch = new dispatch(NULL, NULL, true, init_data.flat_screen, true,
751 false, false, init_data.params.log_level(), init_data.params.benchmark(),
752 init_data.params.swap_interval());
753 if (!_player.init(init_data.input))
754 {
755 msg::err(_("Video player initialization failed."));
756 return false;
757 }
758 }
759 msg::dbg(HERE);
760 return true;
761 }
762
configExit()763 virtual bool configExit()
764 {
765 msg::dbg(HERE);
766 eq::Config *config = getConfig();
767 // Unmap our FrameData instance
768 config->unmapObject(&frame_data);
769 // Unmap our InitData instance
770 config->unmapObject(&init_data);
771 // Cleanup
772 if (!isApplicationNode( ))
773 _player.close();
774 msg::dbg(HERE);
775 return eq::Node::configExit();
776 }
777
frameStart(const eq::uint128_t & frame_id,const uint32_t frame_number)778 virtual void frameStart(const eq::uint128_t &frame_id, const uint32_t frame_number)
779 {
780 // Update our frame data
781 frame_data.sync(frame_id);
782 // Do as we're told
783 if (isApplicationNode( ))
784 {
785 // Nothing to do since the config's master player already did it
786 }
787 else
788 {
789 _dispatch->load_state(frame_data.dispatch_state);
790 if (frame_data.seek_to >= 0)
791 {
792 _player.seek(frame_data.seek_to);
793 }
794 if (frame_data.prep_frame)
795 {
796 _player.finish_frame_read();
797 }
798 if (frame_data.drop_frame)
799 {
800 _player.finish_frame_read();
801 _player.start_frame_read();
802 }
803 }
804 startFrame(frame_number);
805 }
806
frameFinish(const eq::uint128_t &,const uint32_t frame_number)807 virtual void frameFinish(const eq::uint128_t &, const uint32_t frame_number)
808 {
809 if (isApplicationNode( ))
810 {
811 // Nothing to do since the config's master player already did it
812 }
813 else
814 {
815 if (frame_data.prep_frame)
816 {
817 // The frame was uploaded to texture memory.
818 // Start reading the next one asynchronously.
819 _player.start_frame_read();
820 }
821 }
822 releaseFrame(frame_number);
823 }
824
825 public:
get_video_frame()826 const video_frame &get_video_frame()
827 {
828 if (isApplicationNode( ))
829 return global_player_equalizer->get_video_frame();
830 else
831 return _player.get_video_frame();
832 }
833 };
834
835 /*
836 * eq_pipe
837 */
838
839 class eq_pipe : public eq::Pipe
840 {
841 public:
eq_pipe(eq::Node * parent)842 eq_pipe(eq::Node *parent) : eq::Pipe(parent)
843 {
844 }
845 };
846
847 /*
848 * eq_window
849 */
850
851 class eq_window : public eq::Window
852 {
853 public:
eq_window(eq::Pipe * parent)854 eq_window(eq::Pipe *parent) : eq::Window(parent)
855 {
856 }
857
858 protected:
859
configInitGL(const eq::uint128_t & init_id)860 virtual bool configInitGL(const eq::uint128_t &init_id)
861 {
862 msg::dbg(HERE);
863 if (!eq::Window::configInitGL(init_id))
864 {
865 return false;
866 }
867 if (!glewContextIsSupported(const_cast<GLEWContext *>(glewGetContext()),
868 "GL_VERSION_2_1 GL_EXT_framebuffer_object"))
869 {
870 msg::err(_("This OpenGL implementation does not support OpenGL 2.1 and framebuffer objects."));
871 return false;
872 }
873
874 // Disable some things that Equalizer seems to enable for some reason.
875 glDisable(GL_LIGHTING);
876
877 msg::dbg(HERE);
878 return true;
879 }
880
configExitGL()881 virtual bool configExitGL()
882 {
883 msg::dbg(HERE);
884 return eq::Window::configExitGL();
885 }
886
swapBuffers()887 virtual void swapBuffers()
888 {
889 eq_node *node = static_cast<eq_node *>(getNode());
890 if (node->frame_data.display_frame)
891 eq::Window::swapBuffers();
892 }
893 };
894
895 /*
896 * eq_channel
897 */
898
899 class eq_channel : public eq::Channel
900 {
901 private:
902 video_output_eq_channel _video_output;
903
904 public:
eq_channel(eq::Window * parent)905 eq_channel(eq::Window *parent) :
906 eq::Channel(parent),
907 _video_output(this,
908 static_cast<eq_node *>(getNode())->init_data.canvas_width,
909 static_cast<eq_node *>(getNode())->init_data.canvas_height)
910 {
911 }
912
913 protected:
914
configExit()915 virtual bool configExit()
916 {
917 msg::dbg(HERE);
918 getWindow()->makeCurrent();
919 _video_output.deinit();
920 msg::dbg(HERE);
921 return eq::Channel::configExit();
922 }
923
frameDraw(const eq::uint128_t & frame_id)924 virtual void frameDraw(const eq::uint128_t &frame_id)
925 {
926 // Let Equalizer initialize some stuff
927 eq::Channel::frameDraw(frame_id);
928
929 // Get the canvas video area and the canvas channel area
930 eq_node *node = static_cast<eq_node *>(getNode());
931 const struct { float x, y, w, h, d; } canvas_video_area =
932 {
933 node->frame_data.canvas_video_area.x,
934 node->frame_data.canvas_video_area.y,
935 node->frame_data.canvas_video_area.w,
936 node->frame_data.canvas_video_area.h,
937 node->frame_data.canvas_video_area.d
938 };
939 _video_output.set_canvas_size(canvas_video_area.w, canvas_video_area.h);
940 const eq::Viewport &canvas_channel_area = getViewport();
941 // Determine the video quad to render
942 float quad_x = canvas_video_area.x;
943 float quad_y = canvas_video_area.y;
944 float quad_w = canvas_video_area.w;
945 float quad_h = canvas_video_area.h;
946 if (node->init_data.flat_screen)
947 {
948 quad_x = ((quad_x - canvas_channel_area.x) / canvas_channel_area.w - 0.5f) * 2.0f;
949 quad_y = ((quad_y - canvas_channel_area.y) / canvas_channel_area.h - 0.5f) * 2.0f;
950 quad_w = 2.0f * quad_w / canvas_channel_area.w;
951 quad_h = 2.0f * quad_h / canvas_channel_area.h;
952 glMatrixMode(GL_PROJECTION);
953 glLoadIdentity();
954 glMatrixMode(GL_MODELVIEW);
955 glLoadIdentity();
956 }
957 else
958 {
959 glTranslatef(0.0f, 0.0f, -canvas_video_area.d);
960 }
961
962 // Display
963 glEnable(GL_TEXTURE_2D);
964 glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
965 bool mono_right_instead_of_left = (getEye() == eq::EYE_RIGHT);
966 GLint viewport[4];
967 glGetIntegerv(GL_VIEWPORT, viewport);
968 _video_output.display_current_frame(mono_right_instead_of_left, quad_x, quad_y, quad_w, quad_h,
969 viewport, node->frame_data.tex_coords);
970 }
971
frameStart(const eq::uint128_t &,const uint32_t frame_number)972 virtual void frameStart(const eq::uint128_t &, const uint32_t frame_number)
973 {
974 // Get frame data via from the node
975 eq_node *node = static_cast<eq_node *>(getNode());
976 // Do as we're told
977 if (node->frame_data.prep_frame)
978 {
979 getWindow()->makeCurrent();
980 if (node->frame_data.subtitle.is_valid())
981 {
982 _video_output.wait_for_subtitle_renderer();
983 }
984 _video_output.prepare_next_frame(node->get_video_frame(), node->frame_data.subtitle);
985 }
986 if (node->frame_data.display_frame)
987 {
988 _video_output.activate_next_frame();
989 }
990 startFrame(frame_number);
991 }
992
frameViewFinish(const eq::uint128_t & id)993 void frameViewFinish( const eq::uint128_t& id )
994 {
995 eq_node *node = static_cast<eq_node *>(getNode());
996 if (node->frame_data.display_statistics)
997 drawStatistics();
998 eq::Channel::frameViewFinish( id );
999 }
1000 };
1001
1002 /*
1003 * eq_node_factory
1004 */
1005
1006 class eq_node_factory : public eq::NodeFactory
1007 {
1008 public:
createConfig(eq::ServerPtr parent)1009 virtual eq::Config *createConfig(eq::ServerPtr parent)
1010 {
1011 return new eq_config(parent);
1012 }
1013
createNode(eq::Config * parent)1014 virtual eq::Node *createNode(eq::Config *parent)
1015 {
1016 return new eq_node(parent);
1017 }
1018
createPipe(eq::Node * parent)1019 virtual eq::Pipe *createPipe(eq::Node *parent)
1020 {
1021 return new eq_pipe(parent);
1022 }
1023
createWindow(eq::Pipe * parent)1024 virtual eq::Window *createWindow(eq::Pipe *parent)
1025 {
1026 return new eq_window(parent);
1027 }
1028
createChannel(eq::Window * parent)1029 virtual eq::Channel *createChannel(eq::Window *parent)
1030 {
1031 return new eq_channel(parent);
1032 }
1033 };
1034
1035 /*
1036 * player_equalizer
1037 */
1038
player_equalizer(int * argc,char * argv[],bool flat_screen)1039 player_equalizer::player_equalizer(int *argc, char *argv[], bool flat_screen) :
1040 player(), _flat_screen(flat_screen)
1041 {
1042 assert(!global_player_equalizer);
1043 global_player_equalizer = this;
1044 /* Initialize Equalizer */
1045 _node_factory = new eq_node_factory;
1046 if (!eq::init(*argc, argv, _node_factory))
1047 {
1048 throw exc(_("Equalizer initialization failed."));
1049 }
1050 /* Get a configuration */
1051 _config = static_cast<eq_config *>(eq::getConfig(*argc, argv));
1052 // The following code is only executed on the application node because
1053 // eq::getConfig() does not return on other nodes.
1054 if (!_config)
1055 {
1056 throw exc(_("Cannot get equalizer configuration."));
1057 }
1058 }
1059
~player_equalizer()1060 player_equalizer::~player_equalizer()
1061 {
1062 eq::releaseConfig(_config);
1063 eq::exit();
1064 delete _node_factory;
1065 global_player_equalizer = NULL;
1066 }
1067
open()1068 void player_equalizer::open()
1069 {
1070 if (!_config->init(*(global_dispatch->get_input_data()), _flat_screen))
1071 {
1072 throw exc(_("Equalizer configuration initialization failed."));
1073 }
1074 }
1075
1076 static bool global_quit_request;
1077
1078 class eq_quit_controller : public controller
1079 {
receive_notification(const notification & note)1080 virtual void receive_notification(const notification& note)
1081 {
1082 if (note.type == notification::quit)
1083 global_quit_request = true;
1084 }
1085 };
1086
mainloop()1087 void player_equalizer::mainloop()
1088 {
1089 global_quit_request = false;
1090 eq_quit_controller qc;
1091 for (;;) {
1092 if (!global_player_equalizer) {
1093 dispatch::step();
1094 dispatch::process_all_events();
1095 if (global_quit_request)
1096 return;
1097 }
1098 if (global_player_equalizer) {
1099 eq_config* config = global_player_equalizer->_config;
1100 while (config->isRunning()) {
1101 config->startFrame();
1102 config->finishFrame();
1103 }
1104 global_dispatch->stop_eq_player();
1105 delete global_player_equalizer;
1106 }
1107 }
1108 }
1109