1 /******************************************************************************
2     QtAV:  Multimedia framework based on Qt and FFmpeg
3     Copyright (C) 2012-2017 Wang Bin <wbsecg1@gmail.com>
4 
5 *   This file is part of QtAV
6 
7     This library is free software; you can redistribute it and/or
8     modify it under the terms of the GNU Lesser General Public
9     License as published by the Free Software Foundation; either
10     version 2.1 of the License, or (at your option) any later version.
11 
12     This library is distributed in the hope that it will be useful,
13     but WITHOUT ANY WARRANTY; without even the implied warranty of
14     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15     Lesser General Public License for more details.
16 
17     You should have received a copy of the GNU Lesser General Public
18     License along with this library; if not, write to the Free Software
19     Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
20 ******************************************************************************/
21 
22 #include "QtAV/VideoRenderer.h"
23 #include "QtAV/private/VideoRenderer_p.h"
24 #include "QtAV/Filter.h"
25 #include <QtCore/QCoreApplication>
26 #include <QtCore/QEvent>
27 #include "QtAV/Statistics.h"
28 #include "QtAV/private/factory.h"
29 #include "QtAV/private/mkid.h"
30 #include "utils/Logger.h"
31 
32 namespace QtAV {
33 FACTORY_DEFINE(VideoRenderer)
34 VideoRendererId VideoRendererId_OpenGLWindow = mkid::id32base36_6<'Q', 'O', 'G', 'L', 'W', 'w'>::value;
35 
VideoRenderer()36 VideoRenderer::VideoRenderer()
37     :AVOutput(*new VideoRendererPrivate)
38 {
39     // can not do 'if (widget()) connect to update()' because widget() is virtual
40 }
41 
VideoRenderer(VideoRendererPrivate & d)42 VideoRenderer::VideoRenderer(VideoRendererPrivate &d)
43     :AVOutput(d)
44 {
45 }
46 
~VideoRenderer()47 VideoRenderer::~VideoRenderer()
48 {
49 }
50 
receive(const VideoFrame & frame)51 bool VideoRenderer::receive(const VideoFrame &frame)
52 {
53     DPTR_D(VideoRenderer);
54     const qreal dar_old = d.source_aspect_ratio;
55     d.source_aspect_ratio = frame.displayAspectRatio();
56     if (dar_old != d.source_aspect_ratio)
57         sourceAspectRatioChanged(d.source_aspect_ratio);
58     setInSize(frame.width(), frame.height());
59     QMutexLocker locker(&d.img_mutex);
60     Q_UNUSED(locker); //TODO: double buffer for display/dec frame to avoid mutex
61     return receiveFrame(frame);
62 }
63 
setPreferredPixelFormat(VideoFormat::PixelFormat pixfmt)64 bool VideoRenderer::setPreferredPixelFormat(VideoFormat::PixelFormat pixfmt)
65 {
66     DPTR_D(VideoRenderer);
67     if (d.preferred_format == pixfmt)
68         return false;
69     if (!isSupported(pixfmt)) {
70         qWarning("pixel format '%s' is not supported", VideoFormat(pixfmt).name().toUtf8().constData());
71         return false;
72     }
73     VideoFormat::PixelFormat old = d.preferred_format;
74     d.preferred_format = pixfmt;
75     if (!onSetPreferredPixelFormat(pixfmt)) {
76         qWarning("onSetPreferredPixelFormat failed");
77         d.preferred_format = old;
78         return false;
79     }
80     return true;
81 }
82 
onSetPreferredPixelFormat(VideoFormat::PixelFormat pixfmt)83 bool VideoRenderer::onSetPreferredPixelFormat(VideoFormat::PixelFormat pixfmt)
84 {
85     Q_UNUSED(pixfmt);
86     return true;
87 }
88 
preferredPixelFormat() const89 VideoFormat::PixelFormat VideoRenderer::preferredPixelFormat() const
90 {
91     return d_func().preferred_format;
92 }
93 
forcePreferredPixelFormat(bool force)94 void VideoRenderer::forcePreferredPixelFormat(bool force)
95 {
96     DPTR_D(VideoRenderer);
97     if (d.force_preferred == force)
98         return;
99     bool old = d.force_preferred;
100     d.force_preferred = force;
101     if (!onForcePreferredPixelFormat(force)) {
102         qWarning("onForcePreferredPixelFormat failed");
103         d.force_preferred = old;
104     }
105 }
106 
onForcePreferredPixelFormat(bool force)107 bool VideoRenderer::onForcePreferredPixelFormat(bool force)
108 {
109     Q_UNUSED(force);
110     return true;
111 }
112 
isPreferredPixelFormatForced() const113 bool VideoRenderer::isPreferredPixelFormatForced() const
114 {
115     return d_func().force_preferred;
116 }
117 
sourceAspectRatio() const118 qreal VideoRenderer::sourceAspectRatio() const
119 {
120     return d_func().source_aspect_ratio;
121 }
122 
setOutAspectRatioMode(OutAspectRatioMode mode)123 void VideoRenderer::setOutAspectRatioMode(OutAspectRatioMode mode)
124 {
125     DPTR_D(VideoRenderer);
126     if (mode == d.out_aspect_ratio_mode)
127         return;
128     d.aspect_ratio_changed = true;
129     d.out_aspect_ratio_mode = mode;
130     if (mode == RendererAspectRatio) {
131         QRect out_rect0(d.out_rect);
132         //compute out_rect
133         d.out_rect = QRect(0, 0, d.renderer_width, d.renderer_height); //remove? already in computeOutParameters()
134         setOutAspectRatio(qreal(d.renderer_width)/qreal(d.renderer_height));
135         if (out_rect0 != d.out_rect) {
136             Q_EMIT videoRectChanged();
137             Q_EMIT contentRectChanged();
138         }
139         //is that thread safe?
140     } else if (mode == VideoAspectRatio) {
141         setOutAspectRatio(d.source_aspect_ratio);
142     }
143     onSetOutAspectRatioMode(mode);
144     Q_EMIT outAspectRatioModeChanged();
145 }
146 
onSetOutAspectRatioMode(OutAspectRatioMode mode)147 void VideoRenderer::onSetOutAspectRatioMode(OutAspectRatioMode mode)
148 {
149     Q_UNUSED(mode);
150 }
151 
outAspectRatioMode() const152 VideoRenderer::OutAspectRatioMode VideoRenderer::outAspectRatioMode() const
153 {
154     return d_func().out_aspect_ratio_mode;
155 }
156 
setOutAspectRatio(qreal ratio)157 void VideoRenderer::setOutAspectRatio(qreal ratio)
158 {
159     DPTR_D(VideoRenderer);
160     bool ratio_changed = d.out_aspect_ratio != ratio;
161     d.out_aspect_ratio = ratio;
162     //indicate that this function is called by user. otherwise, called in VideoRenderer
163     if (!d.aspect_ratio_changed) {
164         if (d.out_aspect_ratio_mode != CustomAspectRation) {
165             d.out_aspect_ratio_mode = CustomAspectRation;
166             Q_EMIT outAspectRatioModeChanged();
167         }
168     }
169     d.aspect_ratio_changed = false; //TODO: when is false?
170     if (d.out_aspect_ratio_mode != RendererAspectRatio) {
171         d.update_background = true; //can not fill the whole renderer with video
172     }
173     //compute the out out_rect
174     if (d.computeOutParameters(ratio)) {
175         Q_EMIT videoRectChanged();
176         Q_EMIT contentRectChanged();
177     }
178     if (ratio_changed) {
179         onSetOutAspectRatio(ratio);
180         Q_EMIT outAspectRatioChanged();
181     }
182     updateUi();
183 }
184 
onSetOutAspectRatio(qreal ratio)185 void VideoRenderer::onSetOutAspectRatio(qreal ratio)
186 {
187     Q_UNUSED(ratio);
188 }
189 
outAspectRatio() const190 qreal VideoRenderer::outAspectRatio() const
191 {
192     return d_func().out_aspect_ratio;
193 }
194 
setQuality(Quality q)195 void VideoRenderer::setQuality(Quality q)
196 {
197     DPTR_D(VideoRenderer);
198     if (d.quality == q)
199         return;
200     Quality old = quality();
201     d.quality = q;
202     if (!onSetQuality(q)) {
203         d.quality = old;
204     } else {
205         updateUi();
206     }
207 }
208 
onSetQuality(Quality q)209 bool VideoRenderer::onSetQuality(Quality q)
210 {
211     Q_UNUSED(q);
212     return true;
213 }
214 
quality() const215 VideoRenderer::Quality VideoRenderer::quality() const
216 {
217     return d_func().quality;
218 }
219 
setInSize(const QSize & s)220 void VideoRenderer::setInSize(const QSize& s)
221 {
222     setInSize(s.width(), s.height());
223 }
224 
setInSize(int width,int height)225 void VideoRenderer::setInSize(int width, int height)
226 {
227     DPTR_D(VideoRenderer);
228     if (d.src_width != width || d.src_height != height) {
229         d.aspect_ratio_changed = true; //?? for VideoAspectRatio mode
230         d.src_width = width;
231         d.src_height = height;
232         Q_EMIT videoFrameSizeChanged();
233     }
234     if (!d.aspect_ratio_changed)// && (d.src_width == width && d.src_height == height))
235         return;
236     //d.source_aspect_ratio = qreal(d.src_width)/qreal(d.src_height);
237     qDebug("%s => calculating aspect ratio from converted input data(%f)", __FUNCTION__, d.source_aspect_ratio);
238     //see setOutAspectRatioMode
239     if (d.out_aspect_ratio_mode == VideoAspectRatio) {
240         //source_aspect_ratio equals to original video aspect ratio here, also equals to out ratio
241         setOutAspectRatio(d.source_aspect_ratio);
242     }
243     d.aspect_ratio_changed = false; //TODO: why graphicsitemrenderer need this? otherwise aspect_ratio_changed is always true?
244 }
245 
resizeRenderer(const QSize & size)246 void VideoRenderer::resizeRenderer(const QSize &size)
247 {
248     resizeRenderer(size.width(), size.height());
249 }
250 
resizeRenderer(int width,int height)251 void VideoRenderer::resizeRenderer(int width, int height)
252 {
253     DPTR_D(VideoRenderer);
254     if (width == 0 || height == 0 || (d.renderer_width == width && d.renderer_height == height))
255         return;
256     d.renderer_width = width;
257     d.renderer_height = height;
258     if (d.out_aspect_ratio_mode == RendererAspectRatio)
259         Q_EMIT outAspectRatioChanged();
260     if (d.computeOutParameters(d.out_aspect_ratio)) {
261         Q_EMIT videoRectChanged();
262         Q_EMIT contentRectChanged();
263     }
264     onResizeRenderer(width, height); //TODO: resize widget
265 }
266 
onResizeRenderer(int width,int height)267 void VideoRenderer::onResizeRenderer(int width, int height)
268 {
269     Q_UNUSED(width);
270     Q_UNUSED(height);
271 }
272 
rendererSize() const273 QSize VideoRenderer::rendererSize() const
274 {
275     DPTR_D(const VideoRenderer);
276     return QSize(d.renderer_width, d.renderer_height);
277 }
278 
rendererWidth() const279 int VideoRenderer::rendererWidth() const
280 {
281     return d_func().renderer_width;
282 }
283 
rendererHeight() const284 int VideoRenderer::rendererHeight() const
285 {
286     return d_func().renderer_height;
287 }
288 
setOrientation(int value)289 void VideoRenderer::setOrientation(int value)
290 {
291     DPTR_D(VideoRenderer);
292     // currently only supports a multiple of 90
293     value = (value + 360) % 360;
294     if (value % 90)
295         return;
296     if (d.orientation == value)
297         return;
298     int old = orientation();
299     d.orientation = value;
300     if (!onSetOrientation(value)) {
301         d.orientation = old;
302     } else {
303         orientationChanged();
304         if (d.computeOutParameters(d.out_aspect_ratio)) {
305             Q_EMIT videoRectChanged();
306             Q_EMIT contentRectChanged();
307         }
308         onSetOutAspectRatio(outAspectRatio());
309         updateUi();
310     }
311 }
312 
orientation() const313 int VideoRenderer::orientation() const
314 {
315     DPTR_D(const VideoRenderer);
316     return d.orientation;
317 }
318 
319 // only qpainter and opengl based renderers support orientation.
onSetOrientation(int value)320 bool VideoRenderer::onSetOrientation(int value)
321 {
322     Q_UNUSED(value);
323     return false;
324 }
325 
videoFrameSize() const326 QSize VideoRenderer::videoFrameSize() const
327 {
328     DPTR_D(const VideoRenderer);
329     return QSize(d.src_width, d.src_height);
330 }
331 
videoRect() const332 QRect VideoRenderer::videoRect() const
333 {
334     return d_func().out_rect;
335 }
336 
regionOfInterest() const337 QRectF VideoRenderer::regionOfInterest() const
338 {
339     return d_func().roi;
340 }
341 
setRegionOfInterest(qreal x,qreal y,qreal width,qreal height)342 void VideoRenderer::setRegionOfInterest(qreal x, qreal y, qreal width, qreal height)
343 {
344     setRegionOfInterest(QRectF(x, y, width, height));
345 }
346 
setRegionOfInterest(const QRectF & roi)347 void VideoRenderer::setRegionOfInterest(const QRectF &roi)
348 {
349     DPTR_D(VideoRenderer);
350     if (d.roi == roi)
351         return;
352     QRectF old = regionOfInterest();
353     d.roi = roi;
354     if (!onSetRegionOfInterest(roi)) {
355         d.roi = old;
356     } else {
357         Q_EMIT regionOfInterestChanged();
358         updateUi();
359     }
360     // TODO: how to fill video? what's out_rect now?
361 }
362 
onSetRegionOfInterest(const QRectF & roi)363 bool VideoRenderer::onSetRegionOfInterest(const QRectF &roi)
364 {
365     Q_UNUSED(roi);
366     return true;
367 }
368 
realROI() const369 QRect VideoRenderer::realROI() const
370 {
371     DPTR_D(const VideoRenderer);
372     if (!d.roi.isValid()) {
373         return QRect(QPoint(), d.video_frame.size());
374     }
375     QRect r = d.roi.toRect();
376     // nomalized x, y < 1
377     bool normalized = false;
378     if (qAbs(d.roi.x()) < 1) {
379         normalized = true;
380         r.setX(d.roi.x()*qreal(d.src_width)); //TODO: why not video_frame.size()? roi not correct
381     }
382     if (qAbs(d.roi.y()) < 1) {
383         normalized = true;
384         r.setY(d.roi.y()*qreal(d.src_height));
385     }
386     // whole size use width or height = 0, i.e. null size
387     // nomalized width, height <= 1. If 1 is normalized value iff |x|<1 || |y| < 1
388     if (qAbs(d.roi.width()) < 1)
389         r.setWidth(d.roi.width()*qreal(d.src_width));
390     if (qAbs(d.roi.height()) < 1)
391         r.setHeight(d.roi.height()*qreal(d.src_height));
392     if (d.roi.width() == 1.0 && normalized) {
393         r.setWidth(d.src_width);
394     }
395     if (d.roi.height() == 1.0 && normalized) {
396         r.setHeight(d.src_height);
397     }
398     //TODO: insect with source rect?
399     return r;
400 }
401 
normalizedROI() const402 QRectF VideoRenderer::normalizedROI() const
403 {
404     DPTR_D(const VideoRenderer);
405     if (!d.roi.isValid()) {
406         return QRectF(0, 0, 1, 1);
407     }
408     QRectF r = d.roi;
409     bool normalized = false;
410     if (qAbs(r.x()) >= 1)
411         r.setX(r.x()/qreal(d.src_width));
412     else
413         normalized = true;
414     if (qAbs(r.y()) >= 1)
415         r.setY(r.y()/qreal(d.src_height));
416     else
417         normalized = true;
418     if (r.width() > 1 || (!normalized && r.width() == 1))
419         r.setWidth(r.width()/qreal(d.src_width));
420     if (r.height() > 1 || (!normalized && r.width() == 1)) {
421         r.setHeight(r.height()/qreal(d.src_height));
422     }
423     return r;
424 }
425 
mapToFrame(const QPointF & p) const426 QPointF VideoRenderer::mapToFrame(const QPointF &p) const
427 {
428     return onMapToFrame(p);
429 }
430 
431 // TODO: orientation
onMapToFrame(const QPointF & p) const432 QPointF VideoRenderer::onMapToFrame(const QPointF &p) const
433 {
434     QRectF roi = realROI();
435     // zoom=roi.w/roi.h>vo.w/vo.h?roi.w/vo.w:roi.h/vo.h
436     qreal zoom = qMax(roi.width()/rendererWidth(), roi.height()/rendererHeight());
437     QPointF delta = p - QPointF(rendererWidth()/2, rendererHeight()/2);
438     return roi.center() + delta * zoom;
439 }
440 
mapFromFrame(const QPointF & p) const441 QPointF VideoRenderer::mapFromFrame(const QPointF &p) const
442 {
443     return onMapFromFrame(p);
444 }
445 
onMapFromFrame(const QPointF & p) const446 QPointF VideoRenderer::onMapFromFrame(const QPointF &p) const
447 {
448     QRectF roi = realROI();
449     // zoom=roi.w/roi.h>vo.w/vo.h?roi.w/vo.w:roi.h/vo.h
450     qreal zoom = qMax(roi.width()/rendererWidth(), roi.height()/rendererHeight());
451     // (p-roi.c)/zoom + c
452     QPointF delta = p - roi.center();
453     return QPointF(rendererWidth()/2, rendererHeight()/2) + delta / zoom;
454 }
455 
backgroundRegion() const456 QRegion VideoRenderer::backgroundRegion() const
457 {
458     return QRegion(0, 0, rendererWidth(), rendererHeight()) - QRegion(d_func().out_rect);
459 }
460 
drawBackground()461 void VideoRenderer::drawBackground()
462 {
463 }
464 
handlePaintEvent()465 void VideoRenderer::handlePaintEvent()
466 {
467     DPTR_D(VideoRenderer);
468     d.setupQuality();
469     //begin paint. how about QPainter::beginNativePainting()?
470     {
471         //lock is required only when drawing the frame
472         QMutexLocker locker(&d.img_mutex);
473         Q_UNUSED(locker);
474         // do not apply filters if d.video_frame is already filtered. e.g. rendering an image and resize window to repaint
475         if (!d.video_frame.metaData(QStringLiteral("gpu_filtered")).toBool() && !d.filters.isEmpty() && d.statistics) {
476             // vo filter will not modify video frame, no lock required
477             foreach(Filter* filter, d.filters) {
478                 VideoFilter *vf = static_cast<VideoFilter*>(filter);
479                 if (!vf) {
480                     qWarning("a null filter!");
481                     //d.filters.removeOne(filter);
482                     continue;
483                 }
484                 if (!vf->isEnabled())
485                     continue;
486                 // qpainter on video frame always runs on video thread. qpainter on renderer's paint device can work on rendering thread
487                 // Here apply filters on frame on video thread, for example, GPU filters
488 
489                 //vf->prepareContext(d.filter_context, d.statistics, 0);
490                 //if (!vf->context() || vf->context()->type() != VideoFilterContext::OpenGL)
491                 if (!vf->isSupported(VideoFilterContext::OpenGL))
492                     continue;
493                 vf->apply(d.statistics, &d.video_frame); //painter and paint device are ready, pass video frame is ok.
494                 d.video_frame.setMetaData(QStringLiteral("gpu_filtered"), true);
495             }
496         }
497         /* begin paint. how about QPainter::beginNativePainting()?
498          * fill background color when necessary, e.g. renderer is resized, image is null
499          * if we access d.data which will be modified in AVThread, the following must be
500          * protected by mutex. otherwise, e.g. QPainterRenderer, it's not required if drawing
501          * on the shared data is safe
502          */
503         drawBackground();
504         /*
505          * NOTE: if data is not copyed in receiveFrame(), you should always call drawFrame()
506          */
507         if (d.video_frame.isValid()) {
508             drawFrame();
509             //qDebug("render elapsed: %lld", et.elapsed());
510             if (d.statistics) {
511                 d.statistics->video_only.frameDisplayed(d.video_frame.timestamp());
512                 d.statistics->video.current_time = QTime(0, 0, 0).addMSecs(int(d.video_frame.timestamp() * 1000.0));
513             }
514         }
515     }
516     hanlePendingTasks();
517     //TODO: move to AVOutput::applyFilters() //protected?
518     if (!d.filters.isEmpty() && d.filter_context && d.statistics) {
519         // vo filter will not modify video frame, no lock required
520         foreach(Filter* filter, d.filters) {
521             VideoFilter *vf = static_cast<VideoFilter*>(filter);
522             if (!vf) {
523                 qWarning("a null filter!");
524                 //d.filters.removeOne(filter);
525                 continue;
526             }
527             if (!vf->isEnabled())
528                 continue;
529             // qpainter rendering on renderer's paint device. only supported by none-null paint engine
530             if (!vf->context() || vf->context()->type()  == VideoFilterContext::OpenGL)
531                 continue;
532             if (vf->prepareContext(d.filter_context, d.statistics, 0)) {
533                 if (!vf->isSupported(d.filter_context->type()))
534                     continue;
535                 vf->apply(d.statistics, &d.video_frame); //painter and paint device are ready, pass video frame is ok.
536             }
537         }
538     }
539     //end paint. how about QPainter::endNativePainting()?
540 }
541 
brightness() const542 qreal VideoRenderer::brightness() const
543 {
544     return d_func().brightness;
545 }
546 
setBrightness(qreal brightness)547 bool VideoRenderer::setBrightness(qreal brightness)
548 {
549     DPTR_D(VideoRenderer);
550     if (d.brightness == brightness)
551         return true;
552     if (!onSetBrightness(brightness))
553         return false;
554     d.brightness = brightness;
555     Q_EMIT brightnessChanged(brightness);
556     updateUi();
557     return true;
558 }
559 
contrast() const560 qreal VideoRenderer::contrast() const
561 {
562     return d_func().contrast;
563 }
564 
setContrast(qreal contrast)565 bool VideoRenderer::setContrast(qreal contrast)
566 {
567     DPTR_D(VideoRenderer);
568     if (d.contrast == contrast)
569         return true;
570     if (!onSetContrast(contrast))
571         return false;
572     d.contrast = contrast;
573     Q_EMIT contrastChanged(contrast);
574     updateUi();
575     return true;
576 }
577 
hue() const578 qreal VideoRenderer::hue() const
579 {
580     return d_func().hue;
581 }
582 
setHue(qreal hue)583 bool VideoRenderer::setHue(qreal hue)
584 {
585     DPTR_D(VideoRenderer);
586     if (d.hue == hue)
587         return true;
588     if (!onSetHue(hue))
589         return false;
590     d.hue = hue;
591     Q_EMIT hueChanged(hue);
592     updateUi();
593     return true;
594 }
595 
saturation() const596 qreal VideoRenderer::saturation() const
597 {
598     return d_func().saturation;
599 }
600 
setSaturation(qreal saturation)601 bool VideoRenderer::setSaturation(qreal saturation)
602 {
603     DPTR_D(VideoRenderer);
604     if (d.saturation == saturation)
605         return true;
606     if (!onSetSaturation(saturation))
607         return false;
608     d.saturation = saturation;
609     Q_EMIT saturationChanged(saturation);
610     updateUi();
611     return true;
612 }
613 
onSetBrightness(qreal b)614 bool VideoRenderer::onSetBrightness(qreal b)
615 {
616     Q_UNUSED(b);
617     return false;
618 }
619 
onSetContrast(qreal c)620 bool VideoRenderer::onSetContrast(qreal c)
621 {
622     Q_UNUSED(c);
623     return false;
624 }
625 
onSetHue(qreal h)626 bool VideoRenderer::onSetHue(qreal h)
627 {
628     Q_UNUSED(h);
629     return false;
630 }
631 
onSetSaturation(qreal s)632 bool VideoRenderer::onSetSaturation(qreal s)
633 {
634     Q_UNUSED(s);
635     return false;
636 }
637 
backgroundColor() const638 QColor VideoRenderer::backgroundColor() const
639 {
640     return d_func().bg_color;
641 }
642 
onSetBackgroundColor(const QColor & color)643 void VideoRenderer::onSetBackgroundColor(const QColor &color)
644 {
645     Q_UNUSED(color);
646 }
647 
setBackgroundColor(const QColor & c)648 void VideoRenderer::setBackgroundColor(const QColor &c)
649 {
650     DPTR_D(VideoRenderer);
651     if (d.bg_color == c)
652         return;
653     onSetBackgroundColor(c);
654     d.bg_color = c;
655     Q_EMIT backgroundColorChanged();
656     updateUi();
657 }
658 
updateUi()659 void VideoRenderer::updateUi()
660 {
661     QObject *obj = (QObject*)widget();
662     if (obj) {
663         // UpdateRequest only sync backing store but do not shedule repainting. UpdateLater does
664         // Copy from qwidget_p.h. QWidget::event() will convert UpdateLater to QUpdateLaterEvent and get it's region()
665         class QUpdateLaterEvent : public QEvent
666         {
667         public:
668             explicit QUpdateLaterEvent(const QRegion& paintRegion)
669                 : QEvent(UpdateLater), m_region(paintRegion)
670             {}
671             ~QUpdateLaterEvent() {}
672             inline const QRegion &region() const { return m_region; }
673         protected:
674             QRegion m_region;
675         };
676         QCoreApplication::instance()->postEvent(obj, new QUpdateLaterEvent(QRegion(0, 0, rendererWidth(), rendererHeight())));
677     } else {
678         obj = (QObject*)qwindow();
679         if (obj)
680             QCoreApplication::instance()->postEvent(obj, new QEvent(QEvent::UpdateRequest));
681     }
682 }
683 } //namespace QtAV
684