[mythtv] [PATCH] Video timebase

steve at nexusuk.org steve at nexusuk.org
Mon Dec 22 13:11:55 EST 2003


This patch adds a "Use video as timebase" option, which displays the video 
at a constant rate and dynamically warps the audio speed to correct drift.

For systems synchronising with the vertical blanking interval of the, 
graphics card in situations where the VBI is occurring at the same 
frequency as the video's frame rate, adjusting the video timing involves 
keeping a frame on screen for twice as long as it should be, or dropping a 
frame entirely.  By adjusting the audio instead, this patch guarantees 
exactly one frame per frame period (as much as can be guaranteed), which 
produces smoother video.

For systems not synchronising with the VBI, the benefits are debatable. If 
there is no vsync support available, /dev/rtc will be used as the 
timebase, and if that isn't available then usleep() will be used as a last 
resort.

The amount of audio warping needed to keep the audio and video in sync is 
averaged over a 10 minute period and recorded in the settings. Next time a 
video is played the audio warping is initialised to that value. The hope 
is that on the same system, the A/V drift rate will always be similar so 
the audio can be warped right from the start to keep it synchronised.

Bugs: The audio warper for void NuppelVideoPlayer::AddAudioData(char 
*buffer, int len, long long timecode) makes a complete mess of the audio. 
This basically affects PCM audio and probably MPEG2 playback - MP3 audio 
uses void NuppelVideoPlayer::AddAudioData(short int *lbuffer, short int 
*rbuffer, int samples, long long timecode) which works fine.

MythTV patches available at:  http://www.nexusuk.org/projects/pvr/patches/

-- 

 - Steve                                             http://www.nexusuk.org/

     Servatis a periculum, servatis a maleficum - Whisper, Evanescence
-------------- next part --------------
diff -urN mythtv-0.13.vanilla/libs/libmythtv/NuppelVideoPlayer.cpp mythtv-0.13.videotimebase/libs/libmythtv/NuppelVideoPlayer.cpp
--- mythtv-0.13.vanilla/libs/libmythtv/NuppelVideoPlayer.cpp	2003-12-06 00:01:45.000000000 +0000
+++ mythtv-0.13.videotimebase/libs/libmythtv/NuppelVideoPlayer.cpp	2003-12-22 16:53:41.000000000 +0000
@@ -10,6 +10,8 @@
 #include <qstringlist.h>
 #include <qsqldatabase.h>
 #include <qmap.h>
+#include <linux/rtc.h>
+#include <sys/ioctl.h>
 
 #include <iostream>
 using namespace std;
@@ -876,6 +878,162 @@
     }
 }
 
+#define MAXWARPDIFF     0.0005                      // Maximum amount the warpfactor can change in 1 frame
+#define WARPMULTIPLIER  1000000000                  // How much do we multiply the warp by when storing it in an integer
+#define WARPAVLEN       (video_frame_rate * 600)    // How long to average the warp over
+#define RTCRATE         1024                        // RTC frequency if we have no vsync
+float NuppelVideoPlayer::WarpFactor(void) {
+    // Calculate a new warp factor
+    float   divergence;
+    float   rate;
+    float   newwarp = 1;
+    float   warpdiff;
+
+    divergence = (float)avsync_avg / (float)frame_interval;             // Number of frames the audio is out by
+    rate = (float)(avsync_avg - avsync_oldavg) / (float)frame_interval; // Number of frames divergence is changing by per frame
+    avsync_oldavg = avsync_avg;
+    newwarp = warpfactor_avg * (1 + ((divergence + rate) / 125));
+
+    // Clip the amount changed so we don't get big frequency variations
+    warpdiff = newwarp / warpfactor;
+    if (warpdiff > (1 + MAXWARPDIFF)) {
+//        cerr << "Clipped.  Warpdiff: " << warpdiff << "  warp: " << newwarp;
+        newwarp = warpfactor * (1 + MAXWARPDIFF);
+//        cerr << "  clipped to: " << newwarp << endl;
+    } else if (warpdiff < (1 - MAXWARPDIFF)) {
+//        cerr << "Clipped.  Warpdiff: " << warpdiff << "  warp: " << newwarp;
+        newwarp = warpfactor * (1 - MAXWARPDIFF);
+//        cerr << "  clipped to: " << newwarp << endl;
+    };
+    
+    warpfactor = newwarp;
+
+    // Clip final warp factor
+    if (warpfactor < 0.5) warpfactor = 0.5;
+    else if (warpfactor > 2) warpfactor = 2;
+    warpfactor_avg = (warpfactor + (warpfactor_avg * (WARPAVLEN - 1))) / WARPAVLEN;   // Keep a 10 minute average
+//    cerr << "Divergence: " << divergence << "  Rate: " << rate << "  Warpfactor: " << warpfactor << "  warpfactor_avg: " << warpfactor_avg << endl;
+    return divergence;
+}
+
+void NuppelVideoPlayer::InitVTAVSync(void) {
+    QString timing_type = "next trigger";
+
+    //warpfactor_avg = 1;
+    warpfactor_avg = gContext->GetNumSetting("WarpFactor", 0);
+    if (warpfactor_avg) warpfactor_avg /= WARPMULTIPLIER;
+    else warpfactor_avg = 1;
+    // Reset the warpfactor if it's obviously bogus
+    if (warpfactor_avg < 0.5) warpfactor_avg = 1;
+    if (warpfactor_avg > 2) warpfactor_avg = 1;
+    warpfactor = warpfactor_avg;
+    avsync_oldavg = 0;
+    rtcfd = -1;
+    if (!disablevideo) {
+        int ret = vsync_init();
+
+        refreshrate = videoOutput->GetRefreshRate();
+        if (refreshrate <= 0) refreshrate = frame_interval;
+        if (ret > 0) {
+            hasvsync = true;
+
+            if ( ret == 1 ) timing_type = "nVidia polling";
+            vsynctol = refreshrate / 2; // How far out can the vsync be for us to use it?
+        } else {
+            rtcfd = open("/dev/rtc", O_RDONLY);
+            if (rtcfd >= 0) {
+                if ((ioctl(rtcfd, RTC_IRQP_SET, RTCRATE) < 0) || (ioctl(rtcfd, RTC_PIE_ON, 0) < 0)) {
+                    close(rtcfd);
+                    rtcfd = -1;
+                } else {
+                    vsynctol = 1000000 / RTCRATE; // How far out can the interrupt be for us to use it?
+                    timing_type = "/dev/rtc";
+                };
+            };
+        }
+
+        nice(-19);
+
+        QString msg = QString("Video timing method: %1").arg(timing_type);
+        VERBOSE(VB_PLAYBACK, msg);
+        msg = QString("Refresh rate: %1, frame interval: %2") .arg(refreshrate).arg(frame_interval);
+        VERBOSE(VB_PLAYBACK, msg);
+    }
+}
+
+void NuppelVideoPlayer::VTAVSync(void)
+{
+    float           diverge;
+    unsigned long   rtcdata;
+    
+    VideoFrame *buffer = videoOutput->GetLastShownFrame();
+    if (!buffer) {
+        cerr << "No video buffer, error error\n";
+        return;
+    }
+
+    diverge = WarpFactor();
+    /*if (diverge < -5) cerr << "Dropping frame to keep audio in sync :(" << endl;
+    else*/ if (disablevideo) {
+        delay = UpdateDelay(&nexttrigger);
+        if (delay > 0) usleep(delay);
+    } else {
+        // if we get here, we're actually going to do video output
+        delay = UpdateDelay(&nexttrigger);
+        if (buffer) videoOutput->PrepareFrame(buffer);
+        if ((hasvsync) || (rtcfd >= 0)) {
+            delay = UpdateDelay(&nexttrigger);
+            if (delay < (0 - vsynctol)) cerr << "Late frame!  Delay: " << delay << endl;
+            if (hasvsync) vsync_wait_for_retrace();
+            else read(rtcfd,&rtcdata,sizeof(rtcdata));
+            delay = UpdateDelay(&nexttrigger);
+            while (delay > vsynctol) {
+                vsync_wait_for_retrace();
+                delay = UpdateDelay(&nexttrigger);
+            };
+        } else if (rtcfd >= 0) {    // No vsync - use the RTC instead
+            
+        } else {    // No vsync _or_ RTC, fall back to usleep() (yuck)
+            delay = UpdateDelay(&nexttrigger);
+            usleep(delay);
+        };
+        
+        // Reset the frame timer to current time since we're trusting the video timing
+        gettimeofday(&nexttrigger, NULL);
+        
+        // Display the frame
+        videoOutput->Show();
+    };
+    
+    if (output_jmeter && output_jmeter->RecordCycleTime()) cout << "avsync_avg: " << avsync_avg / 1000 << ", avsync_oldavg: " << avsync_oldavg / 1000 << ", warpfactor: " << warpfactor << ", warpfactor_avg: " << warpfactor_avg << endl;
+
+    // Schedule next frame
+    nexttrigger.tv_usec += frame_interval;
+    NormalizeTimeval(&nexttrigger);
+    
+    if (audioOutput && normal_speed) {
+        lastaudiotime = audioOutput->GetAudiotime(); // ms, same scale as timecodes
+        if (lastaudiotime != 0) { // lastaudiotime = 0 after a seek
+            // The time at the start of this frame (ie, now) is given by last->timecode
+            avsync_delay = (buffer->timecode - lastaudiotime) * 1000; // uSecs
+            avsync_avg = (avsync_delay + (avsync_avg * 3)) / 4;
+        } else {
+            avsync_avg = 0;
+            avsync_oldavg = 0;
+        };
+    };
+}
+
+void NuppelVideoPlayer::ShutdownVTAVSync(void) {
+    if (hasvsync) vsync_shutdown();
+    if (hasvgasync) vgasync_cleanup();
+    gContext->SaveSetting("WarpFactor", (int)(warpfactor_avg * WARPMULTIPLIER));
+    if (rtcfd >= 0) {
+        close(rtcfd);
+        rtcfd = -1;
+    };
+}
+
 void NuppelVideoPlayer::InitExAVSync(void)
 {
     QString timing_type = "next trigger";
@@ -1191,6 +1349,7 @@
 
     reducejitter = gContext->GetNumSetting("ReduceJitter", 0);
     experimentalsync = gContext->GetNumSetting("ExperimentalAVSync", 0);
+    usevideotimebase = gContext->GetNumSetting("UseVideoTimebase", 0);
 
     if ((print_verbose_messages & VB_PLAYBACK) != 0)
         output_jmeter = new Jitterometer("video_output", 100);
@@ -1201,7 +1360,8 @@
 
     gettimeofday(&nexttrigger, NULL);
 
-    if (experimentalsync)
+    if (usevideotimebase) InitVTAVSync();
+    else if (experimentalsync)
         InitExAVSync();
 
     while (!eof && !killvideo)
@@ -1262,7 +1422,8 @@
 
         videoOutput->ProcessFrame(frame, osd, videoFilters, pipplayer);
 
-        if (experimentalsync)
+        if (usevideotimebase) VTAVSync();
+        else if (experimentalsync)
             ExAVSync();
         else 
             OldAVSync();
@@ -1277,7 +1438,8 @@
     delete videoOutput;
     videoOutput = NULL;
 
-    if (experimentalsync)
+    if (usevideotimebase) ShutdownVTAVSync();
+    else if (experimentalsync)
         ShutdownExAVSync();
 }
 
@@ -1607,9 +1769,25 @@
 
 void NuppelVideoPlayer::AddAudioData(char *buffer, int len, long long timecode)
 {
-    if (audioOutput)
-        audioOutput->AddSamples(buffer, len / (audio_channels * audio_bits / 8),
-                                timecode);
+    if (audioOutput) {
+        if (usevideotimebase) {
+            int         samples;
+            char *      newbuffer;
+            float       incount = 0;
+            int         outcount;
+            int         samplesize;
+
+            samplesize = audio_channels * audio_bits / 8;
+            samples = len / samplesize;
+            newbuffer = (char *) malloc(len * 2);
+            for (incount = 0, outcount = 0; (incount < samples) && (outcount < (samples * 2)); outcount++, incount += warpfactor) {
+                memcpy(newbuffer + (outcount * samplesize), buffer + ((int)incount * samplesize), samplesize);
+            };
+            samples = outcount;
+            audioOutput->AddSamples(buffer, samples, timecode);
+            free(newbuffer);
+        } else audioOutput->AddSamples(buffer, len / (audio_channels * audio_bits / 8), timecode);
+    }
 }
 
 void NuppelVideoPlayer::AddAudioData(short int *lbuffer, short int *rbuffer,
@@ -1618,7 +1796,25 @@
     if (audioOutput)
     {
         char *buffers[] = {(char *)lbuffer, (char *)rbuffer};
-        audioOutput->AddSamples(buffers, samples, timecode);
+        if (usevideotimebase) {
+            short int * newlbuffer;
+            short int * newrbuffer;
+            float       incount = 0;
+            int         outcount;
+
+            newlbuffer = (short int *) malloc(sizeof(short int) * samples * 2);
+            newrbuffer = (short int *) malloc(sizeof(short int) * samples * 2);
+            buffers[0] = (char *)newlbuffer;
+            buffers[1] = (char *)newlbuffer;
+            for (incount = 0, outcount = 0; (incount < samples) && (outcount < (samples * 2)); outcount++, incount += warpfactor) {
+                newlbuffer[outcount] = lbuffer[(int) incount];
+                newrbuffer[outcount] = rbuffer[(int) incount];
+            };
+            samples = outcount;
+            audioOutput->AddSamples(buffers, samples, timecode);
+            free(newlbuffer);
+            free(newrbuffer);
+        } else audioOutput->AddSamples(buffers, samples, timecode);
     }
 }
 
diff -urN mythtv-0.13.vanilla/libs/libmythtv/NuppelVideoPlayer.h mythtv-0.13.videotimebase/libs/libmythtv/NuppelVideoPlayer.h
--- mythtv-0.13.vanilla/libs/libmythtv/NuppelVideoPlayer.h	2003-11-26 21:43:02.000000000 +0000
+++ mythtv-0.13.videotimebase/libs/libmythtv/NuppelVideoPlayer.h	2003-12-22 16:53:24.000000000 +0000
@@ -150,6 +150,7 @@
 
     void DrawSlice(VideoFrame *frame, int x, int y, int w, int h);
 
+    float WarpFactor(void);
     void AddAudioData(char *buffer, int len, long long timecode);
     void AddAudioData(short int *lbuffer, short int *rbuffer, int samples,
                       long long timecode);
@@ -384,10 +385,15 @@
     int delay;
     int avsync_delay;
     int avsync_avg;
+    int avsync_oldavg;
     int refreshrate;
     int frame_interval; // always adjusted for play_speed
     float play_speed;  
     bool normal_speed;
+    float warpfactor;
+    float warpfactor_avg;
+    int rtcfd;
+    int vsynctol;
  
     bool delay_clipping;
     struct timeval nexttrigger, now;
@@ -398,6 +404,9 @@
 
     Jitterometer *output_jmeter;
 
+    void InitVTAVSync(void);
+    void VTAVSync(void);
+    void ShutdownVTAVSync(void);
     void InitExAVSync(void);
     void OldAVSync(void);
     void ExAVSync(void);
@@ -405,6 +414,7 @@
 
     bool reducejitter;
     bool experimentalsync;
+    bool usevideotimebase;
 
     bool limitKeyRepeat;
 
diff -urN mythtv-0.13.vanilla/programs/mythfrontend/globalsettings.cpp mythtv-0.13.videotimebase/programs/mythfrontend/globalsettings.cpp
--- mythtv-0.13.vanilla/programs/mythfrontend/globalsettings.cpp	2003-12-10 21:35:46.000000000 +0000
+++ mythtv-0.13.videotimebase/programs/mythfrontend/globalsettings.cpp	2003-12-22 16:53:24.000000000 +0000
@@ -597,6 +597,17 @@
     };
 };
 
+class UseVideoTimebase: public CheckBoxSetting, public GlobalSetting {
+public:
+    UseVideoTimebase():
+        GlobalSetting("UseVideoTimebase") {
+        setLabel(QObject::tr("Use video as timebase"));
+        setValue(false);
+        setHelpText(QObject::tr("Use the video as the timebase and warp "
+                    "the audio to keep it in sync."));
+    };
+};
+
 class DefaultCCMode: public CheckBoxSetting, public GlobalSetting {
 public:
     DefaultCCMode():
@@ -1403,6 +1414,7 @@
     general->addChild(new CustomFilters());
     general->addChild(new ReduceJitter());
     general->addChild(new ExperimentalSync());
+    general->addChild(new UseVideoTimebase());
     general->addChild(new DecodeExtraAudio());
     general->addChild(new PIPLocation());
     addChild(general);


More information about the mythtv-dev mailing list