It's not true that the relevant source is in SDL - clock.tick does call
SDL_Delay, but all SDL_Delay takes a number of ticks to sleep, and then just
sleeps that long. It's pygame source that is deciding how long to
sleep/wait.

... also no need to guess at the source, you can get at it from the web,
even:
http://www.seul.org/viewcvs/viewcvs.cgi/trunk/src/time.c?rev=1240&root=PyGame&view=auto

Basically, I think this could be fixed in pygame source without an SDL
change, just by putting a max on how long to wait in clock tick - and that
delay could be maxed out at the framerate (surely you never would wait
longer than 100ms to regulate a 10fps timer, right?)

here's the relevant source (accurate_delay is passed in as 0, and the
framerate arg is what was passed to tick):
----------------------------

// to be called by the other tick functions.
static PyObject*
clock_tick_base(PyObject* self, PyObject* arg, int use_accurate_delay)
{
    PyClockObject* _clock = (PyClockObject*) self;
    float framerate = 0.0f;
    int nowtime;

    if (!PyArg_ParseTuple (arg, "|f", &framerate))
        return NULL;

    if (framerate)
    {
        int delay, endtime = (int) ((1.0f / framerate) * 1000.0f);
        _clock->rawpassed = SDL_GetTicks () - _clock->last_tick;
        delay = endtime - _clock->rawpassed;

        /*just doublecheck that timer is initialized*/
        if (!SDL_WasInit (SDL_INIT_TIMER))
        {
            if (SDL_InitSubSystem (SDL_INIT_TIMER))
            {
                RAISE (PyExc_SDLError, SDL_GetError ());
                return NULL;
            }
        }

        if (use_accurate_delay)
            delay = accurate_delay (delay);
        else
        {
            // this uses sdls delay, which can be inaccurate.
            if (delay < 0)
                delay = 0;

            Py_BEGIN_ALLOW_THREADS;
            SDL_Delay ((Uint32) delay);
            Py_END_ALLOW_THREADS;
        }

        if (delay == -1)
            return NULL;
    }

    nowtime = SDL_GetTicks ();
    _clock->timepassed = nowtime - _clock->last_tick;
    _clock->fps_count += 1;
    _clock->last_tick = nowtime;
    if (!framerate)
        _clock->rawpassed = _clock->timepassed;

    if (!_clock->fps_tick)
    {
        _clock->fps_count = 0;
        _clock->fps_tick = nowtime;
    }
    else if (_clock->fps_count >= 10)
    {
        _clock->fps = _clock->fps_count /
            ((nowtime - _clock->fps_tick) / 1000.0f);
        _clock->fps_count = 0;
        _clock->fps_tick = nowtime;
        Py_XDECREF (_clock->rendered);
    }
    return PyInt_FromLong (_clock->timepassed);
}

Reply via email to