How to use the pyperf.perf_counter function in pyperf

To help you get started, we’ve selected a few pyperf examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github matrix-org / synapse / synmark / suites / lrucache_evict.py View on Github external
async def main(reactor, loops):
    """
    Benchmark `loops` number of insertions into LruCache where half of them are
    evicted.
    """
    cache = LruCache(loops // 2)

    start = perf_counter()

    for i in range(loops):
        cache[i] = True

    end = perf_counter() - start

    return end
github gevent / gevent / benchmarks / bench_spawn.py View on Github external
def _test(spawn, sleep, options):
    global counter
    counter = 0
    before_spawn = perf_counter()
    for _ in xrange(N):
        spawn(incr, **options.kwargs)
    spawn_duration = perf_counter() - before_spawn


    if options.sleep:
        assert counter == 0, counter
        before_sleep = perf_counter()
        sleep(0)
        sleep_duration = perf_counter() - before_sleep
        assert counter == N, (counter, N)
    else:
        sleep_duration = -1


    if options.join:
        before_join = perf_counter()
        options.join()
        join_duration = perf_counter() - before_join
github python / pyperformance / pyperformance / benchmarks / bm_hexiom.py View on Github external
strategy = Done.FIRST_STRATEGY
    stream = io.StringIO()

    board = board.strip()
    expected = solution.rstrip()

    range_it = range(loops)
    t0 = pyperf.perf_counter()

    for _ in range_it:
        stream = io.StringIO()
        solve_file(board, strategy, order, stream)
        output = stream.getvalue()
        stream = None

    dt = pyperf.perf_counter() - t0

    output = '\n'.join(line.rstrip() for line in output.splitlines())
    if output != expected:
        raise AssertionError("got a wrong answer:\n%s\nexpected: %s"
                             % (output, expected))

    return dt
github python / pyperformance / pyperformance / benchmarks / bm_spectral_norm.py View on Github external
def bench_spectral_norm(loops):
    range_it = range(loops)
    t0 = pyperf.perf_counter()

    for _ in range_it:
        u = [1] * DEFAULT_N

        for dummy in range(10):
            v = eval_AtA_times_u(u)
            u = eval_AtA_times_u(v)

        vBv = vv = 0

        for ue, ve in zip(u, v):
            vBv += ue * ve
            vv += ve * ve

    return pyperf.perf_counter() - t0
github cropsinsilico / yggdrasil / yggdrasil / timing.py View on Github external
Returns:
        float: Time (in seconds) required to perform the test the required
            number of times.

    """
    ttot = 0
    range_it = range(loops)
    for i in range_it:
        run_uuid = timer.before_run(nmsg, msg_size)
        flag = False
        nerrors = 0
        while not flag:
            try:
                t0 = pyperf.perf_counter()
                timer.run(run_uuid, timer=pyperf.perf_counter)
                t1 = pyperf.perf_counter()
                tdif = t1 - t0
                timer.after_run(run_uuid, tdif)
                ttot += tdif
                flag = True
            except AssertionError as e:  # pragma: debug
                nerrors += 1
                if nerrors >= max_errors:
                    raise
                else:
                    warnings.warn("Error %d/%d. Trying again. (error = '%s')"
                                  % (nerrors, max_errors, e), RuntimeWarning)
    return ttot
github python / pyperformance / pyperformance / benchmarks / bm_mdp.py View on Github external
def bench_mdp(loops):
    expected = 0.89873589887
    max_diff = 1e-6
    range_it = range(loops)

    t0 = pyperf.perf_counter()
    for _ in range_it:
        result = Battle().evaluate(0.192)
    dt = pyperf.perf_counter() - t0

    if abs(result - expected) > max_diff:
        raise Exception("invalid result: got %s, expected %s "
                        "(diff: %s, max diff: %s)"
                        % (result, expected, result - expected, max_diff))
    return dt
github python / pyperformance / pyperformance / benchmarks / bm_unpack_sequence.py View on Github external
a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack
        a, b, c, d, e, f, g, h, i, j = to_unpack

    return pyperf.perf_counter() - t0
github gevent / gevent / benchmarks / bench_spawn.py View on Github external
spawn(incr, **options.kwargs)
    spawn_duration = perf_counter() - before_spawn


    if options.sleep:
        assert counter == 0, counter
        before_sleep = perf_counter()
        sleep(0)
        sleep_duration = perf_counter() - before_sleep
        assert counter == N, (counter, N)
    else:
        sleep_duration = -1


    if options.join:
        before_join = perf_counter()
        options.join()
        join_duration = perf_counter() - before_join
    else:
        join_duration = -1

    return Times(spawn_duration,
                 sleep_duration,
                 join_duration)
github python / pyperformance / pyperformance / benchmarks / bm_sqlite_synth.py View on Github external
def bench_sqlite(loops):
    t0 = pyperf.perf_counter()

    conn = sqlite3.connect(":memory:")
    conn.execute('create table cos (x, y, z);')
    for i in range(loops):
        cos_i = math.cos(i)
        conn.execute('insert into cos values (?, ?, ?)',
                     [i, cos_i, str(i)])

    conn.create_function("cos", 1, math.cos)
    for x, cosx1, cosx2 in conn.execute("select x, cos(x), y from cos"):
        assert math.cos(x) == cosx1 == cosx2

    conn.create_aggregate("avglength", 1, AvgLength)
    cursor = conn.execute("select avglength(z) from cos;")
    cursor.fetchone()[0]