~oubiwann/+junk/gevent-get-page-examples

« back to all changes in this revision

Viewing changes to 04-naive-benchmarks.py

  • Committer: Duncan McGreggor
  • Date: 2012-03-08 05:00:06 UTC
  • Revision ID: duncan@dreamhost.com-20120308050006-odek41fo5m1s83by
Added some benchmarking/stats-gathering code for gevent under different loads
and iteration counts.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
 
 
2
#!/usr/bin/python
 
3
# Copyright (c) 2012 New Dream Network, LLC (DreamHost)
 
4
"""
 
5
Use the Greenlet class to run multiple "jobs" and wait for them to complete.
 
6
"""
 
7
import gevent
 
8
from gevent import monkey
 
9
 
 
10
monkey.patch_all()
 
11
 
 
12
import timeit
 
13
import urllib2
 
14
 
 
15
 
 
16
 
 
17
 
 
18
def get_page(url, content_offset=100):
 
19
    try:
 
20
        data = urllib2.urlopen(url).read()
 
21
    except Exception, err:
 
22
        data = err
 
23
    return data
 
24
 
 
25
 
 
26
def check_content(greenlet_exit):
 
27
    data = greenlet_exit.value
 
28
    bytes = 0
 
29
    if isinstance(data, basestring):
 
30
        bytes = len(data)
 
31
 
 
32
def create_greenlets(urls):
 
33
    jobs = []
 
34
    for url in urls:
 
35
        g = gevent.Greenlet(get_page, url, content_offset=20)
 
36
        g.link_value(check_content)
 
37
        g.start()
 
38
        jobs.append(g)
 
39
    return jobs
 
40
 
 
41
 
 
42
def complex(url_count=1):
 
43
    urls = ["http://127.0.0.1"] * url_count
 
44
    jobs = create_greenlets(urls)
 
45
    gevent.joinall(jobs)
 
46
 
 
47
 
 
48
def simple(url_count=1):
 
49
    urls = ["http://127.0.0.1"] * url_count
 
50
    jobs = [gevent.spawn(get_page, url) for url in urls]
 
51
    gevent.joinall(jobs)
 
52
 
 
53
 
 
54
def run_it(runs, requests):
 
55
    for command in ['complex', 'simple']:
 
56
        import_string = 'from __main__ import %s' % command
 
57
        command_string = '%s(url_count=%s)' % (command, requests)
 
58
        for run in runs:
 
59
            time = timeit.timeit(command_string, import_string, number=run)
 
60
            print "%s, %s, %s, %s" % (command, run, requests, time)
 
61
 
 
62
def run_it_2(runs, requests):
 
63
    for command in ['complex', 'simple']:
 
64
        import_string = 'from __main__ import %s' % command
 
65
        for run, request_count in zip(runs, requests):
 
66
            command_string = '%s(url_count=%s)' % (command, request_count)
 
67
            time = timeit.timeit(command_string, import_string, number=run)
 
68
            print "%s, %s, %s, %s" % (command, run, request_count, time)
 
69
 
 
70
 
 
71
# progression 1
 
72
runs = [1,2,3,4,5,6,7,8,9,10,20,30,40,50,100,200,500,1000,5000,10000]
 
73
requests = 1
 
74
#run_it(runs, requests)
 
75
# progression 2
 
76
runs = [1,2,3,4,5,6,7,8,9,10,20,30,40,50,100,200,500,1000]
 
77
requests = 10
 
78
#run_it(runs, requests)
 
79
# progression 3
 
80
runs = [1,2,3,4,5,6,7,8,9,10,20,30,40,50,100]
 
81
requests = 100
 
82
#run_it(runs, requests)
 
83
# progression 4
 
84
runs = [1,2,3,4,5,6,7,8,9,10]
 
85
requests = 1000
 
86
#run_it(runs, requests)
 
87
# progression 5
 
88
runs = [1,2,3]
 
89
requests = 10000
 
90
#run_it(runs, requests)
 
91
# progression 6
 
92
runs = [1,2,3]
 
93
requests = 100000
 
94
#run_it(runs, requests)
 
95
# progression 7
 
96
runs = [100000,10000,1000,100,10,1]
 
97
requests = [1,10,100,1000,10000,100000]
 
98
run_it_2(runs, requests)