forked from NarrativeContentGroup/ELB-Replayer
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathreplayer.py
executable file
·136 lines (122 loc) · 3.96 KB
/
replayer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
#!/usr/bin/env python
import argparse
import sys
import dateutil.parser
import requests
import itertools
from urlparse import urlparse
from twisted.internet import reactor
SCRIPT_DESCRIPTION = 'ELB Log Replayer (ELR)'
TOTALS = {'successful': 0, 'failed': 0}
SPINNER = itertools.cycle(['-', '\\', '|', '/'])
PARSER = argparse.ArgumentParser(description=SCRIPT_DESCRIPTION)
PARSER.add_argument('logfile', help='the logfile to replay')
PARSER.add_argument(
'--host',
help='host to send requests',
default='localhost',
)
PARSER.add_argument(
'--replace-host',
action='store_false',
help='use host parameter for request Host header'
)
PARSER.add_argument(
'--verbose',
action='store_true',
help='always print requests and responses',
)
PARSER.add_argument(
'--dry-run',
action='store_true',
help='don\'t actually hit the `host`',
)
PARSER.add_argument(
'--paced',
action='store_true',
help='play requests as quickly as possible, not as they were in the original file'
)
PARSER.add_argument(
'--limit',
action='store',
help='replay only first n requests'
)
PARSER.add_argument(
'--dummy',
action='store_true',
help='treat 404=>200 transitions as successes (for when spdj_gatekeeper_dummy_response is true)'
)
PARSER.add_argument(
'--output',
action='store',
help='store information about failed requests in output file'
)
SCRIPT_ARGS = PARSER.parse_args()
def replay_request(url, host, orig_resp):
if not SCRIPT_ARGS.verbose:
sys.stdout.write('\b')
sys.stdout.write(SPINNER.next())
sys.stdout.flush()
if SCRIPT_ARGS.dry_run:
sys.stdout.write('{}\n'.format(url))
else:
session = requests.Session()
req = requests.Request('GET', 'http://{}{}?{}'.format(SCRIPT_ARGS.host, url.path, url.query))
prepped = req.prepare()
if SCRIPT_ARGS.replace_host:
prepped.headers['Host'] = host
resp = session.send(prepped)
if SCRIPT_ARGS.dummy and (int(orig_resp) == 404 or int(orig_resp) == 403) and int(resp.status_code) == 200:
resp.status_code = 404
if str(resp.status_code) == str(orig_resp):
warning = ''
TOTALS['successful'] += 1
else:
warning = 'WARNING'
TOTALS['failed'] += 1
if SCRIPT_ARGS.output:
with open(SCRIPT_ARGS.output, 'a') as output:
output.write('{}=>{} {} {}?{}\n'.format(orig_resp, resp.status_code, resp.reason, url.path, url.query))
if SCRIPT_ARGS.verbose or warning == 'WARNING':
print '{}=>{} {} {}?{}'.format(orig_resp, resp.status_code, warning, url.path, url.query)
def main():
starting = None
if SCRIPT_ARGS.limit:
countdown = int(SCRIPT_ARGS.limit)
else:
countdown = None
for line in open(SCRIPT_ARGS.logfile):
bits = line.split()
if SCRIPT_ARGS.paced:
timestamp = dateutil.parser.parse(bits[0])
if not starting:
starting = timestamp
offset = timestamp - starting
if offset.total_seconds() < 0:
# ignore past requests
continue
method = bits[11].lstrip('"')
if method != 'GET':
continue
url = urlparse(bits[12])
orig_host = url.netloc.split(':')[0]
orig_resp = bits[8]
if countdown == 0:
break
if SCRIPT_ARGS.limit:
countdown -= 1
if SCRIPT_ARGS.paced:
reactor.callLater(offset.total_seconds(), replay_request, url, orig_host, orig_resp)
else:
reactor.callInThread(replay_request, url, orig_host, orig_resp)
if SCRIPT_ARGS.paced:
reactor.callLater(offset.total_seconds() + 2, reactor.stop)
else:
reactor.callFromThread(reactor.stop)
reactor.run()
print TOTALS
if __name__ == "__main__":
print SCRIPT_ARGS
if SCRIPT_ARGS.dry_run:
SCRIPT_ARGS.paced = False
main()