This repository has been archived by the owner on Jul 15, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
server.py
209 lines (188 loc) · 7.91 KB
/
server.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: leeyoshinari
import os
import re
import time
import json
import threading
import traceback
import configparser
import logging.handlers
import redis
import requests
cfg = configparser.ConfigParser()
cfg.read('config.conf', encoding='utf-8')
def get_config(key):
return cfg.get('server', key, fallback=None)
LEVEL = get_config('level')
backupcount = int(get_config('backupCount'))
log_path = get_config('logPath')
if not os.path.exists(log_path):
os.mkdir(log_path)
log_level = {
'DEBUG': logging.DEBUG,
'INFO': logging.INFO,
'WARNING': logging.WARNING,
'ERROR': logging.ERROR,
'CRITICAL': logging.CRITICAL
}
logger = logging.getLogger()
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(filename)s[line:%(lineno)d] - %(message)s')
logger.setLevel(level=log_level.get(LEVEL))
file_handler = logging.handlers.TimedRotatingFileHandler(
os.path.join(log_path, 'monitor.log'), when='midnight', interval=1, backupCount=backupcount)
file_handler.suffix = '%Y-%m-%d.log'
# file_handler = logging.StreamHandler()
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
def get_ip():
"""
Get server's IP address
:return: IP address
"""
try:
if get_config('host'):
IP = get_config('host')
else:
with os.popen("hostname -I |awk '{print $1}'") as p:
result = p.readlines()
logger.debug(result)
if result:
IP = result[0].strip()
logger.info(f'The IP address is: {IP}')
else:
logger.warning('Server IP address not found!')
IP = '127.0.0.1'
except:
IP = '127.0.0.1'
return IP
class Task(object):
def __init__(self):
self.IP = get_ip()
self.group_key = None
self.prefix = ''
self.influx_stream = 'influx_stream' # stream name
self.redis_host = '127.0.0.1'
self.redis_port = 6379
self.redis_password = '123456'
self.redis_db = 0
# log_format main '$remote_addr - $remote_user [$time_iso8601] $request_method $request_uri $server_protocol $status $body_bytes_sent $upstream_response_time "$http_referer" "$http_user_agent" "$http_x_forwarded_for"';
self.compiler = re.compile(r'(?P<ip>.*?)- - \[(?P<time>.*?)\] (?P<method>.*?) (?P<path>.*?) (?P<protocol>.*?) (?P<status>.*?) (?P<bytes>.*?) (?P<rt>.*?) "(?P<referer>.*?)" "(?P<ua>.*?)"')
self.access_log = get_config('nginxAccessLogPath')
self.get_configure_from_server()
if not self.access_log:
self.find_nginx_log()
self.redis_client = redis.StrictRedis(host=self.redis_host, port=self.redis_port, password=self.redis_password,
db=self.redis_db, decode_responses=True)
t = threading.Thread(target=self.parse_log, args=())
t.start()
def get_configure_from_server(self):
url = f'http://{get_config("address")}/register'
post_data = {
'type': 'nginx-agent',
'host': self.IP,
'port': get_config('port'),
}
while True:
try:
res = request_post(url, post_data)
logger.debug(f"The result of registration is {res}")
self.redis_host = res['redis']['host']
self.redis_port = res['redis']['port']
self.redis_password = res['redis']['password']
self.redis_db = res['redis']['db']
self.group_key = 'nginx_' + res['groupKey']
self.prefix = res['prefix']
time.sleep(1)
break
except:
logger.error(traceback.format_exc())
time.sleep(1)
def find_nginx_log(self):
with os.popen("ps -ef|grep nginx |grep -v grep |grep master|awk '{print $2}'") as p:
nginx_pid = p.read().strip()
logger.info(f'nginx pid is: {nginx_pid}')
if nginx_pid:
with os.popen(f'pwdx {nginx_pid}') as p:
res = p.read()
nginx_path = res.strip().split(' ')[-1].strip()
self.access_log = os.path.join(os.path.dirname(nginx_path), 'logs', 'access.log')
if not os.path.exists(self.access_log):
raise Exception(f'Not found nginx log: {self.access_log}')
else:
logger.error('Nginx is not found ~')
raise Exception('Nginx is not found ~')
def parse_log(self):
logger.info(self.access_log)
if not os.path.exists(self.access_log):
logger.error(f'Not found nginx log: {self.access_log}')
raise Exception(f'Not found nginx log: {self.access_log}')
position = 0
with open(self.access_log, mode='r', encoding='utf-8') as f1:
lines = f1.readlines() # jump to current newest line, ignore old lines.
while True:
try:
lines = f1.readlines()
cur_position = f1.tell()
if cur_position == position:
time.sleep(0.1)
continue
else:
position = cur_position
self.parse_line(lines)
except:
logger.error(traceback.format_exc())
def parse_line(self, lines):
for line in lines:
if self.prefix in line:
if 'static' in line and '.' in line:
continue
else:
logger.debug(f'Nginx - access.log -- {line}')
res = self.compiler.match(line).groups()
logger.debug(res)
path = res[3].split('?')[0].strip()
if 'PerformanceTest' in res[9]:
source = 'PerformanceTest'
else:
source = 'Normal'
c_time = res[1].split('+')[0].replace('T', ' ').strip()
try:
rt = float(res[7].split(',')[-1].strip()) if ',' in res[7] else float(res[7].strip())
except ValueError:
logger.error(f'parse error: {line}')
rt = 0.0
error = 0 if int(res[5]) < 400 else 1
self.write_redis({'measurement': self.group_key, 'tags': {'source': source, 'path': path},
'fields': {'c_time': c_time, 'client': res[0].strip(), 'status': int(res[5]),
'size': int(res[6]), 'rt': rt, 'error': error}})
def write_redis(self, data):
try:
self.redis_client.xadd(self.influx_stream, {'data': json.dumps([data])}, maxlen=100)
except:
logger.error(traceback.format_exc())
def request_post(url, post_data):
header = {
"Accept": "application/json, text/plain, */*",
"Accept-Encoding": "gzip, deflate",
"Content-Type": "application/json; charset=UTF-8"}
try:
res = requests.post(url=url, json=post_data, headers=header)
logger.info(f"The result of request is {res.content.decode('unicode_escape')}")
if res.status_code == 200:
response_data = json.loads(res.content.decode('unicode_escape'))
if response_data['code'] == 0:
return response_data['data']
else:
logger.error(response_data['msg'])
raise Exception(response_data['msg'])
except:
logger.error(traceback.format_exc())
raise
if __name__ == '__main__':
task = Task()
time.sleep(2)
PID = os.getpid()
with open('pid', 'w', encoding='utf-8') as f:
f.write(str(PID))