-
Notifications
You must be signed in to change notification settings - Fork 0
/
server.py
288 lines (266 loc) · 10.8 KB
/
server.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
import copy
import logging
import os
import shutil
from typing import List, Tuple, Union
import zipfile
from flask import Flask, request, jsonify
import requests
import numpy as np
from scipy.special import softmax
from tqdm import tqdm
from urllib.parse import urlencode
from data_processing.tokenization import tokenize_text, sentenize_text
from data_processing.postprocessing import decode_entity
from neural_network.ner import load_ner
ner_logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
level=logging.INFO)
app = Flask(__name__)
def download_ner() -> bool:
base_url = 'https://cloud-api.yandex.net/v1/disk/public/resources/download?'
public_key = 'https://yadi.sk/d/7CQPhR2SAu6mxw'
final_url = base_url + urlencode(dict(public_key=public_key))
pk_request = requests.get(final_url)
direct_link = pk_request.json().get('href')
response = requests.get(direct_link, stream=True)
total_size_in_bytes = int(response.headers.get('content-length', 0))
ner_logger.info(f'Total size of NER is {total_size_in_bytes} bytes.')
block_size = 1024
progress_bar = tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True)
zip_archive_name = os.path.join(model_path, 'dp_rubert_from_siamese.zip')
with open(zip_archive_name, 'wb') as file:
for data in response.iter_content(block_size):
progress_bar.update(len(data))
file.write(data)
progress_bar.close()
if (total_size_in_bytes != 0) and (progress_bar.n != total_size_in_bytes):
return False
with zipfile.ZipFile(zip_archive_name) as archive:
archive.extractall(model_path)
os.remove(zip_archive_name)
return True
model_path = os.path.join(os.path.dirname(__file__), 'models')
if not os.path.isdir(model_path):
raise ValueError(f'The directory "{model_path}" does not exist!')
trained_ner_path = os.path.join(model_path, 'dp_rubert_from_siamese')
if not os.path.isdir(trained_ner_path):
ner_exists = False
else:
if not os.path.isfile(os.path.join(trained_ner_path, 'ner.h5')):
ner_exists = False
elif not os.path.isfile(os.path.join(trained_ner_path, 'ner.json')):
ner_exists = False
else:
ner_exists = True
if not ner_exists:
if os.path.isdir(trained_ner_path):
shutil.rmtree(trained_ner_path, ignore_errors=True)
if not download_ner():
raise ValueError('The NER cannot be downloaded from Yandex Disk!')
if not os.path.isdir(trained_ner_path):
raise ValueError(f'The directory "{trained_ner_path}" does not exist!')
ner_model, ner_tokenizer, max_sent_len, ne_list = load_ner(trained_ner_path)
def check_input_data(data: List[Union[str, dict]]) -> str:
res = ''
err_msg = ''
for idx, cur in enumerate(data):
if isinstance(cur, str):
if len(res) == 0:
res = 'str'
else:
if res != 'str':
err_msg = f'Data type of sample {idx} of input data is ' \
f'unexpected! Expected {res}, got {type(cur)}.'
break
elif isinstance(cur, dict):
if 'text' in cur:
if isinstance(cur['text'], str):
if len(res) == 0:
res = 'dict'
else:
if res != 'dict':
err_msg = f'Data type of sample {idx}["text"] of ' \
f'input data is unexpected! ' \
f'Expected str, got {cur["text"]}.'
break
else:
err_msg = ''
break
else:
err_msg = f'Sample {idx} describes uknown data! ' \
f'The `text` is not found in the key list ' \
f'{sorted(list(cur.keys()))}.'
break
else:
err_msg = f'Data type of sample {idx} of input data is wrong! ' \
f'Expected str or dict, got {type(cur)}.'
break
if len(err_msg) > 0:
raise ValueError(err_msg)
if len(res) == 0:
raise ValueError('The input data are empty!')
return res
def extract_texts(data: List[Union[str, dict]]) -> List[str]:
data_type = check_input_data(data)
if data_type == 'str':
prepared_data = data
else:
prepared_data = [cur['text'] for cur in data]
return prepared_data
def recognize_single_text(cur_text: str) -> List[Tuple[int, int, str]]:
global ner_model, ner_tokenizer, max_sent_len, ne_list
recognized_entities = []
if len(cur_text.strip()) > 0:
for sent_start, sent_end in sentenize_text(cur_text):
words, subtokens, subtoken_bounds = tokenize_text(
s=cur_text[sent_start:sent_end],
tokenizer=ner_tokenizer
)
while (len(subtokens) % max_sent_len) != 0:
subtokens.append(ner_tokenizer.pad_token)
subtoken_bounds.append(None)
x = []
start_pos = 0
for _ in range(len(subtokens) // max_sent_len):
end_pos = start_pos + max_sent_len
subtoken_indices = ner_tokenizer.convert_tokens_to_ids(
subtokens[start_pos:end_pos]
)
x.append(
np.array(
subtoken_indices,
dtype=np.int32
).reshape((1, max_sent_len))
)
start_pos = end_pos
predicted = ner_model.predict(np.vstack(x), batch_size=1)
if len(predicted) != len(ne_list):
err_msg = f'Number of neural network heads does not ' \
f'correspond to number of named entities! ' \
f'{len(predicted)} != {len(ne_list)}'
raise ValueError(err_msg)
del x
probability_matrices = [
np.vstack([
cur[sample_idx]
for sample_idx in range(len(subtokens) // max_sent_len)
])
for cur in predicted
]
del predicted
for ne_idx in range(len(ne_list)):
entity_bounds = decode_entity(
softmax(probability_matrices[ne_idx], axis=1),
words
)
if len(entity_bounds) > 0:
for start_subtoken, end_subtoken in entity_bounds:
entity_start = subtoken_bounds[start_subtoken][0]
entity_end = subtoken_bounds[end_subtoken - 1][1]
recognized_entities.append((
sent_start + entity_start,
sent_start + entity_end,
ne_list[ne_idx]
))
del entity_bounds
del words, subtokens, subtoken_bounds
return recognized_entities
def enrich_data_with_recognition_results(
data: List[Union[str, dict]],
recognition_results: List[List[Tuple[int, int, str]]]
) -> List[Union[str, dict]]:
data_type = check_input_data(data)
if len(data) != len(recognition_results):
err_msg = f'Source data do not correspond to recognition results! ' \
f'{len(data)} != {len(recognition_results)}'
raise ValueError(err_msg)
enriched_data = []
if data_type == 'str':
for text, res in zip(data, recognition_results):
new_res = {'text': text, 'ners': res}
enriched_data.append(new_res)
else:
for cur_sample, cur_res in zip(data, recognition_results):
new_res = copy.deepcopy(cur_sample)
new_res['ners'] = cur_res
enriched_data.append(new_res)
return enriched_data
@app.route('/ready')
def ready():
return 'OK'
@app.route('/recognize', methods=['POST'])
def recognize():
request_data = request.get_json()
if (not isinstance(request_data, str)) and \
(not isinstance(request_data, list)):
err_msg = f'{type(request_data)} is unknown data type for ' \
f'the named entity recognizer!'
resp = jsonify({'message': err_msg})
resp.status_code = 400
ner_logger.error(err_msg)
else:
if isinstance(request_data, str):
try:
res = recognize_single_text(request_data)
err_msg = ''
except Exception as e:
err_msg = str(e)
res = None
if res is None:
resp = jsonify({'message': err_msg})
resp.status_code = 400
ner_logger.error(err_msg)
else:
resp = jsonify({
"text": request_data,
"ners": res
})
resp.status_code = 200
else:
err_msg = ''
if len(request_data) == 0:
err_msg = 'The input data are empty!'
resp = jsonify({'message': err_msg})
resp.status_code = 400
ner_logger.error(err_msg)
else:
try:
texts_for_recognition = extract_texts(request_data)
except Exception as e:
err_msg = str(e)
texts_for_recognition = None
if texts_for_recognition is None:
resp = jsonify({'message': err_msg})
resp.status_code = 400
ner_logger.error(err_msg)
else:
try:
res = [recognize_single_text(s)
for s in texts_for_recognition]
err_msg = ''
except Exception as e:
err_msg = str(e)
res = None
if res is None:
resp = jsonify({'message': err_msg})
resp.status_code = 400
ner_logger.error(err_msg)
else:
try:
res = enrich_data_with_recognition_results(
request_data, res)
err_msg = ''
except Exception as e:
err_msg = str(e)
res = None
if res is None:
resp = jsonify({'message': err_msg})
resp.status_code = 400
ner_logger.error(err_msg)
else:
resp = jsonify(res)
resp.status_code = 200
return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8010)