-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathext.py
executable file
·88 lines (57 loc) · 1.76 KB
/
ext.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
import os
from time import ctime
from pickle import load, dump, HIGHEST_PROTOCOL
from multiprocessing import Pool, cpu_count
from multiprocessing.pool import ThreadPool as Pool2
##
def cls():
os.system('clear' if os.name != 'nt' else 'cls')
##
def now():
return [e for e in ctime().split(' ') if ':' in e][-1]
##
def pickle_save(obj, file_path, buffered=False):
with open(file_path, 'wb+') as f:
if buffered:
return dump(obj, BufferedFile(f), protocol=HIGHEST_PROTOCOL)
else: return dump(obj, f)
def pickle_load(file_path, buffered=False):
try:
with open(file_path, 'rb') as f:
if buffered:
return load(BufferedFile(f))
else: return load(f)
except Exception as e:
return None
##
class BufferedFile(object):
def __init__(self, f):
self.f = f
def __getattr__(self, item):
return getattr(self.f, item)
def read(self, n):
if n >= (1 << 31):
buffer = bytearray(n)
i = 0
while i < n:
batch_size = min(n - i, 1 << 31 - 1)
buffer[i:i + batch_size] = self.f.read(batch_size)
i += batch_size
return buffer
return self.f.read(n)
def write(self, buffer):
n = len(buffer)
i = 0
while i < n:
batch_size = min(n - i, 1 << 31 - 1)
self.f.write(buffer[i:i + batch_size])
i += batch_size
##
def parallel(fn,lst,chunksize=None,backend='proc',hm_workers=None):
if not hm_workers: hm_workers = cpu_count()
with (Pool if backend=='proc' else Pool2)(hm_workers) as p:
res = p.map_async(fn,lst,chunksize)
p.close()
p.join()
return res.get()
##