-
Notifications
You must be signed in to change notification settings - Fork 8
/
Copy pathgarble.py
167 lines (141 loc) · 5.31 KB
/
garble.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
#!/usr/bin/env python3
import argparse
import glob
import json
import os
import subprocess
import sys
from datetime import datetime
from pathlib import Path
from zipfile import ZipFile
from definitions import TIMESTAMP_FMT, TIMESTAMP_LEN
from derive_subkey import derive_subkey
def parse_arguments():
parser = argparse.ArgumentParser(
description="Tool for garbling PII in for PPRL purposes in the CODI project"
)
parser.add_argument(
"sourcefile", default=None, nargs="?", help="Source pii-TIMESTAMP.csv file"
)
parser.add_argument("schemadir", help="Directory of linkage schema")
parser.add_argument("secretfile", help="Location of de-identification secret file")
parser.add_argument(
"-z",
"--outputzip",
dest="outputzip",
default="garbled.zip",
help="Specify an name for the .zip file. Default is garbled.zip",
)
parser.add_argument(
"-o",
"--outputdir",
dest="outputdir",
default="output",
help="Specify an output directory. Default is output/",
)
args = parser.parse_args()
if not Path(args.schemadir).exists():
parser.error("Unable to find directory: " + args.schemadir)
if not Path(args.secretfile).exists():
parser.error("Unable to find secret file: " + args.secretfile)
return args
def validate_secret_file(secret_file):
secret = None
with open(secret_file, "r") as secret_text:
# strip whitespace just in case someone copy & pastes the salt text
# instead of the file itself (eg, trim extra newline)
secret = secret_text.read().strip()
try:
int(secret, 16)
except ValueError:
sys.exit("Secret must be in hexadecimal format")
if len(secret) < 32:
sys.exit("Secret smaller than minimum security level")
return secret
def validate_clks(clk_files, metadata_file):
with open(metadata_file, "r") as meta_fp:
metadata = json.load(meta_fp)
n_lines_expected = metadata["number_of_records"]
for clk_file in clk_files:
with open(clk_file, "r") as clk_fp:
data = json.load(clk_fp)
n_lines_actual = len(data["clks"])
assert (
n_lines_expected == n_lines_actual
), f"Expected {n_lines_expected} in {clk_file.name}, found {n_lines_actual}"
def garble_pii(args):
secret_file = Path(args.secretfile)
if args.sourcefile:
source_file = Path(args.sourcefile)
else:
filenames = list(
filter(
lambda x: "pii" in x and len(x) == 8 + TIMESTAMP_LEN,
os.listdir("temp-data"),
)
)
timestamps = [
datetime.strptime(filename[4:-4], TIMESTAMP_FMT) for filename in filenames
]
newest_name = filenames[timestamps.index(max(timestamps))]
source_file = Path("temp-data") / newest_name
print(f"PII Source: {str(source_file)}")
os.makedirs("output", exist_ok=True)
source_file_name = os.path.basename(source_file)
source_dir_name = os.path.dirname(source_file)
source_timestamp = os.path.splitext(source_file_name.replace("pii-", ""))[0]
metadata_file_name = source_file_name.replace("pii", "metadata").replace(
".csv", ".json"
)
metadata_file = Path(source_dir_name) / metadata_file_name
with open(metadata_file, "r") as fp:
metadata = json.load(fp)
meta_timestamp = metadata["creation_date"].replace("-", "").replace(":", "")[:-7]
assert (
source_timestamp == meta_timestamp
), "Metadata creation date does not match pii file timestamp"
garble_time = datetime.now()
metadata["garble_time"] = garble_time.isoformat()
with open(Path("output") / metadata_file_name, "w+") as fp:
json.dump(metadata, fp, indent=2)
with open(metadata_file, "w") as fp:
json.dump(metadata, fp, indent=2)
secret = validate_secret_file(secret_file)
individuals_secret = derive_subkey(secret, "individuals")
clk_files = []
schema = glob.glob(args.schemadir + "/*.json")
for s in schema:
with open(s, "r") as schema_file:
file_contents = schema_file.read()
if "doubleHash" in file_contents:
sys.exit(
"The following schema uses doubleHash, which is insecure: " + str(s)
)
output_file = Path(args.outputdir) / os.path.basename(s)
subprocess.run(
[
"anonlink",
"hash",
source_file,
individuals_secret,
str(s),
str(output_file),
],
check=True,
)
clk_files.append(output_file)
validate_clks(clk_files, metadata_file)
return clk_files + [Path(f"output/{metadata_file_name}")]
def create_output_zip(clk_files, args):
with ZipFile(os.path.join(args.outputdir, args.outputzip), "w") as garbled_zip:
for output_file in clk_files:
garbled_zip.write(output_file)
if "metadata" in output_file.name:
os.remove(output_file)
print("Zip file created at: " + str(Path(args.outputdir) / args.outputzip))
def main():
args = parse_arguments()
output_files = garble_pii(args)
create_output_zip(output_files, args)
if __name__ == "__main__":
main()