|
1 | 1 | # system imports
|
2 | 2 | import os
|
3 |
| -import subprocess |
4 |
| -import shutil |
5 |
| -import hashlib |
6 |
| -import json |
7 |
| -import time |
8 | 3 | import logging
|
9 | 4 |
|
10 | 5 | # web imports
|
11 |
| -from http import HTTPStatus |
12 |
| -from flask import Flask, request, jsonify, make_response |
13 |
| -from flask_sqlalchemy import SQLAlchemy |
| 6 | +from flask import Flask |
14 | 7 | from flask_executor import Executor
|
15 |
| -from werkzeug.utils import secure_filename |
| 8 | +from flask_shell2http import Shell2HTTP |
16 | 9 |
|
17 | 10 | # Globals
|
18 | 11 | app = Flask(__name__)
|
19 | 12 | executor = Executor(app)
|
| 13 | +shell2http = Shell2HTTP(app, executor) |
| 14 | + |
| 15 | +# with this, we can make http calls to the endpoint: /peframe |
| 16 | +shell2http.register_command(endpoint="peframe", command_name="peframe") |
20 | 17 |
|
21 | 18 | # Config
|
22 | 19 | CONFIG = {
|
23 |
| - "SECRET_KEY": os.environ.get("FLASK_SECRET_KEY") |
24 |
| - or __import__("secrets").token_hex(16), |
25 |
| - "UPLOAD_PATH": os.environ.get("UPLOAD_PATH") or "uploads/", |
26 |
| - "SQLALCHEMY_DATABASE_URI": os.environ.get("DATABASE_URL") or "sqlite:///site.db", |
27 |
| - "SQLALCHEMY_TRACK_MODIFICATIONS": False, |
28 |
| - "DEBUG": os.environ.get("FLASK_DEBUG") or False, |
| 20 | + "SECRET_KEY": __import__("secrets").token_hex(16), |
29 | 21 | }
|
30 | 22 | app.config.update(CONFIG)
|
31 | 23 |
|
32 |
| -# SQLAlchemy Models |
33 |
| -db = SQLAlchemy(app) |
34 |
| - |
35 |
| - |
36 |
| -class Result(db.Model): |
37 |
| - md5 = db.Column(db.String(128), primary_key=True, unique=True) |
38 |
| - timestamp = db.Column(db.Float(), default=time.time()) |
39 |
| - report = db.Column(db.JSON(), nullable=True, default=None) |
40 |
| - error = db.Column(db.TEXT(), nullable=True, default=None) |
41 |
| - status = db.Column(db.String(20), nullable=True, default="failed") |
42 |
| - |
43 |
| - |
44 |
| -# Utility functions |
45 |
| -def call_peframe(f_loc, f_hash): |
46 |
| - """ |
47 |
| - This function is called by the executor to run peframe |
48 |
| - using a subprocess asynchronously. |
49 |
| - """ |
50 |
| - try: |
51 |
| - cmd = f"peframe -j {f_loc}" |
52 |
| - proc = subprocess.Popen( |
53 |
| - cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE |
54 |
| - ) |
55 |
| - stdout, stderr = proc.communicate() |
56 |
| - stderr = stderr.decode("ascii") |
57 |
| - err = None if ("SyntaxWarning" in stderr) else stderr |
58 |
| - if err and stdout: |
59 |
| - status = "reported_with_fails" |
60 |
| - elif stdout and not err: |
61 |
| - status = "success" |
62 |
| - else: |
63 |
| - status = "failed" |
64 |
| - |
65 |
| - # This gets stored as Future.result |
66 |
| - job_result = { |
67 |
| - "file_location": f_loc, |
68 |
| - "md5": f_hash, |
69 |
| - "stdout": json.dumps(json.loads(stdout)), |
70 |
| - "stderr": err, |
71 |
| - "status": status, |
72 |
| - } |
73 |
| - app.logger.info(f"job_{f_hash} was successful.") |
74 |
| - return job_result |
75 |
| - |
76 |
| - except Exception as e: |
77 |
| - job_key = f"job_{f_hash}" |
78 |
| - app.logger.exception(f"Caught exception:{e}") |
79 |
| - executor.futures._futures.get(job_key).cancel() |
80 |
| - app.logger.error(f"{job_key} was cancelled") |
81 |
| - job_result = { |
82 |
| - "file_location": f_loc, |
83 |
| - "md5": f_hash, |
84 |
| - "stdout": None, |
85 |
| - "stderr": str(e), |
86 |
| - "status": "failed", |
87 |
| - } |
88 |
| - return job_result |
89 |
| - |
90 |
| - |
91 |
| -def add_result_to_db(future): |
92 |
| - """ |
93 |
| - Default callable fn for Future object. |
94 |
| - """ |
95 |
| - # get job result from future |
96 |
| - job_res = future.result() |
97 |
| - app.logger.debug(job_res) |
98 |
| - # get and update corresponding db row object |
99 |
| - result = Result.query.get(job_res.get("md5")) |
100 |
| - result.status = job_res.get("status") |
101 |
| - result.report = job_res.get("stdout") |
102 |
| - result.error = job_res.get("stderr") |
103 |
| - |
104 |
| - # delete file |
105 |
| - os.remove(job_res.get("file_location")) |
106 |
| - # finally commit changes to DB |
107 |
| - db.session.commit() |
108 |
| - |
109 |
| - |
110 |
| -executor.add_default_done_callback(add_result_to_db) |
111 |
| - |
112 |
| - |
113 |
| -# API routes/endpoints |
114 |
| -@app.before_first_request |
115 |
| -def before_first_request(): |
116 |
| - try: |
117 |
| - db.drop_all() |
118 |
| - db.create_all() |
119 |
| - app.logger.debug("Dropped current DB and created new instance") |
120 |
| - except Exception as e: |
121 |
| - app.logger.exception(f"Caught Exception:{e}") |
122 |
| - db.create_all() |
123 |
| - app.logger.debug("Created new DB instance") |
124 |
| - |
125 |
| - _upload_path = app.config.get("UPLOAD_PATH") |
126 |
| - try: |
127 |
| - os.mkdir(_upload_path) |
128 |
| - except FileExistsError: |
129 |
| - app.logger.debug(f"Emptying upload_path:{_upload_path} folder.") |
130 |
| - shutil.rmtree(_upload_path, ignore_errors=True) |
131 |
| - os.mkdir(_upload_path) |
132 |
| - |
133 |
| - |
134 |
| -@app.route("/run_analysis", methods=["POST"]) |
135 |
| -def run_analysis(): |
136 |
| - try: |
137 |
| - # Check if file part exists |
138 |
| - if "file" not in request.files: |
139 |
| - app.logger.error("No file part in request") |
140 |
| - return make_response(jsonify(error="No File part"), HTTPStatus.NOT_FOUND) |
141 |
| - |
142 |
| - # get file and save it |
143 |
| - req_file = request.files["file"] |
144 |
| - f_name = secure_filename(req_file.filename) |
145 |
| - f_loc = os.path.join(app.config.get("UPLOAD_PATH"), f_name) |
146 |
| - req_file.save(f_loc) |
147 |
| - |
148 |
| - # Calc file hash |
149 |
| - with open(f_loc, "rb") as rf: |
150 |
| - f_hash = hashlib.md5(rf.read()).hexdigest() |
151 |
| - |
152 |
| - # Check if hash already in DB, and return directly if yes |
153 |
| - res = Result.query.get(f_hash) |
154 |
| - if res: |
155 |
| - app.logger.info(f"Report already exists for md5:{f_hash}") |
156 |
| - return make_response( |
157 |
| - jsonify(info="Analysis already exists", status=res.status, md5=res.md5), |
158 |
| - 200, |
159 |
| - ) |
160 |
| - |
161 |
| - app.logger.info(f"Analysis requested for md5:{f_hash}") |
162 |
| - |
163 |
| - # add to DB |
164 |
| - result = Result(md5=f_hash, status="running") |
165 |
| - db.session.add(result) |
166 |
| - db.session.commit() |
167 |
| - |
168 |
| - # run executor job in background |
169 |
| - job_key = f"job_{f_hash}" |
170 |
| - executor.submit_stored( |
171 |
| - future_key=job_key, fn=call_peframe, f_loc=f_loc, f_hash=f_hash |
172 |
| - ) |
173 |
| - app.logger.info(f"Job created with key:{job_key}.") |
174 |
| - |
175 |
| - return make_response(jsonify(status="running", md5=f_hash), 200) |
176 |
| - |
177 |
| - except Exception as e: |
178 |
| - app.logger.exception(f"unexpected error {e}") |
179 |
| - return make_response(jsonify(error=str(e)), HTTPStatus.INTERNAL_SERVER_ERROR) |
180 |
| - |
181 |
| - |
182 |
| -@app.route("/get_report/<md5_to_get>", methods=["GET"]) |
183 |
| -def ask_report(md5_to_get): |
184 |
| - try: |
185 |
| - app.logger.info(f"Report requested for md5:{md5_to_get}") |
186 |
| - # check if job has been finished |
187 |
| - future = executor.futures._futures.get(f"job_{md5_to_get}", None) |
188 |
| - if future: |
189 |
| - if future.done: |
190 |
| - # pop future object since it has been finished |
191 |
| - executor.futures.pop(f"job_{md5_to_get}") |
192 |
| - else: |
193 |
| - return make_response(jsonify(status="running", md5=md5_to_get), 200) |
194 |
| - # if yes, get result from DB |
195 |
| - res = Result.query.get(md5_to_get) |
196 |
| - if not res: |
197 |
| - raise Exception(f"Report does not exist for md5:{md5_to_get}") |
198 |
| - |
199 |
| - return make_response( |
200 |
| - jsonify( |
201 |
| - status=res.status, |
202 |
| - md5=res.md5, |
203 |
| - report=json.loads(res.report), |
204 |
| - error=res.error, |
205 |
| - timestamp=res.timestamp, |
206 |
| - ), |
207 |
| - 200, |
208 |
| - ) |
209 |
| - |
210 |
| - except Exception as e: |
211 |
| - app.logger.exception(f"Caught Exception:{e}") |
212 |
| - return make_response(jsonify(error=str(e)), HTTPStatus.NOT_FOUND) |
213 |
| - |
214 | 24 |
|
215 | 25 | # Application Runner
|
216 | 26 | if __name__ == "__main__":
|
217 | 27 | app.run(port=4000)
|
218 | 28 | else:
|
219 |
| - # set logger |
| 29 | + # get flask-shell2http logger instance |
| 30 | + logger = logging.getLogger("flask_shell2http") |
| 31 | + # logger config |
220 | 32 | formatter = logging.Formatter(
|
221 |
| - "%(asctime)s - %(name)s - %(funcName)s - %(levelname)s - %(message)s" |
| 33 | + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" |
222 | 34 | )
|
223 | 35 | log_level = os.getenv("LOG_LEVEL", logging.INFO)
|
224 | 36 | log_path = "/var/log/intel_owl"
|
| 37 | + # create new file handlers |
225 | 38 | fh = logging.FileHandler(f"{log_path}/peframe.log")
|
226 | 39 | fh.setFormatter(formatter)
|
227 | 40 | fh.setLevel(log_level)
|
228 |
| - app.logger.addHandler(fh) |
229 | 41 | fh_err = logging.FileHandler(f"{log_path}/peframe_errors.log")
|
230 | 42 | fh_err.setFormatter(formatter)
|
231 | 43 | fh_err.setLevel(logging.ERROR)
|
232 |
| - app.logger.addHandler(fh_err) |
| 44 | + # set the logger |
| 45 | + logger.addHandler(fh) |
| 46 | + logger.addHandler(fh_err) |
0 commit comments