Skip to content

Commit

Permalink
threading, better logging
Browse files Browse the repository at this point in the history
  • Loading branch information
TheTechromancer committed Apr 5, 2023
1 parent 4a12157 commit 698b8a7
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 19 deletions.
33 changes: 28 additions & 5 deletions cloud_providers.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,11 @@
"96.16.0.0/15",
"96.6.0.0/15"
],
<<<<<<< Updated upstream
"last_updated": "2023-04-05T17:49:34.653897",
=======
"last_updated": "2023-04-05T14:31:09.853919",
>>>>>>> Stashed changes
"provider_type": "cdn"
},
"Amazon": {
Expand Down Expand Up @@ -1024,13 +1028,9 @@
"13.34.94.0/27",
"13.34.94.128/27",
"13.34.94.160/27",
"13.34.94.192/27",
"13.34.94.224/27",
"13.34.94.32/27",
"13.34.94.64/27",
"13.34.94.96/27",
"13.34.95.0/27",
"13.34.95.32/27",
"13.35.0.0/16",
"13.36.0.0/14",
"13.36.155.0/24",
Expand Down Expand Up @@ -3857,7 +3857,6 @@
"51.112.0.0/16",
"51.118.0.0/15",
"51.16.0.0/15",
"51.16.103.128/25",
"51.16.104.0/23",
"51.20.0.0/14",
"51.24.0.0/13",
Expand Down Expand Up @@ -5762,7 +5761,11 @@
"99.87.4.0/22",
"99.87.8.0/21"
],
<<<<<<< Updated upstream
"last_updated": "2023-04-05T17:49:34.653897",
=======
"last_updated": "2023-04-05T14:31:09.853919",
>>>>>>> Stashed changes
"provider_type": "cloud"
},
"Azure": {
Expand Down Expand Up @@ -9311,7 +9314,11 @@
"94.245.123.176/28",
"94.245.88.0/21"
],
<<<<<<< Updated upstream
"last_updated": "2023-04-05T17:49:34.653897",
=======
"last_updated": "2023-04-05T14:31:09.853919",
>>>>>>> Stashed changes
"provider_type": "cloud"
},
"Cloudflare": {
Expand Down Expand Up @@ -9339,7 +9346,11 @@
"2a06:98c0::/29",
"2c0f:f248::/32"
],
<<<<<<< Updated upstream
"last_updated": "2023-04-05T17:49:34.653897",
=======
"last_updated": "2023-04-05T14:31:09.853919",
>>>>>>> Stashed changes
"provider_type": "cdn"
},
"DigitalOcean": {
Expand Down Expand Up @@ -11017,7 +11028,11 @@
"95.85.8.0/24",
"95.85.9.0/24"
],
<<<<<<< Updated upstream
"last_updated": "2023-04-05T17:49:34.653897",
=======
"last_updated": "2023-04-05T14:31:09.853919",
>>>>>>> Stashed changes
"provider_type": "cloud"
},
"Google": {
Expand Down Expand Up @@ -11621,7 +11636,11 @@
"8.34.220.0/22",
"8.35.192.0/21"
],
<<<<<<< Updated upstream
"last_updated": "2023-04-05T17:49:34.653897",
=======
"last_updated": "2023-04-05T14:31:09.853919",
>>>>>>> Stashed changes
"provider_type": "cloud"
},
"Oracle": {
Expand Down Expand Up @@ -12205,7 +12224,11 @@
"81.208.160.0/20",
"81.208.188.0/22"
],
<<<<<<< Updated upstream
"last_updated": "2023-04-05T17:49:34.653897",
=======
"last_updated": "2023-04-05T14:31:09.853919",
>>>>>>> Stashed changes
"provider_type": "cloud"
}
}
21 changes: 18 additions & 3 deletions cloudcheck/cloudcheck.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,17 @@
import sys
import json
import traceback
from threading import Lock
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor

from .providers import *
from .helpers import ip_network_parents

import logging

log = logging.getLogger("cloudcheck")


json_path = Path(__file__).parent.parent / "cloud_providers.json"

Expand All @@ -19,22 +25,31 @@ def __init__(self, *args, **kwargs):
except Exception:
self.json = {}
provider_classes = CloudProvider.__subclasses__()
now = datetime.now().isoformat()
for p in provider_classes:
self.now = datetime.now().isoformat()
with ThreadPoolExecutor(max_workers=len(provider_classes)) as e:
for p in provider_classes:
e.submit(self._get_provider, p, *args, **kwargs)

def _get_provider(self, p, *args, **kwargs):
try:
provider = p(*args, **kwargs)
self.providers[provider.name] = provider
# if we successfully got CIDR ranges, then update the JSON
if not provider.name in self.json:
self.json[provider.name] = {}
json_ranges = self.json[provider.name].get("cidrs", [])
if provider.ranges.cidrs:
self.json[provider.name]["last_updated"] = now
self.json[provider.name]["last_updated"] = self.now
self.json[provider.name]["provider_type"] = provider.provider_type
self.json[provider.name]["cidrs"] = sorted(
str(r) for r in provider.ranges
)
else:
provider.ranges = CidrRanges(json_ranges)
except Exception as e:
log.warning(
f"Error getting provider {p.name}: {e}: {traceback.format_exc()}"
)

def check(self, ip):
for net in ip_network_parents(ip):
Expand Down
21 changes: 10 additions & 11 deletions cloudcheck/providers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import io
import csv
import logging
import zipfile
import requests
import traceback
Expand All @@ -9,6 +10,8 @@

from .cidr import CidrRanges

log = logging.getLogger("cloudcheck.providers")

db_path = Path.home() / ".cache" / "cloudcheck" / "requests-cache.sqlite"
backend = SQLiteCache(db_path=db_path)
sessions = {}
Expand All @@ -22,8 +25,7 @@ class CloudProvider:
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.5112.79 Safari/537.36"
}

def __init__(self, quiet=False, cache_for=None):
self.quiet = quiet
def __init__(self, cache_for=None):
if cache_for is None:
# default = cache IP lists for 7 days
cache_for = 60 * 60 * 24 * 7
Expand All @@ -43,21 +45,18 @@ def get_ranges(self):
try:
return self.parse_response(response)
except Exception:
self.print(f"Error parsing response: {traceback.format_exc()}")
except requests.RequestException:
self.print(f"Error retrieving {self.main_url}")
log.warning(f"Error parsing response: {traceback.format_exc()}")
except requests.RequestException as e:
log.warning(f"Error retrieving {self.main_url}: {e}")
return []

def parse_response(self, response):
pass

@classmethod
@property
def name(self):
return self.__class__.__name__

def print(self, s):
if not self.quiet:
print(f"[{self.name}] {s}")
def name(cls):
return cls.__name__

def __str__(self):
return self.name
Expand Down

0 comments on commit 698b8a7

Please sign in to comment.