Skip to content

Commit

Permalink
Fix equality bug issue maldevel#55
Browse files Browse the repository at this point in the history
  • Loading branch information
Sam Bradbury authored and Sam Bradbury committed Jan 23, 2025
1 parent 934d412 commit 5b789b6
Showing 1 changed file with 45 additions and 45 deletions.
90 changes: 45 additions & 45 deletions EmailHarvester.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
This file is part of EmailHarvester
Copyright (C) 2016 @maldevel
https://github.com/maldevel/EmailHarvester
EmailHarvester - A tool to retrieve Domain email addresses from Search Engines.
This program is free software: you can redistribute it and/or modify
Expand All @@ -20,7 +20,7 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
For more see the file 'LICENSE' for copying permission.
"""

Expand Down Expand Up @@ -57,10 +57,10 @@
colorama.init()

class myparser:

def __init__(self):
self.temp = []

def extract(self, results, word):
self.results = results
self.word = word
Expand All @@ -71,7 +71,7 @@ def genericClean(self):
self.results = self.results.replace(e, '')
for e in '%2f %3a %3A %3C %3D & / : ; < = > \\'.split():
self.results = self.results.replace(e, ' ')

def emails(self):
self.genericClean()
reg_emails = re.compile(
Expand All @@ -82,15 +82,15 @@ def emails(self):
self.temp = reg_emails.findall(self.results)
emails = self.unique()
return emails

def unique(self):
self.new = list(set(self.temp))
return self.new

###################################################################

class EmailHarvester(object):

def __init__(self, userAgent, proxy):
self.plugins = {}
self.proxy = proxy
Expand All @@ -99,23 +99,23 @@ def __init__(self, userAgent, proxy):
self.activeEngine = "None"
path = os.path.dirname(os.path.abspath(__file__)) + "/plugins/"
plugins = {}

sys.path.insert(0, path)
for f in os.listdir(path):
fname, ext = os.path.splitext(f)
if ext == '.py':
mod = __import__(fname, fromlist=[''])
plugins[fname] = mod.Plugin(self, {'useragent':userAgent, 'proxy':proxy})

def register_plugin(self, search_method, functions):
self.plugins[search_method] = functions

def get_plugins(self):
return self.plugins

def show_message(self, msg):
print(green(msg))

def init_search(self, url, word, limit, counterInit, counterStep, engineName):
self.results = ""
self.totalresults = ""
Expand All @@ -125,7 +125,7 @@ def init_search(self, url, word, limit, counterInit, counterStep, engineName):
self.step = int(counterStep)
self.word = word
self.activeEngine = engineName

def do_search(self):
try:
urly = self.url.format(counter=str(self.counter), word=self.word)
Expand All @@ -135,7 +135,7 @@ def do_search(self):
r=requests.get(urly, headers=headers, proxies=proxies)
else:
r=requests.get(urly, headers=headers)

except Exception as e:
print(e)
sys.exit(4)
Expand All @@ -145,18 +145,18 @@ def do_search(self):

self.results = r.content.decode(r.encoding)
self.totalresults += self.results

def process(self):
while (self.counter < self.limit):
self.do_search()
time.sleep(1)
self.counter += self.step
print(green("[+] Searching in {}:".format(self.activeEngine)) + cyan(" {} results".format(str(self.counter))))

def get_emails(self):
self.parser.extract(self.totalresults, self.word)
return self.parser.emails()

###################################################################

def yellow(text):
Expand Down Expand Up @@ -200,45 +200,45 @@ def checkDomain(value):

parser = argparse.ArgumentParser(description="""
_____ _ _ _ _ _
| ___| (_)| | | | | | | |
| |__ _ __ ___ __ _ _ | | | |_| | __ _ _ __ __ __ ___ ___ | |_ ___ _ __
_____ _ _ _ _ _
| ___| (_)| | | | | | | |
| |__ _ __ ___ __ _ _ | | | |_| | __ _ _ __ __ __ ___ ___ | |_ ___ _ __
| __|| '_ ` _ \ / _` || || | | _ | / _` || '__|\ \ / // _ \/ __|| __|/ _ \| '__|
| |___| | | | | || (_| || || | | | | || (_| || | \ V /| __/\__ \| |_| __/| |
\____/|_| |_| |_| \__,_||_||_| \_| |_/ \__,_||_| \_/ \___||___/ \__|\___||_|
| |___| | | | | || (_| || || | | | | || (_| || | \ V /| __/\__ \| |_| __/| |
\____/|_| |_| |_| \__,_||_||_| \_| |_/ \__,_||_| \_/ \___||___/ \__|\___||_|
A tool to retrieve Domain email addresses from Search Engines | @maldevel
{}: {}
""".format(red('Version'), yellow(__version__)),
""".format(red('Version'), yellow(__version__)),
formatter_class=RawTextHelpFormatter)
parser.add_argument("-d", '--domain', action="store", metavar='DOMAIN', dest='domain',

parser.add_argument("-d", '--domain', action="store", metavar='DOMAIN', dest='domain',
default=None, type=checkDomain, help="Domain to search.")
parser.add_argument("-s", '--save', action="store", metavar='FILE', dest='filename',
parser.add_argument("-s", '--save', action="store", metavar='FILE', dest='filename',
default=None, type=str, help="Save the results into a TXT and XML file (both).")
parser.add_argument("-e", '--engine', action="store", metavar='ENGINE', dest='engine',

parser.add_argument("-e", '--engine', action="store", metavar='ENGINE', dest='engine',
default="all", type=str, help="Select search engine plugin(eg. '-e google').")
parser.add_argument("-l", '--limit', action="store", metavar='LIMIT', dest='limit',

parser.add_argument("-l", '--limit', action="store", metavar='LIMIT', dest='limit',
type=limit_type, default=100, help="Limit the number of results.")
parser.add_argument('-u', '--user-agent', action="store", metavar='USER-AGENT', dest='uagent',
parser.add_argument('-u', '--user-agent', action="store", metavar='USER-AGENT', dest='uagent',
type=str, help="Set the User-Agent request header.")
parser.add_argument('-x', '--proxy', action="store", metavar='PROXY', dest='proxy',
parser.add_argument('-x', '--proxy', action="store", metavar='PROXY', dest='proxy',
default=None, type=checkProxyUrl, help="Setup proxy server (eg. '-x http://127.0.0.1:8080')")
parser.add_argument('--noprint', action='store_true', default=False,
parser.add_argument('--noprint', action='store_true', default=False,
help='EmailHarvester will print discovered emails to terminal. It is possible to tell EmailHarvester not to print results to terminal with this option.')
parser.add_argument('-r', '--exclude', action="store", metavar='EXCLUDED_PLUGINS', dest="exclude",
type=str, default=None, help="Plugins to exclude when you choose 'all' for search engine (eg. '-r google,twitter')")
parser.add_argument('-p', '--list-plugins', action='store_true', dest='listplugins',
parser.add_argument('-p', '--list-plugins', action='store_true', dest='listplugins',
default=False, help='List all available plugins.')
if len(sys.argv) is 1:

if len(sys.argv) == 1:
parser.print_help()
sys.exit()

args = parser.parse_args()

if args.listplugins:
path = "plugins/"
print(green("[+] Available plugins"))
Expand All @@ -248,22 +248,22 @@ def checkDomain(value):
if ext == '.py':
print(green("[+] Plugin: ") + cyan(fname))
sys.exit(1)

if not args.domain:
print(red("[-] Please specify a domain name to search."))
sys.exit(2)
domain = args.domain

userAgent = (args.uagent or
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1")

print(green("[+] User-Agent in use: ") + cyan(userAgent))

if args.proxy:
print(green("[+] Proxy server in use: ") + cyan(args.proxy.scheme + "://" + args.proxy.netloc))

filename = args.filename or ""
limit = args.limit
limit = args.limit
engine = args.engine
app = EmailHarvester(userAgent, args.proxy)
plugins = app.get_plugins()
Expand All @@ -283,7 +283,7 @@ def checkDomain(value):
else:
all_emails = plugins[engine]['search'](domain, limit)
all_emails = unique(all_emails)

if not all_emails:
print(red("[-] No emails found"))
sys.exit(4)
Expand All @@ -293,7 +293,7 @@ def checkDomain(value):
if not args.noprint:
for emails in all_emails:
print(emails)

if filename:
try:
print(green("[+] Saving results to files"))
Expand All @@ -305,7 +305,7 @@ def checkDomain(value):
print(red("[-] Exception: " + email))
except Exception as e:
print(red("[-] Error saving TXT file: " + e))

try:
filename = filename.split(".")[0] + ".xml"
with open(filename, 'w') as out_file:
Expand Down

0 comments on commit 5b789b6

Please sign in to comment.