Skip to content

Commit 3d12836

Browse files
Merge branch 'dev' into master
2 parents 0747c67 + 2225762 commit 3d12836

12 files changed

+137
-45
lines changed

.gitignore

+2-1
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,5 @@ old_config.json
1010
db/
1111
outdated
1212
runtime.log
13-
config.json.dev
13+
config.json.dev
14+
Build Release.bat

Build Release.bat

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
".\.env\Scripts\activate" && pyinstaller --onefile --icon "icon.ico" --console --name "NN-Downloader" --upx-dir "Z:\Projects\Python\### UPX ###" --add-data="./.env/Lib/site-packages/grapheme/data/*;grapheme/data/" main.py && rmdir /s /q .\build && rmdir /s /q .\__pycache__ && del ".\NN-Downloader.spec"
1+
conda activate pyinstaller && pyinstaller --paths Z:\Projects\Python\NN-Downloader\.env\Lib\site-packages --hidden-import requests --hidden-import inquirer --hidden-import alive_progress --hidden-import termcolor --hidden-import xmltodict --add-data="Z:\Projects\Python\NN-Downloader\.env\Lib\site-packages\grapheme\data\*;grapheme/data/" --onefile --icon "icon.ico" --console --name "NN-Downloader" --upx-dir "Z:\Projects\Python\### UPX ###" main.py

main.py

+11-36
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import sys
77
import inquirer
88

9-
version = "1.5.0"
9+
version = "1.6.0"
1010

1111
if os.name == 'nt':
1212
from ctypes import windll
@@ -15,7 +15,7 @@
1515
proxy_list = []
1616
header = {"User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)"}
1717
needed_folders = ["db", "media"]
18-
database_list = ["e621", "furbooru", "rule34", "e6ai"]
18+
database_list = ["e621", "furbooru", "rule34", "e6ai", "e926"]
1919
unsafe_chars = ["/", "\\", ":", "*", "?", "\"", "<", ">", "|", "\0", "$", "#", "@", "&", "%", "!", "`", "^", "(", ")", "{", "}", "[", "]", "=", "+", "~", ",", ";"]
2020

2121
if sys.gettrace() is not None:
@@ -69,6 +69,7 @@ def clear_screen():
6969
oneTimeDownload = config["oneTimeDownload"]
7070
use_proxies = config["proxies"]
7171
checkForUpdates = config["checkForUpdates"]
72+
ai_training = config["ai_training"]
7273
else:
7374
config = Config_Manager.creator()
7475
print(colored("New Config file generated. Please configure it for your use case and add API keys for needed services.", "green"))
@@ -106,10 +107,9 @@ def clear_screen():
106107

107108
site = answers.get("selection").lower()
108109

109-
if site in ["multporn", "yiffer", "luscious"]:
110-
pass
111-
else:
112-
print(colored("Please enter the tags you want to use", "green"))
110+
if site in ["e621", "e6ai", "e926"]:
111+
112+
print(colored("Please enter the tags you want to use.", "green"))
113113
user_tags = input(">> ").lower()
114114
while user_tags == "":
115115
print(colored("Please enter the tags you want.", "red"))
@@ -121,42 +121,17 @@ def clear_screen():
121121
max_sites = input(">> ").lower()
122122
print("")
123123

124-
if site == "e621":
125-
apiUser = config["user_credentials"]["e621"]["apiUser"]
126-
apiKey = config["user_credentials"]["e621"]["apiKey"]
127-
if oneTimeDownload == True:
128-
with open("db/e621.db", "r") as db_reader:
129-
database = db_reader.read().splitlines()
130-
if apiKey == "" or apiUser == "":
131-
print(colored("Please add your Api Key into the config.json", "red"))
132-
sleep(5)
133-
else:
134-
output = E621.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiUser=apiUser, apiKey=apiKey, header=header, db=database)
135-
136-
if site == "e6ai":
137-
apiUser = config["user_credentials"]["e6ai"]["apiUser"]
138-
apiKey = config["user_credentials"]["e6ai"]["apiKey"]
139-
if oneTimeDownload == True:
140-
with open("db/e6ai.db", "r") as db_reader:
141-
database = db_reader.read().splitlines()
142-
if apiKey == "" or apiUser == "":
143-
print(colored("Please add your Api Key into the config.json", "red"))
144-
sleep(5)
145-
else:
146-
output = E6AI.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiUser=apiUser, apiKey=apiKey, header=header, db=database)
147-
148-
elif site == "e926":
149-
apiUser = config["user_credentials"]["e926"]["apiUser"]
150-
apiKey = config["user_credentials"]["e926"]["apiKey"]
124+
apiUser = config["user_credentials"][site]["apiUser"]
125+
apiKey = config["user_credentials"][site]["apiKey"]
151126
if oneTimeDownload == True:
152-
with open("db/e621.db", "r") as db_reader:
127+
with open(f"db/{site}.db", "r") as db_reader:
153128
database = db_reader.read().splitlines()
154129
if apiKey == "" or apiUser == "":
155130
print(colored("Please add your Api Key into the config.json", "red"))
156131
sleep(5)
157132
else:
158-
output = E926.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiUser=apiUser, apiKey=apiKey, header=header, db=database)
159-
133+
output = E6System.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiUser=apiUser, apiKey=apiKey, header=header, db=database, site=site, ai_training=ai_training)
134+
160135
elif site == "rule34":
161136
if oneTimeDownload == True:
162137
with open("db/rule34.db", "r") as db_reader:

modules/__init__.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,7 @@
66

77

88
# Here are all modules for the sites that are supported
9-
from .e621 import E621
10-
from .e6ai import E6AI
11-
from .e926 import E926
9+
from .e6systems import E6System
1210
from .rule34 import RULE34
1311
from .furbooru import FURBOORU
1412
from .multporn import Multporn

modules/configManager.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from termcolor import colored
44
import os
55

6-
def_config_version = 1.5
6+
def_config_version = 1.6
77

88
class Config_Manager():
99

@@ -14,6 +14,7 @@ def creator():
1414
"checkForUpdates": True,
1515
"oneTimeDownload": True,
1616
"advancedMode": False,
17+
"ai_training": False,
1718
"user_credentials": {
1819
"e621": {
1920
"apiUser": "",
File renamed without changes.
File renamed without changes.
File renamed without changes.

modules/e6systems.py

+120
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
from requests.auth import HTTPBasicAuth
2+
import requests
3+
import random
4+
from termcolor import colored
5+
from alive_progress import alive_bar
6+
from time import sleep
7+
from datetime import datetime
8+
import os
9+
import json
10+
11+
from main import unsafe_chars
12+
now = datetime.now()
13+
dt_now = now.strftime("%d-%m-%Y_%H-%M-%S")
14+
15+
class E6System():
16+
def Fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, apiUser ,apiKey, header, db, site, ai_training):
17+
try:
18+
approved_list = []
19+
page = 1
20+
while True:
21+
URL = f"https://{site}.net/posts.json?tags={user_tags}&limit=320&page={page}"
22+
if user_proxies == True:
23+
proxy = random.choice(proxy_list)
24+
raw_req = requests.get(URL, headers=header, proxies=proxy, auth=HTTPBasicAuth(apiUser, apiKey))
25+
else:
26+
raw_req = requests.get(URL, headers=header, auth=HTTPBasicAuth(apiUser, apiKey))
27+
28+
req = raw_req.json()
29+
30+
try:
31+
if req["message"] == "You cannot go beyond page 750. Please narrow your search terms.":
32+
print(colored(req["message"] + " (API limit)", "red"))
33+
sleep(5)
34+
break
35+
except:
36+
pass
37+
38+
if req["posts"] == []:
39+
print(colored("No images found or all downloaded! Try different tags.", "yellow"))
40+
sleep(5)
41+
break
42+
43+
elif page == max_sites:
44+
print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow"))
45+
sleep(5)
46+
break
47+
48+
else:
49+
for item in req["posts"]:
50+
image_id = item["id"]
51+
image_address = item["file"]["url"]
52+
post_tags1 = item["tags"]["general"]
53+
post_tags2 = item["tags"]["species"]
54+
post_tags3 = item["tags"]["character"]
55+
if site == "e6ai":
56+
post_tags4 = item["tags"]["director"]
57+
post_tags5 = item["tags"]["meta"]
58+
else:
59+
post_tags4 = item["tags"]["copyright"]
60+
post_tags5 = item["tags"]["artist"]
61+
62+
if ai_training == True:
63+
meta_tags = item["tags"]
64+
65+
post_tags = post_tags1 + post_tags2 + post_tags3 + post_tags4 + post_tags5
66+
image_format = item["file"]["ext"]
67+
user_blacklist_lenght = len(user_blacklist)
68+
passed = 0
69+
70+
for blacklisted_tag in user_blacklist:
71+
if blacklisted_tag in post_tags:
72+
break
73+
else:
74+
passed += 1
75+
if passed == user_blacklist_lenght and str(image_id) not in db and image_address != None:
76+
image_data = {"image_address": image_address, "image_format": image_format, "image_id": image_id, "meta_tags": meta_tags}
77+
approved_list.append(image_data)
78+
else:
79+
pass
80+
81+
# Download Each file
82+
with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
83+
for data in approved_list:
84+
image_address = data["image_address"]
85+
image_format = data["image_format"]
86+
image_id = data["image_id"]
87+
meta_tags = data["meta_tags"]
88+
bar.text = f'-> Downloading: {image_id}, please wait...'
89+
if user_proxies == True:
90+
proxy = random.choice(proxy_list)
91+
img_data = requests.get(image_address, proxies=proxy).content
92+
else:
93+
sleep(1)
94+
img_data = requests.get(image_address).content
95+
96+
safe_user_tags = user_tags.replace(" ", "_")
97+
for char in unsafe_chars:
98+
safe_user_tags = safe_user_tags.replace(char, "")
99+
100+
if not os.path.exists(f"media/{dt_now}_{safe_user_tags}"):
101+
os.mkdir(f"media/{dt_now}_{safe_user_tags}")
102+
if not os.path.exists(f"media/{dt_now}_{safe_user_tags}/meta") and ai_training == True:
103+
os.mkdir(f"media/{dt_now}_{safe_user_tags}/meta")
104+
with open(f"media/{dt_now}_{safe_user_tags}/{str(image_id)}.{image_format}", 'wb') as handler:
105+
handler.write(img_data)
106+
with open(f"media/{dt_now}_{safe_user_tags}/meta/{str(image_id)}.json", 'w') as handler:
107+
json.dump(meta_tags, handler, indent=6)
108+
with open(f"db/{site}.db", "a") as db_writer:
109+
db_writer.write(f"{str(image_id)}\n")
110+
bar()
111+
112+
print(colored(f"Page {page} Completed", "green"))
113+
approved_list.clear()
114+
page += 1
115+
sleep(5)
116+
117+
return {"status": "ok"}
118+
119+
except Exception as e:
120+
return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content}

modules/nhentai.py

-1
This file was deleted.

modules/pixiv.py

-1
This file was deleted.

modules/yiffgallery.py

-1
This file was deleted.

0 commit comments

Comments
 (0)