Refactored to make fetching and downloading posts seperate

This commit is contained in:
Anon 2022-08-29 15:15:09 -07:00
parent 8fb4da20ad
commit bfaaa50117

View File

@ -24,24 +24,33 @@ class downloader:
username = None
password = None
tmp = None
banned = None
unallowed_extensions = (".zip",)
def __init__(self, banned = tuple(), username=None, password=None, tmp="/tmp"):
def __init__(self, username=None, password=None, tmp="/tmp"):
self.username = username
self.password = password
self.tmp = tmp
self.banned = banned
def is_banned(self, tag_list):
for tag in self.banned:
if tag in tag_list:
return True
return False
def download(self, profile):
# Search ratings: s=safe, e=explicit
def download_post(self, post):
file_url = post["file_url"]
full_path = post["full_path"]
remote_image = requests.get(file_url)
if remote_image.status_code != 200:
print("Remote image request returned:", remote_image.status_code)
return None
for d in full_path:
with open(d, "wb") as f:
f.write(remote_image.content)
return post
def fetch_post(self, profile):
# Search ratings: s=safe, e=nsfw
# base_url = "https://danbooru.donmai.us/posts.json?random=true&tags={}&rating=e&limit=1"
tags = profile["tags"]
search_url = "https://danbooru.donmai.us/posts.json?random=true&limit=1"
@ -50,64 +59,49 @@ class downloader:
search_url = "{}&tags={}".format(search_url, search_tags)
search_request = None
while True:
if self.username and self.password:
search_request = requests.get(search_url,
auth=(self.username, self.password)
)
else:
search_request = requests.get(search_url)
if self.username and self.password:
search_request = requests.get(search_url,
auth=(self.username, self.password)
)
else:
search_request = requests.get(search_url)
if search_request.status_code != 200:
print("Search request returned:", search_request.status_code)
continue
elif "large_file_url" not in search_request.json()[0]:
continue
elif "tag_string" not in search_request.json()[0]:
continue
elif "tag_string_general" not in search_request.json()[0]:
continue
elif self.is_banned(search_request.json()[0]["tag_string"]):
print("Banned Tag1:", search_request.json()[0]["tag_string"])
continue
elif self.is_banned(search_request.json()[0]["tag_string_general"]):
print("Banned Tag2",search_request.json()[0]["tag_string_general"])
continue
break
import pprint
pprint.pprint(search_request.json()[0]["tag_string"])
large_file_url = search_request.json()[0]["file_url"]
explicit = search_request.json()[0]["rating"]
explicit = get_most_sever_rating(explicit)
remote_image = requests.get(large_file_url)
if remote_image.status_code != 200:
print("Remote image request returned:", remote_image.status_code)
if search_request.status_code != 200:
print("Search request returned:", search_request.status_code)
return None
response = search_request.json()[0]
tag_response = []
if "file_url" not in response:
print("file_url is not in response")
return None
# Aggregate Tags
for tag_type in "tag_string", "tag_string_general":
if tag_type in response:
tag_response.append(response[tag_type].strip())
basename = large_file_url.rsplit("/", 1)[1]
nsfw = search_request.json()[0]["rating"]
nsfw = get_most_sever_rating(nsfw)
file_url = response["file_url"]
basename = file_url.rsplit("/", 1)[1]
full_path = os.path.join(self.tmp, basename)
with open(full_path, "wb") as f:
f.write(remote_image.content)
r = {
# Add profile to dictioanry
"name": profile["name"],
"backend": profile["backend"],
"tags": profile["tags"],
"message": profile["message"],
"message_nsfw": profile["message_nsfw"],
# Query results
"search_url": search_url,
"large_file_url": large_file_url,
"file_url": file_url,
"full_path": [full_path],
"explicit": explicit,
"tag_response": " ".join(tag_response),
"nsfw": nsfw
}
return r