Skip to content

Commit

Permalink
Remove backwards compatibility for configs | Formatting
Browse files Browse the repository at this point in the history
Removed backward compatibility for configs before v7

Replaced old "auto" keys with a new naming scheme for consistency and readability.
  • Loading branch information
UltimaHoarder committed May 13, 2021
1 parent ba78541 commit 35640c7
Show file tree
Hide file tree
Showing 10 changed files with 88 additions and 1,139 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ Usage: You can automatically choose which site you want to scrape.

OnlyFans = "onlyfans"

### auto_choice:
### auto_media_choice:

Usage: You can automatically choose which media type you want to scrape.
Default = ""
Expand All @@ -218,7 +218,7 @@ Default = ""

You can automatically choose which type of media you want to scrape.

### auto_scrape_names:
### auto_model_choice:

Default = false

Expand Down
6 changes: 3 additions & 3 deletions apis/onlyfans/onlyfans.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ def __init__(self, option={}) -> None:
self.link = option.get("link")
self.links = content_types()
self.scraped = content_types()
self.auth_id:Optional[int] = None
self.auth_id: Optional[int] = None
self.auth_count = None
self.session_manager: api_helper.session_manager = option.get(
"session_manager")
Expand Down Expand Up @@ -480,8 +480,8 @@ def __init__(self, custom_request=callable) -> None:
self.links = links
self.session_manager = api_helper.session_manager

def set_auth_details(self, option={}):
if not option.get("active"):
def set_auth_details(self, option={}, only_active=False):
if only_active and not option.get("active"):
return
auth = create_auth()
auth.auth_details = auth_details(option)
Expand Down
162 changes: 38 additions & 124 deletions classes/make_settings.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import copy
from typing import List, Union
from apis.onlyfans.onlyfans import auth_details
import os

Expand All @@ -16,123 +18,19 @@ def export_json(path, metadata):


def fix(config={}):
added = []
changed = []
fix = []
settings = {}
global_user_agent = ""
for key, value in config.items():
if key == "settings":
settings = value
auto_profile_choice = settings.pop("auto_profile_choice", None)
socks5_proxies = settings.pop(
"socks5_proxy", None)
if socks5_proxies:
fixed_socks5_proxies = []
for socks5_proxy in socks5_proxies:
fixed_socks5_proxy = f"socks5h://{socks5_proxy}"
fixed_socks5_proxies.append(fixed_socks5_proxy)
settings["proxies"] = fixed_socks5_proxies
global_user_agent = settings.pop(
"global_user_agent", None)
if isinstance(settings.get(
"webhooks", {}), list):
webhook = settings["webhooks"]
settings["webhooks"] = {}
settings["webhooks"]["global_webhooks"] = webhook
if key == "supported":
for key2, value2 in value.items():
temp_auth = value2.pop("auth", None)
if temp_auth:
q = os.path.abspath(".settings")
backup_config_filepath = os.path.join(
q, "config_backup.json")
print(
f"LEGACY CONFIG FOUND, BACKING IT UP AND CREATING A NEW ONE. ({backup_config_filepath})")
export_json(backup_config_filepath, config)
print
temp_auth["user_agent"] = global_user_agent
auth = {}
temp_auth = auth_details(temp_auth).__dict__
auth["auth"] = temp_auth
if "profile_directories" in settings:
dpd = settings["profile_directories"][0]
default_profile_directory = os.path.join(
os.path.abspath(dpd), key2, "default")
os.makedirs(default_profile_directory, exist_ok=True)
profile_auth_filepath = os.path.join(
default_profile_directory, "auth.json")
export_json(profile_auth_filepath, auth)
print(
f"{profile_auth_filepath} HAS BEEN CREATED, CHECK IF IT'S CORRECT.")
print
for key3, settings in value2.items():
if key3 == "settings":
settings["text_length"] = int(settings["text_length"])
re = settings.pop("download_paths", None)
if re:
settings["download_directories"] = re
string = f"download_paths to download_directories in {key2}"
changed.append(string)
re = settings.get("metadata_directory_format", None)
if not re:
settings["metadata_directory_format"] = "{site_name}/{username}/Metadata"
string = f"metadata_directory_format in {key2}"
added.append(string)
delete_legacy_metadata = settings.get(
"delete_legacy_metadata", None)
if delete_legacy_metadata == None:
message_string = f"{key2} - IN THIS COMMIT I CHANGED HOW STORING METADATA WORKS. 'METADATA_DIRECTORIES' (config.json) NOW CONTROLS WHERE METADATA IS STORED SO MAKE SURE IT'S THE CORRECT DIRECTORY TO AVOID DOWNLOADING DUPES.\nPRESS ENTER TO CONTINUE"
print(message_string)
filename_format = settings.pop(
"file_name_format", None)
if filename_format:
settings["filename_format"] = filename_format
reformats = {k: v for k,
v in settings.items() if "_format" in k}
bl = ["date_format"]
reformats = {k: v for k,
v in reformats.items() if k not in bl}
for re_name, re_value in reformats.items():
top = ["{id}", "{file_name}"]
bottom = ["{media_id}", "{filename}"]
z = list(zip(top, bottom))
for x in z:
if x[0] in re_value:
settings[re_name] = settings[re_name].replace(
x[0], x[1])
reformats[re_name] = settings[re_name]
x = format_types(reformats)
q = x.check_rules()
if not q[1]:
fix.append(f"{key2} - {q[0]}")
c = x.check_unique()
if not c["bool_status"]:
s = f"{key2} - {c['string']}"
s_list = s.split("\n")
fix.extend(s_list)
print
value.pop("fourchan", None)
value.pop("bbwchan", None)
added = "\n".join([f"Added {x}" for x in added if x])
changed = "\n".join([f"Changed {x}" for x in changed if x])
fix = "\n".join([f"Fix: {x}" for x in fix if x])
seperator = "\n"*2
changed2 = seperator.join([added, changed, fix])
if not all(x for x in changed2.split("\n") if not x):
changed2 = changed2.strip()
if changed2:
print(f"\n{changed2}")
if fix:
string = "\nFix the problems above and then restart the script."
print(string.upper())
input()
exit(0)
return config, changed2
info = config.get("info")
if not info:
print("If you're not using >= v7 release, please download said release so the script can properly update your config. \nIf you're using >= v7 release or you don't care about your current config settings, press enter to continue.")
input()
return config


class config(object):
def __init__(self, settings={}, supported={}):
def __init__(self, info={}, settings={}, supported={}):
class Info(object):
def __init__(self) -> None:
self.version = 7.1

class Settings(object):
def __init__(self, auto_site_choice="", profile_directories=[".profiles"], export_type="json", max_threads=-1, min_drive_space=0, helpers={}, webhooks={}, exit_on_completion=False, infinite_loop=True, loop_timeout="0", proxies=[], cert="", random_string=""):
class webhooks_settings:
Expand Down Expand Up @@ -191,6 +89,15 @@ def __init__(self, option={}) -> None:
self.cert = cert
self.random_string = random_string if random_string else uuid.uuid1().hex

def update_site_settings(options) -> dict:
new_options = copy.copy(options)
for key, value in options.items():
if "auto_scrape_names" == key:
new_options["auto_model_choice"] = value
elif "auto_media_choice" == key:
new_options["auto_media_choice"] = value
return new_options

class Supported(object):
def __init__(self, onlyfans={}, patreon={}, starsavn={}):
self.onlyfans = self.OnlyFans(onlyfans)
Expand All @@ -202,6 +109,8 @@ def __init__(self, module):

class Settings():
def __init__(self, option={}):
option = update_site_settings(option)

class jobs:
def __init__(self, option={}) -> None:
self.scrape_names = option.get(
Expand All @@ -220,13 +129,14 @@ def __init__(self, option={}) -> None:
'posts', True)
self.comments = option.get(
'comments', True)
self.auto_profile_choice = option.get(
'auto_profile_choice', "")
self.auto_scrape_names = option.get(
'auto_scrape_names', False)
self.auto_choice = option.get('auto_choice', "")
self.auto_scrape_apis = option.get(
'auto_scrape_apis', True)
self.auto_profile_choice:Union[List] = option.get(
'auto_profile_choice', [])
self.auto_model_choice = option.get(
'auto_model_choice', False)
self.auto_media_choice = option.get(
'auto_media_choice', "")
self.auto_api_choice = option.get(
'auto_api_choice', True)
self.browser = browser(option.get(
'browser', {}))
self.jobs = jobs(option.get(
Expand Down Expand Up @@ -272,6 +182,8 @@ def __init__(self, option={}):

class Settings():
def __init__(self, option={}):
option = update_site_settings(option)

class jobs:
def __init__(self, option={}) -> None:
self.scrape_names = option.get(
Expand All @@ -285,9 +197,10 @@ def __init__(self, option={}) -> None:
'auth', True)
self.auto_profile_choice = option.get(
'auto_profile_choice', "")
self.auto_scrape_names = option.get(
'auto_scrape_names', False)
self.auto_choice = option.get('auto_choice', "")
self.auto_model_choice = option.get(
'auto_model_choice', False)
self.auto_media_choice = option.get(
'auto_media_choice', "")
self.auto_scrape_apis = option.get(
'auto_scrape_apis', True)
self.browser = browser(option.get(
Expand Down Expand Up @@ -322,5 +235,6 @@ def __init__(self, option={}) -> None:
'blacklist_name', "")
self.webhook = option.get(
'webhook', True)
self.info = Info()
self.settings = Settings(**settings)
self.supported = Supported(**supported)
15 changes: 6 additions & 9 deletions datascraper/main_datascraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,9 @@

import helpers.main_helper as main_helper
from helpers.main_helper import choose_option
import modules.bbwchan as m_bbwchan
import modules.fourchan as m_fourchan
import modules.onlyfans as m_onlyfans
from apis.onlyfans import onlyfans as OnlyFans
from apis.starsavn import starsavn as StarsAVN
import modules.patreon as m_patreon
import modules.starsavn as m_starsavn
import time
import requests
Expand All @@ -37,9 +34,9 @@ def start_datascraper(json_config: dict, site_name_lower: str, api: Optional[Onl

json_site_settings = json_sites[site_name_lower]["settings"]

auto_scrape_names = json_site_settings["auto_scrape_names"]
if isinstance(auto_scrape_names, str):
temp_identifiers = auto_scrape_names.split(",")
auto_model_choice = json_site_settings["auto_model_choice"]
if isinstance(auto_model_choice, str):
temp_identifiers = auto_model_choice.split(",")
identifiers = [x for x in temp_identifiers if x]
else:
identifiers = []
Expand Down Expand Up @@ -70,7 +67,7 @@ def start_datascraper(json_config: dict, site_name_lower: str, api: Optional[Onl
if not auto_profile_choice:
print("Choose Profile")
auths = choose_option(
subscription_list, auto_profile_choice)
subscription_list, auto_profile_choice, True)
api.auths = [x.pop(0) for x in auths]
for auth in api.auths:
if not auth.auth_details:
Expand Down Expand Up @@ -105,7 +102,7 @@ def start_datascraper(json_config: dict, site_name_lower: str, api: Optional[Onl
if jobs["scrape_names"]:
print("Scraping Subscriptions")
names = main_helper.process_names(
module, subscription_list, auto_scrape_names, api, json_config, site_name_lower, site_name)
module, subscription_list, auto_model_choice, api, json_config, site_name_lower, site_name)
x = main_helper.process_downloads(api, module)
if webhooks:
x = main_helper.process_webhooks(
Expand Down Expand Up @@ -151,7 +148,7 @@ def start_datascraper(json_config: dict, site_name_lower: str, api: Optional[Onl
# if jobs["scrape_names"]:
# print("Scraping Subscriptions")
# names = main_helper.process_names(
# module, subscription_list, auto_scrape_names, apis, json_config, site_name_lower, site_name)
# module, subscription_list, auto_model_choice, apis, json_config, site_name_lower, site_name)
# x = main_helper.process_downloads(apis, module)
stop_time = str(
int(timeit.default_timer() - archive_time) / 60)[:4]
Expand Down
23 changes: 14 additions & 9 deletions helpers/main_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,7 +493,7 @@ def get_config(config_path):
else:
json_config = {}
json_config2 = copy.deepcopy(json_config)
json_config, string = make_settings.fix(json_config)
json_config = make_settings.fix(json_config)
file_name = os.path.basename(config_path)
json_config = ujson.loads(json.dumps(make_settings.config(
**json_config), default=lambda o: o.__dict__))
Expand Down Expand Up @@ -543,25 +543,30 @@ def choose_auth(array):
return names


def choose_option(subscription_list, auto_scrape: Union[str, bool]):
def choose_option(subscription_list, auto_scrape: Union[str, bool], use_default_message=False):
names = subscription_list[0]
default_message = ""
seperator = " | "
if use_default_message:
default_message = f"Names: Username = username {seperator}"
new_names = []
if names:
seperator = " | "
if isinstance(auto_scrape, bool):
if auto_scrape:
values = [x[1] for x in names]
else:
print(
f"Names: Username = username {seperator} {subscription_list[1]}")
f"{default_message}{subscription_list[1]}")
values = input().strip().split(",")
else:
if not auto_scrape:
print(
f"Names: Username = username {seperator} {subscription_list[1]}")
f"{default_message}{subscription_list[1]}")
values = input().strip().split(",")
else:
values = auto_scrape.split(",")
values = auto_scrape
if isinstance(auto_scrape, str):
values = auto_scrape.split(",")
for value in values:
if value.isdigit():
if value == "0":
Expand Down Expand Up @@ -616,7 +621,7 @@ def process_profiles(json_settings, original_sessions, site_name, api: Union[Onl

def process_names(module, subscription_list, auto_scrape, api, json_config, site_name_lower, site_name) -> list:
names = choose_option(
subscription_list, auto_scrape)
subscription_list, auto_scrape, True)
if not names:
print("There's nothing to scrape.")
for name in names:
Expand Down Expand Up @@ -679,9 +684,9 @@ def export_data(metadata: Union[list, dict], path: str, encoding: Optional[str]
ujson.dump(metadata, outfile, indent=2, escape_forward_slashes=False)


def grouper(n, iterable, fillvalue:Optional[Union[str,int]]=None):
def grouper(n, iterable, fillvalue: Optional[Union[str, int]] = None):
args = [iter(iterable)] * n
grouped = list(zip_longest(fillvalue=fillvalue, *args))
grouped = list(zip_longest(fillvalue=fillvalue, *args))
if not fillvalue:
grouped = [x for x in grouped if x]
return grouped
Expand Down
Loading

0 comments on commit 35640c7

Please sign in to comment.