Skip to content

Commit

Permalink
Added {profile_username} to path format
Browse files Browse the repository at this point in the history
  • Loading branch information
UltimaHoarder committed Jun 16, 2021
1 parent 4d69ff4 commit 7091421
Show file tree
Hide file tree
Showing 6 changed files with 150 additions and 71 deletions.
12 changes: 7 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,9 @@ Overview for [file_directory_format](#file_directory_format), [filename_format](

{media_id} = The media's ID.

{username} = The account's username.
{profile_username} = Your account's username.

{model_username} = The model's username.

{api_type} = Posts, Messages, etc.

Expand All @@ -148,10 +150,10 @@ The list below are unique identifiers that you must include.

You can choose one or more.

Default = "{site_name}/{username}/{api_type}/{value}/{media_type}"
Default = "{site_name}/{model_username}/{api_type}/{value}/{media_type}"
Default Translated = "OnlyFans/belledelphine/Posts/Free/Images"

{username} = belledelphine
{model_username} = belledelphine

### filename_format:

Expand All @@ -175,10 +177,10 @@ The list below are unique identifiers that you must include.

You must choose one or more.

Default = "{site_name}/{username}/Metadata"
Default = "{site_name}/{model_username}/Metadata"
Default Translated = "OnlyFans/belledelphine/Metadata"

{username} = belledelphine
{model_username} = belledelphine

### text_length:

Expand Down
20 changes: 14 additions & 6 deletions classes/make_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,22 @@
import uuid as uuid

from yarl import URL

current_version = None
def fix(config={}):
global current_version
info = config.get("info")
if not info:
print("If you're not using >= v7 release, please download said release so the script can properly update your config. \nIf you're using >= v7 release or you don't care about your current config settings, press enter to continue. If script crashes, delete config.")
input()
current_version = info["version"]
return config


class config(object):
def __init__(self, info={}, settings={}, supported={}):
class Info(object):
def __init__(self) -> None:
self.version = 7.1
self.version = 7.2

class Settings(object):
def __init__(self, auto_site_choice="", profile_directories=[".profiles"], export_type="json", max_threads=-1, min_drive_space=0, helpers={}, webhooks={}, exit_on_completion=False, infinite_loop=True, loop_timeout="0", dynamic_rules_link="https://raw.githubusercontent.com/DATAHOARDERS/dynamic-rules/main/onlyfans.json", proxies=[], cert="", random_string=""):
Expand Down Expand Up @@ -92,6 +94,12 @@ def update_site_settings(options) -> dict:
new_options["auto_model_choice"] = value
elif "auto_scrape_apis" == key:
new_options["auto_api_choice"] = value
if "file_directory_format" == key:
new_options["file_directory_format"] = value.replace("{username}","{model_username}")
if "filename_format" == key:
new_options["filename_format"] = value.replace("{username}","{model_username}")
if "metadata_directory_format" == key:
new_options["metadata_directory_format"] = value.replace("{username}","{model_username}")
return new_options

class Supported(object):
Expand Down Expand Up @@ -141,13 +149,13 @@ def __init__(self, option={}) -> None:
'download_directories', [".sites"])
normpath = os.path.normpath
self.file_directory_format = normpath(option.get(
'file_directory_format', "{site_name}/{username}/{api_type}/{value}/{media_type}"))
'file_directory_format', "{site_name}/{model_username}/{api_type}/{value}/{media_type}"))
self.filename_format = normpath(option.get(
'filename_format', "{filename}.{ext}"))
self.metadata_directories = option.get(
'metadata_directories', [".sites"])
self.metadata_directory_format = normpath(option.get(
'metadata_directory_format', "{site_name}/{username}/Metadata"))
'metadata_directory_format', "{site_name}/{model_username}/Metadata"))
self.delete_legacy_metadata = option.get(
'delete_legacy_metadata', False)
self.text_length = option.get('text_length', 255)
Expand Down Expand Up @@ -207,13 +215,13 @@ def __init__(self, option={}) -> None:
'download_directories', [".sites"])
normpath = os.path.normpath
self.file_directory_format = normpath(option.get(
'file_directory_format', "{site_name}/{username}/{api_type}/{value}/{media_type}"))
'file_directory_format', "{site_name}/{model_username}/{api_type}/{value}/{media_type}"))
self.filename_format = normpath(option.get(
'filename_format', "{filename}.{ext}"))
self.metadata_directories = option.get(
'metadata_directories', [".sites"])
self.metadata_directory_format = normpath(option.get(
'metadata_directory_format', "{site_name}/{username}/Metadata"))
'metadata_directory_format', "{site_name}/{model_username}/Metadata"))
self.delete_legacy_metadata = option.get(
'delete_legacy_metadata', False)
self.text_length = option.get('text_length', 255)
Expand Down
20 changes: 15 additions & 5 deletions classes/prepare_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,8 @@ def __init__(self):
self.first_letter = "{first_letter}"
self.post_id = "{post_id}"
self.media_id = "{media_id}"
self.username = "{username}"
self.profile_username = "{profile_username}"
self.model_username = "{model_username}"
self.api_type = "{api_type}"
self.media_type = "{media_type}"
self.filename = "{filename}"
Expand Down Expand Up @@ -303,7 +304,13 @@ def check_rules(self):
if b in self.filename_format:
invalid_list.append(b)
if key == "metadata_directory_format":
wl = ["{site_name}", "{first_letter}", "{model_id}", "{username}"]
wl = [
"{site_name}",
"{first_letter}",
"{model_id}",
"{profile_username}",
"{model_username}",
]
bl = format_variables().whitelist(wl)
invalid_list = []
for b in bl:
Expand All @@ -328,7 +335,7 @@ def check_unique(self, return_unique=True):
f = format_variables()
for key, value in self:
if key == "file_directory_format":
unique = ["{media_id}", "{username}"]
unique = ["{media_id}", "{model_username}"]
value = os.path.normpath(value)
values = value.split(os.sep)
option["unique"].file_directory_format = unique
Expand All @@ -341,7 +348,7 @@ def check_unique(self, return_unique=True):
values.append(value2)
option["unique"].filename_format = unique
elif key == "metadata_directory_format":
unique = ["{username}"]
unique = ["{model_username}"]
value = os.path.normpath(value)
values = value.split(os.sep)
option["unique"].metadata_directory_format = unique
Expand Down Expand Up @@ -369,7 +376,10 @@ def __init__(self, option, keep_vars=False):
self.site_name = option.get("site_name", format_variables2.site_name)
self.post_id = option.get("post_id", format_variables2.post_id)
self.media_id = option.get("media_id", format_variables2.media_id)
self.username = option.get("username", format_variables2.username)
self.profile_username = option.get(
"profile_username", format_variables2.profile_username
)
self.model_username = option.get("model_username", format_variables2.model_username)
self.api_type = option.get("api_type", format_variables2.api_type)
self.media_type = option.get("media_type", format_variables2.media_type)
self.filename = option.get("filename", format_variables2.filename)
Expand Down
100 changes: 71 additions & 29 deletions extras/OFRenamer/start.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,36 @@
#!/usr/bin/env python3
from sqlalchemy.orm.scoping import scoped_session
from database.models.api_table import api_table
from database.models.media_table import media_table
import urllib.parse as urlparse
import os
import shutil
import traceback
import urllib.parse as urlparse
from datetime import datetime
import os
from itertools import chain, product
import traceback

from apis.onlyfans.classes.create_user import create_user
from database.models.api_table import api_table
from database.models.media_table import media_table
from sqlalchemy.orm.scoping import scoped_session

def fix_directories(api,posts, all_files, database_session: scoped_session, folder, site_name, parent_type, api_type, username, base_directory, json_settings):

def fix_directories(
posts,
api,
subscription: create_user,
all_files,
database_session: scoped_session,
folder,
site_name,
api_type,
base_directory,
json_settings,
):
new_directories = []

def fix_directories2(post: api_table, media_db: list[media_table]):
delete_rows = []
final_api_type = os.path.join("Archived",api_type) if post.archived else api_type
final_api_type = (
os.path.join("Archived", api_type) if post.archived else api_type
)
post_id = post.post_id
media_db = [x for x in media_db if x.post_id == post_id]
for media in media_db:
Expand All @@ -36,7 +51,8 @@ def fix_directories2(post: api_table, media_db: list[media_table]):
option["site_name"] = site_name
option["post_id"] = post_id
option["media_id"] = media_id
option["username"] = username
option["profile_username"] = subscription.subscriber.username
option["model_username"] = subscription.username
option["api_type"] = final_api_type
option["media_type"] = media.media_type
option["filename"] = original_filename
Expand All @@ -51,23 +67,25 @@ def fix_directories2(post: api_table, media_db: list[media_table]):
option["archived"] = post.archived
prepared_format = prepare_reformat(option)
file_directory = main_helper.reformat(
prepared_format, file_directory_format)
prepared_format, file_directory_format
)
prepared_format.directory = file_directory
old_filepath = ""
if media.linked:
filename_format = f"linked_{filename_format}"
old_filepaths = [
x for x in all_files if original_filename in os.path.basename(x)]
x for x in all_files if original_filename in os.path.basename(x)
]
if not old_filepaths:
old_filepaths = [
x for x in all_files if str(media_id) in os.path.basename(x)]
x for x in all_files if str(media_id) in os.path.basename(x)
]
print
if not media.linked:
old_filepaths = [x for x in old_filepaths if "linked_" not in x]
if old_filepaths:
old_filepath = old_filepaths[0]
new_filepath = main_helper.reformat(
prepared_format, filename_format)
new_filepath = main_helper.reformat(prepared_format, filename_format)
if old_filepath and old_filepath != new_filepath:
if os.path.exists(new_filepath):
os.remove(new_filepath)
Expand All @@ -78,12 +96,16 @@ def fix_directories2(post: api_table, media_db: list[media_table]):
if media.size:
media.downloaded = True
found_dupes = [
x for x in media_db if x.filename == new_filename and x.id != media.id]
x
for x in media_db
if x.filename == new_filename and x.id != media.id
]
delete_rows.extend(found_dupes)
os.makedirs(os.path.dirname(
new_filepath), exist_ok=True)
os.makedirs(os.path.dirname(new_filepath), exist_ok=True)
if media.linked:
if os.path.dirname(old_filepath) == os.path.dirname(new_filepath):
if os.path.dirname(old_filepath) == os.path.dirname(
new_filepath
):
moved = shutil.move(old_filepath, new_filepath)
else:
moved = shutil.copy(old_filepath, new_filepath)
Expand All @@ -105,21 +127,32 @@ def fix_directories2(post: api_table, media_db: list[media_table]):
media.filename = os.path.basename(new_filepath)
new_directories.append(os.path.dirname(new_filepath))
return delete_rows

result = database_session.query(folder.media_table)
media_db = result.all()
pool = api.pool
delete_rows = pool.starmap(fix_directories2, product(
posts, [media_db]))
delete_rows = pool.starmap(fix_directories2, product(posts, [media_db]))
delete_rows = list(chain(*delete_rows))
for delete_row in delete_rows:
database_session.query(folder.media_table).filter(
folder.media_table.id == delete_row.id).delete()
folder.media_table.id == delete_row.id
).delete()
database_session.commit()
new_directories = list(set(new_directories))
return posts, new_directories


def start(api,Session, parent_type, api_type, api_path, site_name, subscription, folder, json_settings):
def start(
api,
Session,
parent_type,
api_type,
api_path,
site_name,
subscription: create_user,
folder,
json_settings,
):
api_table = folder.api_table
database_session = Session()
result = database_session.query(api_table).all()
Expand All @@ -132,23 +165,23 @@ def start(api,Session, parent_type, api_type, api_path, site_name, subscription,
reformats["metadata_directory_format"] = json_settings["metadata_directory_format"]
reformats["file_directory_format"] = json_settings["file_directory_format"]
reformats["filename_format"] = json_settings["filename_format"]
username = subscription.username
model_username = subscription.username
option = {}
option["site_name"] = site_name
option["api_type"] = api_type
option["username"] = username
option["profile_username"] = subscription.subscriber.username
option["model_username"] = model_username
option["date_format"] = date_format
option["maximum_length"] = text_length
option["directory"] = root_directory
formatted = format_types(reformats).check_unique()
unique = formatted["unique"]
for key, value in reformats.items():
key2 = getattr(unique, key)[0]
reformats[key] = value.split(key2, 1)[0]+key2
reformats[key] = value.split(key2, 1)[0] + key2
print
print
a, base_directory, c = prepare_reformat(
option, keep_vars=True).reformat(reformats)
a, base_directory, c = prepare_reformat(option, keep_vars=True).reformat(reformats)
download_info["base_directory"] = base_directory
print
all_files = []
Expand All @@ -157,7 +190,17 @@ def start(api,Session, parent_type, api_type, api_path, site_name, subscription,
all_files.extend(x)

fix_directories(
api,result, all_files, database_session, folder, site_name, parent_type, api_type, username, root_directory, json_settings)
result,
api,
subscription,
all_files,
database_session,
folder,
site_name,
api_type,
root_directory,
json_settings,
)
database_session.close()
return metadata

Expand All @@ -169,4 +212,3 @@ def start(api,Session, parent_type, api_type, api_path, site_name, subscription,
else:
import helpers.main_helper as main_helper
from classes.prepare_metadata import format_types, prepare_reformat

Loading

0 comments on commit 7091421

Please sign in to comment.