Skip to content

Commit

Permalink
Paid Content and Metadata Fixes
Browse files Browse the repository at this point in the history
Script now merges metadata correctly.
  • Loading branch information
UltimaHoarder committed Dec 12, 2020
1 parent c5b8c94 commit 8e9a81a
Show file tree
Hide file tree
Showing 4 changed files with 43 additions and 11 deletions.
1 change: 0 additions & 1 deletion apis/onlyfans/onlyfans.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@ def __iter__(self):
for attr, value in self.__dict__.items():
yield attr, value


class content_types:
def __init__(self, option={}) -> None:
class archived_types(content_types):
Expand Down
23 changes: 18 additions & 5 deletions classes/prepare_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@


class create_metadata(object):
def __init__(self, api=None, metadata: dict = {}, new=False, api_type: str = "") -> None:
def __init__(self, api=None, metadata: dict = {}, standard_format=False, api_type: str = "") -> None:
self.version = global_version
fixed_metadata = self.fix_metadata(metadata, new, api_type)
fixed_metadata = self.fix_metadata(metadata, standard_format, api_type)
self.content = format_content(
api, fixed_metadata["version"], fixed_metadata["content"]).content

def fix_metadata(self, metadata, new=False, api_type: str = "") -> dict:
def fix_metadata(self, metadata, standard_format=False, api_type: str = "") -> dict:
new_format = {}
new_format["version"] = 1
new_format["content"] = {}
Expand All @@ -32,7 +32,10 @@ def fix_metadata(self, metadata, new=False, api_type: str = "") -> dict:
metadata = new_format
else:
version = metadata.get("version", None)
if not version and not new and metadata:
if any(x for x in metadata if x in media_types().__dict__.keys()):
standard_format = True
print
if not version and not standard_format and metadata:
legacy_metadata = metadata
media_type = legacy_metadata.get("type", None)
if not media_type:
Expand All @@ -49,7 +52,17 @@ def fix_metadata(self, metadata, new=False, api_type: str = "") -> dict:
new_format["content"][media_type][key] = posts
print
print
elif new:
elif standard_format:
if any(x for x in metadata if x in media_types().__dict__.keys()):
metadata.pop("directories", None)
for key, status in metadata.items():
for key2, posts in status.items():
if all(x and isinstance(x, list) for x in posts):
posts = list(chain(*posts))
metadata[key][key2] = posts
print
print
print
new_format["content"] = metadata
print
else:
Expand Down
7 changes: 6 additions & 1 deletion extras/OFRenamer/start.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#!/usr/bin/env python3
from apis.api_helper import multiprocessing
from classes.prepare_metadata import format_types, format_variables, prepare_reformat
from hashlib import new
from os.path import dirname as up
Expand All @@ -16,7 +17,8 @@

def fix_directories(posts, base_directory, site_name, api_type, media_type, username, all_files, json_settings):
new_directories = []
for post in posts:

def fix_directory(post):
new_post_dict = post.convert(keep_empty_items=True)
for media in post.medias:
if media.links:
Expand Down Expand Up @@ -61,6 +63,9 @@ def fix_directories(posts, base_directory, site_name, api_type, media_type, user
setattr(media, "old_filepath", old_filepath)
setattr(media, "new_filepath", new_filepath)
new_directories.append(os.path.dirname(new_filepath))
pool = multiprocessing()
pool.starmap(fix_directory, product(
posts))
new_directories = list(set(new_directories))
return posts, new_directories

Expand Down
23 changes: 19 additions & 4 deletions modules/onlyfans.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,13 @@ def paid_content_scraper(apis: list[start]):
if not subscription:
subscription = create_subscription(author)
authed.subscriptions.append(subscription)
if paid_content["responseType"] == "post":
if paid_content["isArchived"]:
print(f"Model: {author['username']}")
# print(
# "ERROR, PLEASE REPORT THIS AS AN ISSUE AND TELL ME WHICH MODEL YOU'RE SCRAPIMG, THANKS")
# input()
# exit()
api_type = paid_content["responseType"].capitalize()+"s"
api_media = getattr(subscription.scraped, api_type)
api_media.append(paid_content)
Expand All @@ -324,6 +331,8 @@ def paid_content_scraper(apis: list[start]):
media_type = format_media_types()
count += 1
for api_type, paid_content in subscription.scraped:
if api_type == "Archived":
continue
formatted_directories = format_directories(
j_directory, site_name, username, metadata_directory_format, media_type, api_type)
metadata_directory = formatted_directories["metadata_directory"]
Expand Down Expand Up @@ -500,16 +509,18 @@ def compare_message(queue_id, remote_messages):


def process_metadata(api: start, new_metadata, formatted_directories, subscription, api_type, api_path, archive_path, site_name):
print("Processing Metadata")
print("Processing metadata.")
legacy_metadata_object = legacy_metadata_fixer(
formatted_directories, api)
new_metadata_object = create_metadata(
api, new_metadata, new=True)
api, new_metadata, standard_format=True)
print("Merging new metadata with legacy metadata.")
new_metadata_object = compare_metadata(
new_metadata_object, legacy_metadata_object)
old_metadata_set = import_archive(archive_path)
old_metadata_object = create_metadata(
api, old_metadata_set)
api, old_metadata_set, api_type=api_type)
print("Merging new metadata with old metadata.")
new_metadata_object = compare_metadata(
new_metadata_object, old_metadata_object)
if not subscription.download_info:
Expand All @@ -518,10 +529,11 @@ def process_metadata(api: start, new_metadata, formatted_directories, subscripti
subscription.download_info["webhook"] = webhook
subscription.download_info["metadata_locations"][api_type] = archive_path
subscription.set_scraped(api_type, new_metadata_object)
print("Renaming files.")
new_metadata_object = ofrenamer.start(
subscription, api_type, api_path, site_name, json_settings)
subscription.set_scraped(api_type, new_metadata_object)
print("Finished Processing Metadata")
print("Finished processing metadata.")
return new_metadata_object


Expand Down Expand Up @@ -827,6 +839,9 @@ def media_scraper(results, api, formatted_directories, username, api_type, paren
media_set2["valid"] = []
media_set2["invalid"] = []
for media_api in results:
if media_api["responseType"] == "post":
if media_api["isArchived"]:
pass
if api_type == "Messages":
media_api["rawText"] = media_api["text"]
if api_type == "Mass Messages":
Expand Down

0 comments on commit 8e9a81a

Please sign in to comment.