Skip to content

Commit

Permalink
Formatting and removed starsavn
Browse files Browse the repository at this point in the history
  • Loading branch information
UltimaHoarder committed Jun 16, 2021
1 parent 0854d88 commit 22cebf5
Show file tree
Hide file tree
Showing 16 changed files with 51 additions and 1,221 deletions.
15 changes: 9 additions & 6 deletions apis/api_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,16 +90,18 @@ def __init__(
self.auth = auth

def create_client_session(self):
proxies = self.proxies
proxy = self.proxies[randint(0, len(proxies) - 1)] if proxies else ""
proxy = self.get_proxy()
connector = ProxyConnector.from_url(proxy) if proxy else None

final_cookies = self.auth.cookies if hasattr(self.auth, "cookies") else {}
client_session = ClientSession(
connector=connector, cookies=final_cookies, read_timeout=None
)
return client_session

def get_proxy(self)->str:
proxies = self.proxies
proxy = self.proxies[randint(0, len(proxies) - 1)] if proxies else ""
return proxy
def stimulate_sessions(self):
# Some proxies switch IP addresses if no request have been made for x amount of secondss
def do(session_manager):
Expand Down Expand Up @@ -257,8 +259,7 @@ async def check(download_item: media_table, response: ClientResponse):
temp_response = [
response
for response in responses
if response
and URL.human_repr(response.url) == download_item.link
if response and str(response.url) == download_item.link
]
if temp_response:
temp_response = temp_response[0]
Expand Down Expand Up @@ -441,7 +442,9 @@ def restore_missing_data(master_set2, media_set, split_by):
return new_set


async def scrape_endpoint_links(links, session_manager: Optional[session_manager], api_type):
async def scrape_endpoint_links(
links, session_manager: Optional[session_manager], api_type
):
media_set = []
max_attempts = 100
api_type = api_type.capitalize()
Expand Down
12 changes: 5 additions & 7 deletions apis/onlyfans/classes/create_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,11 @@
from dateutil.relativedelta import relativedelta
from user_agent import generate_user_agent


class create_auth(create_user):
def __init__(
self,
option:dict[str,Any] = {},
option: dict[str, Any] = {},
user: create_user = create_user(),
pool: Optional[Pool] = None,
max_threads: int = -1,
Expand Down Expand Up @@ -54,7 +55,7 @@ def __init__(
self.guest = False
self.active: bool = False
self.errors: list[error_details] = []
self.extras: Dict[str, dict[str, Any]] = {}
self.extras: Dict[str, Any] = {}

def update(self, data: Dict[str, Any]):
for key, value in data.items():
Expand Down Expand Up @@ -191,7 +192,7 @@ async def get_lists(self, refresh=True, limit=100, offset=0):
self.lists = results
return results

async def get_user(self, identifier: Union[str, int])->Union[create_user,dict]:
async def get_user(self, identifier: Union[str, int]) -> Union[create_user, dict]:
link = endpoint_links(identifier).users
result = await self.session_manager.json_request(link)
result["session_manager"] = self.session_manager
Expand Down Expand Up @@ -239,7 +240,6 @@ async def get_subscriptions(
if not refresh:
subscriptions = self.subscriptions
return subscriptions
link = endpoint_links(global_limit=limit, global_offset=offset).subscriptions
ceil = math.ceil(self.subscribesCount / limit)
a = list(range(ceil))
offset_array = []
Expand Down Expand Up @@ -276,8 +276,6 @@ async def get_subscriptions(

async def multi(item):
link = item
# link = item["link"]
# session = item["session"]
subscriptions = await self.session_manager.json_request(link)
valid_subscriptions = []
extras = {}
Expand All @@ -298,7 +296,7 @@ async def multi(item):
tasks.append(task)
tasks = await asyncio.gather(*tasks)
for task in tasks:
subscription2:Union[create_user,dict] = task
subscription2: Union[create_user, dict] = task
for subscription in subscriptions:
if isinstance(subscription2, dict):
continue
Expand Down
3 changes: 1 addition & 2 deletions apis/onlyfans/classes/create_user.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
content_types,
endpoint_links,
handle_refresh,
media_types,
)


Expand Down Expand Up @@ -371,7 +370,7 @@ async def get_message_by_id(
).message_by_id
results = await self.session_manager.json_request(link)
results = [x for x in results["list"] if x["id"] == message_id]
result = result = results[0] if results else {}
result = results[0] if results else {}
final_result = create_message(result, self)
return final_result

Expand Down
13 changes: 8 additions & 5 deletions classes/make_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import os
import uuid as uuid

from yarl import URL

def fix(config={}):
info = config.get("info")
if not info:
Expand Down Expand Up @@ -72,12 +74,13 @@ def __init__(self, option={}) -> None:
self.exit_on_completion = exit_on_completion
self.infinite_loop = infinite_loop
self.loop_timeout = loop_timeout
if "github.com" in dynamic_rules_link:
if "raw" not in dynamic_rules_link:
parsed_link = urlparse(dynamic_rules_link)
path = parsed_link.path.replace("blob/","")
dynamic_rules_link = URL(dynamic_rules_link)
url_host = dynamic_rules_link.host
if "github.com" == url_host:
if "raw" != url_host:
path = dynamic_rules_link.path.replace("blob/","")
dynamic_rules_link = f"https://raw.githubusercontent.com/{path}"
self.dynamic_rules_link = dynamic_rules_link
self.dynamic_rules_link = str(dynamic_rules_link)
self.proxies = proxies
self.cert = cert
self.random_string = random_string if random_string else uuid.uuid1().hex
Expand Down
5 changes: 1 addition & 4 deletions classes/prepare_metadata.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
import copy
import os
from enum import unique
from itertools import chain, groupby
from math import exp
from typing import Dict, MutableMapping, Union
from typing import MutableMapping, Union

import jsonpickle
from apis.onlyfans.classes.create_auth import create_auth
from apis.onlyfans.classes.extras import media_types
from helpers import main_helper
from requests.api import get

global_version = 2

Expand Down
2 changes: 0 additions & 2 deletions database/databases/messages/messages.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
### messages.py ###

import sqlalchemy
from sqlalchemy.orm import declarative_base
from database.models.api_table import api_table
from database.models.media_table import media_table
Expand All @@ -9,7 +8,6 @@

class api_table(api_table,Base):
api_table.__tablename__ = "messages"
pass

class media_table(media_table,Base):
pass
2 changes: 0 additions & 2 deletions database/databases/posts/posts.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
### posts.py ###

import sqlalchemy
from sqlalchemy.orm import declarative_base
from database.models.api_table import api_table
from database.models.media_table import media_table
Expand All @@ -10,7 +9,6 @@

class api_table(api_table, Base):
api_table.__tablename__ = "posts"
pass


class media_table(media_table, Base):
Expand Down
2 changes: 0 additions & 2 deletions database/databases/stories/stories.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
### posts.py ###

import sqlalchemy
from sqlalchemy.orm import declarative_base
from database.models.api_table import api_table
from database.models.media_table import media_table
Expand All @@ -10,7 +9,6 @@

class api_table(api_table, Base):
api_table.__tablename__ = "stories"
pass


class media_table(media_table, Base):
Expand Down
1 change: 0 additions & 1 deletion database/models/api_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from datetime import datetime
from typing import cast
import sqlalchemy
from sqlalchemy.orm import declarative_base

class api_table():
__tablename__ = ""
Expand Down
2 changes: 1 addition & 1 deletion database/models/media_table.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
### api_table.py ###

from datetime import datetime
from typing import Optional, cast
from typing import cast
import sqlalchemy


Expand Down
25 changes: 10 additions & 15 deletions datascraper/main_datascraper.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
from itertools import product
import os
import timeit
from typing import Optional, Union
from typing import Optional

import helpers.main_helper as main_helper
from helpers.main_helper import choose_option
import modules.onlyfans as m_onlyfans
from apis.onlyfans import onlyfans as OnlyFans
from apis.starsavn import starsavn as StarsAVN
import modules.starsavn as m_starsavn
import time
import requests
from helpers.main_helper import choose_option

api_helper = OnlyFans.api_helper

Expand Down Expand Up @@ -57,9 +52,7 @@ async def start_datascraper(
if not api:
api = OnlyFans.start(max_threads=json_settings["max_threads"])
api.settings = json_config
api = main_helper.process_profiles(
json_settings, proxies, site_name, api
)
api = main_helper.process_profiles(json_settings, proxies, site_name, api)
print

subscription_array = []
Expand All @@ -82,7 +75,7 @@ async def start_datascraper(
)
if not setup:
if webhooks:
x = await main_helper.process_webhooks(api, "auth_webhook", "failed")
await main_helper.process_webhooks(api, "auth_webhook", "failed")
auth_details = {}
auth_details["auth"] = auth.auth_details.__dict__
profile_directory = auth.profile_directory
Expand All @@ -94,8 +87,10 @@ async def start_datascraper(
continue
auth_count += 1
subscription_array += subscriptions
x = await main_helper.process_webhooks(api, "auth_webhook", "succeeded")
subscription_list = module.format_options(subscription_array, "usernames", api.auths)
await main_helper.process_webhooks(api, "auth_webhook", "succeeded")
subscription_list = module.format_options(
subscription_array, "usernames", api.auths
)
if jobs["scrape_paid_content"]:
print("Scraping Paid Content")
paid_content = await module.paid_content_scraper(api, identifiers)
Expand All @@ -110,9 +105,9 @@ async def start_datascraper(
site_name_lower,
site_name,
)
x = await main_helper.process_downloads(api, module)
await main_helper.process_downloads(api, module)
if webhooks:
x = await main_helper.process_webhooks(api, "download_webhook", "succeeded")
await main_helper.process_webhooks(api, "download_webhook", "succeeded")
elif site_name_lower == "starsavn":
pass
# site_name = "StarsAVN"
Expand Down
9 changes: 3 additions & 6 deletions extras/OFRenamer/start.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
def fix_directories(api,posts, all_files, database_session: scoped_session, folder, site_name, parent_type, api_type, username, base_directory, json_settings):
new_directories = []

def fix_directories(post: api_table, media_db: list[media_table]):
def fix_directories2(post: api_table, media_db: list[media_table]):
delete_rows = []
final_api_type = os.path.join("Archived",api_type) if post.archived else api_type
post_id = post.post_id
Expand All @@ -32,8 +32,6 @@ def fix_directories(post: api_table, media_db: list[media_table]):
date_format = json_settings["date_format"]
text_length = json_settings["text_length"]
download_path = base_directory
today = datetime.today()
today = today.strftime("%d-%m-%Y %H:%M:%S")
option = {}
option["site_name"] = site_name
option["post_id"] = post_id
Expand Down Expand Up @@ -110,7 +108,7 @@ def fix_directories(post: api_table, media_db: list[media_table]):
result = database_session.query(folder.media_table)
media_db = result.all()
pool = api.pool
delete_rows = pool.starmap(fix_directories, product(
delete_rows = pool.starmap(fix_directories2, product(
posts, [media_db]))
delete_rows = list(chain(*delete_rows))
for delete_row in delete_rows:
Expand All @@ -123,7 +121,6 @@ def fix_directories(post: api_table, media_db: list[media_table]):

def start(api,Session, parent_type, api_type, api_path, site_name, subscription, folder, json_settings):
api_table = folder.api_table
media_table = folder.media_table
database_session = Session()
result = database_session.query(api_table).all()
metadata = getattr(subscription.temp_scraped, api_type)
Expand Down Expand Up @@ -159,7 +156,7 @@ def start(api,Session, parent_type, api_type, api_path, site_name, subscription,
x = [os.path.join(root, x) for x in files]
all_files.extend(x)

fixed, new_directories = fix_directories(
fix_directories(
api,result, all_files, database_session, folder, site_name, parent_type, api_type, username, root_directory, json_settings)
database_session.close()
return metadata
Expand Down
6 changes: 3 additions & 3 deletions helpers/db_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ def run_revisions(alembic_directory: str, database_path: str = ""):
alembic_cfg = Config(ini_path)
alembic_cfg.set_main_option('script_location', script_location)
alembic_cfg.set_main_option('sqlalchemy.url', full_database_path)
x = command.upgrade(alembic_cfg, 'head')
x = command.revision(alembic_cfg, autogenerate=True, message="content")
command.upgrade(alembic_cfg, 'head')
command.revision(alembic_cfg, autogenerate=True, message="content")


def run_migrations(alembic_directory: str, database_path: str) -> None:
Expand All @@ -45,7 +45,7 @@ def run_migrations(alembic_directory: str, database_path: str) -> None:
alembic_cfg = Config(ini_path)
alembic_cfg.set_main_option('script_location', script_location)
alembic_cfg.set_main_option('sqlalchemy.url', full_database_path)
x = command.upgrade(alembic_cfg, 'head')
command.upgrade(alembic_cfg, 'head')


class database_collection(object):
Expand Down
Loading

0 comments on commit 22cebf5

Please sign in to comment.