Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ZeronameExPlugin - Really nice tail #2861

Open
wants to merge 6 commits into
base: py3
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Add nameyo support
  • Loading branch information
wolfitdm committed Jan 19, 2025
commit 1c22812888629c92a97a7e0861752b21a0beecf3
6 changes: 4 additions & 2 deletions plugins/AnnounceBitTorrent/AnnounceBitTorrentPlugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import struct
import socket

import lib.bencode_open as bencode_open
import bencode
from lib.subtl.subtl import UdpTrackerClient
import socks
import sockshandler
Expand Down Expand Up @@ -133,7 +133,9 @@ def announceTrackerHttp(self, tracker_address, mode="start", num_want=10, protoc

# Decode peers
try:
peer_data = bencode_open.loads(response)[b"peers"]
peer_data = bencode.decode(response)["peers"]
if type(peer_data) is not bytes:
peer_data = peer_data.encode()
response = None
peer_count = int(len(peer_data) / 6)
peers = []
Expand Down
3 changes: 2 additions & 1 deletion plugins/AnnounceLocal/AnnounceLocalPlugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,11 +120,12 @@ def actionSiteListResponse(self, sender, params):
@PluginManager.registerTo("FileServer")
class FileServerPlugin(object):
def __init__(self, *args, **kwargs):
super(FileServerPlugin, self).__init__(*args, **kwargs)
res = super(FileServerPlugin, self).__init__(*args, **kwargs)
if config.broadcast_port and config.tor != "always" and not config.disable_udp:
self.local_announcer = LocalAnnouncer(self, config.broadcast_port)
else:
self.local_announcer = None
return res

def start(self, *args, **kwargs):
if self.local_announcer:
Expand Down
20 changes: 9 additions & 11 deletions plugins/AnnounceZero/AnnounceZeroPlugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,6 @@ def importHostClasses():
# Process result got back from tracker
def processPeerRes(tracker_address, site, peers):
added = 0

# Onion
found_onion = 0
for packed_address in peers["onion"]:
found_onion += 1
peer_onion, peer_port = helper.unpackOnionAddress(packed_address)
if site.addPeer(peer_onion, peer_port, source="tracker"):
added += 1

# Ip4
found_ipv4 = 0
peers_normal = itertools.chain(peers.get("ip4", []), peers.get("ipv4", []), peers.get("ipv6", []))
Expand All @@ -38,6 +29,13 @@ def processPeerRes(tracker_address, site, peers):
peer_ip, peer_port = helper.unpackAddress(packed_address)
if site.addPeer(peer_ip, peer_port, source="tracker"):
added += 1
# Onion
found_onion = 0
for packed_address in peers["onion"]:
found_onion += 1
peer_onion, peer_port = helper.unpackOnionAddress(packed_address)
if site.addPeer(peer_onion, peer_port, source="tracker"):
added += 1

if added:
site.worker_manager.onPeers()
Expand Down Expand Up @@ -133,8 +131,8 @@ def announceTrackerZero(self, tracker_address, mode="start", num_want=10):
tracker_peer.remove() # Close connection, we don't need it in next 5 minute

self.site.log.debug(
"Tracker announce result: zero://%s (sites: %s, new peers: %s, add: %s, mode: %s) in %.3fs" %
(tracker_address, site_index, peers_added, add_types, mode, time.time() - s)
"Tracker announce result: zero://%s (sites: %s, new peers: %s, add: %s) in %.3fs" %
(tracker_address, site_index, peers_added, add_types, time.time() - s)
)

return True
96 changes: 22 additions & 74 deletions plugins/Bigfile/BigfilePlugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ def importPluginnedClasses():
from Content.ContentManager import VerifyError
from Config import config


if "upload_nonces" not in locals():
upload_nonces = {}

Expand Down Expand Up @@ -61,44 +60,13 @@ def actionBigfileUpload(self):
})

self.readMultipartHeaders(self.env['wsgi.input']) # Skip http headers
result = self.handleBigfileUpload(upload_info, self.env['wsgi.input'].read)
return json.dumps(result)

def actionBigfileUploadWebsocket(self):
ws = self.env.get("wsgi.websocket")

if not ws:
self.start_response("400 Bad Request", [])
return [b"Not a websocket request!"]

nonce = self.get.get("upload_nonce")
if nonce not in upload_nonces:
return self.error403("Upload nonce error.")

upload_info = upload_nonces[nonce]
del upload_nonces[nonce]

ws.send("poll")

buffer = b""
def read(size):
nonlocal buffer
while len(buffer) < size:
buffer += ws.receive()
ws.send("poll")
part, buffer = buffer[:size], buffer[size:]
return part

result = self.handleBigfileUpload(upload_info, read)
ws.send(json.dumps(result))

def handleBigfileUpload(self, upload_info, read):
site = upload_info["site"]
inner_path = upload_info["inner_path"]

with site.storage.open(inner_path, "wb", create_dirs=True) as out_file:
merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile(
read, upload_info["size"], upload_info["piece_size"], out_file
self.env['wsgi.input'], upload_info["size"], upload_info["piece_size"], out_file
)

if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
Expand Down Expand Up @@ -137,12 +105,12 @@ def handleBigfileUpload(self, upload_info, read):

site.content_manager.contents.loadItem(file_info["content_inner_path"]) # reload cache

return {
return json.dumps({
"merkle_root": merkle_root,
"piece_num": len(piecemap_info["sha512_pieces"]),
"piece_size": piece_size,
"inner_path": inner_path
}
})

def readMultipartHeaders(self, wsgi_input):
found = False
Expand All @@ -169,7 +137,7 @@ def actionFile(self, file_path, *args, **kwargs):

@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
def actionBigfileUploadInit(self, to, inner_path, size, protocol="xhr"):
def actionBigfileUploadInit(self, to, inner_path, size):
valid_signers = self.site.content_manager.getValidSigners(inner_path)
auth_address = self.user.getAuthAddress(self.site.address)
if not self.site.settings["own"] and auth_address not in valid_signers:
Expand All @@ -193,29 +161,12 @@ def actionBigfileUploadInit(self, to, inner_path, size, protocol="xhr"):
"piece_size": piece_size,
"piecemap": inner_path + ".piecemap.msgpack"
}

if protocol == "xhr":
return {
"url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
"piece_size": piece_size,
"inner_path": inner_path,
"file_relative_path": file_relative_path
}
elif protocol == "websocket":
server_url = self.request.getWsServerUrl()
if server_url:
proto, host = server_url.split("://")
origin = proto.replace("http", "ws") + "://" + host
else:
origin = "{origin}"
return {
"url": origin + "/ZeroNet-Internal/BigfileUploadWebsocket?upload_nonce=" + nonce,
"piece_size": piece_size,
"inner_path": inner_path,
"file_relative_path": file_relative_path
}
else:
return {"error": "Unknown protocol"}
return {
"url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
"piece_size": piece_size,
"inner_path": inner_path,
"file_relative_path": file_relative_path
}

@flag.no_multiuser
def actionSiteSetAutodownloadBigfileLimit(self, to, limit):
Expand Down Expand Up @@ -258,14 +209,14 @@ def getFileInfo(self, inner_path, *args, **kwargs):
file_info = super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs)
return file_info

def readFile(self, read_func, size, buff_size=1024 * 64):
def readFile(self, file_in, size, buff_size=1024 * 64):
part_num = 0
recv_left = size

while 1:
part_num += 1
read_size = min(buff_size, recv_left)
part = read_func(read_size)
part = file_in.read(read_size)

if not part:
break
Expand All @@ -278,7 +229,7 @@ def readFile(self, read_func, size, buff_size=1024 * 64):
if recv_left <= 0:
break

def hashBigfile(self, read_func, size, piece_size=1024 * 1024, file_out=None):
def hashBigfile(self, file_in, size, piece_size=1024 * 1024, file_out=None):
self.site.settings["has_bigfile"] = True

recv = 0
Expand All @@ -291,7 +242,7 @@ def hashBigfile(self, read_func, size, piece_size=1024 * 1024, file_out=None):
mt.hash_function = CryptHash.sha512t

part = ""
for part in self.readFile(read_func, size):
for part in self.readFile(file_in, size):
if file_out:
file_out.write(part)

Expand Down Expand Up @@ -357,7 +308,7 @@ def hashFile(self, dir_inner_path, file_relative_path, optional=False):
return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)

self.log.info("- [HASHING] %s" % file_relative_path)
merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb").read, file_size)
merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb"), file_size)
if not hash:
hash = merkle_root

Expand Down Expand Up @@ -389,11 +340,7 @@ def getPiecemap(self, inner_path):
return piecemap

def verifyPiece(self, inner_path, pos, piece):
try:
piecemap = self.getPiecemap(inner_path)
except Exception as err:
raise VerifyError("Unable to download piecemap: %s" % Debug.formatException(err))

piecemap = self.getPiecemap(inner_path)
piece_i = int(pos / piecemap["piece_size"])
if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
raise VerifyError("Invalid hash")
Expand Down Expand Up @@ -459,7 +406,9 @@ def __init__(self, *args, **kwargs):
def createSparseFile(self, inner_path, size, sha512=None):
file_path = self.getPath(inner_path)

self.ensureDir(os.path.dirname(inner_path))
file_dir = os.path.dirname(file_path)
if not os.path.isdir(file_dir):
os.makedirs(file_dir)

f = open(file_path, 'wb')
f.truncate(min(1024 * 1024 * 5, size)) # Only pre-allocate up to 5MB
Expand All @@ -483,7 +432,9 @@ def write(self, inner_path, content):
file_path = self.getPath(inner_path)

# Create dir if not exist
self.ensureDir(os.path.dirname(inner_path))
file_dir = os.path.dirname(file_path)
if not os.path.isdir(file_dir):
os.makedirs(file_dir)

if not os.path.isfile(file_path):
file_info = self.site.content_manager.getFileInfo(inner_path)
Expand Down Expand Up @@ -597,9 +548,6 @@ def seek(self, pos, whence=0):
whence = 0
return self.f.seek(pos, whence)

def seekable(self):
return self.f.seekable()

def tell(self):
return self.f.tell()

Expand Down
20 changes: 10 additions & 10 deletions plugins/Bigfile/Test/TestBigfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def testRangedFileDownload(self, file_server, site, site_temp):
peer_client = site_temp.addPeer(file_server.ip, 1544)

# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
site_temp.download(blind_includes=True).join(timeout=5)

bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
assert not bad_files
Expand Down Expand Up @@ -172,7 +172,7 @@ def testOpenBigfile(self, file_server, site, site_temp):
site_temp.addPeer(file_server.ip, 1544)

# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
site_temp.download(blind_includes=True).join(timeout=5)

# Open virtual file
assert not site_temp.storage.isFile(inner_path)
Expand Down Expand Up @@ -255,7 +255,7 @@ def testFileGet(self, file_server, site, site_temp):
site_temp.addPeer(file_server.ip, 1544)

# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
site_temp.download(blind_includes=True).join(timeout=5)

# Download second block
with site_temp.storage.openBigfile(inner_path) as f:
Expand Down Expand Up @@ -380,7 +380,7 @@ def testDownloadStats(self, file_server, site, site_temp):
site_temp.addPeer(file_server.ip, 1544)

# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
site_temp.download(blind_includes=True).join(timeout=5)

# Open virtual file
assert not site_temp.storage.isFile(inner_path)
Expand Down Expand Up @@ -417,7 +417,7 @@ def testPrebuffer(self, file_server, site, site_temp):
site_temp.addPeer(file_server.ip, 1544)

# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
site_temp.download(blind_includes=True).join(timeout=5)

# Open virtual file
assert not site_temp.storage.isFile(inner_path)
Expand Down Expand Up @@ -453,7 +453,7 @@ def testDownloadAllPieces(self, file_server, site, site_temp):
site_temp.addPeer(file_server.ip, 1544)

# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
site_temp.download(blind_includes=True).join(timeout=5)

# Open virtual file
assert not site_temp.storage.isFile(inner_path)
Expand Down Expand Up @@ -482,7 +482,7 @@ def testFileSize(self, file_server, site, site_temp):
site_temp.addPeer(file_server.ip, 1544)

# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
site_temp.download(blind_includes=True).join(timeout=5)

# Open virtual file
assert not site_temp.storage.isFile(inner_path)
Expand All @@ -507,7 +507,7 @@ def testFileRename(self, file_server, site, site_temp):
site_temp.addPeer(file_server.ip, 1544)

# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
site_temp.download(blind_includes=True).join(timeout=5)

with Spy.Spy(FileRequest, "route") as requests:
site_temp.needFile("%s|%s-%s" % (inner_path, 0, 1 * self.piece_size))
Expand All @@ -529,7 +529,7 @@ def testFileRename(self, file_server, site, site_temp):
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10) # Wait for download
site_temp.download(blind_includes=True).join(timeout=5) # Wait for download

assert len([req[1] for req in requests if req[1] == "streamFile"]) == 0

Expand Down Expand Up @@ -563,7 +563,7 @@ def testNullFileRead(self, file_server, site, site_temp, size):
site_temp.addPeer(file_server.ip, 1544)

# Download site
site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
site_temp.download(blind_includes=True).join(timeout=5)

if "piecemap" in site.content_manager.getFileInfo(inner_path): # Bigfile
site_temp.needFile(inner_path + "|all")
Expand Down
Loading