Validation of existing downloaded parts
This commit is contained in:
parent
3a879b042c
commit
3846394914
|
@ -0,0 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
|
@ -2,7 +2,10 @@
|
|||
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import glob
|
||||
from hashlib import sha1
|
||||
from io import BytesIO
|
||||
from torrentool.api import Torrent
|
||||
|
||||
"""
|
||||
|
@ -12,15 +15,28 @@ from torrentool.api import Torrent
|
|||
|
||||
"""
|
||||
|
||||
ARG_OPTIONS = {'short': False, 'files': False}
|
||||
ARG_OPTIONS = {'short': False, 'files': False, 'existing': False, 'non-existing': False, 'validate': False}
|
||||
|
||||
|
||||
def parse_torrent_file(torrentfile, short_output=False, list_contents=True):
|
||||
def make_blob(buffer, fileinfo):
|
||||
if os.path.exists(fileinfo[0]):
|
||||
with open(fileinfo[0], 'rb') as datafd:
|
||||
num_written = buffer.write(datafd.read())
|
||||
else:
|
||||
num_written = buffer.write(b'\x00' * fileinfo[1])
|
||||
return num_written
|
||||
|
||||
|
||||
def parse_torrent_file(torrentfile):
|
||||
buffer = BytesIO()
|
||||
|
||||
torrent = Torrent.from_file(torrentfile)
|
||||
print('+- Torrent : {}'.format(os.path.basename(torrentfile)))
|
||||
print('| Title : {}'.format(torrent.name))
|
||||
info_struct = torrent._struct.get('info')
|
||||
print('| Piece Len: {:,} Bytes'.format(info_struct['piece length']))
|
||||
try:
|
||||
print('| Size : {} bytes'.format(torrent.total_size))
|
||||
print('| Size : {:,} Bytes'.format(torrent.total_size))
|
||||
except:
|
||||
print('| Size : {}'.format('n/a'))
|
||||
if not ARG_OPTIONS['short']:
|
||||
|
@ -34,9 +50,44 @@ def parse_torrent_file(torrentfile, short_output=False, list_contents=True):
|
|||
if len(torrent.webseeds) > 0:
|
||||
print('| webseeds : {}'.format(torrent.webseeds))
|
||||
if ARG_OPTIONS['files']:
|
||||
# TODO: move to separate print file detail method
|
||||
num_written = 0
|
||||
for index, fileinfo in enumerate(torrent.files):
|
||||
print('| FILE[{:4}] name = {}'.format(index, fileinfo[0]))
|
||||
print('| FILE[{:4}] size = {}'.format(index, fileinfo[1]))
|
||||
if ARG_OPTIONS['validate']:
|
||||
num_written += make_blob(buffer, fileinfo)
|
||||
if ARG_OPTIONS['existing']:
|
||||
if os.path.exists(fileinfo[0]):
|
||||
print('| FOUND [{:4}] name = {} (size = {:,} Bytes)'.format(index, fileinfo[0], fileinfo[1]))
|
||||
elif ARG_OPTIONS['non-existing']:
|
||||
print('| MISSING [{:4}] name = {} (size = {:,} Bytes)'.format(index, fileinfo[0], fileinfo[1]))
|
||||
|
||||
if ARG_OPTIONS['validate']:
|
||||
pieceshash = info_struct['pieces']
|
||||
buffer.seek(0)
|
||||
print('| Validation:')
|
||||
chunk_size = info_struct['piece length']
|
||||
chunk = buffer.read(chunk_size)
|
||||
chunk_hashes = []
|
||||
while len(chunk) > 0:
|
||||
hashcalc = sha1()
|
||||
hashcalc.update(chunk)
|
||||
chunk_hashes.append(hashcalc.digest())
|
||||
chunk = buffer.read(chunk_size)
|
||||
|
||||
out_wrap = 0
|
||||
completed = True
|
||||
sys.stdout.write('| ')
|
||||
for digest in chunk_hashes:
|
||||
out_wrap += 1
|
||||
if digest in pieceshash:
|
||||
sys.stdout.write('▒')
|
||||
else:
|
||||
completed = False
|
||||
sys.stdout.write('░')
|
||||
if not(out_wrap % 64):
|
||||
sys.stdout.write('\n| ')
|
||||
sys.stdout.write('\n')
|
||||
print('| Completed: {}'.format(completed))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
Loading…
Reference in New Issue