else
This commit is contained in:
parent
97403c7d5f
commit
6e64483523
14 changed files with 240 additions and 29 deletions
147
Bencode/bencode.py
Normal file
147
Bencode/bencode.py
Normal file
|
@ -0,0 +1,147 @@
|
|||
'''
|
||||
A Python 3 translation of bcode.py
|
||||
https://pypi.python.org/pypi/bcode/0.5
|
||||
'''
|
||||
|
||||
def bencode(data):
|
||||
'''
|
||||
Encode python types to bencode.
|
||||
'''
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
data_type = type(data)
|
||||
|
||||
encoders = {
|
||||
bytes: encode_bytes,
|
||||
str: encode_string,
|
||||
float: encode_float,
|
||||
int: encode_int,
|
||||
dict: encode_dict,
|
||||
}
|
||||
|
||||
encoder = encoders.get(data_type, None)
|
||||
if encoder is None:
|
||||
try:
|
||||
return encode_iterator(iter(data))
|
||||
except TypeError:
|
||||
raise ValueError('Invalid field type %s' % data_type)
|
||||
return encoder(data)
|
||||
|
||||
def encode_bytes(data):
|
||||
return '%d:%s' % (len(data), data)
|
||||
|
||||
def encode_dict(data):
|
||||
result = []
|
||||
keys = list(data.keys())
|
||||
keys.sort()
|
||||
for key in keys:
|
||||
result.append(bencode(key))
|
||||
result.append(bencode(data[key]))
|
||||
result = ''.join(result)
|
||||
return 'd%se' % result
|
||||
|
||||
def encode_float(data):
|
||||
return encode_string(str(data))
|
||||
|
||||
def encode_int(data):
|
||||
return 'i%de' % data
|
||||
|
||||
def encode_iterator(data):
|
||||
result = []
|
||||
for item in data:
|
||||
result.append(bencode(item))
|
||||
result = ''.join(result)
|
||||
return 'l%se' % result
|
||||
|
||||
def encode_string(data):
|
||||
return encode_bytes(data)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def bdecode(data):
|
||||
'''
|
||||
Decode bencode to python types.
|
||||
|
||||
Returns a dictionary
|
||||
{
|
||||
'result': the decoded item
|
||||
'remainder': what's left of the input text
|
||||
}
|
||||
'''
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
data = data.strip()
|
||||
if isinstance(data, bytes):
|
||||
data = data.decode('utf-8')
|
||||
|
||||
if data[0] == 'i':
|
||||
return decode_int(data)
|
||||
|
||||
if data[0].isdigit():
|
||||
return decode_string(data)
|
||||
|
||||
if data[0] == 'l':
|
||||
return decode_list(data)
|
||||
|
||||
if data[0] == 'd':
|
||||
return decode_dict(data)
|
||||
|
||||
raise ValueError('Invalid initial delimiter "%s"' % data[0])
|
||||
|
||||
def decode_dict(data):
|
||||
result = {}
|
||||
|
||||
# slice leading d
|
||||
remainder = data[1:]
|
||||
while remainder[0] != 'e':
|
||||
temp = bdecode(remainder)
|
||||
key = temp['result']
|
||||
remainder = temp['remainder']
|
||||
|
||||
temp = bdecode(remainder)
|
||||
value = temp['result']
|
||||
remainder = temp['remainder']
|
||||
|
||||
result[key] = value
|
||||
|
||||
# slice ending 3
|
||||
remainder = remainder[1:]
|
||||
return {'result': result, 'remainder': remainder}
|
||||
|
||||
def decode_int(data):
|
||||
end = data.find('e')
|
||||
if end == -1:
|
||||
raise ValueError('Missing end delimiter "e"')
|
||||
|
||||
# slice leading i and closing e
|
||||
result = data[1:end]
|
||||
remainder = data[end+1:]
|
||||
return {'result': result, 'remainder': remainder}
|
||||
|
||||
def decode_list(data):
|
||||
result = []
|
||||
|
||||
# slice leading l
|
||||
remainder = data[1:]
|
||||
while remainder[0] != 'e':
|
||||
item = bdecode(data)
|
||||
result.append(item['result'])
|
||||
reaminder = item['remainder']
|
||||
|
||||
# slice ending e
|
||||
remainder = remainder[1:]
|
||||
return {'result': result, 'remainder': remainder}
|
||||
|
||||
def decode_string(data):
|
||||
start = data.find(':') + 1
|
||||
size = int(data[:start-1])
|
||||
end = start + size
|
||||
text = data[start:end]
|
||||
if len(text) < size:
|
||||
raise ValueError('Actual length %d is less than declared length %d' % len(text), size)
|
||||
remainder = data[end:]
|
||||
return {'result': text, 'remainder': remainder}
|
|
@ -100,7 +100,7 @@ def image_to_ico(filename):
|
|||
if min(image.size) > 256:
|
||||
w = image.size[0]
|
||||
h = image.size[1]
|
||||
image = image.resize((256, 256))
|
||||
image = image.resize((256, 256), resample=Image.ANTIALIAS)
|
||||
image = image.convert('RGBA')
|
||||
|
||||
print('Building ico header')
|
||||
|
|
|
@ -189,6 +189,7 @@ SKIPPABLE_FILETYPES = [
|
|||
'.wav',
|
||||
'.webm',
|
||||
'.wma',
|
||||
'.xml',
|
||||
'.zip',
|
||||
]
|
||||
SKIPPABLE_FILETYPES = set(x.lower() for x in SKIPPABLE_FILETYPES)
|
||||
|
|
|
@ -3,6 +3,9 @@ Spinal
|
|||
|
||||
A couple of tools for copying files and directories.
|
||||
|
||||
- 2016 12 06
|
||||
- Fixed bug where dry runs would still create directories
|
||||
|
||||
- 2016 11 27
|
||||
- Renamed the `copy_file` parameter `callback` to `callback_progress` for clarity.
|
||||
|
||||
|
|
|
@ -248,6 +248,7 @@ def copy_dir(
|
|||
raise DestinationIsDirectory(destination_abspath)
|
||||
|
||||
destination_location = os.path.split(destination_abspath.absolute_path)[0]
|
||||
if not dry_run:
|
||||
os.makedirs(destination_location, exist_ok=True)
|
||||
|
||||
copied = copy_file(
|
||||
|
@ -391,7 +392,6 @@ def copy_file(
|
|||
source_bytes = source.size
|
||||
destination_location = os.path.split(destination.absolute_path)[0]
|
||||
os.makedirs(destination_location, exist_ok=True)
|
||||
written_bytes = 0
|
||||
|
||||
try:
|
||||
log.debug('Opening handles.')
|
||||
|
@ -407,6 +407,7 @@ def copy_file(
|
|||
if validate_hash:
|
||||
hasher = HASH_CLASS()
|
||||
|
||||
written_bytes = 0
|
||||
while True:
|
||||
data_chunk = source_handle.read(CHUNK_SIZE)
|
||||
data_bytes = len(data_chunk)
|
||||
|
@ -424,6 +425,10 @@ def copy_file(
|
|||
|
||||
callback_progress(destination, written_bytes, source_bytes)
|
||||
|
||||
if written_bytes == 0:
|
||||
# For zero-length files, we want to get at least one call in there.
|
||||
callback_progress(destination, written_bytes, source_bytes)
|
||||
|
||||
# Fin
|
||||
log.debug('Closing source handle.')
|
||||
source_handle.close()
|
||||
|
@ -556,7 +561,9 @@ def walk_generator(
|
|||
exclude_directories=None,
|
||||
exclude_filenames=None,
|
||||
recurse=True,
|
||||
yield_style='flat'
|
||||
yield_directories=False,
|
||||
yield_files=True,
|
||||
yield_style='flat',
|
||||
):
|
||||
'''
|
||||
Yield Path objects for files in the file tree, similar to os.walk.
|
||||
|
@ -586,11 +593,20 @@ def walk_generator(
|
|||
recurse:
|
||||
Yield from subdirectories. If False, only immediate files are returned.
|
||||
|
||||
yield_directories:
|
||||
Should the generator produce directories? Has no effect in nested yield style.
|
||||
|
||||
yield_files:
|
||||
Should the generator produce files? Has no effect in nested yield style.
|
||||
|
||||
yield_style:
|
||||
If 'flat', yield individual files one by one in a constant stream.
|
||||
If 'nested', yield tuple(root, directories, files) like os.walk does,
|
||||
except I use Path objects with absolute paths for everything.
|
||||
'''
|
||||
if not yield_directories and not yield_files:
|
||||
raise ValueError('yield_directories and yield_files cannot both be False')
|
||||
|
||||
if yield_style not in ['flat', 'nested']:
|
||||
raise ValueError('Invalid yield_style %s. Either "flat" or "nested".' % repr(yield_style))
|
||||
|
||||
|
@ -607,6 +623,7 @@ def walk_generator(
|
|||
exclude_directories = {normalize(f) for f in exclude_directories}
|
||||
|
||||
path = str_to_fp(path)
|
||||
path.correct_case()
|
||||
|
||||
# Considering full paths
|
||||
if normalize(path.absolute_path) in exclude_directories:
|
||||
|
@ -631,9 +648,11 @@ def walk_generator(
|
|||
except PermissionError as exception:
|
||||
callback_permission_denied(current_location, exception)
|
||||
continue
|
||||
|
||||
log.debug('received %d items', len(contents))
|
||||
|
||||
if yield_style == 'flat' and yield_directories:
|
||||
yield current_location
|
||||
|
||||
directories = []
|
||||
files = []
|
||||
for base_name in contents:
|
||||
|
@ -646,7 +665,11 @@ def walk_generator(
|
|||
callback_exclusion(absolute_name, 'directory')
|
||||
continue
|
||||
|
||||
directories.append(str_to_fp(absolute_name))
|
||||
directory = str_to_fp(absolute_name)
|
||||
directories.append(directory)
|
||||
|
||||
elif yield_style == 'flat' and not yield_files:
|
||||
continue
|
||||
|
||||
else:
|
||||
exclude = normalize(absolute_name) in exclude_filenames
|
||||
|
|
|
@ -61,8 +61,8 @@ def threaded_dl(urls, thread_count, filename_format=None):
|
|||
print('%d threads remaining\r' % len(threads), end='', flush=True)
|
||||
time.sleep(0.1)
|
||||
|
||||
def main():
|
||||
filename = sys.argv[1]
|
||||
def main(argv):
|
||||
filename = argv[0]
|
||||
if os.path.isfile(filename):
|
||||
f = open(filename, 'r')
|
||||
with f:
|
||||
|
@ -70,9 +70,9 @@ def main():
|
|||
else:
|
||||
urls = clipext.resolve(filename)
|
||||
urls = urls.replace('\r', '').split('\n')
|
||||
thread_count = int(listget(sys.argv, 2, 4))
|
||||
filename_format = listget(sys.argv, 3, None)
|
||||
thread_count = int(listget(argv, 1, 4))
|
||||
filename_format = listget(argv, 2, None)
|
||||
threaded_dl(urls, thread_count=thread_count, filename_format=filename_format)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -46,7 +46,7 @@ def loop(pairs, dry=False):
|
|||
line = '{old}\n{new}\n'
|
||||
line = line.format(old=x, new=y)
|
||||
#print(line.encode('utf-8'))
|
||||
print(line)
|
||||
print(line.encode('ascii', 'replace').decode())
|
||||
has_content = True
|
||||
else:
|
||||
os.rename(x, y)
|
||||
|
|
18
Toolbox/eval.py
Normal file
18
Toolbox/eval.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
'''
|
||||
Great for applying Python post-processing to the output of some other command.
|
||||
Provide an input string (!i for stdin) and an eval string using `x` as the
|
||||
variable name of the input.
|
||||
'''
|
||||
from voussoirkit import clipext
|
||||
import math
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import sys
|
||||
import time
|
||||
|
||||
x = clipext.resolve(sys.argv[1])
|
||||
transformation = ' '.join(sys.argv[2:])
|
||||
|
||||
result = eval(transformation)
|
||||
print(result)
|
|
@ -1,4 +1,5 @@
|
|||
import converter
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
@ -6,7 +7,6 @@ import sys
|
|||
import time
|
||||
|
||||
def main(filename):
|
||||
assert os.path.isfile(filename)
|
||||
ffmpeg = converter.Converter()
|
||||
probe = ffmpeg.probe(filename)
|
||||
new_name = filename
|
||||
|
@ -16,10 +16,12 @@ def main(filename):
|
|||
if '___' in filename:
|
||||
video_codec = probe.video.codec
|
||||
|
||||
audios = [stream for stream in probe.streams if stream.type == 'audio']
|
||||
audios = [stream for stream in probe.streams if stream.type == 'audio' and stream.bitrate]
|
||||
if audios:
|
||||
audio = max(audios, key=lambda x: x.bitrate)
|
||||
|
||||
audio_codec = probe.audio.codec
|
||||
else:
|
||||
audio_codec = None
|
||||
|
||||
if any(not x for x in [video_codec, probe.video.bitrate, audio_codec, probe.audio.bitrate]):
|
||||
print('Could not identify media info')
|
||||
|
@ -40,4 +42,5 @@ def main(filename):
|
|||
os.rename(filename, new_name)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1])
|
||||
for filename in glob.glob(sys.argv[1]):
|
||||
main(filename)
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
'''
|
||||
Pull all of the files in nested directories into the current directory.
|
||||
'''
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from voussoirkit import spinal
|
||||
|
||||
def main():
|
||||
files = list(spinal.walk_generator())
|
||||
def filepull(pull_from='.'):
|
||||
files = list(spinal.walk_generator(pull_from))
|
||||
cwd = os.getcwd()
|
||||
files = [f for f in files if os.path.split(f.absolute_path)[0] != cwd]
|
||||
|
||||
|
@ -36,5 +36,17 @@ def main():
|
|||
local = os.path.join('.', f.basename)
|
||||
os.rename(f.absolute_path, local)
|
||||
|
||||
def filepull_argparse(args):
|
||||
filepull(pull_from=args.pull_from)
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('pull_from', nargs='?', default='.')
|
||||
parser.set_defaults(func=filepull_argparse)
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
args.func(args)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main(sys.argv[1:])
|
||||
|
|
|
@ -31,5 +31,5 @@ for filename in spinal.walk_generator():
|
|||
pass
|
||||
if matches:
|
||||
print(filename)
|
||||
print('\n'.join(matches))
|
||||
print('\n'.join(matches).encode('ascii', 'replace').decode())
|
||||
print()
|
||||
|
|
|
@ -5,14 +5,17 @@ import glob
|
|||
import os
|
||||
import sys
|
||||
|
||||
|
||||
glob_patterns = sys.argv[1:]
|
||||
for glob_pattern in glob_patterns:
|
||||
def touch(glob_pattern):
|
||||
filenames = glob.glob(glob_pattern)
|
||||
if len(filenames) == 0:
|
||||
print(glob_pattern)
|
||||
print(glob_pattern.encode('ascii', 'replace').decode())
|
||||
open(glob_pattern, 'a').close()
|
||||
else:
|
||||
for filename in filenames:
|
||||
print(filename)
|
||||
print(filename.encode('ascii', 'replace').decode())
|
||||
os.utime(filename)
|
||||
|
||||
if __name__ == '__main__':
|
||||
glob_patterns = sys.argv[1:]
|
||||
for glob_pattern in glob_patterns:
|
||||
touch(glob_pattern)
|
||||
|
|
|
@ -11,6 +11,7 @@ PATHS = [
|
|||
'C:\\git\\else\\Ratelimiter\\ratelimiter.py',
|
||||
'C:\\git\\else\\RateMeter\\ratemeter.py',
|
||||
'C:\\git\\else\\SpinalTap\\spinal.py',
|
||||
'C:\\git\\else\\WebstreamZip\\webstreamzip.py',
|
||||
]
|
||||
|
||||
os.makedirs(PACKAGE, exist_ok=True)
|
||||
|
@ -39,7 +40,7 @@ import setuptools
|
|||
setuptools.setup(
|
||||
author='voussoir',
|
||||
name='{package}',
|
||||
version='0.0.2',
|
||||
version='0.0.3',
|
||||
description='',
|
||||
py_modules=[{py_modules}],
|
||||
)
|
||||
|
|
Binary file not shown.
Loading…
Reference in a new issue