linting
This commit is contained in:
parent
6f3f5371ce
commit
fa6d1149d4
3 changed files with 28 additions and 27 deletions
|
@ -11,7 +11,7 @@ norecursedirs = .git
|
||||||
[flake8]
|
[flake8]
|
||||||
exclude = .git,__pycache__,legacy,build,dist,.tox
|
exclude = .git,__pycache__,legacy,build,dist,.tox
|
||||||
max-complexity = 15
|
max-complexity = 15
|
||||||
ignore = E741,W504
|
ignore = E741,W504,W503
|
||||||
|
|
||||||
[yapf]
|
[yapf]
|
||||||
based_on_style = pep8
|
based_on_style = pep8
|
||||||
|
|
|
@ -111,6 +111,7 @@ def mpi_to_int(s):
|
||||||
"""
|
"""
|
||||||
return int(binascii.hexlify(s[2:]), 16)
|
return int(binascii.hexlify(s[2:]), 16)
|
||||||
|
|
||||||
|
|
||||||
def extended_gcd(a, b):
|
def extended_gcd(a, b):
|
||||||
if a == 0:
|
if a == 0:
|
||||||
return (b, 0, 1)
|
return (b, 0, 1)
|
||||||
|
@ -118,19 +119,17 @@ def extended_gcd(a, b):
|
||||||
g, y, x = extended_gcd(b % a, a)
|
g, y, x = extended_gcd(b % a, a)
|
||||||
return (g, x - (b // a) * y, y)
|
return (g, x - (b // a) * y, y)
|
||||||
|
|
||||||
|
|
||||||
def modular_inverse(a, m):
|
def modular_inverse(a, m):
|
||||||
"""
|
|
||||||
Thank you Mart Bakhoff for this solution.
|
|
||||||
https://stackoverflow.com/a/9758173
|
|
||||||
"""
|
|
||||||
g, x, y = extended_gcd(a, m)
|
g, x, y = extended_gcd(a, m)
|
||||||
if g != 1:
|
if g != 1:
|
||||||
raise Exception('modular inverse does not exist')
|
raise Exception('modular inverse does not exist')
|
||||||
else:
|
else:
|
||||||
return x % m
|
return x % m
|
||||||
|
|
||||||
|
|
||||||
def base64_url_decode(data):
|
def base64_url_decode(data):
|
||||||
data += '==' [(2 - len(data) * 3) % 4:]
|
data += '=='[(2 - len(data) * 3) % 4:]
|
||||||
for search, replace in (('-', '+'), ('_', '/'), (',', '')):
|
for search, replace in (('-', '+'), ('_', '/'), (',', '')):
|
||||||
data = data.replace(search, replace)
|
data = data.replace(search, replace)
|
||||||
return base64.b64decode(data)
|
return base64.b64decode(data)
|
||||||
|
|
|
@ -303,9 +303,8 @@ class Mega:
|
||||||
if foldername != '':
|
if foldername != '':
|
||||||
for file in files.items():
|
for file in files.items():
|
||||||
if (
|
if (
|
||||||
file[1]['a'] and
|
file[1]['a'] and file[1]['t']
|
||||||
file[1]['t'] and
|
and file[1]['a']['n'] == foldername
|
||||||
file[1]['a']['n'] == foldername
|
|
||||||
):
|
):
|
||||||
if parent_desc == file[1]['p']:
|
if parent_desc == file[1]['p']:
|
||||||
parent_desc = file[0]
|
parent_desc = file[0]
|
||||||
|
@ -333,23 +332,20 @@ class Mega:
|
||||||
parent_dir_name, files=files
|
parent_dir_name, files=files
|
||||||
)
|
)
|
||||||
if (
|
if (
|
||||||
filename and parent_node_id and
|
filename and parent_node_id and file[1]['a']
|
||||||
file[1]['a'] and file[1]['a']['n'] == filename and
|
and file[1]['a']['n'] == filename
|
||||||
parent_node_id == file[1]['p']
|
and parent_node_id == file[1]['p']
|
||||||
):
|
):
|
||||||
if (
|
if (
|
||||||
exclude_deleted and
|
exclude_deleted
|
||||||
self._trash_folder_node_id == file[1]['p']
|
and self._trash_folder_node_id == file[1]['p']
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
return file
|
return file
|
||||||
if (
|
if (filename and file[1]['a'] and file[1]['a']['n'] == filename):
|
||||||
filename and
|
|
||||||
file[1]['a'] and file[1]['a']['n'] == filename
|
|
||||||
):
|
|
||||||
if (
|
if (
|
||||||
exclude_deleted and
|
exclude_deleted
|
||||||
self._trash_folder_node_id == file[1]['p']
|
and self._trash_folder_node_id == file[1]['p']
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
return file
|
return file
|
||||||
|
@ -598,13 +594,13 @@ class Mega:
|
||||||
|
|
||||||
def _export_file(self, node):
|
def _export_file(self, node):
|
||||||
node_data = self._node_data(node)
|
node_data = self._node_data(node)
|
||||||
self._api_request([
|
self._api_request(
|
||||||
{
|
[{
|
||||||
'a': 'l',
|
'a': 'l',
|
||||||
'n': node_data['h'],
|
'n': node_data['h'],
|
||||||
'i': self.request_id
|
'i': self.request_id
|
||||||
}
|
}]
|
||||||
])
|
)
|
||||||
return self.get_link(node)
|
return self.get_link(node)
|
||||||
|
|
||||||
def export(self, path=None, node_id=None):
|
def export(self, path=None, node_id=None):
|
||||||
|
@ -627,7 +623,9 @@ class Mega:
|
||||||
|
|
||||||
master_key_cipher = AES.new(a32_to_str(self.master_key), AES.MODE_ECB)
|
master_key_cipher = AES.new(a32_to_str(self.master_key), AES.MODE_ECB)
|
||||||
ha = base64_url_encode(
|
ha = base64_url_encode(
|
||||||
master_key_cipher.encrypt(node_data['h'].encode("utf8") + node_data['h'].encode("utf8"))
|
master_key_cipher.encrypt(
|
||||||
|
node_data['h'].encode("utf8") + node_data['h'].encode("utf8")
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
share_key = secrets.token_bytes(16)
|
share_key = secrets.token_bytes(16)
|
||||||
|
@ -745,7 +743,9 @@ class Mega:
|
||||||
aes = AES.new(k_str, AES.MODE_CTR, counter=counter)
|
aes = AES.new(k_str, AES.MODE_CTR, counter=counter)
|
||||||
|
|
||||||
mac_str = '\0' * 16
|
mac_str = '\0' * 16
|
||||||
mac_encryptor = AES.new(k_str, AES.MODE_CBC, mac_str.encode("utf8"))
|
mac_encryptor = AES.new(
|
||||||
|
k_str, AES.MODE_CBC, mac_str.encode("utf8")
|
||||||
|
)
|
||||||
iv_str = a32_to_str([iv[0], iv[1], iv[0], iv[1]])
|
iv_str = a32_to_str([iv[0], iv[1], iv[0], iv[1]])
|
||||||
|
|
||||||
for chunk_start, chunk_size in get_chunks(file_size):
|
for chunk_start, chunk_size in get_chunks(file_size):
|
||||||
|
@ -808,7 +808,9 @@ class Mega:
|
||||||
completion_file_handle = None
|
completion_file_handle = None
|
||||||
|
|
||||||
mac_str = '\0' * 16
|
mac_str = '\0' * 16
|
||||||
mac_encryptor = AES.new(k_str, AES.MODE_CBC, mac_str.encode("utf8"))
|
mac_encryptor = AES.new(
|
||||||
|
k_str, AES.MODE_CBC, mac_str.encode("utf8")
|
||||||
|
)
|
||||||
iv_str = a32_to_str([ul_key[4], ul_key[5], ul_key[4], ul_key[5]])
|
iv_str = a32_to_str([ul_key[4], ul_key[5], ul_key[4], ul_key[5]])
|
||||||
if file_size > 0:
|
if file_size > 0:
|
||||||
for chunk_start, chunk_size in get_chunks(file_size):
|
for chunk_start, chunk_size in get_chunks(file_size):
|
||||||
|
|
Loading…
Reference in a new issue