megapy/src/mega/mega.py

984 lines
32 KiB
Python
Raw Normal View History

2013-02-04 02:02:33 +00:00
import re
import json
from Crypto.Cipher import AES
from Crypto.PublicKey import RSA
from Crypto.Util import Counter
import os
2019-10-17 20:02:52 +00:00
import sys
2013-02-04 02:02:33 +00:00
import random
import binascii
import requests
import shutil
from .errors import ValidationError, RequestError
2019-10-16 20:20:22 +00:00
from .crypto import (
a32_to_base64, encrypt_key, base64_url_encode, encrypt_attr, base64_to_a32,
base64_url_decode, decrypt_attr, a32_to_str, get_chunks, str_to_a32,
2019-10-17 20:02:52 +00:00
decrypt_key, mpi_to_int, stringhash, prepare_key, make_id, makebyte
2019-10-16 20:20:22 +00:00
)
import tempfile
2013-02-04 02:02:33 +00:00
2019-10-17 20:02:52 +00:00
PYTHON2 = sys.version_info < (3, )
2013-02-04 02:02:33 +00:00
class Mega(object):
def __init__(self, options=None):
2013-02-04 02:02:33 +00:00
self.schema = 'https'
self.domain = 'mega.co.nz'
2019-10-16 20:20:22 +00:00
self.timeout = 160 # max secs to wait for resp from api requests
2013-02-04 02:02:33 +00:00
self.sid = None
self.sequence_num = random.randint(0, 0xFFFFFFFF)
self.request_id = make_id(10)
2013-02-04 02:02:33 +00:00
if options is None:
options = {}
self.options = options
2013-05-18 00:58:41 +00:00
def login(self, email=None, password=None):
if email:
2013-06-27 21:25:02 +00:00
self._login_user(email, password)
2013-05-18 00:58:41 +00:00
else:
self.login_anonymous()
return self
2013-02-04 02:02:33 +00:00
2013-06-27 21:25:02 +00:00
def _login_user(self, email, password):
2013-02-04 02:02:33 +00:00
password_aes = prepare_key(str_to_a32(password))
uh = stringhash(email, password_aes)
2013-06-27 21:25:02 +00:00
resp = self._api_request({'a': 'us', 'user': email, 'uh': uh})
2019-10-16 20:20:22 +00:00
# if numeric error code response
2013-02-04 02:02:33 +00:00
if isinstance(resp, int):
raise RequestError(resp)
2013-02-04 02:02:33 +00:00
self._login_process(resp, password_aes)
2013-05-18 00:58:41 +00:00
def login_anonymous(self):
master_key = [random.randint(0, 0xFFFFFFFF)] * 4
password_key = [random.randint(0, 0xFFFFFFFF)] * 4
session_self_challenge = [random.randint(0, 0xFFFFFFFF)] * 4
2019-10-16 20:20:22 +00:00
user = self._api_request(
{
'a':
'up',
'k':
a32_to_base64(encrypt_key(master_key, password_key)),
'ts':
base64_url_encode(
2019-10-16 21:40:13 +00:00
a32_to_str(session_self_challenge) + a32_to_str(
encrypt_key(session_self_challenge, master_key)
)
2019-10-16 20:20:22 +00:00
)
}
)
2013-05-18 00:58:41 +00:00
2013-06-27 21:25:02 +00:00
resp = self._api_request({'a': 'us', 'user': user})
2019-10-16 20:20:22 +00:00
# if numeric error code response
2013-05-18 00:58:41 +00:00
if isinstance(resp, int):
raise RequestError(resp)
self._login_process(resp, password_key)
2013-02-04 02:02:33 +00:00
def _login_process(self, resp, password):
encrypted_master_key = base64_to_a32(resp['k'])
self.master_key = decrypt_key(encrypted_master_key, password)
if 'tsid' in resp:
tsid = base64_url_decode(resp['tsid'])
key_encrypted = a32_to_str(
2019-10-16 20:20:22 +00:00
encrypt_key(str_to_a32(tsid[:16]), self.master_key)
)
2013-02-04 02:02:33 +00:00
if key_encrypted == tsid[-16:]:
self.sid = resp['tsid']
elif 'csid' in resp:
encrypted_rsa_private_key = base64_to_a32(resp['privk'])
2019-10-16 20:20:22 +00:00
rsa_private_key = decrypt_key(
encrypted_rsa_private_key, self.master_key
)
2013-02-04 02:02:33 +00:00
private_key = a32_to_str(rsa_private_key)
self.rsa_private_key = [0, 0, 0, 0]
for i in range(4):
2019-10-17 20:02:52 +00:00
if PYTHON2:
l = (
(ord(private_key[0]) * 256 + ord(private_key[1]) + 7) / 8
) + 2
else:
l = int(
((private_key[0]) * 256 + (private_key[1]) + 7) / 8
) + 2
2013-02-04 02:02:33 +00:00
self.rsa_private_key[i] = mpi_to_int(private_key[:l])
private_key = private_key[l:]
encrypted_sid = mpi_to_int(base64_url_decode(resp['csid']))
rsa_decrypter = RSA.construct(
2019-10-16 20:20:22 +00:00
(
2019-10-16 20:59:31 +00:00
self.rsa_private_key[0] * self.rsa_private_key[1], 0,
2019-10-16 20:20:22 +00:00
self.rsa_private_key[2], self.rsa_private_key[0],
self.rsa_private_key[1]
)
)
2013-02-04 02:02:33 +00:00
sid = '%x' % rsa_decrypter.key._decrypt(encrypted_sid)
sid = binascii.unhexlify('0' + sid if len(sid) % 2 else sid)
self.sid = base64_url_encode(sid[:43])
2013-06-27 21:25:02 +00:00
def _api_request(self, data):
2013-02-04 02:02:33 +00:00
params = {'id': self.sequence_num}
self.sequence_num += 1
if self.sid:
params.update({'sid': self.sid})
2019-10-16 20:20:22 +00:00
# ensure input data is a list
if not isinstance(data, list):
data = [data]
2019-10-16 20:20:22 +00:00
url = '{0}://g.api.{1}/cs'.format(self.schema, self.domain)
2013-02-04 02:02:33 +00:00
req = requests.post(
2019-10-16 20:20:22 +00:00
url,
params=params,
data=json.dumps(data),
2019-10-16 20:20:22 +00:00
timeout=self.timeout,
headers={
'Origin':
'https://mega.nz',
'Referer':
'https://mega.nz/login',
'User-Agent': (
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:69.0) '
'Gecko/20100101 Firefox/69.0'
),
}
)
json_resp = json.loads(req.text)
2019-10-16 20:20:22 +00:00
# if numeric error code response
2013-02-04 02:02:33 +00:00
if isinstance(json_resp, int):
raise RequestError(json_resp)
2013-02-04 02:02:33 +00:00
return json_resp[0]
2013-06-27 21:25:02 +00:00
def _parse_url(self, url):
2019-10-16 20:20:22 +00:00
# parse file id and key from url
2013-06-27 21:25:02 +00:00
if '!' in url:
match = re.findall(r'/#!(.*)', url)
path = match[0]
return path
else:
raise RequestError('Url key missing')
2013-06-27 21:25:02 +00:00
def _process_file(self, file, shared_keys):
"""
Process a file
"""
if file['t'] == 0 or file['t'] == 1:
2019-10-16 20:20:22 +00:00
keys = dict(
keypart.split(':', 1)
for keypart in file['k'].split('/')
if ':' in keypart
)
uid = file['u']
key = None
# my objects
if uid in keys:
key = decrypt_key(base64_to_a32(keys[uid]), self.master_key)
2019-10-16 20:20:22 +00:00
# shared folders
elif 'su' in file and 'sk' in file and ':' in file['k']:
2019-10-16 20:20:22 +00:00
shared_key = decrypt_key(
base64_to_a32(file['sk']), self.master_key
)
key = decrypt_key(base64_to_a32(keys[file['h']]), shared_key)
if file['su'] not in shared_keys:
shared_keys[file['su']] = {}
shared_keys[file['su']][file['h']] = shared_key
# shared files
elif file['u'] and file['u'] in shared_keys:
for hkey in shared_keys[file['u']]:
shared_key = shared_keys[file['u']][hkey]
if hkey in keys:
key = keys[hkey]
key = decrypt_key(base64_to_a32(key), shared_key)
break
if key is not None:
# file
if file['t'] == 0:
2019-10-16 20:20:22 +00:00
k = (
key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6],
key[3] ^ key[7]
)
file['iv'] = key[4:6] + (0, 0)
file['meta_mac'] = key[6:8]
# folder
else:
k = key
file['key'] = key
file['k'] = k
attributes = base64_url_decode(file['a'])
attributes = decrypt_attr(attributes, k)
file['a'] = attributes
# other => wrong object
elif file['k'] == '':
file['a'] = False
elif file['t'] == 2:
self.root_id = file['h']
file['a'] = {'n': 'Cloud Drive'}
elif file['t'] == 3:
self.inbox_id = file['h']
file['a'] = {'n': 'Inbox'}
elif file['t'] == 4:
self.trashbin_id = file['h']
file['a'] = {'n': 'Rubbish Bin'}
return file
2013-06-27 21:25:02 +00:00
def _init_shared_keys(self, files, shared_keys):
"""
Init shared key not associated with a user.
Seems to happen when a folder is shared,
some files are exchanged and then the
folder is un-shared.
Keys are stored in files['s'] and files['ok']
"""
ok_dict = {}
for ok_item in files['ok']:
2019-10-16 20:20:22 +00:00
shared_key = decrypt_key(
base64_to_a32(ok_item['k']), self.master_key
)
ok_dict[ok_item['h']] = shared_key
for s_item in files['s']:
if s_item['u'] not in shared_keys:
shared_keys[s_item['u']] = {}
if s_item['h'] in ok_dict:
shared_keys[s_item['u']][s_item['h']] = ok_dict[s_item['h']]
2013-02-23 15:22:12 +00:00
##########################################################################
# GET
2019-10-16 20:20:22 +00:00
def find_path_descriptor(self, path):
"""
Find descriptor of folder inside a path. i.e.: folder1/folder2/folder3
Params:
path, string like folder1/folder2/folder3
Return:
Descriptor (str) of folder3 if exists, None otherwise
"""
paths = path.split('/')
files = self.get_files()
parent_desc = self.root_id
found = False
for foldername in paths:
if foldername != '':
2019-10-16 20:59:31 +00:00
for file in files.items():
if file[1]['a'] and file[1]['t'] and \
file[1]['a']['n'] == foldername:
if parent_desc == file[1]['p']:
parent_desc = file[0]
found = True
if found:
found = False
else:
return None
return parent_desc
2019-10-16 20:20:22 +00:00
def find(self, filename):
"""
Return file object from given filename
"""
files = self.get_files()
2019-10-16 20:59:31 +00:00
for file in list(files.items()):
2019-10-16 20:20:22 +00:00
if not isinstance(file[1]['a'], dict):
continue
if file[1]['a'] and file[1]['a']['n'] == filename:
return file
2013-02-04 02:02:33 +00:00
def get_files(self):
"""
Get all files in account
"""
2013-06-27 21:25:02 +00:00
files = self._api_request({'a': 'f', 'c': 1})
2013-02-04 02:02:33 +00:00
files_dict = {}
shared_keys = {}
2013-06-27 21:25:02 +00:00
self._init_shared_keys(files, shared_keys)
2013-02-04 02:02:33 +00:00
for file in files['f']:
2013-06-27 21:25:02 +00:00
processed_file = self._process_file(file, shared_keys)
2019-10-16 20:20:22 +00:00
# ensure each file has a name before returning
if processed_file['a']:
files_dict[file['h']] = processed_file
2013-02-04 02:02:33 +00:00
return files_dict
2013-02-07 23:38:05 +00:00
def get_upload_link(self, file):
"""
2013-02-07 23:47:10 +00:00
Get a files public link inc. decrypted key
Requires upload() response as input
"""
2013-02-07 23:30:37 +00:00
if 'f' in file:
file = file['f'][0]
2013-06-27 21:25:02 +00:00
public_handle = self._api_request({'a': 'l', 'n': file['h']})
2013-02-07 23:30:37 +00:00
file_key = file['k'][file['k'].index(':') + 1:]
2019-10-16 20:20:22 +00:00
decrypted_key = a32_to_base64(
decrypt_key(base64_to_a32(file_key), self.master_key)
)
return '{0}://{1}/#!{2}!{3}'.format(
self.schema, self.domain, public_handle, decrypted_key
)
else:
2019-10-16 20:20:22 +00:00
raise ValueError(
'''Upload() response required as input,
use get_link() for regular file input'''
)
def get_link(self, file):
"""
Get a file public link from given file object
"""
file = file[1]
if 'h' in file and 'k' in file:
2013-06-27 21:25:02 +00:00
public_handle = self._api_request({'a': 'l', 'n': file['h']})
2013-04-19 18:38:45 +00:00
if public_handle == -11:
2019-10-16 20:20:22 +00:00
raise RequestError(
"Can't get a public link from that file "
"(is this a shared file?)"
)
decrypted_key = a32_to_base64(file['key'])
2019-10-16 20:20:22 +00:00
return '{0}://{1}/#!{2}!{3}'.format(
self.schema, self.domain, public_handle, decrypted_key
)
else:
raise ValidationError('File id and key must be present')
def get_user(self):
2013-06-27 21:25:02 +00:00
user_data = self._api_request({'a': 'ug'})
return user_data
def get_node_by_type(self, type):
"""
Get a node by it's numeric type id, e.g:
0: file
1: dir
2: special: root cloud drive
3: special: inbox
4: special trash bin
"""
nodes = self.get_files()
2019-10-16 20:59:31 +00:00
for node in list(nodes.items()):
2013-06-27 21:25:02 +00:00
if node[1]['t'] == type:
return node
2013-02-04 02:02:33 +00:00
def get_files_in_node(self, target):
"""
Get all files in a given target, e.g. 4=trash
"""
if type(target) == int:
# convert special nodes (e.g. trash)
node_id = self.get_node_by_type(target)
else:
node_id = [target]
2013-06-27 21:25:02 +00:00
files = self._api_request({'a': 'f', 'c': 1})
files_dict = {}
shared_keys = {}
2013-06-27 21:25:02 +00:00
self._init_shared_keys(files, shared_keys)
for file in files['f']:
2013-06-27 21:25:02 +00:00
processed_file = self._process_file(file, shared_keys)
if processed_file['a'] and processed_file['p'] == node_id[0]:
files_dict[file['h']] = processed_file
return files_dict
2013-02-05 07:11:15 +00:00
def get_id_from_public_handle(self, public_handle):
2019-10-16 20:20:22 +00:00
# get node data
2013-06-27 21:25:02 +00:00
node_data = self._api_request({'a': 'f', 'f': 1, 'p': public_handle})
2013-05-08 11:46:59 +00:00
node_id = self.get_id_from_obj(node_data)
return node_id
2013-05-06 15:50:40 +00:00
def get_id_from_obj(self, node_data):
2013-05-08 11:48:56 +00:00
"""
Get node id from a file object
"""
2013-05-06 15:50:40 +00:00
node_id = None
for i in node_data['f']:
2019-10-16 20:59:31 +00:00
if i['h'] != '':
node_id = i['h']
return node_id
2013-03-10 03:38:22 +00:00
def get_quota(self):
"""
2013-03-10 03:38:22 +00:00
Get current remaining disk quota in MegaBytes
"""
2019-10-16 20:20:22 +00:00
json_resp = self._api_request(
{
'a': 'uq',
'xfer': 1,
'strg': 1,
'v': 1
}
)
# convert bytes to megabyes
return json_resp['mstrg'] / 1048576
2013-03-10 03:38:22 +00:00
def get_storage_space(self, giga=False, mega=False, kilo=False):
"""
Get the current storage space.
Return a dict containing at least:
'used' : the used space on the account
'total' : the maximum space allowed with current plan
All storage space are in bytes unless asked differently.
"""
if sum(1 if x else 0 for x in (kilo, mega, giga)) > 1:
raise ValueError("Only one unit prefix can be specified")
unit_coef = 1
if kilo:
2013-04-28 16:08:47 +00:00
unit_coef = 1024
if mega:
2013-04-28 16:08:47 +00:00
unit_coef = 1048576
if giga:
2013-04-28 16:08:47 +00:00
unit_coef = 1073741824
2013-06-27 21:25:02 +00:00
json_resp = self._api_request({'a': 'uq', 'xfer': 1, 'strg': 1})
return {
'used': json_resp['cstrg'] / unit_coef,
'total': json_resp['mstrg'] / unit_coef,
2013-04-28 16:08:47 +00:00
}
2013-03-10 03:38:22 +00:00
def get_balance(self):
"""
2013-03-10 03:38:22 +00:00
Get account monetary balance, Pro accounts only
"""
2013-06-27 21:25:02 +00:00
user_data = self._api_request({"a": "uq", "pro": 1})
2013-03-10 03:38:22 +00:00
if 'balance' in user_data:
return user_data['balance']
##########################################################################
# DELETE
def delete(self, public_handle):
2013-05-05 02:12:49 +00:00
"""
Delete a file by its public handle
"""
return self.move(public_handle, 4)
2013-02-04 04:42:28 +00:00
def delete_url(self, url):
2013-05-05 02:12:49 +00:00
"""
Delete a file by its url
"""
2013-06-27 21:25:02 +00:00
path = self._parse_url(url).split('!')
2013-02-07 23:47:10 +00:00
public_handle = path[0]
file_id = self.get_id_from_public_handle(public_handle)
return self.move(file_id, 4)
def destroy(self, file_id):
2013-05-05 02:12:49 +00:00
"""
Destroy a file by its private id
"""
2019-10-16 20:20:22 +00:00
return self._api_request(
{
'a': 'd',
'n': file_id,
'i': self.request_id
}
)
def destroy_url(self, url):
2013-05-05 02:12:49 +00:00
"""
Destroy a file by its url
"""
2013-06-27 21:25:02 +00:00
path = self._parse_url(url).split('!')
public_handle = path[0]
file_id = self.get_id_from_public_handle(public_handle)
return self.destroy(file_id)
def empty_trash(self):
# get list of files in rubbish out
files = self.get_files_in_node(4)
# make a list of json
if files != {}:
post_list = []
for file in files:
2019-10-16 20:20:22 +00:00
post_list.append({"a": "d", "n": file, "i": self.request_id})
2013-06-27 21:25:02 +00:00
return self._api_request(post_list)
##########################################################################
# DOWNLOAD
def download(self, file, dest_path=None, dest_filename=None):
"""
Download a file by it's file object
"""
2019-10-16 20:20:22 +00:00
self._download_file(
None,
None,
file=file[1],
dest_path=dest_path,
dest_filename=dest_filename,
is_public=False
)
def download_url(self, url, dest_path=None, dest_filename=None):
"""
Download a file by it's public url
"""
2013-06-27 21:25:02 +00:00
path = self._parse_url(url).split('!')
file_id = path[0]
file_key = path[1]
2019-10-16 20:20:22 +00:00
self._download_file(
file_id, file_key, dest_path, dest_filename, is_public=True
)
def _download_file(
self,
file_handle,
file_key,
dest_path=None,
dest_filename=None,
is_public=False,
file=None
):
2013-06-27 21:25:02 +00:00
if file is None:
if is_public:
file_key = base64_to_a32(file_key)
2019-10-16 20:20:22 +00:00
file_data = self._api_request(
{
'a': 'g',
'g': 1,
'p': file_handle
}
)
2013-04-19 18:38:45 +00:00
else:
2019-10-16 20:20:22 +00:00
file_data = self._api_request(
{
'a': 'g',
'g': 1,
'n': file_handle
}
)
k = (
file_key[0] ^ file_key[4], file_key[1] ^ file_key[5],
file_key[2] ^ file_key[6], file_key[3] ^ file_key[7]
)
iv = file_key[4:6] + (0, 0)
meta_mac = file_key[6:8]
2013-04-19 18:38:45 +00:00
else:
2013-06-27 21:25:02 +00:00
file_data = self._api_request({'a': 'g', 'g': 1, 'n': file['h']})
k = file['k']
iv = file['iv']
meta_mac = file['meta_mac']
2019-10-16 20:20:22 +00:00
# Seems to happens sometime... When this occurs, files are
# inaccessible also in the official also in the official web app.
# Strangely, files can come back later.
2013-04-19 18:38:45 +00:00
if 'g' not in file_data:
raise RequestError('File not accessible anymore')
2013-02-04 02:02:33 +00:00
file_url = file_data['g']
file_size = file_data['s']
attribs = base64_url_decode(file_data['at'])
attribs = decrypt_attr(attribs, k)
if dest_filename is not None:
file_name = dest_filename
else:
file_name = attribs['n']
2013-02-04 02:02:33 +00:00
input_file = requests.get(file_url, stream=True).raw
if dest_path is None:
dest_path = ''
else:
dest_path += '/'
2019-10-16 20:20:22 +00:00
temp_output_file = tempfile.NamedTemporaryFile(
mode='w+b', prefix='megapy_', delete=False
)
2013-02-04 02:02:33 +00:00
k_str = a32_to_str(k)
2019-10-16 20:20:22 +00:00
counter = Counter.new(128, initial_value=((iv[0] << 32) + iv[1]) << 64)
aes = AES.new(k_str, AES.MODE_CTR, counter=counter)
2013-02-04 02:02:33 +00:00
mac_str = '\0' * 16
mac_encryptor = AES.new(k_str, AES.MODE_CBC, mac_str)
iv_str = a32_to_str([iv[0], iv[1], iv[0], iv[1]])
for chunk_start, chunk_size in get_chunks(file_size):
2013-02-04 02:02:33 +00:00
chunk = input_file.read(chunk_size)
chunk = aes.decrypt(chunk)
temp_output_file.write(chunk)
2013-02-04 02:02:33 +00:00
encryptor = AES.new(k_str, AES.MODE_CBC, iv_str)
2019-10-16 20:20:22 +00:00
for i in range(0, len(chunk) - 16, 16):
block = chunk[i:i + 16]
encryptor.encrypt(block)
2019-10-16 20:20:22 +00:00
# fix for files under 16 bytes failing
if file_size > 16:
i += 16
else:
i = 0
block = chunk[i:i + 16]
if len(block) % 16:
block += '\0' * (16 - (len(block) % 16))
mac_str = mac_encryptor.encrypt(encryptor.encrypt(block))
if self.options.get('verbose') is True:
# temp file size
file_info = os.stat(temp_output_file.name)
2019-10-16 20:59:31 +00:00
print((
2019-10-16 20:20:22 +00:00
'{0} of {1} downloaded'.format(
file_info.st_size, file_size
)
2019-10-16 20:59:31 +00:00
))
file_mac = str_to_a32(mac_str)
temp_output_file.close()
2013-02-04 02:02:33 +00:00
# check mac integrity
if (file_mac[0] ^ file_mac[1], file_mac[2] ^ file_mac[3]) != meta_mac:
raise ValueError('Mismatched mac')
shutil.move(temp_output_file.name, dest_path + file_name)
##########################################################################
# UPLOAD
def upload(self, filename, dest=None, dest_filename=None):
2019-10-16 20:20:22 +00:00
# determine storage node
2013-02-04 02:02:33 +00:00
if dest is None:
2019-10-16 20:20:22 +00:00
# if none set, upload to cloud drive node
2013-03-12 17:00:30 +00:00
if not hasattr(self, 'root_id'):
2013-02-04 02:02:33 +00:00
self.get_files()
dest = self.root_id
2019-10-16 20:20:22 +00:00
# request upload url, call 'u' method
2013-02-04 02:02:33 +00:00
input_file = open(filename, 'rb')
2013-04-28 15:33:50 +00:00
file_size = os.path.getsize(filename)
2013-06-27 21:25:02 +00:00
ul_url = self._api_request({'a': 'u', 's': file_size})['p']
2013-02-04 02:02:33 +00:00
2019-10-16 20:20:22 +00:00
# generate random aes key (128) for file
ul_key = [random.randint(0, 0xFFFFFFFF) for _ in range(6)]
2013-05-05 18:29:41 +00:00
k_str = a32_to_str(ul_key[:4])
2019-10-16 20:20:22 +00:00
count = Counter.new(
128, initial_value=((ul_key[4] << 32) + ul_key[5]) << 64
)
2013-05-05 18:29:41 +00:00
aes = AES.new(k_str, AES.MODE_CTR, counter=count)
2013-02-04 02:02:33 +00:00
2013-04-28 15:33:50 +00:00
upload_progress = 0
2013-05-01 19:56:33 +00:00
completion_file_handle = None
2013-05-05 18:29:41 +00:00
mac_str = '\0' * 16
mac_encryptor = AES.new(k_str, AES.MODE_CBC, mac_str)
iv_str = a32_to_str([ul_key[4], ul_key[5], ul_key[4], ul_key[5]])
2013-11-09 17:11:27 +00:00
if file_size > 0:
for chunk_start, chunk_size in get_chunks(file_size):
chunk = input_file.read(chunk_size)
upload_progress += len(chunk)
encryptor = AES.new(k_str, AES.MODE_CBC, iv_str)
2019-10-16 20:20:22 +00:00
for i in range(0, len(chunk) - 16, 16):
2013-11-09 17:11:27 +00:00
block = chunk[i:i + 16]
encryptor.encrypt(block)
2019-10-16 20:20:22 +00:00
# fix for files under 16 bytes failing
2013-11-09 17:11:27 +00:00
if file_size > 16:
i += 16
else:
i = 0
2013-05-05 18:29:41 +00:00
block = chunk[i:i + 16]
2013-11-09 17:11:27 +00:00
if len(block) % 16:
2019-10-17 20:02:52 +00:00
block += makebyte('\0' * (16 - len(block) % 16))
2013-11-09 17:11:27 +00:00
mac_str = mac_encryptor.encrypt(encryptor.encrypt(block))
2019-10-16 20:20:22 +00:00
# encrypt file and upload
2013-11-09 17:11:27 +00:00
chunk = aes.encrypt(chunk)
2019-10-17 20:02:52 +00:00
try:
output_file = requests.post(
ul_url + "/" + str(chunk_start),
data=chunk,
timeout=self.timeout
)
except:
output_file = requests.post(
ul_url + "/" + str(chunk_start),
data=chunk,
timeout=self.timeout
)
2013-11-09 17:11:27 +00:00
completion_file_handle = output_file.text
if self.options.get('verbose') is True:
# upload progress
2019-10-16 20:59:31 +00:00
print((
2019-10-16 20:20:22 +00:00
'{0} of {1} uploaded'.format(
upload_progress, file_size
)
2019-10-16 20:59:31 +00:00
))
2013-11-09 17:11:27 +00:00
else:
2019-10-16 20:20:22 +00:00
output_file = requests.post(
ul_url + "/0", data='', timeout=self.timeout
)
2013-02-04 02:02:33 +00:00
completion_file_handle = output_file.text
2019-10-16 20:20:22 +00:00
2013-05-05 18:29:41 +00:00
file_mac = str_to_a32(mac_str)
2019-10-16 20:20:22 +00:00
# determine meta mac
2013-02-04 02:02:33 +00:00
meta_mac = (file_mac[0] ^ file_mac[1], file_mac[2] ^ file_mac[3])
if dest_filename is not None:
attribs = {'n': dest_filename}
else:
attribs = {'n': os.path.basename(filename)}
2013-02-04 02:02:33 +00:00
encrypt_attribs = base64_url_encode(encrypt_attr(attribs, ul_key[:4]))
2019-10-16 20:20:22 +00:00
key = [
ul_key[0] ^ ul_key[4], ul_key[1] ^ ul_key[5],
ul_key[2] ^ meta_mac[0], ul_key[3] ^ meta_mac[1], ul_key[4],
ul_key[5], meta_mac[0], meta_mac[1]
]
2013-02-04 02:02:33 +00:00
encrypted_key = a32_to_base64(encrypt_key(key, self.master_key))
2019-10-16 20:20:22 +00:00
# update attributes
data = self._api_request(
{
'a':
'p',
't':
dest,
'n': [
{
'h': completion_file_handle,
't': 0,
'a': encrypt_attribs,
'k': encrypted_key
}
]
}
)
# close input file and return API msg
input_file.close()
return data
2013-03-12 14:40:56 +00:00
def create_folder(self, name, dest=None):
2019-10-16 20:20:22 +00:00
# determine storage node
2013-03-12 14:40:56 +00:00
if dest is None:
2019-10-16 20:20:22 +00:00
# if none set, upload to cloud drive node
2013-03-12 14:40:56 +00:00
if not hasattr(self, 'root_id'):
self.get_files()
dest = self.root_id
2019-10-16 20:20:22 +00:00
# generate random aes key (128) for folder
ul_key = [random.randint(0, 0xFFFFFFFF) for _ in range(6)]
2013-03-12 14:40:56 +00:00
2019-10-16 20:20:22 +00:00
# encrypt attribs
2013-03-12 14:40:56 +00:00
attribs = {'n': name}
encrypt_attribs = base64_url_encode(encrypt_attr(attribs, ul_key[:4]))
encrypted_key = a32_to_base64(encrypt_key(ul_key[:4], self.master_key))
2019-10-16 20:20:22 +00:00
# update attributes
data = self._api_request(
{
'a':
'p',
't':
dest,
'n': [
{
'h': 'xxxxxxxx',
't': 1,
'a': encrypt_attribs,
'k': encrypted_key
}
],
'i':
self.request_id
}
)
2013-03-12 14:40:56 +00:00
return data
2013-05-04 22:55:52 +00:00
def rename(self, file, new_name):
file = file[1]
2019-10-16 20:20:22 +00:00
# create new attribs
2013-05-04 22:55:52 +00:00
attribs = {'n': new_name}
2019-10-16 20:20:22 +00:00
# encrypt attribs
2013-05-04 22:55:52 +00:00
encrypt_attribs = base64_url_encode(encrypt_attr(attribs, file['k']))
2019-10-16 20:20:22 +00:00
encrypted_key = a32_to_base64(
encrypt_key(file['key'], self.master_key)
)
# update attributes
data = self._api_request(
[
{
'a': 'a',
'attr': encrypt_attribs,
'key': encrypted_key,
'n': file['h'],
'i': self.request_id
}
]
)
2013-05-04 22:55:52 +00:00
return data
2013-05-08 10:26:25 +00:00
def move(self, file_id, target):
"""
Move a file to another parent node
params:
a : command
n : node we're moving
t : id of target parent node, moving to
i : request id
targets
2 : root
3 : inbox
4 : trash
or...
target's id
or...
target's structure returned by find()
"""
2019-10-16 20:20:22 +00:00
# determine target_node_id
2013-05-08 10:26:25 +00:00
if type(target) == int:
target_node_id = str(self.get_node_by_type(target)[0])
2019-10-16 20:59:31 +00:00
elif type(target) in (str, str):
2013-05-08 10:26:25 +00:00
target_node_id = target
else:
file = target[1]
target_node_id = file['h']
2019-10-16 20:20:22 +00:00
return self._api_request(
{
'a': 'm',
'n': file_id,
't': target_node_id,
'i': self.request_id
}
)
2013-05-08 14:36:20 +00:00
def add_contact(self, email):
"""
Add another user to your mega contact list
"""
2013-05-11 21:56:25 +00:00
return self._edit_contact(email, True)
def remove_contact(self, email):
"""
Remove a user to your mega contact list
"""
return self._edit_contact(email, False)
def _edit_contact(self, email, add):
"""
Editing contacts
"""
if add is True:
l = '1' # add command
elif add is False:
l = '0' # remove command
else:
raise ValidationError('add parameter must be of type bool')
2013-05-08 14:36:20 +00:00
if not re.match(r"[^@]+@[^@]+\.[^@]+", email):
ValidationError('add_contact requires a valid email address')
else:
2019-10-16 20:20:22 +00:00
return self._api_request(
{
'a': 'ur',
'u': email,
'l': l,
'i': self.request_id
}
)
2013-05-08 14:36:20 +00:00
def get_contacts(self):
raise NotImplementedError()
# TODO implement this
# sn param below = maxaction var with function getsc() in mega.co.nz js
# seens to be the 'sn' attrib of the previous request response...
# requests goto /sc rather than
2019-10-16 20:20:22 +00:00
# req = requests.post(
# '{0}://g.api.{1}/sc'.format(self.schema, self.domain),
# params={'sn': 'ZMxcQ_DmHnM', 'ssl': '1'},
# data=json.dumps(None),
# timeout=self.timeout)
# json_resp = json.loads(req.text)
# print json_resp
def get_public_url_info(self, url):
"""
Get size and name from a public url, dict returned
"""
2013-06-27 21:25:02 +00:00
file_handle, file_key = self._parse_url(url).split('!')
return self.get_public_file_info(file_handle, file_key)
def import_public_url(self, url, dest_node=None, dest_name=None):
"""
Import the public url into user account
"""
2013-06-27 21:25:02 +00:00
file_handle, file_key = self._parse_url(url).split('!')
2019-10-16 20:20:22 +00:00
return self.import_public_file(
file_handle, file_key, dest_node=dest_node, dest_name=dest_name
)
def get_public_file_info(self, file_handle, file_key):
"""
Get size and name of a public file
"""
2019-10-16 20:20:22 +00:00
data = self._api_request({'a': 'g', 'p': file_handle, 'ssm': 1})
2019-10-16 20:20:22 +00:00
# if numeric error code response
if isinstance(data, int):
raise RequestError(data)
if 'at' not in data or 's' not in data:
raise ValueError("Unexpected result", data)
key = base64_to_a32(file_key)
2019-10-16 20:20:22 +00:00
k = (
key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6], key[3] ^ key[7]
)
size = data['s']
unencrypted_attrs = decrypt_attr(base64_url_decode(data['at']), k)
2013-06-27 21:25:02 +00:00
if not unencrypted_attrs:
return None
2019-10-16 20:20:22 +00:00
result = {'size': size, 'name': unencrypted_attrs['n']}
return result
2019-10-16 20:20:22 +00:00
def import_public_file(
self, file_handle, file_key, dest_node=None, dest_name=None
):
"""
Import the public file into user account
"""
# Providing dest_node spare an API call to retrieve it.
if dest_node is None:
# Get '/Cloud Drive' folder no dest node specified
dest_node = self.get_node_by_type(2)[1]
# Providing dest_name spares an API call to retrieve it.
if dest_name is None:
pl_info = self.get_public_file_info(file_handle, file_key)
dest_name = pl_info['name']
key = base64_to_a32(file_key)
2019-10-16 20:20:22 +00:00
k = (
key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6], key[3] ^ key[7]
)
encrypted_key = a32_to_base64(encrypt_key(key, self.master_key))
encrypted_name = base64_url_encode(encrypt_attr({'n': dest_name}, k))
2019-10-16 20:20:22 +00:00
data = self._api_request(
{
'a':
'p',
't':
dest_node['h'],
'n': [
{
'ph': file_handle,
't': 0,
'a': encrypted_name,
'k': encrypted_key
}
]
}
)
return data