2013-02-04 02:02:33 +00:00
|
|
|
import re
|
|
|
|
import json
|
|
|
|
from Crypto.Cipher import AES
|
|
|
|
from Crypto.PublicKey import RSA
|
|
|
|
from Crypto.Util import Counter
|
|
|
|
import os
|
|
|
|
import random
|
|
|
|
import binascii
|
|
|
|
import requests
|
2013-04-19 18:10:52 +00:00
|
|
|
import time
|
|
|
|
import shutil
|
2013-04-15 15:14:55 +00:00
|
|
|
from .errors import ValidationError, RequestError
|
2013-02-16 00:34:16 +00:00
|
|
|
from .crypto import *
|
2013-04-28 02:55:04 +00:00
|
|
|
import tempfile
|
2013-02-04 02:02:33 +00:00
|
|
|
|
|
|
|
|
2013-02-09 18:29:58 +00:00
|
|
|
class Mega(object):
|
2013-04-28 02:55:04 +00:00
|
|
|
def __init__(self, options=None):
|
2013-02-04 02:02:33 +00:00
|
|
|
self.schema = 'https'
|
|
|
|
self.domain = 'mega.co.nz'
|
2013-04-15 15:14:55 +00:00
|
|
|
self.timeout = 160 # max time (secs) to wait for resp from api requests
|
2013-02-04 02:02:33 +00:00
|
|
|
self.sid = None
|
|
|
|
self.sequence_num = random.randint(0, 0xFFFFFFFF)
|
2013-02-04 21:15:18 +00:00
|
|
|
self.request_id = make_id(10)
|
2013-02-04 02:02:33 +00:00
|
|
|
|
2013-04-28 02:55:04 +00:00
|
|
|
if options is None:
|
|
|
|
options = {}
|
|
|
|
self.options = options
|
|
|
|
|
|
|
|
def login(self, email, password):
|
|
|
|
self.login_user(email, password)
|
|
|
|
return self
|
2013-02-04 02:02:33 +00:00
|
|
|
|
|
|
|
def login_user(self, email, password):
|
|
|
|
password_aes = prepare_key(str_to_a32(password))
|
|
|
|
uh = stringhash(email, password_aes)
|
|
|
|
resp = self.api_request({'a': 'us', 'user': email, 'uh': uh})
|
|
|
|
#if numeric error code response
|
|
|
|
if isinstance(resp, int):
|
2013-02-16 00:34:16 +00:00
|
|
|
raise RequestError(resp)
|
2013-02-04 02:02:33 +00:00
|
|
|
self._login_process(resp, password_aes)
|
|
|
|
|
|
|
|
def _login_process(self, resp, password):
|
|
|
|
encrypted_master_key = base64_to_a32(resp['k'])
|
|
|
|
self.master_key = decrypt_key(encrypted_master_key, password)
|
|
|
|
if 'tsid' in resp:
|
|
|
|
tsid = base64_url_decode(resp['tsid'])
|
|
|
|
key_encrypted = a32_to_str(
|
|
|
|
encrypt_key(str_to_a32(tsid[:16]), self.master_key))
|
|
|
|
if key_encrypted == tsid[-16:]:
|
|
|
|
self.sid = resp['tsid']
|
|
|
|
elif 'csid' in resp:
|
|
|
|
encrypted_rsa_private_key = base64_to_a32(resp['privk'])
|
2013-02-09 18:29:58 +00:00
|
|
|
rsa_private_key = decrypt_key(encrypted_rsa_private_key,
|
|
|
|
self.master_key)
|
2013-02-04 02:02:33 +00:00
|
|
|
|
|
|
|
private_key = a32_to_str(rsa_private_key)
|
|
|
|
self.rsa_private_key = [0, 0, 0, 0]
|
|
|
|
|
|
|
|
for i in range(4):
|
2013-04-15 15:14:55 +00:00
|
|
|
l = ((ord(private_key[0]) * 256 + ord(private_key[1]) + 7) / 8) + 2
|
2013-02-04 02:02:33 +00:00
|
|
|
self.rsa_private_key[i] = mpi_to_int(private_key[:l])
|
|
|
|
private_key = private_key[l:]
|
|
|
|
|
|
|
|
encrypted_sid = mpi_to_int(base64_url_decode(resp['csid']))
|
|
|
|
rsa_decrypter = RSA.construct(
|
|
|
|
(self.rsa_private_key[0] * self.rsa_private_key[1],
|
|
|
|
0L, self.rsa_private_key[2], self.rsa_private_key[0],
|
|
|
|
self.rsa_private_key[1]))
|
|
|
|
|
|
|
|
sid = '%x' % rsa_decrypter.key._decrypt(encrypted_sid)
|
|
|
|
sid = binascii.unhexlify('0' + sid if len(sid) % 2 else sid)
|
|
|
|
self.sid = base64_url_encode(sid[:43])
|
|
|
|
|
|
|
|
def api_request(self, data):
|
|
|
|
params = {'id': self.sequence_num}
|
|
|
|
self.sequence_num += 1
|
|
|
|
|
|
|
|
if self.sid:
|
|
|
|
params.update({'sid': self.sid})
|
2013-02-23 15:06:02 +00:00
|
|
|
|
|
|
|
#ensure input data is a list
|
|
|
|
if not isinstance(data, list):
|
|
|
|
data = [data]
|
|
|
|
|
2013-02-04 02:02:33 +00:00
|
|
|
req = requests.post(
|
2013-02-09 18:29:58 +00:00
|
|
|
'{0}://g.api.{1}/cs'.format(self.schema, self.domain),
|
2013-04-15 15:14:55 +00:00
|
|
|
params=params,
|
|
|
|
data=json.dumps(data),
|
|
|
|
timeout=self.timeout)
|
2013-02-09 18:29:58 +00:00
|
|
|
json_resp = json.loads(req.text)
|
2013-02-04 21:15:18 +00:00
|
|
|
|
2013-02-04 02:02:33 +00:00
|
|
|
#if numeric error code response
|
|
|
|
if isinstance(json_resp, int):
|
2013-02-16 00:34:16 +00:00
|
|
|
raise RequestError(json_resp)
|
2013-02-04 02:02:33 +00:00
|
|
|
return json_resp[0]
|
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
def parse_url(self, url):
|
|
|
|
#parse file id and key from url
|
|
|
|
if ('!' in url):
|
|
|
|
match = re.findall(r'/#!(.*)', url)
|
|
|
|
path = match[0]
|
|
|
|
return path
|
|
|
|
else:
|
|
|
|
raise RequestError('Url key missing')
|
|
|
|
|
2013-04-15 14:04:03 +00:00
|
|
|
def process_file(self, file, shared_keys):
|
2013-02-23 15:06:02 +00:00
|
|
|
"""
|
2013-02-28 07:50:57 +00:00
|
|
|
Process a file
|
2013-02-23 15:06:02 +00:00
|
|
|
"""
|
|
|
|
if file['t'] == 0 or file['t'] == 1:
|
2013-04-27 17:52:09 +00:00
|
|
|
keys = dict(keypart.split(':', 1) for keypart in file['k'].split('/') if ':' in keypart)
|
2013-02-23 15:06:02 +00:00
|
|
|
uid = file['u']
|
2013-02-28 07:50:57 +00:00
|
|
|
key = None
|
|
|
|
# my objects
|
2013-04-15 15:14:55 +00:00
|
|
|
if uid in keys:
|
|
|
|
key = decrypt_key(base64_to_a32(keys[uid]), self.master_key)
|
2013-04-15 14:04:03 +00:00
|
|
|
# shared folders
|
2013-02-28 07:50:57 +00:00
|
|
|
elif 'su' in file and 'sk' in file and ':' in file['k']:
|
2013-04-15 15:14:55 +00:00
|
|
|
shared_key = decrypt_key(base64_to_a32(file['sk']), self.master_key)
|
|
|
|
key = decrypt_key(base64_to_a32(keys[file['h']]), shared_key)
|
|
|
|
if file['su'] not in shared_keys:
|
2013-04-15 14:04:03 +00:00
|
|
|
shared_keys[file['su']] = {}
|
|
|
|
shared_keys[file['su']][file['h']] = shared_key
|
2013-02-28 07:50:57 +00:00
|
|
|
# shared files
|
2013-04-15 15:14:55 +00:00
|
|
|
elif file['u'] and file['u'] in shared_keys:
|
|
|
|
for hkey in shared_keys[file['u']]:
|
2013-04-15 14:04:03 +00:00
|
|
|
shared_key = shared_keys[file['u']][hkey]
|
2013-04-15 15:14:55 +00:00
|
|
|
if hkey in keys:
|
2013-04-15 14:04:03 +00:00
|
|
|
key = keys[hkey]
|
2013-04-15 15:14:55 +00:00
|
|
|
key = decrypt_key(base64_to_a32(key), shared_key)
|
2013-04-15 14:04:03 +00:00
|
|
|
break
|
2013-04-15 15:14:55 +00:00
|
|
|
if key is not None:
|
2013-04-15 14:04:03 +00:00
|
|
|
# file
|
2013-02-28 07:50:57 +00:00
|
|
|
if file['t'] == 0:
|
|
|
|
k = (key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6],
|
|
|
|
key[3] ^ key[7])
|
|
|
|
file['iv'] = key[4:6] + (0, 0)
|
|
|
|
file['meta_mac'] = key[6:8]
|
2013-04-15 14:04:03 +00:00
|
|
|
# folder
|
2013-02-28 07:50:57 +00:00
|
|
|
else:
|
|
|
|
k = key
|
2013-04-19 17:53:57 +00:00
|
|
|
file['key'] = key
|
|
|
|
file['k'] = k
|
2013-02-28 07:50:57 +00:00
|
|
|
attributes = base64_url_decode(file['a'])
|
|
|
|
attributes = decrypt_attr(attributes, k)
|
|
|
|
file['a'] = attributes
|
|
|
|
# other => wrong object
|
|
|
|
elif file['k'] == '':
|
|
|
|
file['a'] = False
|
2013-02-23 15:06:02 +00:00
|
|
|
elif file['t'] == 2:
|
|
|
|
self.root_id = file['h']
|
|
|
|
file['a'] = {'n': 'Cloud Drive'}
|
|
|
|
elif file['t'] == 3:
|
|
|
|
self.inbox_id = file['h']
|
|
|
|
file['a'] = {'n': 'Inbox'}
|
|
|
|
elif file['t'] == 4:
|
|
|
|
self.trashbin_id = file['h']
|
|
|
|
file['a'] = {'n': 'Rubbish Bin'}
|
|
|
|
return file
|
|
|
|
|
2013-04-15 15:14:55 +00:00
|
|
|
def init_shared_keys(self, files, shared_keys):
|
|
|
|
"""
|
2013-04-15 14:04:03 +00:00
|
|
|
Init shared key not associated with a user.
|
2013-04-15 15:14:55 +00:00
|
|
|
Seems to happen when a folder is shared,
|
|
|
|
some files are exchanged and then the
|
|
|
|
folder is un-shared.
|
2013-04-15 14:04:03 +00:00
|
|
|
Keys are stored in files['s'] and files['ok']
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-04-15 14:04:03 +00:00
|
|
|
ok_dict = {}
|
2013-04-15 15:14:55 +00:00
|
|
|
for ok_item in files['ok']:
|
|
|
|
shared_key = decrypt_key(base64_to_a32(ok_item['k']), self.master_key)
|
2013-04-15 14:04:03 +00:00
|
|
|
ok_dict[ok_item['h']] = shared_key
|
2013-04-15 15:14:55 +00:00
|
|
|
for s_item in files['s']:
|
|
|
|
if s_item['u'] not in shared_keys:
|
2013-04-15 14:04:03 +00:00
|
|
|
shared_keys[s_item['u']] = {}
|
2013-04-15 15:14:55 +00:00
|
|
|
if s_item['h'] in ok_dict:
|
2013-04-15 14:04:03 +00:00
|
|
|
shared_keys[s_item['u']][s_item['h']] = ok_dict[s_item['h']]
|
|
|
|
|
2013-02-23 15:22:12 +00:00
|
|
|
##########################################################################
|
|
|
|
# GET
|
2013-02-23 15:06:02 +00:00
|
|
|
def find(self, filename):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
Return file object from given filename
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
files = self.get_files()
|
|
|
|
for file in files.items():
|
|
|
|
if file[1]['a'] and file[1]['a']['n'] == filename:
|
|
|
|
return file
|
|
|
|
|
2013-02-04 02:02:33 +00:00
|
|
|
def get_files(self):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-04 21:15:18 +00:00
|
|
|
Get all files in account
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-04 02:02:33 +00:00
|
|
|
files = self.api_request({'a': 'f', 'c': 1})
|
|
|
|
files_dict = {}
|
2013-04-15 14:04:03 +00:00
|
|
|
shared_keys = {}
|
2013-04-15 15:14:55 +00:00
|
|
|
self.init_shared_keys(files, shared_keys)
|
2013-02-04 02:02:33 +00:00
|
|
|
for file in files['f']:
|
2013-04-15 15:14:55 +00:00
|
|
|
processed_file = self.process_file(file, shared_keys)
|
2013-02-18 22:35:57 +00:00
|
|
|
#ensure each file has a name before returning
|
|
|
|
if processed_file['a']:
|
|
|
|
files_dict[file['h']] = processed_file
|
2013-02-04 02:02:33 +00:00
|
|
|
return files_dict
|
|
|
|
|
2013-02-07 23:38:05 +00:00
|
|
|
def get_upload_link(self, file):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-07 23:47:10 +00:00
|
|
|
Get a files public link inc. decrypted key
|
|
|
|
Requires upload() response as input
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-07 23:30:37 +00:00
|
|
|
if 'f' in file:
|
|
|
|
file = file['f'][0]
|
|
|
|
public_handle = self.api_request({'a': 'l', 'n': file['h']})
|
|
|
|
file_key = file['k'][file['k'].index(':') + 1:]
|
2013-02-08 12:21:32 +00:00
|
|
|
decrypted_key = a32_to_base64(decrypt_key(base64_to_a32(file_key),
|
|
|
|
self.master_key))
|
|
|
|
return '{0}://{1}/#!{2}!{3}'.format(self.schema,
|
|
|
|
self.domain,
|
|
|
|
public_handle,
|
|
|
|
decrypted_key)
|
2013-02-07 18:06:03 +00:00
|
|
|
else:
|
2013-02-09 18:29:58 +00:00
|
|
|
raise ValueError('''Upload() response required as input,
|
|
|
|
use get_link() for regular file input''')
|
2013-02-08 12:21:32 +00:00
|
|
|
|
|
|
|
def get_link(self, file):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-08 12:21:32 +00:00
|
|
|
Get a file public link from given file object
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-08 12:21:32 +00:00
|
|
|
file = file[1]
|
|
|
|
if 'h' in file and 'k' in file:
|
|
|
|
public_handle = self.api_request({'a': 'l', 'n': file['h']})
|
2013-04-19 18:38:45 +00:00
|
|
|
if public_handle == -11:
|
2013-04-19 17:53:57 +00:00
|
|
|
raise RequestError("Can't get a public link from that file (is this a shared file?)")
|
|
|
|
decrypted_key = a32_to_base64(file['key'])
|
2013-02-08 12:21:32 +00:00
|
|
|
return '{0}://{1}/#!{2}!{3}'.format(self.schema,
|
|
|
|
self.domain,
|
|
|
|
public_handle,
|
|
|
|
decrypted_key)
|
|
|
|
else:
|
2013-02-16 00:34:16 +00:00
|
|
|
raise ValidationError('File id and key must be present')
|
2013-02-07 18:06:03 +00:00
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
def get_user(self):
|
|
|
|
user_data = self.api_request({'a': 'ug'})
|
|
|
|
return user_data
|
|
|
|
|
|
|
|
def get_node_by_type(self, type):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
Get a node by it's numeric type id, e.g:
|
|
|
|
0: file
|
|
|
|
1: dir
|
|
|
|
2: special: root cloud drive
|
|
|
|
3: special: inbox
|
|
|
|
4: special trash bin
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
nodes = self.get_files()
|
|
|
|
for node in nodes.items():
|
|
|
|
if (node[1]['t'] == type):
|
|
|
|
return node
|
2013-02-04 02:02:33 +00:00
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
def get_files_in_node(self, target):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
Get all files in a given target, e.g. 4=trash
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-05-08 11:17:24 +00:00
|
|
|
if type(target) == int:
|
|
|
|
# convert special nodes (e.g. trash)
|
2013-05-06 13:04:02 +00:00
|
|
|
node_id = self.get_node_by_type(target)
|
|
|
|
else:
|
|
|
|
node_id = [target]
|
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
files = self.api_request({'a': 'f', 'c': 1})
|
|
|
|
files_dict = {}
|
2013-04-15 14:04:03 +00:00
|
|
|
shared_keys = {}
|
2013-04-15 15:14:55 +00:00
|
|
|
self.init_shared_keys(files, shared_keys)
|
2013-02-23 15:06:02 +00:00
|
|
|
for file in files['f']:
|
2013-04-15 15:14:55 +00:00
|
|
|
processed_file = self.process_file(file, shared_keys)
|
2013-02-23 15:06:02 +00:00
|
|
|
if processed_file['a'] and processed_file['p'] == node_id[0]:
|
|
|
|
files_dict[file['h']] = processed_file
|
|
|
|
return files_dict
|
2013-02-05 07:11:15 +00:00
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
def get_id_from_public_handle(self, public_handle):
|
|
|
|
#get node data
|
|
|
|
node_data = self.api_request({'a': 'f', 'f': 1, 'p': public_handle})
|
2013-05-08 11:46:59 +00:00
|
|
|
node_id = self.get_id_from_obj(node_data)
|
|
|
|
return node_id
|
2013-05-06 15:50:40 +00:00
|
|
|
|
2013-05-08 11:41:50 +00:00
|
|
|
def get_id_from_obj(self, node_data):
|
2013-05-08 11:48:56 +00:00
|
|
|
"""
|
|
|
|
Get node id from a file object
|
|
|
|
"""
|
2013-05-06 15:50:40 +00:00
|
|
|
node_id = None
|
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
for i in node_data['f']:
|
|
|
|
if i['h'] is not u'':
|
|
|
|
node_id = i['h']
|
|
|
|
return node_id
|
|
|
|
|
2013-03-10 03:38:22 +00:00
|
|
|
def get_quota(self):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-03-10 03:38:22 +00:00
|
|
|
Get current remaining disk quota in MegaBytes
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-03-10 03:38:22 +00:00
|
|
|
json_resp = self.api_request({'a': 'uq', 'xfer': 1})
|
|
|
|
#convert bytes to megabyes
|
2013-04-15 15:14:55 +00:00
|
|
|
return json_resp['mstrg'] / 1048576
|
2013-03-10 03:38:22 +00:00
|
|
|
|
2013-04-21 16:53:48 +00:00
|
|
|
def get_storage_space(self, giga=False, mega=False, kilo=False):
|
|
|
|
"""
|
|
|
|
Get the current storage space.
|
|
|
|
Return a dict containing at least:
|
|
|
|
'used' : the used space on the account
|
|
|
|
'total' : the maximum space allowed with current plan
|
|
|
|
All storage space are in bytes unless asked differently.
|
|
|
|
"""
|
|
|
|
if sum(1 if x else 0 for x in (kilo, mega, giga)) > 1:
|
|
|
|
raise ValueError("Only one unit prefix can be specified")
|
|
|
|
unit_coef = 1
|
|
|
|
if kilo:
|
2013-04-28 16:08:47 +00:00
|
|
|
unit_coef = 1024
|
2013-04-21 16:53:48 +00:00
|
|
|
if mega:
|
2013-04-28 16:08:47 +00:00
|
|
|
unit_coef = 1048576
|
2013-04-21 16:53:48 +00:00
|
|
|
if giga:
|
2013-04-28 16:08:47 +00:00
|
|
|
unit_coef = 1073741824
|
2013-04-21 16:53:48 +00:00
|
|
|
json_resp = self.api_request({'a': 'uq', 'xfer': 1, 'strg': 1})
|
|
|
|
return {
|
|
|
|
'used': json_resp['cstrg'] / unit_coef,
|
|
|
|
'total': json_resp['mstrg'] / unit_coef,
|
2013-04-28 16:08:47 +00:00
|
|
|
}
|
2013-04-21 16:53:48 +00:00
|
|
|
|
2013-03-10 03:38:22 +00:00
|
|
|
def get_balance(self):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-03-10 03:38:22 +00:00
|
|
|
Get account monetary balance, Pro accounts only
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
|
|
|
user_data = self.api_request({"a": "uq", "pro": 1})
|
2013-03-10 03:38:22 +00:00
|
|
|
if 'balance' in user_data:
|
|
|
|
return user_data['balance']
|
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
##########################################################################
|
|
|
|
# DELETE
|
|
|
|
def delete(self, public_handle):
|
2013-05-05 02:12:49 +00:00
|
|
|
"""
|
|
|
|
Delete a file by its public handle
|
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
return self.move(public_handle, 4)
|
2013-02-04 04:42:28 +00:00
|
|
|
|
2013-02-04 21:15:18 +00:00
|
|
|
def delete_url(self, url):
|
2013-05-05 02:12:49 +00:00
|
|
|
"""
|
|
|
|
Delete a file by its url
|
|
|
|
"""
|
2013-02-04 21:15:18 +00:00
|
|
|
path = self.parse_url(url).split('!')
|
2013-02-07 23:47:10 +00:00
|
|
|
public_handle = path[0]
|
2013-03-05 01:48:01 +00:00
|
|
|
file_id = self.get_id_from_public_handle(public_handle)
|
|
|
|
return self.move(file_id, 4)
|
2013-02-04 21:15:18 +00:00
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
def destroy(self, file_id):
|
2013-05-05 02:12:49 +00:00
|
|
|
"""
|
|
|
|
Destroy a file by its private id
|
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
return self.api_request({'a': 'd',
|
|
|
|
'n': file_id,
|
|
|
|
'i': self.request_id})
|
2013-04-15 15:14:55 +00:00
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
def destroy_url(self, url):
|
2013-05-05 02:12:49 +00:00
|
|
|
"""
|
|
|
|
Destroy a file by its url
|
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
path = self.parse_url(url).split('!')
|
|
|
|
public_handle = path[0]
|
|
|
|
file_id = self.get_id_from_public_handle(public_handle)
|
|
|
|
return self.destroy(file_id)
|
2013-02-07 18:06:03 +00:00
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
def empty_trash(self):
|
|
|
|
# get list of files in rubbish out
|
|
|
|
files = self.get_files_in_node(4)
|
|
|
|
|
|
|
|
# make a list of json
|
|
|
|
if files != {}:
|
|
|
|
post_list = []
|
|
|
|
for file in files:
|
|
|
|
post_list.append({"a": "d",
|
|
|
|
"n": file,
|
|
|
|
"i": self.request_id})
|
|
|
|
return self.api_request(post_list)
|
|
|
|
|
|
|
|
##########################################################################
|
|
|
|
# DOWNLOAD
|
2013-05-08 10:41:23 +00:00
|
|
|
def download(self, file, dest_path=None, dest_filename=None):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
Download a file by it's file object
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-05-08 10:41:23 +00:00
|
|
|
self.download_file(None, None, file=file[1], dest_path=dest_path, dest_filename=dest_filename, is_public=False)
|
2013-02-04 21:15:18 +00:00
|
|
|
|
2013-05-08 10:41:23 +00:00
|
|
|
def download_url(self, url, dest_path=None, dest_filename=None):
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
Download a file by it's public url
|
2013-04-15 15:14:55 +00:00
|
|
|
"""
|
2013-02-23 15:06:02 +00:00
|
|
|
path = self.parse_url(url).split('!')
|
|
|
|
file_id = path[0]
|
|
|
|
file_key = path[1]
|
2013-05-08 10:41:23 +00:00
|
|
|
self.download_file(file_id, file_key, dest_path, dest_filename, is_public=True)
|
2013-02-04 04:42:28 +00:00
|
|
|
|
2013-05-08 10:41:23 +00:00
|
|
|
def download_file(self, file_handle, file_key, dest_path=None, dest_filename=None, is_public=False, file=None):
|
2013-04-19 17:53:57 +00:00
|
|
|
if file is None :
|
|
|
|
if is_public:
|
|
|
|
file_key = base64_to_a32(file_key)
|
|
|
|
file_data = self.api_request({'a': 'g', 'g': 1, 'p': file_handle})
|
2013-04-19 18:38:45 +00:00
|
|
|
else:
|
2013-04-19 17:53:57 +00:00
|
|
|
file_data = self.api_request({'a': 'g', 'g': 1, 'n': file_handle})
|
|
|
|
|
|
|
|
k = (file_key[0] ^ file_key[4], file_key[1] ^ file_key[5],
|
|
|
|
file_key[2] ^ file_key[6], file_key[3] ^ file_key[7])
|
|
|
|
iv = file_key[4:6] + (0, 0)
|
|
|
|
meta_mac = file_key[6:8]
|
2013-04-19 18:38:45 +00:00
|
|
|
else:
|
2013-04-19 17:53:57 +00:00
|
|
|
file_data = self.api_request({'a': 'g', 'g': 1, 'n': file['h']})
|
|
|
|
k = file['k']
|
|
|
|
iv = file['iv']
|
|
|
|
meta_mac = file['meta_mac']
|
|
|
|
|
|
|
|
# Seems to happens sometime... When this occurs, files are
|
2013-04-28 02:55:04 +00:00
|
|
|
# inaccessible also in the official also in the official web app.
|
2013-04-19 17:53:57 +00:00
|
|
|
# Strangely, files can come back later.
|
2013-04-19 18:38:45 +00:00
|
|
|
if 'g' not in file_data:
|
2013-04-19 17:53:57 +00:00
|
|
|
raise RequestError('File not accessible anymore')
|
2013-02-04 02:02:33 +00:00
|
|
|
file_url = file_data['g']
|
|
|
|
file_size = file_data['s']
|
|
|
|
attribs = base64_url_decode(file_data['at'])
|
|
|
|
attribs = decrypt_attr(attribs, k)
|
2013-05-06 11:01:19 +00:00
|
|
|
|
2013-05-08 10:41:23 +00:00
|
|
|
if dest_filename is not None:
|
|
|
|
file_name = dest_filename
|
2013-05-06 11:01:19 +00:00
|
|
|
else:
|
|
|
|
file_name = attribs['n']
|
2013-02-04 02:02:33 +00:00
|
|
|
|
|
|
|
input_file = requests.get(file_url, stream=True).raw
|
2013-02-11 12:47:22 +00:00
|
|
|
|
2013-04-28 02:55:04 +00:00
|
|
|
if dest_path is None:
|
|
|
|
dest_path = ''
|
2013-02-11 12:47:22 +00:00
|
|
|
else:
|
2013-04-28 02:55:04 +00:00
|
|
|
dest_path += '/'
|
|
|
|
|
|
|
|
temp_output_file = tempfile.NamedTemporaryFile(mode='w+b', prefix='megapy_', delete=False)
|
2013-02-04 02:02:33 +00:00
|
|
|
|
2013-05-05 17:48:23 +00:00
|
|
|
k_str = a32_to_str(k)
|
2013-02-04 02:02:33 +00:00
|
|
|
counter = Counter.new(
|
|
|
|
128, initial_value=((iv[0] << 32) + iv[1]) << 64)
|
2013-05-05 17:48:23 +00:00
|
|
|
aes = AES.new(k_str, AES.MODE_CTR, counter=counter)
|
2013-02-04 02:02:33 +00:00
|
|
|
|
2013-05-05 17:48:23 +00:00
|
|
|
mac_str = '\0' * 16
|
|
|
|
mac_encryptor = AES.new(k_str, AES.MODE_CBC, mac_str)
|
|
|
|
iv_str = a32_to_str([iv[0], iv[1], iv[0], iv[1]])
|
|
|
|
|
|
|
|
for chunk_start, chunk_size in get_chunks(file_size):
|
2013-02-04 02:02:33 +00:00
|
|
|
chunk = input_file.read(chunk_size)
|
|
|
|
chunk = aes.decrypt(chunk)
|
2013-04-28 02:55:04 +00:00
|
|
|
temp_output_file.write(chunk)
|
2013-02-04 02:02:33 +00:00
|
|
|
|
2013-05-05 17:48:23 +00:00
|
|
|
encryptor = AES.new(k_str, AES.MODE_CBC, iv_str)
|
|
|
|
for i in range(0, len(chunk)-16, 16):
|
2013-02-09 18:29:58 +00:00
|
|
|
block = chunk[i:i + 16]
|
2013-05-05 17:48:23 +00:00
|
|
|
encryptor.encrypt(block)
|
2013-05-12 22:16:55 +00:00
|
|
|
|
|
|
|
#fix for files under 16 bytes failing
|
|
|
|
if file_size > 16:
|
|
|
|
i += 16
|
|
|
|
else:
|
|
|
|
i = 0
|
|
|
|
|
2013-05-05 17:48:23 +00:00
|
|
|
block = chunk[i:i + 16]
|
|
|
|
if len(block) % 16:
|
|
|
|
block += '\0' * (16 - (len(block) % 16))
|
|
|
|
mac_str = mac_encryptor.encrypt(encryptor.encrypt(block))
|
2013-04-28 02:55:04 +00:00
|
|
|
|
|
|
|
if self.options.get('verbose') is True:
|
|
|
|
# temp file size
|
|
|
|
file_info = os.stat(temp_output_file.name)
|
|
|
|
print('{0} of {1} downloaded'.format(file_info.st_size, file_size))
|
|
|
|
|
2013-05-05 17:48:23 +00:00
|
|
|
file_mac = str_to_a32(mac_str)
|
|
|
|
|
2013-04-28 02:55:04 +00:00
|
|
|
temp_output_file.close()
|
2013-02-04 02:02:33 +00:00
|
|
|
|
|
|
|
# check mac integrity
|
|
|
|
if (file_mac[0] ^ file_mac[1], file_mac[2] ^ file_mac[3]) != meta_mac:
|
|
|
|
raise ValueError('Mismatched mac')
|
|
|
|
|
2013-04-28 02:55:04 +00:00
|
|
|
shutil.move(temp_output_file.name, dest_path + file_name)
|
2013-04-19 18:10:52 +00:00
|
|
|
|
2013-02-23 15:06:02 +00:00
|
|
|
##########################################################################
|
|
|
|
# UPLOAD
|
2013-05-08 10:41:23 +00:00
|
|
|
def upload(self, filename, dest=None, dest_filename=None):
|
2013-02-04 02:02:33 +00:00
|
|
|
#determine storage node
|
|
|
|
if dest is None:
|
|
|
|
#if none set, upload to cloud drive node
|
2013-03-12 17:00:30 +00:00
|
|
|
if not hasattr(self, 'root_id'):
|
2013-02-04 02:02:33 +00:00
|
|
|
self.get_files()
|
|
|
|
dest = self.root_id
|
|
|
|
|
|
|
|
#request upload url, call 'u' method
|
|
|
|
input_file = open(filename, 'rb')
|
2013-04-28 15:33:50 +00:00
|
|
|
file_size = os.path.getsize(filename)
|
|
|
|
ul_url = self.api_request({'a': 'u', 's': file_size})['p']
|
2013-02-04 02:02:33 +00:00
|
|
|
|
|
|
|
#generate random aes key (128) for file
|
2013-04-15 15:14:55 +00:00
|
|
|
ul_key = [random.randint(0, 0xFFFFFFFF) for _ in range(6)]
|
2013-05-05 18:29:41 +00:00
|
|
|
k_str = a32_to_str(ul_key[:4])
|
2013-04-15 15:14:55 +00:00
|
|
|
count = Counter.new(128, initial_value=((ul_key[4] << 32) + ul_key[5]) << 64)
|
2013-05-05 18:29:41 +00:00
|
|
|
aes = AES.new(k_str, AES.MODE_CTR, counter=count)
|
2013-02-04 02:02:33 +00:00
|
|
|
|
2013-04-28 15:33:50 +00:00
|
|
|
upload_progress = 0
|
2013-05-01 19:56:33 +00:00
|
|
|
completion_file_handle = None
|
2013-05-05 18:29:41 +00:00
|
|
|
|
|
|
|
mac_str = '\0' * 16
|
|
|
|
mac_encryptor = AES.new(k_str, AES.MODE_CBC, mac_str)
|
|
|
|
iv_str = a32_to_str([ul_key[4], ul_key[5], ul_key[4], ul_key[5]])
|
|
|
|
|
2013-05-05 17:48:23 +00:00
|
|
|
for chunk_start, chunk_size in get_chunks(file_size):
|
2013-02-04 02:02:33 +00:00
|
|
|
chunk = input_file.read(chunk_size)
|
2013-04-28 15:33:50 +00:00
|
|
|
upload_progress += len(chunk)
|
2013-02-04 02:02:33 +00:00
|
|
|
|
2013-05-05 18:29:41 +00:00
|
|
|
encryptor = AES.new(k_str, AES.MODE_CBC, iv_str)
|
|
|
|
for i in range(0, len(chunk)-16, 16):
|
2013-02-09 18:29:58 +00:00
|
|
|
block = chunk[i:i + 16]
|
2013-05-05 18:29:41 +00:00
|
|
|
encryptor.encrypt(block)
|
2013-05-12 22:16:55 +00:00
|
|
|
|
|
|
|
#fix for files under 16 bytes failing
|
|
|
|
if file_size > 16:
|
|
|
|
i += 16
|
|
|
|
else:
|
|
|
|
i = 0
|
|
|
|
|
2013-05-05 18:29:41 +00:00
|
|
|
block = chunk[i:i + 16]
|
|
|
|
if len(block) % 16:
|
|
|
|
block += '\0' * (16 - len(block) % 16)
|
|
|
|
mac_str = mac_encryptor.encrypt(encryptor.encrypt(block))
|
2013-02-04 02:02:33 +00:00
|
|
|
|
|
|
|
#encrypt file and upload
|
|
|
|
chunk = aes.encrypt(chunk)
|
2013-02-09 18:29:58 +00:00
|
|
|
output_file = requests.post(ul_url + "/" + str(chunk_start),
|
|
|
|
data=chunk, timeout=self.timeout)
|
2013-02-04 02:02:33 +00:00
|
|
|
completion_file_handle = output_file.text
|
|
|
|
|
2013-04-28 15:33:50 +00:00
|
|
|
if self.options.get('verbose') is True:
|
|
|
|
# upload progress
|
|
|
|
print('{0} of {1} uploaded'.format(upload_progress, file_size))
|
|
|
|
|
2013-05-05 18:29:41 +00:00
|
|
|
file_mac = str_to_a32(mac_str)
|
|
|
|
|
2013-02-04 02:02:33 +00:00
|
|
|
#determine meta mac
|
|
|
|
meta_mac = (file_mac[0] ^ file_mac[1], file_mac[2] ^ file_mac[3])
|
|
|
|
|
2013-05-08 10:41:23 +00:00
|
|
|
if dest_filename is not None:
|
|
|
|
attribs = {'n': dest_filename}
|
2013-05-06 11:01:19 +00:00
|
|
|
else:
|
|
|
|
attribs = {'n': os.path.basename(filename)}
|
|
|
|
|
2013-02-04 02:02:33 +00:00
|
|
|
encrypt_attribs = base64_url_encode(encrypt_attr(attribs, ul_key[:4]))
|
2013-02-09 18:29:58 +00:00
|
|
|
key = [ul_key[0] ^ ul_key[4], ul_key[1] ^ ul_key[5],
|
|
|
|
ul_key[2] ^ meta_mac[0], ul_key[3] ^ meta_mac[1],
|
2013-02-04 02:02:33 +00:00
|
|
|
ul_key[4], ul_key[5], meta_mac[0], meta_mac[1]]
|
|
|
|
encrypted_key = a32_to_base64(encrypt_key(key, self.master_key))
|
|
|
|
#update attributes
|
2013-02-09 18:29:58 +00:00
|
|
|
data = self.api_request({'a': 'p', 't': dest, 'n': [{
|
|
|
|
'h': completion_file_handle,
|
|
|
|
't': 0,
|
|
|
|
'a': encrypt_attribs,
|
|
|
|
'k': encrypted_key}]})
|
2013-02-06 15:55:29 +00:00
|
|
|
#close input file and return API msg
|
|
|
|
input_file.close()
|
2013-02-28 07:50:57 +00:00
|
|
|
return data
|
2013-03-12 14:40:56 +00:00
|
|
|
|
2013-05-08 12:12:04 +00:00
|
|
|
##########################################################################
|
|
|
|
# OTHER OPERATIONS
|
2013-03-12 14:40:56 +00:00
|
|
|
def create_folder(self, name, dest=None):
|
|
|
|
#determine storage node
|
|
|
|
if dest is None:
|
|
|
|
#if none set, upload to cloud drive node
|
|
|
|
if not hasattr(self, 'root_id'):
|
|
|
|
self.get_files()
|
|
|
|
dest = self.root_id
|
|
|
|
|
|
|
|
#generate random aes key (128) for folder
|
2013-04-15 15:14:55 +00:00
|
|
|
ul_key = [random.randint(0, 0xFFFFFFFF) for _ in range(6)]
|
2013-03-12 14:40:56 +00:00
|
|
|
|
|
|
|
#encrypt attribs
|
|
|
|
attribs = {'n': name}
|
|
|
|
encrypt_attribs = base64_url_encode(encrypt_attr(attribs, ul_key[:4]))
|
|
|
|
encrypted_key = a32_to_base64(encrypt_key(ul_key[:4], self.master_key))
|
|
|
|
|
|
|
|
#update attributes
|
|
|
|
data = self.api_request({'a': 'p',
|
|
|
|
't': dest,
|
|
|
|
'n': [{
|
|
|
|
'h': 'xxxxxxxx',
|
|
|
|
't': 1,
|
|
|
|
'a': encrypt_attribs,
|
|
|
|
'k': encrypted_key}
|
2013-03-12 14:47:25 +00:00
|
|
|
],
|
|
|
|
'i': self.request_id})
|
2013-03-12 14:40:56 +00:00
|
|
|
#return API msg
|
|
|
|
return data
|
2013-05-04 22:55:52 +00:00
|
|
|
|
|
|
|
def rename(self, file, new_name):
|
|
|
|
file = file[1]
|
|
|
|
#create new attribs
|
|
|
|
attribs = {'n': new_name}
|
|
|
|
#encrypt attribs
|
|
|
|
encrypt_attribs = base64_url_encode(encrypt_attr(attribs, file['k']))
|
|
|
|
encrypted_key = a32_to_base64(encrypt_key(file['key'], self.master_key))
|
|
|
|
|
|
|
|
#update attributes
|
|
|
|
data = self.api_request([{
|
|
|
|
'a': 'a',
|
|
|
|
'attr': encrypt_attribs,
|
|
|
|
'key': encrypted_key,
|
|
|
|
'n': file['h'],
|
|
|
|
'i': self.request_id}])
|
|
|
|
|
|
|
|
#return API msg
|
|
|
|
return data
|
2013-05-08 10:26:25 +00:00
|
|
|
|
|
|
|
def move(self, file_id, target):
|
|
|
|
"""
|
|
|
|
Move a file to another parent node
|
|
|
|
params:
|
|
|
|
a : command
|
|
|
|
n : node we're moving
|
|
|
|
t : id of target parent node, moving to
|
|
|
|
i : request id
|
|
|
|
|
|
|
|
targets
|
|
|
|
2 : root
|
|
|
|
3 : inbox
|
|
|
|
4 : trash
|
|
|
|
|
|
|
|
or...
|
|
|
|
target's id
|
|
|
|
or...
|
|
|
|
target's structure returned by find()
|
|
|
|
"""
|
|
|
|
|
|
|
|
#determine target_node_id
|
|
|
|
if type(target) == int:
|
|
|
|
target_node_id = str(self.get_node_by_type(target)[0])
|
|
|
|
elif type(target) in (str, unicode):
|
|
|
|
target_node_id = target
|
|
|
|
else:
|
|
|
|
file = target[1]
|
|
|
|
target_node_id = file['h']
|
|
|
|
return self.api_request({'a': 'm',
|
|
|
|
'n': file_id,
|
|
|
|
't': target_node_id,
|
|
|
|
'i': self.request_id})
|
2013-05-08 14:36:20 +00:00
|
|
|
|
|
|
|
def add_contact(self, email):
|
|
|
|
"""
|
|
|
|
Add another user to your mega contact list
|
|
|
|
"""
|
2013-05-11 21:56:25 +00:00
|
|
|
return self._edit_contact(email, True)
|
|
|
|
|
|
|
|
def remove_contact(self, email):
|
|
|
|
"""
|
|
|
|
Remove a user to your mega contact list
|
|
|
|
"""
|
|
|
|
return self._edit_contact(email, False)
|
|
|
|
|
|
|
|
def _edit_contact(self, email, add):
|
|
|
|
"""
|
|
|
|
Editing contacts
|
|
|
|
"""
|
|
|
|
if add is True:
|
|
|
|
l = '1' # add command
|
|
|
|
elif add is False:
|
|
|
|
l = '0' # remove command
|
|
|
|
else:
|
|
|
|
raise ValidationError('add parameter must be of type bool')
|
|
|
|
|
2013-05-08 14:36:20 +00:00
|
|
|
if not re.match(r"[^@]+@[^@]+\.[^@]+", email):
|
|
|
|
ValidationError('add_contact requires a valid email address')
|
|
|
|
else:
|
|
|
|
return self.api_request({'a': 'ur',
|
|
|
|
'u': email,
|
2013-05-11 21:56:25 +00:00
|
|
|
'l': l,
|
2013-05-08 14:36:20 +00:00
|
|
|
'i': self.request_id})
|
|
|
|
|
|
|
|
def get_contacts(self):
|
|
|
|
raise NotImplementedError()
|
|
|
|
# TODO implement this
|
|
|
|
# sn param below = maxaction var with function getsc() in mega.co.nz js
|
|
|
|
# seens to be the 'sn' attrib of the previous request response...
|
|
|
|
# mega.co.nz js full source @ http://homepages.shu.ac.uk/~rjodwyer/mega-scripts-all.js
|
|
|
|
# requests goto /sc rather than
|
|
|
|
|
|
|
|
#req = requests.post(
|
|
|
|
#'{0}://g.api.{1}/sc'.format(self.schema, self.domain),
|
2013-05-11 21:56:25 +00:00
|
|
|
# params={'sn': 'ZMxcQ_DmHnM', 'ssl': '1'},
|
2013-05-08 14:36:20 +00:00
|
|
|
# data=json.dumps(None),
|
|
|
|
# timeout=self.timeout)
|
|
|
|
#json_resp = json.loads(req.text)
|
2013-05-11 20:45:43 +00:00
|
|
|
#print json_resp
|
|
|
|
|
|
|
|
def get_public_url_info(self, url):
|
|
|
|
"""
|
|
|
|
Get size and name from a public url, dict returned
|
|
|
|
"""
|
|
|
|
file_handle, file_key = self.parse_url(url).split('!')
|
|
|
|
return self.get_public_file_info(file_handle, file_key)
|
|
|
|
|
|
|
|
def import_public_url(self, url, dest_node=None, dest_name=None):
|
|
|
|
"""
|
|
|
|
Import the public url into user account
|
|
|
|
"""
|
|
|
|
file_handle, file_key = self.parse_url(url).split('!')
|
|
|
|
return self.import_public_file(file_handle, file_key, dest_node=dest_node, dest_name=dest_name)
|
|
|
|
|
|
|
|
def get_public_file_info(self, file_handle, file_key):
|
|
|
|
"""
|
|
|
|
Get size and name of a public file
|
|
|
|
"""
|
|
|
|
data = self.api_request({
|
|
|
|
'a': 'g',
|
|
|
|
'p': file_handle,
|
|
|
|
'ssm': 1})
|
|
|
|
|
|
|
|
#if numeric error code response
|
|
|
|
if isinstance(data, int):
|
|
|
|
raise RequestError(data)
|
|
|
|
|
|
|
|
if 'at' not in data or 's' not in data:
|
|
|
|
raise ValueError("Unexpected result", data)
|
|
|
|
|
|
|
|
key = base64_to_a32(file_key)
|
|
|
|
k = (key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6], key[3] ^ key[7])
|
|
|
|
|
|
|
|
size = data['s']
|
|
|
|
unencrypted_attrs = decrypt_attr(base64_url_decode(data['at']), k)
|
|
|
|
if not(unencrypted_attrs):
|
|
|
|
return None
|
|
|
|
|
|
|
|
result = {
|
|
|
|
'size': size,
|
|
|
|
'name': unencrypted_attrs['n']}
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def import_public_file(self, file_handle, file_key, dest_node=None, dest_name=None):
|
|
|
|
"""
|
|
|
|
Import the public file into user account
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Providing dest_node spare an API call to retrieve it.
|
|
|
|
if dest_node is None:
|
|
|
|
# Get '/Cloud Drive' folder no dest node specified
|
|
|
|
dest_node = self.get_node_by_type(2)[1]
|
|
|
|
|
|
|
|
# Providing dest_name spares an API call to retrieve it.
|
|
|
|
if dest_name is None:
|
|
|
|
pl_info = self.get_public_file_info(file_handle, file_key)
|
|
|
|
dest_name = pl_info['name']
|
|
|
|
|
|
|
|
key = base64_to_a32(file_key)
|
|
|
|
k = (key[0] ^ key[4], key[1] ^ key[5], key[2] ^ key[6], key[3] ^ key[7])
|
|
|
|
|
|
|
|
encrypted_key = a32_to_base64(encrypt_key(key, self.master_key))
|
|
|
|
encrypted_name = base64_url_encode(encrypt_attr({'n': dest_name}, k))
|
|
|
|
|
|
|
|
data = self.api_request({
|
|
|
|
'a': 'p',
|
|
|
|
't': dest_node['h'],
|
|
|
|
'n': [{
|
|
|
|
'ph': file_handle,
|
|
|
|
't': 0,
|
|
|
|
'a': encrypted_name,
|
|
|
|
'k': encrypted_key}]})
|
|
|
|
|
|
|
|
#return API msg
|
|
|
|
return data
|