Shorten lines longer than 100 characters
This commit is contained in:
parent
defa23eff3
commit
db628d158e
6 changed files with 79 additions and 25 deletions
|
|
@ -105,11 +105,17 @@ SQL_USER = _sql_dictify(SQL_USER_COLUMNS)
|
||||||
|
|
||||||
|
|
||||||
# Errors and warnings
|
# Errors and warnings
|
||||||
ERROR_DATABASE_OUTOFDATE = 'Database is out-of-date. {current} should be {new}. Please use utilities\\etiquette_upgrader.py'
|
ERROR_DATABASE_OUTOFDATE = '''
|
||||||
|
Database is out-of-date. {current} should be {new}.
|
||||||
|
Please use utilities\\etiquette_upgrader.py
|
||||||
|
'''.strip()
|
||||||
|
|
||||||
WARNING_MINMAX_INVALID = 'Field "{field}": "{value}" is not a valid request. Ignored.'
|
WARNING_MINMAX_INVALID = 'Field "{field}": "{value}" is not a valid request. Ignored.'
|
||||||
WARNING_ORDERBY_INVALID = 'Invalid orderby request "{request}". Ignored.'
|
WARNING_ORDERBY_INVALID = 'Invalid orderby request "{request}". Ignored.'
|
||||||
WARNING_ORDERBY_BADCOL = '"{column}" is not a sorting option. Ignored.'
|
WARNING_ORDERBY_BADCOL = '"{column}" is not a sorting option. Ignored.'
|
||||||
WARNING_ORDERBY_BADDIRECTION = 'You can\'t order "{column}" by "{direction}". Defaulting to descending.'
|
WARNING_ORDERBY_BADDIRECTION = '''
|
||||||
|
You can\'t order "{column}" by "{direction}". Defaulting to descending.
|
||||||
|
'''
|
||||||
|
|
||||||
# Operational info
|
# Operational info
|
||||||
TRUTHYSTRING_TRUE = {s.lower() for s in ('1', 'true', 't', 'yes', 'y', 'on')}
|
TRUTHYSTRING_TRUE = {s.lower() for s in ('1', 'true', 't', 'yes', 'y', 'on')}
|
||||||
|
|
|
||||||
|
|
@ -246,6 +246,19 @@ def now(timestamp=True):
|
||||||
return n.timestamp()
|
return n.timestamp()
|
||||||
return n
|
return n
|
||||||
|
|
||||||
|
def parallel_to_dict(keys, values):
|
||||||
|
'''
|
||||||
|
Given two parallel sequences, return a dictionary where the keys are
|
||||||
|
elements of `keys` and their values are the element of `values` with the
|
||||||
|
same index.
|
||||||
|
|
||||||
|
['test', 'toast'],
|
||||||
|
['hello', 'world']
|
||||||
|
->
|
||||||
|
{'test': 'hello', 'toast': 'world'}
|
||||||
|
'''
|
||||||
|
return {keys[index]: value for (index, value) in enumerate(values)}
|
||||||
|
|
||||||
def read_filebytes(filepath, range_min, range_max, chunk_size=2 ** 20):
|
def read_filebytes(filepath, range_min, range_max, chunk_size=2 ** 20):
|
||||||
'''
|
'''
|
||||||
Yield chunks of bytes from the file between the endpoints.
|
Yield chunks of bytes from the file between the endpoints.
|
||||||
|
|
|
||||||
|
|
@ -204,7 +204,7 @@ class Album(ObjectBase, GroupableMixin):
|
||||||
def __init__(self, photodb, db_row):
|
def __init__(self, photodb, db_row):
|
||||||
self.photodb = photodb
|
self.photodb = photodb
|
||||||
if isinstance(db_row, (list, tuple)):
|
if isinstance(db_row, (list, tuple)):
|
||||||
db_row = {constants.SQL_ALBUM_COLUMNS[index]: value for (index, value) in enumerate(db_row)}
|
db_row = helpers.parallel_to_dict(constants.SQL_ALBUM_COLUMNS, db_row)
|
||||||
self.id = db_row['id']
|
self.id = db_row['id']
|
||||||
self.title = db_row['title']
|
self.title = db_row['title']
|
||||||
self.description = db_row['description']
|
self.description = db_row['description']
|
||||||
|
|
@ -348,7 +348,7 @@ class Bookmark(ObjectBase):
|
||||||
def __init__(self, photodb, db_row):
|
def __init__(self, photodb, db_row):
|
||||||
self.photodb = photodb
|
self.photodb = photodb
|
||||||
if isinstance(db_row, (list, tuple)):
|
if isinstance(db_row, (list, tuple)):
|
||||||
db_row = {constants.SQL_BOOKMARK_COLUMNS[index]: value for (index, value) in enumerate(db_row)}
|
db_row = helpers.parallel_to_dict(constants.SQL_BOOKMARK_COLUMNS, db_row)
|
||||||
|
|
||||||
self.id = db_row['id']
|
self.id = db_row['id']
|
||||||
self.title = db_row['title']
|
self.title = db_row['title']
|
||||||
|
|
@ -393,7 +393,7 @@ class Photo(ObjectBase):
|
||||||
def __init__(self, photodb, db_row):
|
def __init__(self, photodb, db_row):
|
||||||
self.photodb = photodb
|
self.photodb = photodb
|
||||||
if isinstance(db_row, (list, tuple)):
|
if isinstance(db_row, (list, tuple)):
|
||||||
db_row = {constants.SQL_PHOTO_COLUMNS[index]: value for (index, value) in enumerate(db_row)}
|
db_row = helpers.parallel_to_dict(constants.SQL_PHOTO_COLUMNS, db_row)
|
||||||
|
|
||||||
self.real_filepath = helpers.normalize_filepath(db_row['filepath'], allowed=':\\/')
|
self.real_filepath = helpers.normalize_filepath(db_row['filepath'], allowed=':\\/')
|
||||||
self.real_path = pathclass.Path(self.real_filepath)
|
self.real_path = pathclass.Path(self.real_filepath)
|
||||||
|
|
@ -455,7 +455,8 @@ class Photo(ObjectBase):
|
||||||
# If the new tag is more specific, remove our current one for it.
|
# If the new tag is more specific, remove our current one for it.
|
||||||
for parent in tag.walk_parents():
|
for parent in tag.walk_parents():
|
||||||
if self.has_tag(parent, check_children=False):
|
if self.has_tag(parent, check_children=False):
|
||||||
self.photodb.log.debug('Preferring new {tag:s} over {par:s}'.format(tag=tag, par=parent))
|
message = 'Preferring new {tag:s} over {par:s}'.format(tag=tag, par=parent)
|
||||||
|
self.photodb.log.debug(message)
|
||||||
self.remove_tag(parent)
|
self.remove_tag(parent)
|
||||||
|
|
||||||
self.photodb.log.debug('Applying tag {tag:s} to photo {pho:s}'.format(tag=tag, pho=self))
|
self.photodb.log.debug('Applying tag {tag:s} to photo {pho:s}'.format(tag=tag, pho=self))
|
||||||
|
|
@ -818,7 +819,7 @@ class Tag(ObjectBase, GroupableMixin):
|
||||||
def __init__(self, photodb, db_row):
|
def __init__(self, photodb, db_row):
|
||||||
self.photodb = photodb
|
self.photodb = photodb
|
||||||
if isinstance(db_row, (list, tuple)):
|
if isinstance(db_row, (list, tuple)):
|
||||||
db_row = {constants.SQL_TAG_COLUMNS[index]: value for (index, value) in enumerate(db_row)}
|
db_row = helpers.parallel_to_dict(constants.SQL_TAG_COLUMNS, db_row)
|
||||||
self.id = db_row['id']
|
self.id = db_row['id']
|
||||||
self.name = db_row['name']
|
self.name = db_row['name']
|
||||||
self.group_getter = self.photodb.get_tag
|
self.group_getter = self.photodb.get_tag
|
||||||
|
|
@ -1003,7 +1004,7 @@ class User(ObjectBase):
|
||||||
def __init__(self, photodb, db_row):
|
def __init__(self, photodb, db_row):
|
||||||
self.photodb = photodb
|
self.photodb = photodb
|
||||||
if isinstance(db_row, (list, tuple)):
|
if isinstance(db_row, (list, tuple)):
|
||||||
db_row = {constants.SQL_USER_COLUMNS[index]: value for (index, value) in enumerate(db_row)}
|
db_row = helpers.parallel_to_dict(constants.SQL_USER_COLUMNS, db_row)
|
||||||
self.id = db_row['id']
|
self.id = db_row['id']
|
||||||
self.username = db_row['username']
|
self.username = db_row['username']
|
||||||
self.created = db_row['created']
|
self.created = db_row['created']
|
||||||
|
|
|
||||||
|
|
@ -146,17 +146,32 @@ def _helper_filenamefilter(subject, terms):
|
||||||
return all(term in basename for term in terms)
|
return all(term in basename for term in terms)
|
||||||
|
|
||||||
def searchfilter_must_may_forbid(photo_tags, tag_musts, tag_mays, tag_forbids, frozen_children):
|
def searchfilter_must_may_forbid(photo_tags, tag_musts, tag_mays, tag_forbids, frozen_children):
|
||||||
if tag_musts and not all(any(option in photo_tags for option in frozen_children[must]) for must in tag_musts):
|
if tag_musts:
|
||||||
#print('Failed musts')
|
for must in tag_musts:
|
||||||
return False
|
for option in frozen_children[must]:
|
||||||
|
if option in photo_tags:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# Fail when ANY of the tags fails to find an option.
|
||||||
|
return False
|
||||||
|
|
||||||
if tag_mays and not any(option in photo_tags for may in tag_mays for option in frozen_children[may]):
|
if tag_mays:
|
||||||
#print('Failed mays')
|
for may in tag_mays:
|
||||||
return False
|
for option in frozen_children[may]:
|
||||||
|
if option in photo_tags:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# Fail when ALL of the tags fail to find an option.
|
||||||
|
return False
|
||||||
|
|
||||||
if tag_forbids and any(option in photo_tags for forbid in tag_forbids for option in frozen_children[forbid]):
|
if tag_forbids:
|
||||||
#print('Failed forbids')
|
for tag in tag_forbids:
|
||||||
return False
|
for option in frozen_children[forbid]:
|
||||||
|
if option in photo_tags:
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
@ -609,9 +624,14 @@ class PDBPhotoMixin:
|
||||||
tag_forbids = None
|
tag_forbids = None
|
||||||
tag_expression = None
|
tag_expression = None
|
||||||
else:
|
else:
|
||||||
tag_musts = searchhelpers.normalize_tag_mmf(photodb=self, tags=tag_musts, warning_bag=warning_bag)
|
_helper = lambda tagset: searchhelpers.normalize_tag_mmf(
|
||||||
tag_mays = searchhelpers.normalize_tag_mmf(photodb=self, tags=tag_mays, warning_bag=warning_bag)
|
photodb=self,
|
||||||
tag_forbids = searchhelpers.normalize_tag_mmf(photodb=self, tags=tag_forbids, warning_bag=warning_bag)
|
tags=tagset,
|
||||||
|
warning_bag=warning_bag
|
||||||
|
)
|
||||||
|
tag_musts = _helper(tag_musts)
|
||||||
|
tag_mays = _helper(tag_mays)
|
||||||
|
tag_forbids = _helper(tag_forbids)
|
||||||
tag_expression = searchhelpers.normalize_tag_expression(tag_expression)
|
tag_expression = searchhelpers.normalize_tag_expression(tag_expression)
|
||||||
|
|
||||||
#print(tag_musts, tag_mays, tag_forbids)
|
#print(tag_musts, tag_mays, tag_forbids)
|
||||||
|
|
@ -710,7 +730,10 @@ class PDBPhotoMixin:
|
||||||
if tag_expression:
|
if tag_expression:
|
||||||
expression_tree = expressionmatch.ExpressionTree.parse(tag_expression)
|
expression_tree = expressionmatch.ExpressionTree.parse(tag_expression)
|
||||||
expression_tree.map(self.normalize_tagname)
|
expression_tree.map(self.normalize_tagname)
|
||||||
expression_matcher = searchhelpers.tag_expression_matcher_builder(frozen_children, warning_bag)
|
expression_matcher = searchhelpers.tag_expression_matcher_builder(
|
||||||
|
frozen_children,
|
||||||
|
warning_bag=warning_bag,
|
||||||
|
)
|
||||||
for node in expression_tree.walk_leaves():
|
for node in expression_tree.walk_leaves():
|
||||||
if node.token in frozen_children:
|
if node.token in frozen_children:
|
||||||
continue
|
continue
|
||||||
|
|
@ -1000,10 +1023,16 @@ class PDBUserMixin:
|
||||||
|
|
||||||
def register_user(self, username, password, commit=True):
|
def register_user(self, username, password, commit=True):
|
||||||
if len(username) < self.config['min_username_length']:
|
if len(username) < self.config['min_username_length']:
|
||||||
raise exceptions.UsernameTooShort(username=username, min_length=self.config['min_username_length'])
|
raise exceptions.UsernameTooShort(
|
||||||
|
username=username,
|
||||||
|
min_length=self.config['min_username_length']
|
||||||
|
)
|
||||||
|
|
||||||
if len(username) > self.config['max_username_length']:
|
if len(username) > self.config['max_username_length']:
|
||||||
raise exceptions.UsernameTooLong(username=username, max_length=self.config['max_username_length'])
|
raise exceptions.UsernameTooLong(
|
||||||
|
username=username,
|
||||||
|
max_length=self.config['max_username_length']
|
||||||
|
)
|
||||||
|
|
||||||
badchars = [c for c in username if c not in self.config['valid_username_chars']]
|
badchars = [c for c in username if c not in self.config['valid_username_chars']]
|
||||||
if badchars:
|
if badchars:
|
||||||
|
|
|
||||||
|
|
@ -260,7 +260,10 @@ def normalize_orderby(orderby, warning_bag=None):
|
||||||
column = 'RANDOM()'
|
column = 'RANDOM()'
|
||||||
|
|
||||||
if direction not in ('asc', 'desc'):
|
if direction not in ('asc', 'desc'):
|
||||||
message = constants.WARNING_ORDERBY_BADDIRECTION.format(column=column, direction=direction)
|
message = constants.WARNING_ORDERBY_BADDIRECTION.format(
|
||||||
|
column=column,
|
||||||
|
direction=direction,
|
||||||
|
)
|
||||||
if warning_bag:
|
if warning_bag:
|
||||||
warning_bag.add(message)
|
warning_bag.add(message)
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -576,7 +576,9 @@ def get_search_html():
|
||||||
@session_manager.give_token
|
@session_manager.give_token
|
||||||
def get_search_json():
|
def get_search_json():
|
||||||
search_results = get_search_core()
|
search_results = get_search_core()
|
||||||
search_results['photos'] = [jsonify.photo(photo, include_albums=False) for photo in search_results['photos']]
|
search_results['photos'] = [
|
||||||
|
jsonify.photo(photo, include_albums=False) for photo in search_results['photos']
|
||||||
|
]
|
||||||
return jsonify.make_json_response(search_results)
|
return jsonify.make_json_response(search_results)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue