1326 lines
44 KiB
Python
1326 lines
44 KiB
Python
import bcrypt
|
|
import collections
|
|
import copy
|
|
import json
|
|
import logging
|
|
import os
|
|
import random
|
|
import sqlite3
|
|
import string
|
|
import time
|
|
import warnings
|
|
|
|
import constants
|
|
import decorators
|
|
import exceptions
|
|
import helpers
|
|
import objects
|
|
|
|
# pip install
|
|
# https://raw.githubusercontent.com/voussoir/else/master/_voussoirkit/voussoirkit.zip
|
|
from voussoirkit import spinal
|
|
|
|
|
|
logging.basicConfig(level=logging.DEBUG)
|
|
logging.getLogger('PIL.PngImagePlugin').setLevel(logging.WARNING)
|
|
|
|
|
|
# Note: Setting user_version pragma in init sequence is safe because it only
|
|
# happens after the out-of-date check occurs, so no chance of accidentally
|
|
# overwriting it.
|
|
DATABASE_VERSION = 4
|
|
DB_INIT = '''
|
|
PRAGMA count_changes = OFF;
|
|
PRAGMA cache_size = 10000;
|
|
PRAGMA user_version = {user_version};
|
|
CREATE TABLE IF NOT EXISTS albums(
|
|
id TEXT,
|
|
title TEXT,
|
|
description TEXT,
|
|
associated_directory TEXT COLLATE NOCASE
|
|
);
|
|
CREATE TABLE IF NOT EXISTS photos(
|
|
id TEXT,
|
|
filepath TEXT COLLATE NOCASE,
|
|
override_filename TEXT COLLATE NOCASE,
|
|
extension TEXT,
|
|
width INT,
|
|
height INT,
|
|
ratio REAL,
|
|
area INT,
|
|
duration INT,
|
|
bytes INT,
|
|
created INT,
|
|
thumbnail TEXT,
|
|
tagged_at INT,
|
|
author_id TEXT
|
|
);
|
|
CREATE TABLE IF NOT EXISTS tags(
|
|
id TEXT,
|
|
name TEXT
|
|
);
|
|
CREATE TABLE IF NOT EXISTS album_photo_rel(
|
|
albumid TEXT,
|
|
photoid TEXT
|
|
);
|
|
CREATE TABLE IF NOT EXISTS photo_tag_rel(
|
|
photoid TEXT,
|
|
tagid TEXT
|
|
);
|
|
CREATE TABLE IF NOT EXISTS tag_group_rel(
|
|
parentid TEXT,
|
|
memberid TEXT
|
|
);
|
|
CREATE TABLE IF NOT EXISTS tag_synonyms(
|
|
name TEXT,
|
|
mastername TEXT
|
|
);
|
|
CREATE TABLE IF NOT EXISTS id_numbers(
|
|
tab TEXT,
|
|
last_id TEXT
|
|
);
|
|
CREATE TABLE IF NOT EXISTS users(
|
|
id TEXT,
|
|
username TEXT COLLATE NOCASE,
|
|
password BLOB,
|
|
created INT
|
|
);
|
|
|
|
-- Album
|
|
CREATE INDEX IF NOT EXISTS index_album_id on albums(id);
|
|
CREATE INDEX IF NOT EXISTS index_albumrel_albumid on album_photo_rel(albumid);
|
|
CREATE INDEX IF NOT EXISTS index_albumrel_photoid on album_photo_rel(photoid);
|
|
|
|
-- Photo
|
|
CREATE INDEX IF NOT EXISTS index_photo_id on photos(id);
|
|
CREATE INDEX IF NOT EXISTS index_photo_path on photos(filepath COLLATE NOCASE);
|
|
CREATE INDEX IF NOT EXISTS index_photo_fakepath on photos(override_filename COLLATE NOCASE);
|
|
CREATE INDEX IF NOT EXISTS index_photo_created on photos(created);
|
|
CREATE INDEX IF NOT EXISTS index_photo_extension on photos(extension);
|
|
CREATE INDEX IF NOT EXISTS index_photo_author on photos(author_id);
|
|
|
|
-- Tag
|
|
CREATE INDEX IF NOT EXISTS index_tag_id on tags(id);
|
|
CREATE INDEX IF NOT EXISTS index_tag_name on tags(name);
|
|
|
|
-- Photo-tag relation
|
|
CREATE INDEX IF NOT EXISTS index_tagrel_photoid on photo_tag_rel(photoid);
|
|
CREATE INDEX IF NOT EXISTS index_tagrel_tagid on photo_tag_rel(tagid);
|
|
|
|
-- Tag-synonym relation
|
|
CREATE INDEX IF NOT EXISTS index_tagsyn_name on tag_synonyms(name);
|
|
|
|
-- Tag-group relation
|
|
CREATE INDEX IF NOT EXISTS index_grouprel_parentid on tag_group_rel(parentid);
|
|
CREATE INDEX IF NOT EXISTS index_grouprel_memberid on tag_group_rel(memberid);
|
|
|
|
-- User
|
|
CREATE INDEX IF NOT EXISTS index_user_id on users(id);
|
|
CREATE INDEX IF NOT EXISTS index_user_username on users(username COLLATE NOCASE);
|
|
'''.format(user_version=DATABASE_VERSION)
|
|
|
|
|
|
def _helper_filenamefilter(subject, terms):
|
|
basename = subject.lower()
|
|
return all(term in basename for term in terms)
|
|
|
|
def binding_filler(column_names, values, require_all=True):
|
|
'''
|
|
Manually aligning question marks and bindings is annoying.
|
|
Given the table's column names and a dictionary of {column: value},
|
|
return the question marks and the list of bindings in the right order.
|
|
'''
|
|
values = values.copy()
|
|
for column in column_names:
|
|
if column in values:
|
|
continue
|
|
if require_all:
|
|
raise ValueError('Missing column "%s"' % column)
|
|
else:
|
|
values.setdefault(column, None)
|
|
qmarks = '?' * len(column_names)
|
|
qmarks = ', '.join(qmarks)
|
|
bindings = [values[column] for column in column_names]
|
|
return (qmarks, bindings)
|
|
|
|
def operate(operand_stack, operator_stack):
|
|
#print('before:', operand_stack, operator_stack)
|
|
operator = operator_stack.pop()
|
|
if operator == 'NOT':
|
|
operand = operand_stack.pop()
|
|
value = operand ^ 1
|
|
else:
|
|
right = operand_stack.pop()
|
|
left = operand_stack.pop()
|
|
if operator == 'OR':
|
|
value = left | right
|
|
elif operator == 'AND':
|
|
value = left & right
|
|
else:
|
|
raise ValueError('werwer')
|
|
operand_stack.append(value)
|
|
#print('after:', operand_stack, operator_stack)
|
|
|
|
def raise_no_such_thing(exception_class, thing_id=None, thing_name=None, comment=''):
|
|
if thing_id is not None:
|
|
message = 'ID: %s. %s' % (thing_id, comment)
|
|
elif thing_name is not None:
|
|
message = 'Name: %s. %s' % (thing_name, comment)
|
|
else:
|
|
message = ''
|
|
raise exception_class(message)
|
|
|
|
def searchfilter_expression(photo_tags, expression, frozen_children, token_normalizer, warn_bad_tags):
|
|
photo_tags = set(tag.name for tag in photo_tags)
|
|
operator_stack = collections.deque()
|
|
operand_stack = collections.deque()
|
|
|
|
expression = expression.replace('-', ' ')
|
|
expression = expression.strip()
|
|
if not expression:
|
|
return False
|
|
expression = expression.replace('(', ' ( ')
|
|
expression = expression.replace(')', ' ) ')
|
|
while ' ' in expression:
|
|
expression = expression.replace(' ', ' ')
|
|
tokens = [token for token in expression.split(' ') if token]
|
|
has_operand = False
|
|
can_shortcircuit = False
|
|
|
|
for token in tokens:
|
|
#print(token, end=' ', flush=True)
|
|
if can_shortcircuit and token != ')':
|
|
continue
|
|
|
|
if token not in constants.EXPRESSION_OPERATORS:
|
|
try:
|
|
token = token_normalizer(token)
|
|
value = any(option in photo_tags for option in frozen_children[token])
|
|
except KeyError:
|
|
if warn_bad_tags:
|
|
warnings.warn(constants.WARNING_NO_SUCH_TAG.format(tag=token))
|
|
else:
|
|
raise exceptions.NoSuchTag(token)
|
|
return False
|
|
operand_stack.append(value)
|
|
if has_operand:
|
|
operate(operand_stack, operator_stack)
|
|
has_operand = True
|
|
continue
|
|
|
|
if token == '(':
|
|
has_operand = False
|
|
|
|
if token == ')':
|
|
if not can_shortcircuit:
|
|
while operator_stack[-1] != '(':
|
|
operate(operand_stack, operator_stack)
|
|
operator_stack.pop()
|
|
has_operand = True
|
|
continue
|
|
|
|
can_shortcircuit = (
|
|
has_operand and
|
|
(
|
|
(operand_stack[-1] == 0 and token == 'AND') or
|
|
(operand_stack[-1] == 1 and token == 'OR')
|
|
)
|
|
)
|
|
if can_shortcircuit:
|
|
if operator_stack and operator_stack[-1] == '(':
|
|
operator_stack.pop()
|
|
continue
|
|
|
|
operator_stack.append(token)
|
|
#time.sleep(.3)
|
|
#print()
|
|
while len(operand_stack) > 1 or len(operator_stack) > 0:
|
|
operate(operand_stack, operator_stack)
|
|
#print(operand_stack)
|
|
return operand_stack.pop()
|
|
|
|
def searchfilter_must_may_forbid(photo_tags, tag_musts, tag_mays, tag_forbids, frozen_children):
|
|
if tag_musts and not all(any(option in photo_tags for option in frozen_children[must]) for must in tag_musts):
|
|
#print('Failed musts')
|
|
return False
|
|
|
|
if tag_mays and not any(option in photo_tags for may in tag_mays for option in frozen_children[may]):
|
|
#print('Failed mays')
|
|
return False
|
|
|
|
if tag_forbids and any(option in photo_tags for forbid in tag_forbids for option in frozen_children[forbid]):
|
|
#print('Failed forbids')
|
|
return False
|
|
|
|
return True
|
|
|
|
def tag_export_easybake(tags, depth=0):
|
|
lines = []
|
|
for tag in tags:
|
|
if not hasattr(tag, 'string'):
|
|
tag.string = tag.name
|
|
children = tag.children()
|
|
synonyms = tag.synonyms()
|
|
lines.append(tag.string)
|
|
|
|
for synonym in synonyms:
|
|
synonym = tag.string + '+' + synonym
|
|
lines.append(synonym)
|
|
|
|
for child in children:
|
|
child.string = tag.string + '.' + child.name
|
|
child_bake = tag_export_easybake(children, depth=depth+1)
|
|
if child_bake != '':
|
|
lines.append(child_bake)
|
|
|
|
lines = '\n'.join(lines)
|
|
return lines
|
|
|
|
def tag_export_json(tags):
|
|
def fill(tag):
|
|
children = {child.name:fill(child) for child in tag.children()}
|
|
return children
|
|
result = {}
|
|
for tag in tags:
|
|
result[tag.name] = fill(tag)
|
|
return result
|
|
|
|
def tag_export_qualname_map(tags):
|
|
lines = tag_export_easybake(tags)
|
|
lines = lines.split('\n')
|
|
lines = [line for line in lines if line]
|
|
qualname_map = {}
|
|
for line in lines:
|
|
key = line.split('.')[-1].split('+')[-1]
|
|
value = line.split('+')[0]
|
|
qualname_map[key] = value
|
|
return qualname_map
|
|
|
|
def tag_export_stdout(tags, depth=0):
|
|
for tag in tags:
|
|
children = tag.children()
|
|
synonyms = tag.synonyms()
|
|
|
|
pad = ' ' * depth
|
|
synpad = ' ' * (depth + 1)
|
|
print(pad + str(tag))
|
|
|
|
for synonym in synonyms:
|
|
print(synpad + synonym)
|
|
|
|
tag_export_stdout(children, depth=depth+1)
|
|
|
|
if tag.parent() is None:
|
|
print()
|
|
|
|
@decorators.time_me
|
|
def tag_export_totally_flat(tags):
|
|
result = {}
|
|
for tag in tags:
|
|
for child in tag.walk_children():
|
|
children = list(child.walk_children())
|
|
result[child] = children
|
|
for synonym in child.synonyms():
|
|
result[synonym] = children
|
|
return result
|
|
|
|
|
|
####################################################################################################
|
|
####################################################################################################
|
|
|
|
|
|
class PDBAlbumMixin:
|
|
def get_album(self, id):
|
|
return self.get_thing_by_id('album', id)
|
|
|
|
def get_album_by_path(self, filepath):
|
|
'''
|
|
Return the album with the `associated_directory` of this value, NOT case-sensitive.
|
|
'''
|
|
filepath = os.path.abspath(filepath)
|
|
self.cur.execute('SELECT * FROM albums WHERE associated_directory == ?', [filepath])
|
|
fetch = self.cur.fetchone()
|
|
if fetch is None:
|
|
raise exceptions.NoSuchAlbum(filepath)
|
|
return self.get_album(fetch[constants.SQL_ALBUM['id']])
|
|
|
|
def get_albums(self):
|
|
yield from self.get_things(thing_type='album')
|
|
|
|
def new_album(
|
|
self,
|
|
title=None,
|
|
description=None,
|
|
*,
|
|
associated_directory=None,
|
|
commit=True,
|
|
photos=None
|
|
):
|
|
'''
|
|
Create a new album. Photos can be added now or later.
|
|
'''
|
|
# Albums share the tag table's ID counter
|
|
albumid = self.generate_id('tags')
|
|
title = title or ''
|
|
description = description or ''
|
|
if associated_directory is not None:
|
|
associated_directory = os.path.abspath(associated_directory)
|
|
|
|
if not isinstance(title, str):
|
|
raise TypeError('Title must be string, not %s' % type(title))
|
|
|
|
if not isinstance(description, str):
|
|
raise TypeError('Description must be string, not %s' % type(description))
|
|
|
|
data = {
|
|
'id': albumid,
|
|
'title': title,
|
|
'description': description,
|
|
'associated_directory': associated_directory,
|
|
}
|
|
|
|
(qmarks, bindings) = binding_filler(constants.SQL_ALBUM_COLUMNS, data)
|
|
query = 'INSERT INTO albums VALUES(%s)' % qmarks
|
|
self.cur.execute(query, bindings)
|
|
|
|
album = objects.Album(self, data)
|
|
if photos:
|
|
for photo in photos:
|
|
photo = self.get_photo(photo)
|
|
album.add_photo(photo, commit=False)
|
|
|
|
if commit:
|
|
self.log.debug('Committing - new Album')
|
|
self.commit()
|
|
return album
|
|
|
|
|
|
class PDBPhotoMixin:
|
|
def get_photo(self, photoid):
|
|
return self.get_thing_by_id('photo', photoid)
|
|
|
|
def get_photo_by_path(self, filepath):
|
|
filepath = os.path.abspath(filepath)
|
|
self.cur.execute('SELECT * FROM photos WHERE filepath == ?', [filepath])
|
|
fetch = self.cur.fetchone()
|
|
if fetch is None:
|
|
raise_no_such_thing(exceptions.NoSuchPhoto, thing_name=filepath)
|
|
photo = objects.Photo(self, fetch)
|
|
return photo
|
|
|
|
def get_photos_by_recent(self, count=None):
|
|
'''
|
|
Yield photo objects in order of creation time.
|
|
'''
|
|
if count is not None and count <= 0:
|
|
return
|
|
# We're going to use a second cursor because the first one may
|
|
# get used for something else, deactivating this query.
|
|
temp_cur = self.sql.cursor()
|
|
temp_cur.execute('SELECT * FROM photos ORDER BY created DESC')
|
|
while True:
|
|
fetch = temp_cur.fetchone()
|
|
if fetch is None:
|
|
break
|
|
photo = objects.Photo(self, fetch)
|
|
|
|
yield photo
|
|
|
|
if count is None:
|
|
continue
|
|
count -= 1
|
|
if count <= 0:
|
|
break
|
|
|
|
def new_photo(
|
|
self,
|
|
filename,
|
|
*,
|
|
allow_duplicates=False,
|
|
author=None,
|
|
commit=True,
|
|
do_metadata=True,
|
|
do_thumbnail=True,
|
|
tags=None
|
|
):
|
|
'''
|
|
Given a filepath, determine its attributes and create a new Photo object in the
|
|
database. Tags may be applied now or later.
|
|
|
|
If `allow_duplicates` is False, we will first check the database for any files
|
|
with the same path and raise exceptions.PhotoExists if found.
|
|
|
|
Returns the Photo object.
|
|
'''
|
|
filename = os.path.abspath(filename)
|
|
assert os.path.isfile(filename)
|
|
if not allow_duplicates:
|
|
try:
|
|
existing = self.get_photo_by_path(filename)
|
|
except exceptions.NoSuchPhoto:
|
|
pass
|
|
else:
|
|
exc = exceptions.PhotoExists(filename, existing)
|
|
exc.photo = existing
|
|
raise exc
|
|
|
|
if isinstance(author, objects.User):
|
|
if author.photodb != self:
|
|
raise ValueError('That user does not belong to this photodb')
|
|
author_id = author.id
|
|
elif author is not None:
|
|
# Just to confirm
|
|
author_id = self.get_user(id=author).id
|
|
|
|
extension = os.path.splitext(filename)[1]
|
|
extension = extension.replace('.', '')
|
|
extension = self.normalize_tagname(extension)
|
|
created = int(helpers.now())
|
|
photoid = self.generate_id('photos')
|
|
|
|
data = {
|
|
'id': photoid,
|
|
'filepath': filename,
|
|
'override_filename': None,
|
|
'extension': extension,
|
|
'created': created,
|
|
'tagged_at': None,
|
|
'author_id': author_id,
|
|
# These will be filled in during the metadata stage.
|
|
'bytes': None,
|
|
'width': None,
|
|
'height': None,
|
|
'area': None,
|
|
'ratio': None,
|
|
'duration': None,
|
|
'thumbnail': None,
|
|
}
|
|
|
|
(qmarks, bindings) = binding_filler(constants.SQL_PHOTO_COLUMNS, data)
|
|
query = 'INSERT INTO photos VALUES(%s)' % qmarks
|
|
self.cur.execute(query, bindings)
|
|
photo = objects.Photo(self, data)
|
|
|
|
if do_metadata:
|
|
photo.reload_metadata(commit=False)
|
|
if do_thumbnail:
|
|
photo.generate_thumbnail(commit=False)
|
|
|
|
tags = tags or []
|
|
tags = [self.get_tag(tag) for tag in tags]
|
|
for tag in tags:
|
|
photo.add_tag(tag, commit=False)
|
|
|
|
if commit:
|
|
self.log.debug('Commiting - new_photo')
|
|
self.commit()
|
|
return photo
|
|
|
|
def purge_deleted_files(self):
|
|
'''
|
|
Remove Photo entries if their corresponding file is no longer found.
|
|
'''
|
|
photos = self.get_photos_by_recent()
|
|
for photo in photos:
|
|
if os.path.exists(photo.real_filepath):
|
|
continue
|
|
photo.delete()
|
|
|
|
def purge_empty_albums(self):
|
|
albums = self.get_albums()
|
|
for album in albums:
|
|
if album.children() or album.photos():
|
|
continue
|
|
album.delete()
|
|
|
|
def search(
|
|
self,
|
|
*,
|
|
area=None,
|
|
width=None,
|
|
height=None,
|
|
ratio=None,
|
|
bytes=None,
|
|
duration=None,
|
|
|
|
created=None,
|
|
extension=None,
|
|
extension_not=None,
|
|
filename=None,
|
|
has_tags=None,
|
|
mimetype=None,
|
|
tag_musts=None,
|
|
tag_mays=None,
|
|
tag_forbids=None,
|
|
tag_expression=None,
|
|
|
|
warn_bad_tags=False,
|
|
limit=None,
|
|
offset=None,
|
|
orderby=None
|
|
):
|
|
'''
|
|
PHOTO PROPERTISE
|
|
area, width, height, ratio, bytes, duration:
|
|
A hyphen_range string representing min and max. Or just a number for lower bound.
|
|
|
|
TAGS AND FILTERS
|
|
created:
|
|
A hyphen_range string respresenting min and max. Or just a number for lower bound.
|
|
|
|
extension:
|
|
A string or list of strings of acceptable file extensions.
|
|
|
|
extension_not:
|
|
A string or list of strings of unacceptable file extensions.
|
|
|
|
filename:
|
|
A string or list of strings which will be split into words. The file's basename
|
|
must include every word, NOT case-sensitive.
|
|
|
|
has_tags:
|
|
If True, require that the Photo has >=1 tag.
|
|
If False, require that the Photo has no tags.
|
|
If None, not considered.
|
|
|
|
mimetype:
|
|
A string or list of strings of acceptable mimetypes. 'image', 'video', ...
|
|
|
|
tag_musts:
|
|
A list of tag names or Tag objects.
|
|
Photos MUST have ALL tags in this list.
|
|
|
|
tag_mays:
|
|
A list of tag names or Tag objects.
|
|
Photos MUST have AT LEAST ONE tag in this list.
|
|
|
|
tag_forbids:
|
|
A list of tag names or Tag objects.
|
|
Photos MUST NOT have ANY tag in the list.
|
|
|
|
tag_expression:
|
|
A string like 'family AND (animals OR vacation)' to filter by.
|
|
Can NOT be used with the must, may, forbid style search.
|
|
|
|
QUERY OPTIONS
|
|
warn_bad_tags:
|
|
If a tag is not found, issue a warning but continue the search.
|
|
Otherwise, a exceptions.NoSuchTag exception would be raised.
|
|
|
|
limit:
|
|
The maximum number of *successful* results to yield.
|
|
|
|
offset:
|
|
How many *successful* results to skip before we start yielding.
|
|
|
|
orderby:
|
|
A list of strings like ['ratio DESC', 'created ASC'] to sort
|
|
and subsort the results.
|
|
Descending is assumed if not provided.
|
|
'''
|
|
start_time = time.time()
|
|
maximums = {}
|
|
minimums = {}
|
|
helpers._minmax('area', area, minimums, maximums)
|
|
helpers._minmax('created', created, minimums, maximums)
|
|
helpers._minmax('width', width, minimums, maximums)
|
|
helpers._minmax('height', height, minimums, maximums)
|
|
helpers._minmax('ratio', ratio, minimums, maximums)
|
|
helpers._minmax('bytes', bytes, minimums, maximums)
|
|
helpers._minmax('duration', duration, minimums, maximums)
|
|
orderby = orderby or []
|
|
|
|
extension = helpers._normalize_extensions(extension)
|
|
extension_not = helpers._normalize_extensions(extension_not)
|
|
mimetype = helpers._normalize_extensions(mimetype)
|
|
|
|
if filename is not None:
|
|
if not isinstance(filename, str):
|
|
filename = ' '.join(filename)
|
|
filename = set(term.lower() for term in filename.strip().split(' '))
|
|
|
|
if (tag_musts or tag_mays or tag_forbids) and tag_expression:
|
|
raise exceptions.NotExclusive('Expression filter cannot be used with musts, mays, forbids')
|
|
|
|
tag_musts = helpers._setify_tags(photodb=self, tags=tag_musts, warn_bad_tags=warn_bad_tags)
|
|
tag_mays = helpers._setify_tags(photodb=self, tags=tag_mays, warn_bad_tags=warn_bad_tags)
|
|
tag_forbids = helpers._setify_tags(photodb=self, tags=tag_forbids, warn_bad_tags=warn_bad_tags)
|
|
|
|
query = 'SELECT * FROM photos'
|
|
orderby = [helpers._orderby(o) for o in orderby]
|
|
orderby = [o for o in orderby if o]
|
|
if orderby:
|
|
whereable_columns = [o[0] for o in orderby if o[0] != 'RANDOM()']
|
|
whereable_columns = [column + ' IS NOT NULL' for column in whereable_columns]
|
|
if whereable_columns:
|
|
query += ' WHERE '
|
|
query += ' AND '.join(whereable_columns)
|
|
orderby = [' '.join(o) for o in orderby]
|
|
orderby = ', '.join(orderby)
|
|
query += ' ORDER BY %s' % orderby
|
|
else:
|
|
query += ' ORDER BY created DESC'
|
|
print(query)
|
|
generator = helpers.select_generator(self.sql, query)
|
|
|
|
# To lighten the amount of database reading here, `frozen_children` is a dict where
|
|
# EVERY tag in the db is a key, and the value is a list of ALL ITS NESTED CHILDREN.
|
|
# This representation is memory inefficient, but it is faster than repeated
|
|
# database lookups
|
|
is_must_may_forbid = bool(tag_musts or tag_mays or tag_forbids)
|
|
is_tagsearch = is_must_may_forbid or tag_expression
|
|
if is_tagsearch:
|
|
if self._cached_frozen_children:
|
|
frozen_children = self._cached_frozen_children
|
|
else:
|
|
frozen_children = self.export_tags(tag_export_totally_flat)
|
|
self._cached_frozen_children = frozen_children
|
|
photos_received = 0
|
|
|
|
for fetch in generator:
|
|
photo = objects.Photo(self, fetch)
|
|
|
|
if extension and photo.extension not in extension:
|
|
#print('Failed extension')
|
|
continue
|
|
|
|
if extension_not and photo.extension in extension_not:
|
|
#print('Failed extension_not')
|
|
continue
|
|
|
|
if mimetype and photo.mimetype() not in mimetype:
|
|
#print('Failed mimetype')
|
|
continue
|
|
|
|
if filename and not _helper_filenamefilter(subject=photo.basename, terms=filename):
|
|
#print('Failed filename')
|
|
continue
|
|
|
|
if any(
|
|
not fetch[constants.SQL_PHOTO[key]] or
|
|
fetch[constants.SQL_PHOTO[key]] > value for (key, value) in maximums.items()
|
|
):
|
|
#print('Failed maximums')
|
|
continue
|
|
|
|
if any(
|
|
not fetch[constants.SQL_PHOTO[key]] or
|
|
fetch[constants.SQL_PHOTO[key]] < value for (key, value) in minimums.items()
|
|
):
|
|
#print('Failed minimums')
|
|
continue
|
|
|
|
if (has_tags is not None) or is_tagsearch:
|
|
photo_tags = photo.tags()
|
|
|
|
if has_tags is False and len(photo_tags) > 0:
|
|
continue
|
|
|
|
if has_tags is True and len(photo_tags) == 0:
|
|
continue
|
|
|
|
photo_tags = set(photo_tags)
|
|
|
|
if tag_expression:
|
|
success = searchfilter_expression(
|
|
photo_tags=photo_tags,
|
|
expression=tag_expression,
|
|
frozen_children=frozen_children,
|
|
token_normalizer=self.normalize_tagname,
|
|
warn_bad_tags=warn_bad_tags,
|
|
)
|
|
if not success:
|
|
continue
|
|
elif is_must_may_forbid:
|
|
success = searchfilter_must_may_forbid(
|
|
photo_tags=photo_tags,
|
|
tag_musts=tag_musts,
|
|
tag_mays=tag_mays,
|
|
tag_forbids=tag_forbids,
|
|
frozen_children=frozen_children,
|
|
)
|
|
if not success:
|
|
continue
|
|
|
|
if offset is not None and offset > 0:
|
|
offset -= 1
|
|
continue
|
|
|
|
if limit is not None and photos_received >= limit:
|
|
break
|
|
|
|
photos_received += 1
|
|
yield photo
|
|
|
|
end_time = time.time()
|
|
print(end_time - start_time)
|
|
|
|
|
|
class PDBTagMixin:
|
|
def export_tags(self, exporter=tag_export_stdout, specific_tag=None):
|
|
'''
|
|
Send the top-level tags to function `exporter`.
|
|
Strings 'start' and 'stop' are sent before and after the tags are sent.
|
|
Recursion is to be handled by the exporter.
|
|
'''
|
|
if specific_tag is None:
|
|
items = list(self.get_tags())
|
|
items = [item for item in items if item.parent() is None]
|
|
items.sort(key=lambda x: x.name)
|
|
else:
|
|
items = [self.get_tag(specific_tag)]
|
|
return exporter(items)
|
|
|
|
def get_tag(self, name=None, id=None):
|
|
'''
|
|
Redirect to get_tag_by_id or get_tag_by_name after xor-checking the parameters.
|
|
'''
|
|
if not helpers.is_xor(id, name):
|
|
raise exceptions.NotExclusive('One and only one of `id`, `name` must be passed.')
|
|
|
|
if id is not None:
|
|
return self.get_tag_by_id(id)
|
|
elif name is not None:
|
|
return self.get_tag_by_name(name)
|
|
else:
|
|
raise_no_such_thing(exceptions.NoSuchTag, thing_id=id, thing_name=name)
|
|
|
|
def get_tag_by_id(self, id):
|
|
return self.get_thing_by_id('tag', thing_id=id)
|
|
|
|
def get_tag_by_name(self, tagname):
|
|
if isinstance(tagname, objects.Tag):
|
|
tagname = tagname.name
|
|
|
|
tagname = tagname.split('.')[-1].split('+')[0]
|
|
tagname = self.normalize_tagname(tagname)
|
|
|
|
while True:
|
|
# Return if it's a toplevel, or resolve the synonym and try that.
|
|
self.cur.execute('SELECT * FROM tags WHERE name == ?', [tagname])
|
|
fetch = self.cur.fetchone()
|
|
if fetch is not None:
|
|
return objects.Tag(self, fetch)
|
|
|
|
self.cur.execute('SELECT * FROM tag_synonyms WHERE name == ?', [tagname])
|
|
fetch = self.cur.fetchone()
|
|
if fetch is None:
|
|
# was not a top tag or synonym
|
|
raise_no_such_thing(exceptions.NoSuchTag, thing_name=tagname)
|
|
tagname = fetch[constants.SQL_SYN['master']]
|
|
|
|
def get_tags(self):
|
|
yield from self.get_things(thing_type='tag')
|
|
|
|
def new_tag(self, tagname, *, commit=True):
|
|
'''
|
|
Register a new tag and return the Tag object.
|
|
'''
|
|
tagname = self.normalize_tagname(tagname)
|
|
try:
|
|
self.get_tag_by_name(tagname)
|
|
except exceptions.NoSuchTag:
|
|
pass
|
|
else:
|
|
raise exceptions.TagExists(tagname)
|
|
|
|
tagid = self.generate_id('tags')
|
|
self._cached_frozen_children = None
|
|
self.cur.execute('INSERT INTO tags VALUES(?, ?)', [tagid, tagname])
|
|
if commit:
|
|
self.log.debug('Commiting - new_tag')
|
|
self.commit()
|
|
tag = objects.Tag(self, [tagid, tagname])
|
|
return tag
|
|
|
|
def normalize_tagname(self, tagname):
|
|
'''
|
|
Tag names can only consist of characters defined in the config.
|
|
The given tagname is lowercased, gets its spaces and hyphens
|
|
replaced by underscores, and is stripped of any not-whitelisted
|
|
characters.
|
|
'''
|
|
tagname = tagname.lower()
|
|
tagname = tagname.replace('-', '_')
|
|
tagname = tagname.replace(' ', '_')
|
|
tagname = (c for c in tagname if c in self.config['valid_tag_chars'])
|
|
tagname = ''.join(tagname)
|
|
|
|
if len(tagname) < self.config['min_tag_name_length']:
|
|
raise exceptions.TagTooShort(tagname)
|
|
if len(tagname) > self.config['max_tag_name_length']:
|
|
raise exceptions.TagTooLong(tagname)
|
|
|
|
return tagname
|
|
|
|
class PDBUserMixin:
|
|
def generate_user_id(self):
|
|
'''
|
|
User IDs are randomized instead of integers like the other objects,
|
|
so they get their own method.
|
|
'''
|
|
possible = string.digits + string.ascii_uppercase
|
|
for retry in range(20):
|
|
user_id = [random.choice(possible) for x in range(self.config['id_length'])]
|
|
user_id = ''.join(user_id)
|
|
|
|
self.cur.execute('SELECT * FROM users WHERE id == ?', [user_id])
|
|
if self.cur.fetchone() is None:
|
|
break
|
|
else:
|
|
raise Exception('Failed to create user id after 20 tries.')
|
|
|
|
return user_id
|
|
|
|
def get_user(self, username=None, id=None):
|
|
if not helpers.is_xor(id, username):
|
|
raise exceptions.NotExclusive('One and only one of `id`, `username` must be passed.')
|
|
|
|
if username is not None:
|
|
self.cur.execute('SELECT * FROM users WHERE username == ?', [username])
|
|
else:
|
|
self.cur.execute('SELECT * FROM users WHERE id == ?', [id])
|
|
|
|
fetch = self.cur.fetchone()
|
|
if fetch is not None:
|
|
return objects.User(self, fetch)
|
|
else:
|
|
raise exceptions.NoSuchUser(username)
|
|
|
|
def login(self, user_id, password):
|
|
self.cur.execute('SELECT * FROM users WHERE id == ?', [user_id])
|
|
fetch = self.cur.fetchone()
|
|
|
|
if fetch is None:
|
|
raise exceptions.WrongLogin()
|
|
|
|
stored_password = fetch[constants.SQL_USER['password']]
|
|
|
|
if not isinstance(password, bytes):
|
|
password = password.encode('utf-8')
|
|
|
|
success = bcrypt.checkpw(password, stored_password)
|
|
if not success:
|
|
raise exceptions.WrongLogin()
|
|
|
|
return objects.User(self, fetch)
|
|
|
|
def register_user(self, username, password, commit=True):
|
|
if len(username) < self.config['min_username_length']:
|
|
raise exceptions.UsernameTooShort(username)
|
|
|
|
if len(username) > self.config['max_username_length']:
|
|
raise exceptions.UsernameTooLong(username)
|
|
|
|
badchars = [c for c in username if c not in self.config['valid_username_chars']]
|
|
if badchars:
|
|
raise exceptions.InvalidUsernameChars(badchars)
|
|
|
|
if not isinstance(password, bytes):
|
|
password = password.encode('utf-8')
|
|
|
|
if len(password) < self.config['min_password_length']:
|
|
raise exceptions.PasswordTooShort
|
|
|
|
self.cur.execute('SELECT * FROM users WHERE username == ?', [username])
|
|
if self.cur.fetchone() is not None:
|
|
raise exceptions.UserExists(username)
|
|
|
|
user_id = self.generate_user_id()
|
|
hashed_password = bcrypt.hashpw(password, bcrypt.gensalt())
|
|
created = int(helpers.now())
|
|
|
|
data = {
|
|
'id': user_id,
|
|
'username': username,
|
|
'password': hashed_password,
|
|
'created': created,
|
|
}
|
|
|
|
(qmarks, bindings) = binding_filler(constants.SQL_USER_COLUMNS, data)
|
|
query = 'INSERT INTO users VALUES(%s)' % qmarks
|
|
self.cur.execute(query, bindings)
|
|
|
|
if commit:
|
|
self.log.debug('Committing - register user')
|
|
self.commit()
|
|
|
|
return objects.User(self, data)
|
|
|
|
|
|
class PhotoDB(PDBAlbumMixin, PDBPhotoMixin, PDBTagMixin, PDBUserMixin):
|
|
'''
|
|
This class represents an SQLite3 database containing the following tables:
|
|
|
|
albums:
|
|
Rows represent the inclusion of a photo in an album
|
|
|
|
photos:
|
|
Rows represent image files on the local disk.
|
|
Entries contain a unique ID, the image's filepath, and metadata
|
|
like dimensions and filesize.
|
|
|
|
tags:
|
|
Rows represent labels, which can be applied to an arbitrary number of
|
|
photos. Photos may be selected by which tags they contain.
|
|
Entries contain a unique ID and a name.
|
|
|
|
photo_tag_rel:
|
|
Rows represent a Photo's ownership of a particular Tag.
|
|
|
|
tag_synonyms:
|
|
Rows represent relationships between two tag names, so that they both
|
|
resolve to the same Tag object when selected. Entries contain the
|
|
subordinate name and master name.
|
|
The master name MUST also exist in the `tags` table.
|
|
If a new synonym is created referring to another synoym, the master name
|
|
will be resolved and used instead, so a synonym never points to another synonym.
|
|
Tag objects will ALWAYS represent the master tag.
|
|
|
|
Note that the entries in this table do not contain ID numbers.
|
|
The rationale here is that "coco" is a synonym for "chocolate" regardless
|
|
of the "chocolate" tag's ID, and that if a tag is renamed, its synonyms
|
|
do not necessarily follow.
|
|
The `rename` method of Tag objects includes a parameter
|
|
`apply_to_synonyms` if you do want them to follow.
|
|
'''
|
|
def __init__(
|
|
self,
|
|
data_directory=None,
|
|
):
|
|
if data_directory is None:
|
|
data_directory = constants.DEFAULT_DATADIR
|
|
|
|
# DATA DIR PREP
|
|
data_directory = helpers.normalize_filepath(data_directory, allowed='/\\')
|
|
self.data_directory = os.path.abspath(data_directory)
|
|
os.makedirs(self.data_directory, exist_ok=True)
|
|
|
|
# DATABASE
|
|
self.database_abspath = os.path.join(self.data_directory, 'phototagger.db')
|
|
existing_database = os.path.exists(self.database_abspath)
|
|
self.sql = sqlite3.connect(self.database_abspath)
|
|
self.cur = self.sql.cursor()
|
|
|
|
if existing_database:
|
|
self.cur.execute('PRAGMA user_version')
|
|
existing_version = self.cur.fetchone()[0]
|
|
if existing_version != DATABASE_VERSION:
|
|
message = constants.ERROR_DATABASE_OUTOFDATE
|
|
message = message.format(current=existing_version, new=DATABASE_VERSION)
|
|
raise SystemExit(message)
|
|
|
|
statements = DB_INIT.split(';')
|
|
for statement in statements:
|
|
self.cur.execute(statement)
|
|
|
|
# CONFIG
|
|
self.config_abspath = os.path.join(self.data_directory, 'config.json')
|
|
self.config = copy.deepcopy(constants.DEFAULT_CONFIGURATION)
|
|
if os.path.isfile(self.config_abspath):
|
|
with open(self.config_abspath, 'r') as handle:
|
|
user_config = json.load(handle)
|
|
self.config.update(user_config)
|
|
else:
|
|
with open(self.config_abspath, 'w') as handle:
|
|
handle.write(json.dumps(self.config, indent=4, sort_keys=True))
|
|
#print(self.config)
|
|
|
|
# THUMBNAIL DIRECTORY
|
|
self.thumbnail_directory = os.path.join(self.data_directory, 'site_thumbnails')
|
|
self.thumbnail_directory = os.path.abspath(self.thumbnail_directory)
|
|
os.makedirs(self.thumbnail_directory, exist_ok=True)
|
|
|
|
# OTHER
|
|
self.log = logging.getLogger(__name__)
|
|
self.on_commit_queue = []
|
|
self._cached_frozen_children = None
|
|
|
|
def __repr__(self):
|
|
return 'PhotoDB(data_directory={datadir})'.format(datadir=repr(self.data_directory))
|
|
|
|
def _uncache(self):
|
|
self._cached_frozen_children = None
|
|
|
|
def commit(self):
|
|
while self.on_commit_queue:
|
|
task = self.on_commit_queue.pop()
|
|
print(task)
|
|
args = task.get('args', [])
|
|
kwargs = task.get('kwargs', {})
|
|
task['action'](*args, **kwargs)
|
|
self.sql.commit()
|
|
|
|
def digest_directory(
|
|
self,
|
|
directory,
|
|
*,
|
|
exclude_directories=None,
|
|
exclude_filenames=None,
|
|
commit=True
|
|
):
|
|
'''
|
|
Create an album, and add the directory's contents to it recursively.
|
|
|
|
If a Photo object already exists for a file, it will be added to the correct album.
|
|
'''
|
|
if not os.path.isdir(directory):
|
|
raise ValueError('Not a directory: %s' % directory)
|
|
if exclude_directories is None:
|
|
exclude_directories = self.config['digest_exclude_dirs']
|
|
if exclude_filenames is None:
|
|
exclude_filenames = self.config['digest_exclude_files']
|
|
|
|
directory = spinal.str_to_fp(directory)
|
|
directory.correct_case()
|
|
generator = spinal.walk_generator(
|
|
directory,
|
|
exclude_directories=exclude_directories,
|
|
exclude_filenames=exclude_filenames,
|
|
yield_style='nested',
|
|
)
|
|
try:
|
|
album = self.get_album_by_path(directory.absolute_path)
|
|
except exceptions.NoSuchAlbum:
|
|
album = self.new_album(
|
|
associated_directory=directory.absolute_path,
|
|
commit=False,
|
|
title=directory.basename,
|
|
)
|
|
|
|
albums = {directory.absolute_path: album}
|
|
for (current_location, directories, files) in generator:
|
|
current_album = albums.get(current_location.absolute_path, None)
|
|
if current_album is None:
|
|
try:
|
|
current_album = self.get_album_by_path(current_location.absolute_path)
|
|
except exceptions.NoSuchAlbum:
|
|
current_album = self.new_album(
|
|
associated_directory=current_location.absolute_path,
|
|
commit=False,
|
|
title=current_location.basename,
|
|
)
|
|
print('Created %s' % current_album.title)
|
|
albums[current_location.absolute_path] = current_album
|
|
parent = albums[current_location.parent.absolute_path]
|
|
try:
|
|
parent.add(current_album, commit=False)
|
|
except exceptions.GroupExists:
|
|
pass
|
|
#print('Added to %s' % parent.title)
|
|
for filepath in files:
|
|
try:
|
|
photo = self.new_photo(filepath.absolute_path, commit=False)
|
|
except exceptions.PhotoExists as e:
|
|
photo = e.photo
|
|
current_album.add_photo(photo, commit=False)
|
|
|
|
if commit:
|
|
self.log.debug('Commiting - digest')
|
|
self.commit()
|
|
return album
|
|
|
|
def digest_new_files(
|
|
self,
|
|
directory,
|
|
exclude_directories=None,
|
|
exclude_filenames=None,
|
|
recurse=False,
|
|
commit=True
|
|
):
|
|
'''
|
|
Walk the directory and add new files as Photos.
|
|
Does NOT create or modify any albums like `digest_directory` does.
|
|
'''
|
|
if not os.path.isdir(directory):
|
|
raise ValueError('Not a directory: %s' % directory)
|
|
if exclude_directories is None:
|
|
exclude_directories = self.config['digest_exclude_dirs']
|
|
if exclude_filenames is None:
|
|
exclude_filenames = self.config['digest_exclude_files']
|
|
|
|
directory = spinal.str_to_fp(directory)
|
|
generator = spinal.walk_generator(
|
|
directory,
|
|
exclude_directories=exclude_directories,
|
|
exclude_filenames=exclude_filenames,
|
|
recurse=recurse,
|
|
yield_style='flat',
|
|
)
|
|
for filepath in generator:
|
|
filepath = filepath.absolute_path
|
|
try:
|
|
self.get_photo_by_path(filepath)
|
|
except exceptions.NoSuchPhoto:
|
|
# This is what we want.
|
|
pass
|
|
else:
|
|
continue
|
|
photo = self.new_photo(filepath, commit=False)
|
|
if commit:
|
|
self.log.debug('Committing - digest_new_files')
|
|
self.commit()
|
|
|
|
|
|
def easybake(self, ebstring):
|
|
'''
|
|
Easily create tags, groups, and synonyms with a string like
|
|
"group1.group2.tag+synonym"
|
|
"family.parents.dad+father"
|
|
etc
|
|
'''
|
|
output_notes = []
|
|
def create_or_get(name):
|
|
print('cog', name)
|
|
try:
|
|
item = self.get_tag(name)
|
|
note = ('existing_tag', item.qualified_name())
|
|
except exceptions.NoSuchTag:
|
|
item = self.new_tag(name)
|
|
note = ('new_tag', item.qualified_name())
|
|
output_notes.append(note)
|
|
return item
|
|
|
|
ebstring = ebstring.strip()
|
|
ebstring = ebstring.strip('.+=')
|
|
if ebstring == '':
|
|
return
|
|
|
|
if '=' in ebstring and '+' in ebstring:
|
|
raise ValueError('Cannot rename and assign snynonym at once')
|
|
|
|
rename_parts = ebstring.split('=')
|
|
if len(rename_parts) == 2:
|
|
(ebstring, rename_to) = rename_parts
|
|
elif len(rename_parts) == 1:
|
|
ebstring = rename_parts[0]
|
|
rename_to = None
|
|
else:
|
|
raise ValueError('Too many equals signs')
|
|
|
|
create_parts = ebstring.split('+')
|
|
if len(create_parts) == 2:
|
|
(tag, synonym) = create_parts
|
|
elif len(create_parts) == 1:
|
|
tag = create_parts[0]
|
|
synonym = None
|
|
else:
|
|
raise ValueError('Too many plus signs')
|
|
|
|
if not tag:
|
|
return None
|
|
|
|
if rename_to:
|
|
tag = self.get_tag(tag)
|
|
note = ('rename', '%s=%s' % (tag.name, rename_to))
|
|
tag.rename(rename_to)
|
|
output_notes.append(note)
|
|
else:
|
|
tag_parts = tag.split('.')
|
|
tags = [create_or_get(t) for t in tag_parts]
|
|
for (higher, lower) in zip(tags, tags[1:]):
|
|
try:
|
|
lower.join_group(higher)
|
|
note = ('join_group', '%s.%s' % (higher.name, lower.name))
|
|
output_notes.append(note)
|
|
except exceptions.GroupExists:
|
|
pass
|
|
tag = tags[-1]
|
|
|
|
if synonym:
|
|
try:
|
|
tag.add_synonym(synonym)
|
|
note = ('new_synonym', '%s+%s' % (tag.name, synonym))
|
|
output_notes.append(note)
|
|
print('New syn %s' % synonym)
|
|
except exceptions.TagExists:
|
|
pass
|
|
return output_notes
|
|
|
|
def generate_id(self, table):
|
|
'''
|
|
Create a new ID number that is unique to the given table.
|
|
Note that while this method may INSERT / UPDATE, it does not commit.
|
|
We'll wait for that to happen in whoever is calling us, so we know the
|
|
ID is actually used.
|
|
'''
|
|
table = table.lower()
|
|
if table not in ['photos', 'tags', 'groups']:
|
|
raise ValueError('Invalid table requested: %s.', table)
|
|
|
|
self.cur.execute('SELECT * FROM id_numbers WHERE tab == ?', [table])
|
|
fetch = self.cur.fetchone()
|
|
if fetch is None:
|
|
# Register new value
|
|
new_id_int = 1
|
|
do_insert = True
|
|
else:
|
|
# Use database value
|
|
new_id_int = int(fetch[constants.SQL_LASTID['last_id']]) + 1
|
|
do_insert = False
|
|
|
|
new_id = str(new_id_int).rjust(self.config['id_length'], '0')
|
|
if do_insert:
|
|
self.cur.execute('INSERT INTO id_numbers VALUES(?, ?)', [table, new_id])
|
|
else:
|
|
self.cur.execute('UPDATE id_numbers SET last_id = ? WHERE tab == ?', [new_id, table])
|
|
return new_id
|
|
|
|
def get_thing_by_id(self, thing_type, thing_id):
|
|
thing_map = _THING_CLASSES[thing_type]
|
|
|
|
if isinstance(thing_id, thing_map['class']):
|
|
thing_id = thing_id.id
|
|
|
|
query = 'SELECT * FROM %s WHERE id == ?' % thing_map['table']
|
|
self.cur.execute(query, [thing_id])
|
|
thing = self.cur.fetchone()
|
|
if thing is None:
|
|
return raise_no_such_thing(thing_map['exception'], thing_id=thing_id)
|
|
thing = thing_map['class'](self, thing)
|
|
return thing
|
|
|
|
def get_things(self, thing_type, orderby=None):
|
|
thing_map = _THING_CLASSES[thing_type]
|
|
|
|
if orderby:
|
|
self.cur.execute('SELECT * FROM %s ORDER BY %s' % (thing_map['table'], orderby))
|
|
else:
|
|
self.cur.execute('SELECT * FROM %s' % thing_map['table'])
|
|
|
|
things = self.cur.fetchall()
|
|
for thing in things:
|
|
thing = thing_map['class'](self, row_tuple=thing)
|
|
yield thing
|
|
|
|
|
|
_THING_CLASSES = {
|
|
'album':
|
|
{
|
|
'class': objects.Album,
|
|
'exception': exceptions.NoSuchAlbum,
|
|
'table': 'albums',
|
|
},
|
|
'photo':
|
|
{
|
|
'class': objects.Photo,
|
|
'exception': exceptions.NoSuchPhoto,
|
|
'table': 'photos',
|
|
},
|
|
'tag':
|
|
{
|
|
'class': objects.Tag,
|
|
'exception': exceptions.NoSuchTag,
|
|
'table': 'tags',
|
|
},
|
|
'user':
|
|
{
|
|
'class': objects.User,
|
|
'exception': exceptions.NoSuchUser,
|
|
'table': 'users',
|
|
}
|
|
}
|
|
|
|
if __name__ == '__main__':
|
|
p = PhotoDB()
|
|
print(p)
|