Use SQL generated columns for area, aspectratio, basename, bitrate.
This commit is contained in:
parent
d819b23263
commit
57f1b80442
9 changed files with 102 additions and 56 deletions
|
@ -41,7 +41,7 @@ ffmpeg = _load_ffmpeg()
|
||||||
|
|
||||||
# Database #########################################################################################
|
# Database #########################################################################################
|
||||||
|
|
||||||
DATABASE_VERSION = 21
|
DATABASE_VERSION = 22
|
||||||
|
|
||||||
DB_INIT = f'''
|
DB_INIT = f'''
|
||||||
CREATE TABLE IF NOT EXISTS albums(
|
CREATE TABLE IF NOT EXISTS albums(
|
||||||
|
@ -71,15 +71,11 @@ CREATE INDEX IF NOT EXISTS index_bookmarks_author_id on bookmarks(author_id);
|
||||||
CREATE TABLE IF NOT EXISTS photos(
|
CREATE TABLE IF NOT EXISTS photos(
|
||||||
id INT PRIMARY KEY NOT NULL,
|
id INT PRIMARY KEY NOT NULL,
|
||||||
filepath TEXT COLLATE NOCASE,
|
filepath TEXT COLLATE NOCASE,
|
||||||
basename TEXT COLLATE NOCASE,
|
|
||||||
override_filename TEXT COLLATE NOCASE,
|
override_filename TEXT COLLATE NOCASE,
|
||||||
extension TEXT COLLATE NOCASE,
|
|
||||||
mtime INT,
|
mtime INT,
|
||||||
sha256 TEXT,
|
sha256 TEXT,
|
||||||
width INT,
|
width INT,
|
||||||
height INT,
|
height INT,
|
||||||
ratio REAL,
|
|
||||||
area INT,
|
|
||||||
duration INT,
|
duration INT,
|
||||||
bytes INT,
|
bytes INT,
|
||||||
created INT,
|
created INT,
|
||||||
|
@ -87,12 +83,26 @@ CREATE TABLE IF NOT EXISTS photos(
|
||||||
tagged_at INT,
|
tagged_at INT,
|
||||||
author_id INT,
|
author_id INT,
|
||||||
searchhidden INT,
|
searchhidden INT,
|
||||||
|
-- GENERATED COLUMNS
|
||||||
|
area INT GENERATED ALWAYS AS (width * height) VIRTUAL,
|
||||||
|
aspectratio REAL GENERATED ALWAYS AS (1.0 * width / height) VIRTUAL,
|
||||||
|
-- Thank you ungalcrys
|
||||||
|
-- https://stackoverflow.com/a/38330814/5430534
|
||||||
|
basename TEXT GENERATED ALWAYS AS (
|
||||||
|
COALESCE(
|
||||||
|
override_filename,
|
||||||
|
replace(filepath, rtrim(filepath, replace(replace(filepath, '\\', '/'), '/', '')), '')
|
||||||
|
)
|
||||||
|
) STORED COLLATE NOCASE,
|
||||||
|
extension TEXT GENERATED ALWAYS AS (
|
||||||
|
replace(basename, rtrim(basename, replace(basename, '.', '')), '')
|
||||||
|
) VIRTUAL COLLATE NOCASE,
|
||||||
|
bitrate REAL GENERATED ALWAYS AS ((bytes / 128) / duration) VIRTUAL,
|
||||||
FOREIGN KEY(author_id) REFERENCES users(id)
|
FOREIGN KEY(author_id) REFERENCES users(id)
|
||||||
);
|
);
|
||||||
CREATE INDEX IF NOT EXISTS index_photos_id on photos(id);
|
CREATE INDEX IF NOT EXISTS index_photos_id on photos(id);
|
||||||
CREATE INDEX IF NOT EXISTS index_photos_filepath on photos(filepath COLLATE NOCASE);
|
CREATE INDEX IF NOT EXISTS index_photos_filepath on photos(filepath COLLATE NOCASE);
|
||||||
CREATE INDEX IF NOT EXISTS index_photos_override_filename on
|
CREATE INDEX IF NOT EXISTS index_photos_basename on photos(basename COLLATE NOCASE);
|
||||||
photos(override_filename COLLATE NOCASE);
|
|
||||||
CREATE INDEX IF NOT EXISTS index_photos_created on photos(created);
|
CREATE INDEX IF NOT EXISTS index_photos_created on photos(created);
|
||||||
CREATE INDEX IF NOT EXISTS index_photos_extension on photos(extension);
|
CREATE INDEX IF NOT EXISTS index_photos_extension on photos(extension);
|
||||||
CREATE INDEX IF NOT EXISTS index_photos_author_id on photos(author_id);
|
CREATE INDEX IF NOT EXISTS index_photos_author_id on photos(author_id);
|
||||||
|
@ -194,7 +204,7 @@ ALLOWED_ORDERBY_COLUMNS = {
|
||||||
'extension',
|
'extension',
|
||||||
'height',
|
'height',
|
||||||
'random',
|
'random',
|
||||||
'ratio',
|
'aspectratio',
|
||||||
'tagged_at',
|
'tagged_at',
|
||||||
'width',
|
'width',
|
||||||
}
|
}
|
||||||
|
|
|
@ -850,11 +850,11 @@ class Photo(ObjectBase):
|
||||||
|
|
||||||
self.real_path = db_row['filepath']
|
self.real_path = db_row['filepath']
|
||||||
self.real_path = pathclass.Path(self.real_path)
|
self.real_path = pathclass.Path(self.real_path)
|
||||||
|
self.basename = db_row['basename']
|
||||||
|
|
||||||
self.id = db_row['id']
|
self.id = db_row['id']
|
||||||
self.created_unix = db_row['created']
|
self.created_unix = db_row['created']
|
||||||
self._author_id = self.normalize_author_id(db_row['author_id'])
|
self._author_id = self.normalize_author_id(db_row['author_id'])
|
||||||
self.override_filename = db_row['override_filename']
|
|
||||||
self.extension = self.real_path.extension.no_dot
|
self.extension = self.real_path.extension.no_dot
|
||||||
self.mtime = db_row['mtime']
|
self.mtime = db_row['mtime']
|
||||||
self.sha256 = db_row['sha256']
|
self.sha256 = db_row['sha256']
|
||||||
|
@ -864,12 +864,13 @@ class Photo(ObjectBase):
|
||||||
else:
|
else:
|
||||||
self.dot_extension = '.' + self.extension
|
self.dot_extension = '.' + self.extension
|
||||||
|
|
||||||
self.area = db_row['area']
|
|
||||||
self.bytes = db_row['bytes']
|
self.bytes = db_row['bytes']
|
||||||
self.duration = db_row['duration']
|
self.duration = db_row['duration']
|
||||||
self.width = db_row['width']
|
self.width = db_row['width']
|
||||||
self.height = db_row['height']
|
self.height = db_row['height']
|
||||||
self.ratio = db_row['ratio']
|
self.area = db_row['area']
|
||||||
|
self.aspectratio = db_row['aspectratio']
|
||||||
|
self.bitrate = db_row['bitrate']
|
||||||
|
|
||||||
self.thumbnail = self.normalize_thumbnail(db_row['thumbnail'])
|
self.thumbnail = self.normalize_thumbnail(db_row['thumbnail'])
|
||||||
self.tagged_at_unix = db_row['tagged_at']
|
self.tagged_at_unix = db_row['tagged_at']
|
||||||
|
@ -1004,17 +1005,6 @@ class Photo(ObjectBase):
|
||||||
|
|
||||||
return soup
|
return soup
|
||||||
|
|
||||||
@property
|
|
||||||
def basename(self) -> str:
|
|
||||||
return self.override_filename or self.real_path.basename
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bitrate(self) -> typing.Optional[float]:
|
|
||||||
if self.duration and self.bytes is not None:
|
|
||||||
return (self.bytes / 128) / self.duration
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def bytes_string(self) -> str:
|
def bytes_string(self) -> str:
|
||||||
if self.bytes is not None:
|
if self.bytes is not None:
|
||||||
|
@ -1181,11 +1171,11 @@ class Photo(ObjectBase):
|
||||||
j = {
|
j = {
|
||||||
'type': 'photo',
|
'type': 'photo',
|
||||||
'id': self.id,
|
'id': self.id,
|
||||||
|
'aspectratio': self.aspectratio,
|
||||||
'author': self.author.jsonify() if self._author_id else None,
|
'author': self.author.jsonify() if self._author_id else None,
|
||||||
'extension': self.extension,
|
'extension': self.extension,
|
||||||
'width': self.width,
|
'width': self.width,
|
||||||
'height': self.height,
|
'height': self.height,
|
||||||
'ratio': self.ratio,
|
|
||||||
'area': self.area,
|
'area': self.area,
|
||||||
'bytes': self.bytes,
|
'bytes': self.bytes,
|
||||||
'duration_string': self.duration_string,
|
'duration_string': self.duration_string,
|
||||||
|
@ -1281,8 +1271,6 @@ class Photo(ObjectBase):
|
||||||
self.bytes = None
|
self.bytes = None
|
||||||
self.width = None
|
self.width = None
|
||||||
self.height = None
|
self.height = None
|
||||||
self.area = None
|
|
||||||
self.ratio = None
|
|
||||||
self.duration = None
|
self.duration = None
|
||||||
|
|
||||||
if self.real_path.is_file:
|
if self.real_path.is_file:
|
||||||
|
@ -1302,10 +1290,6 @@ class Photo(ObjectBase):
|
||||||
elif self.simple_mimetype == 'audio':
|
elif self.simple_mimetype == 'audio':
|
||||||
self._reload_audio_metadata()
|
self._reload_audio_metadata()
|
||||||
|
|
||||||
if self.width and self.height:
|
|
||||||
self.area = self.width * self.height
|
|
||||||
self.ratio = round(self.width / self.height, 2)
|
|
||||||
|
|
||||||
hash_kwargs = hash_kwargs or {}
|
hash_kwargs = hash_kwargs or {}
|
||||||
sha256 = spinal.hash_file(self.real_path, hash_class=hashlib.sha256, **hash_kwargs)
|
sha256 = spinal.hash_file(self.real_path, hash_class=hashlib.sha256, **hash_kwargs)
|
||||||
self.sha256 = sha256.hexdigest()
|
self.sha256 = sha256.hexdigest()
|
||||||
|
@ -1316,8 +1300,6 @@ class Photo(ObjectBase):
|
||||||
'sha256': self.sha256,
|
'sha256': self.sha256,
|
||||||
'width': self.width,
|
'width': self.width,
|
||||||
'height': self.height,
|
'height': self.height,
|
||||||
'area': self.area,
|
|
||||||
'ratio': self.ratio,
|
|
||||||
'duration': self.duration,
|
'duration': self.duration,
|
||||||
'bytes': self.bytes,
|
'bytes': self.bytes,
|
||||||
}
|
}
|
||||||
|
@ -1352,8 +1334,6 @@ class Photo(ObjectBase):
|
||||||
data = {
|
data = {
|
||||||
'id': self.id,
|
'id': self.id,
|
||||||
'filepath': new_filepath.absolute_path,
|
'filepath': new_filepath.absolute_path,
|
||||||
'basename': new_filepath.basename,
|
|
||||||
'extension': new_filepath.extension.no_dot,
|
|
||||||
}
|
}
|
||||||
self.photodb.update(table=Photo, pairs=data, where_key='id')
|
self.photodb.update(table=Photo, pairs=data, where_key='id')
|
||||||
self.real_path = new_filepath
|
self.real_path = new_filepath
|
||||||
|
@ -1456,8 +1436,6 @@ class Photo(ObjectBase):
|
||||||
data = {
|
data = {
|
||||||
'id': self.id,
|
'id': self.id,
|
||||||
'filepath': new_path.absolute_path,
|
'filepath': new_path.absolute_path,
|
||||||
'basename': new_path.basename,
|
|
||||||
'extension': new_path.extension.no_dot,
|
|
||||||
}
|
}
|
||||||
self.photodb.update(table=Photo, pairs=data, where_key='id')
|
self.photodb.update(table=Photo, pairs=data, where_key='id')
|
||||||
self.real_path = new_path
|
self.real_path = new_path
|
||||||
|
@ -1494,7 +1472,6 @@ class Photo(ObjectBase):
|
||||||
'override_filename': new_filename,
|
'override_filename': new_filename,
|
||||||
}
|
}
|
||||||
self.photodb.update(table=Photo, pairs=data, where_key='id')
|
self.photodb.update(table=Photo, pairs=data, where_key='id')
|
||||||
self.override_filename = new_filename
|
|
||||||
|
|
||||||
self.__reinit__()
|
self.__reinit__()
|
||||||
|
|
||||||
|
|
|
@ -357,9 +357,7 @@ class PDBPhotoMixin:
|
||||||
data = {
|
data = {
|
||||||
'id': photo_id,
|
'id': photo_id,
|
||||||
'filepath': filepath.absolute_path,
|
'filepath': filepath.absolute_path,
|
||||||
'basename': filepath.basename,
|
|
||||||
'override_filename': None,
|
'override_filename': None,
|
||||||
'extension': filepath.extension.no_dot,
|
|
||||||
'created': helpers.now().timestamp(),
|
'created': helpers.now().timestamp(),
|
||||||
'tagged_at': None,
|
'tagged_at': None,
|
||||||
'author_id': author_id,
|
'author_id': author_id,
|
||||||
|
@ -370,14 +368,12 @@ class PDBPhotoMixin:
|
||||||
'bytes': None,
|
'bytes': None,
|
||||||
'width': None,
|
'width': None,
|
||||||
'height': None,
|
'height': None,
|
||||||
'area': None,
|
|
||||||
'ratio': None,
|
|
||||||
'duration': None,
|
'duration': None,
|
||||||
'thumbnail': None,
|
'thumbnail': None,
|
||||||
}
|
}
|
||||||
self.insert(table=objects.Photo, pairs=data)
|
self.insert(table=objects.Photo, pairs=data)
|
||||||
|
|
||||||
photo = self.get_cached_instance(objects.Photo, data)
|
photo = self.get_photo(photo_id)
|
||||||
|
|
||||||
if do_metadata:
|
if do_metadata:
|
||||||
hash_kwargs = hash_kwargs or {}
|
hash_kwargs = hash_kwargs or {}
|
||||||
|
@ -417,11 +413,12 @@ class PDBPhotoMixin:
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
area=None,
|
area=None,
|
||||||
|
aspectratio=None,
|
||||||
width=None,
|
width=None,
|
||||||
height=None,
|
height=None,
|
||||||
ratio=None,
|
|
||||||
bytes=None,
|
bytes=None,
|
||||||
duration=None,
|
duration=None,
|
||||||
|
bitrate=None,
|
||||||
|
|
||||||
author=None,
|
author=None,
|
||||||
created=None,
|
created=None,
|
||||||
|
@ -450,7 +447,7 @@ class PDBPhotoMixin:
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
PHOTO PROPERTIES
|
PHOTO PROPERTIES
|
||||||
area, width, height, ratio, bytes, duration:
|
area, aspectratio, width, height, bytes, duration, bitrate:
|
||||||
A dotdot_range string representing min and max. Or just a number
|
A dotdot_range string representing min and max. Or just a number
|
||||||
for lower bound.
|
for lower bound.
|
||||||
|
|
||||||
|
@ -531,7 +528,7 @@ class PDBPhotoMixin:
|
||||||
How many *successful* results to skip before we start yielding.
|
How many *successful* results to skip before we start yielding.
|
||||||
|
|
||||||
orderby:
|
orderby:
|
||||||
A list of strings like ['ratio DESC', 'created ASC'] to sort
|
A list of strings like ['aspectratio DESC', 'created ASC'] to sort
|
||||||
and subsort the results.
|
and subsort the results.
|
||||||
Descending is assumed if not provided.
|
Descending is assumed if not provided.
|
||||||
|
|
||||||
|
@ -562,9 +559,10 @@ class PDBPhotoMixin:
|
||||||
searchhelpers.minmax('created', created, minimums, maximums, warning_bag=warning_bag)
|
searchhelpers.minmax('created', created, minimums, maximums, warning_bag=warning_bag)
|
||||||
searchhelpers.minmax('width', width, minimums, maximums, warning_bag=warning_bag)
|
searchhelpers.minmax('width', width, minimums, maximums, warning_bag=warning_bag)
|
||||||
searchhelpers.minmax('height', height, minimums, maximums, warning_bag=warning_bag)
|
searchhelpers.minmax('height', height, minimums, maximums, warning_bag=warning_bag)
|
||||||
searchhelpers.minmax('ratio', ratio, minimums, maximums, warning_bag=warning_bag)
|
searchhelpers.minmax('aspectratio', aspectratio, minimums, maximums, warning_bag=warning_bag)
|
||||||
searchhelpers.minmax('bytes', bytes, minimums, maximums, warning_bag=warning_bag)
|
searchhelpers.minmax('bytes', bytes, minimums, maximums, warning_bag=warning_bag)
|
||||||
searchhelpers.minmax('duration', duration, minimums, maximums, warning_bag=warning_bag)
|
searchhelpers.minmax('duration', duration, minimums, maximums, warning_bag=warning_bag)
|
||||||
|
searchhelpers.minmax('bitrate', bitrate, minimums, maximums, warning_bag=warning_bag)
|
||||||
|
|
||||||
author = searchhelpers.normalize_author(author, photodb=self, warning_bag=warning_bag)
|
author = searchhelpers.normalize_author(author, photodb=self, warning_bag=warning_bag)
|
||||||
extension = searchhelpers.normalize_extension(extension)
|
extension = searchhelpers.normalize_extension(extension)
|
||||||
|
@ -652,7 +650,8 @@ class PDBPhotoMixin:
|
||||||
'area': area,
|
'area': area,
|
||||||
'width': width,
|
'width': width,
|
||||||
'height': height,
|
'height': height,
|
||||||
'ratio': ratio,
|
'aspectratio': aspectratio,
|
||||||
|
'bitrate': bitrate,
|
||||||
'bytes': bytes,
|
'bytes': bytes,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'author': list(author) or None,
|
'author': list(author) or None,
|
||||||
|
|
|
@ -347,10 +347,6 @@ def normalize_orderby(orderby, warning_bag=None):
|
||||||
column_friendly = column
|
column_friendly = column
|
||||||
column_expanded = {
|
column_expanded = {
|
||||||
'random': 'RANDOM()',
|
'random': 'RANDOM()',
|
||||||
'area': '(width * height)',
|
|
||||||
'basename': 'COALESCE(override_filename, basename)',
|
|
||||||
'bitrate': '((bytes / 128) / duration)',
|
|
||||||
'ratio': '(width / height)',
|
|
||||||
}.get(column, column)
|
}.get(column, column)
|
||||||
|
|
||||||
final_orderby.append( (column_friendly, column_expanded, direction) )
|
final_orderby.append( (column_friendly, column_expanded, direction) )
|
||||||
|
|
|
@ -129,7 +129,7 @@ def search_by_argparse(args, yield_albums=False, yield_photos=False):
|
||||||
area=args.area,
|
area=args.area,
|
||||||
width=args.width,
|
width=args.width,
|
||||||
height=args.height,
|
height=args.height,
|
||||||
ratio=args.ratio,
|
aspectratio=args.aspectratio,
|
||||||
bytes=args.bytes,
|
bytes=args.bytes,
|
||||||
duration=args.duration,
|
duration=args.duration,
|
||||||
author=args.author,
|
author=args.author,
|
||||||
|
@ -1272,7 +1272,7 @@ def main(argv):
|
||||||
''',
|
''',
|
||||||
)
|
)
|
||||||
p_search.add_argument(
|
p_search.add_argument(
|
||||||
'--ratio',
|
'--aspectratio',
|
||||||
metavar='X-Y',
|
metavar='X-Y',
|
||||||
default=None,
|
default=None,
|
||||||
help='''
|
help='''
|
||||||
|
|
|
@ -399,10 +399,11 @@ def get_search_core():
|
||||||
area = request.args.get('area')
|
area = request.args.get('area')
|
||||||
width = request.args.get('width')
|
width = request.args.get('width')
|
||||||
height = request.args.get('height')
|
height = request.args.get('height')
|
||||||
ratio = request.args.get('ratio')
|
aspectratio = request.args.get('aspectratio')
|
||||||
bytes = request.args.get('bytes')
|
bytes = request.args.get('bytes')
|
||||||
has_thumbnail = request.args.get('has_thumbnail')
|
has_thumbnail = request.args.get('has_thumbnail')
|
||||||
duration = request.args.get('duration')
|
duration = request.args.get('duration')
|
||||||
|
bitrate = request.args.get('bitrate')
|
||||||
created = request.args.get('created')
|
created = request.args.get('created')
|
||||||
|
|
||||||
# These are in a dictionary so I can pass them to the page template.
|
# These are in a dictionary so I can pass them to the page template.
|
||||||
|
@ -410,9 +411,10 @@ def get_search_core():
|
||||||
'area': area,
|
'area': area,
|
||||||
'width': width,
|
'width': width,
|
||||||
'height': height,
|
'height': height,
|
||||||
'ratio': ratio,
|
'aspectratio': aspectratio,
|
||||||
'bytes': bytes,
|
'bytes': bytes,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
|
'bitrate': bitrate,
|
||||||
|
|
||||||
'author': author,
|
'author': author,
|
||||||
'created': created,
|
'created': created,
|
||||||
|
|
|
@ -59,6 +59,7 @@
|
||||||
.photo_viewer_application,
|
.photo_viewer_application,
|
||||||
.photo_viewer_text
|
.photo_viewer_text
|
||||||
{
|
{
|
||||||
|
display: flex;
|
||||||
justify-items: center;
|
justify-items: center;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
}
|
}
|
||||||
|
@ -183,7 +184,7 @@
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if photo.width %}
|
{% if photo.width %}
|
||||||
<li title="{{photo.area}} px">Dimensions: {{photo.width}}x{{photo.height}} px</li>
|
<li title="{{photo.area}} px">Dimensions: {{photo.width}}x{{photo.height}} px</li>
|
||||||
<li>Aspect ratio: {{photo.ratio}}</li>
|
<li>Aspect ratio: {{photo.aspectratio|round(2)}}</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<li>Size: {{photo.bytes|bytestring}}</li>
|
<li>Size: {{photo.bytes|bytestring}}</li>
|
||||||
{% if photo.duration %}
|
{% if photo.duration %}
|
||||||
|
|
|
@ -171,7 +171,7 @@
|
||||||
<option value="area" {{"selected" if selected_column=="area" else ""}}>Area</option>
|
<option value="area" {{"selected" if selected_column=="area" else ""}}>Area</option>
|
||||||
<option value="width" {{"selected" if selected_column=="width" else ""}}>Width</option>
|
<option value="width" {{"selected" if selected_column=="width" else ""}}>Width</option>
|
||||||
<option value="height" {{"selected" if selected_column=="height" else ""}}>Height</option>
|
<option value="height" {{"selected" if selected_column=="height" else ""}}>Height</option>
|
||||||
<option value="ratio" {{"selected" if selected_column=="ratio" else ""}}>Aspect Ratio</option>
|
<option value="aspectratio" {{"selected" if selected_column=="aspectratio" else ""}}>Aspect Ratio</option>
|
||||||
<option value="bytes" {{"selected" if selected_column=="bytes" else ""}}>File size</option>
|
<option value="bytes" {{"selected" if selected_column=="bytes" else ""}}>File size</option>
|
||||||
<option value="duration" {{"selected" if selected_column=="duration" else ""}}>Duration</option>
|
<option value="duration" {{"selected" if selected_column=="duration" else ""}}>Duration</option>
|
||||||
<option value="bitrate" {{"selected" if selected_column=="bitrate" else ""}}>Bitrate</option>
|
<option value="bitrate" {{"selected" if selected_column=="bitrate" else ""}}>Bitrate</option>
|
||||||
|
|
|
@ -823,6 +823,67 @@ def upgrade_20_to_21(photodb):
|
||||||
photodb.update(table=etiquette.objects.Photo, pairs={'id': photo.id, 'thumbnail': store_as}, where_key='id')
|
photodb.update(table=etiquette.objects.Photo, pairs={'id': photo.id, 'thumbnail': store_as}, where_key='id')
|
||||||
photo.thumbnail = new_thumbnail
|
photo.thumbnail = new_thumbnail
|
||||||
|
|
||||||
|
def upgrade_21_to_22(photodb):
|
||||||
|
m = Migrator(photodb)
|
||||||
|
|
||||||
|
m.tables['photos']['create'] = '''
|
||||||
|
CREATE TABLE IF NOT EXISTS photos(
|
||||||
|
id INT PRIMARY KEY NOT NULL,
|
||||||
|
filepath TEXT COLLATE NOCASE,
|
||||||
|
override_filename TEXT COLLATE NOCASE,
|
||||||
|
mtime INT,
|
||||||
|
sha256 TEXT,
|
||||||
|
width INT,
|
||||||
|
height INT,
|
||||||
|
duration INT,
|
||||||
|
bytes INT,
|
||||||
|
created INT,
|
||||||
|
thumbnail TEXT,
|
||||||
|
tagged_at INT,
|
||||||
|
author_id INT,
|
||||||
|
searchhidden INT,
|
||||||
|
-- GENERATED COLUMNS
|
||||||
|
area INT GENERATED ALWAYS AS (width * height) VIRTUAL,
|
||||||
|
aspectratio REAL GENERATED ALWAYS AS (1.0 * width / height) VIRTUAL,
|
||||||
|
-- Thank you ungalcrys
|
||||||
|
-- https://stackoverflow.com/a/38330814/5430534
|
||||||
|
basename TEXT GENERATED ALWAYS AS (
|
||||||
|
COALESCE(
|
||||||
|
override_filename,
|
||||||
|
replace(filepath, rtrim(filepath, replace(replace(filepath, '\\', '/'), '/', '')), '')
|
||||||
|
)
|
||||||
|
) STORED COLLATE NOCASE,
|
||||||
|
extension TEXT GENERATED ALWAYS AS (
|
||||||
|
replace(basename, rtrim(basename, replace(basename, '.', '')), '')
|
||||||
|
) VIRTUAL COLLATE NOCASE,
|
||||||
|
bitrate REAL GENERATED ALWAYS AS ((bytes / 128) / duration) VIRTUAL,
|
||||||
|
FOREIGN KEY(author_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
|
'''
|
||||||
|
m.tables['photos']['transfer'] = '''
|
||||||
|
INSERT INTO photos SELECT
|
||||||
|
id,
|
||||||
|
filepath,
|
||||||
|
override_filename,
|
||||||
|
mtime,
|
||||||
|
sha256,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
duration,
|
||||||
|
bytes,
|
||||||
|
created,
|
||||||
|
thumbnail,
|
||||||
|
tagged_at,
|
||||||
|
author_id,
|
||||||
|
searchhidden
|
||||||
|
FROM photos_old;
|
||||||
|
'''
|
||||||
|
|
||||||
|
m.go()
|
||||||
|
|
||||||
|
photodb.execute('DROP INDEX index_photos_override_filename')
|
||||||
|
photodb.execute('CREATE INDEX IF NOT EXISTS index_photos_basename on photos(basename COLLATE NOCASE)')
|
||||||
|
|
||||||
def upgrade_all(data_directory):
|
def upgrade_all(data_directory):
|
||||||
'''
|
'''
|
||||||
Given the directory containing a phototagger database, apply all of the
|
Given the directory containing a phototagger database, apply all of the
|
||||||
|
|
Loading…
Reference in a new issue