Experimental atom feed for photos, albums, search.
This commit is contained in:
parent
17e0d0b6a6
commit
2562084fce
3 changed files with 105 additions and 0 deletions
|
@ -2,6 +2,7 @@
|
||||||
This file provides functions which are used in various places throughout the
|
This file provides functions which are used in various places throughout the
|
||||||
codebase but don't deserve to be methods of any class.
|
codebase but don't deserve to be methods of any class.
|
||||||
'''
|
'''
|
||||||
|
import bs4
|
||||||
import datetime
|
import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import mimetypes
|
import mimetypes
|
||||||
|
@ -304,6 +305,28 @@ def is_xor(*args) -> bool:
|
||||||
'''
|
'''
|
||||||
return [bool(a) for a in args].count(True) == 1
|
return [bool(a) for a in args].count(True) == 1
|
||||||
|
|
||||||
|
def make_atom_feed(objects, feed_title, feed_link, feed_id) -> bs4.BeautifulSoup:
|
||||||
|
soup = bs4.BeautifulSoup('', 'xml')
|
||||||
|
feed = soup.new_tag('feed')
|
||||||
|
soup.append(feed)
|
||||||
|
|
||||||
|
title = soup.new_tag('title')
|
||||||
|
title.string = feed_title
|
||||||
|
feed.append(title)
|
||||||
|
|
||||||
|
link = soup.new_tag('link')
|
||||||
|
link['href'] = feed_link
|
||||||
|
feed.append(link)
|
||||||
|
|
||||||
|
id_element = soup.new_tag('id')
|
||||||
|
id_element.string = feed_id
|
||||||
|
feed.append(id_element)
|
||||||
|
|
||||||
|
for obj in objects:
|
||||||
|
feed.append(obj.atomify())
|
||||||
|
|
||||||
|
return soup
|
||||||
|
|
||||||
def now():
|
def now():
|
||||||
'''
|
'''
|
||||||
Return the current UTC datetime object.
|
Return the current UTC datetime object.
|
||||||
|
|
|
@ -4,6 +4,7 @@ but are returned by the PDB accesses.
|
||||||
'''
|
'''
|
||||||
import abc
|
import abc
|
||||||
import bcrypt
|
import bcrypt
|
||||||
|
import bs4
|
||||||
import datetime
|
import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
|
@ -418,6 +419,39 @@ class Album(ObjectBase, GroupableMixin):
|
||||||
for photo in photos:
|
for photo in photos:
|
||||||
photo.add_tag(tag)
|
photo.add_tag(tag)
|
||||||
|
|
||||||
|
def atomify(self) -> bs4.BeautifulSoup:
|
||||||
|
soup = bs4.BeautifulSoup('', 'xml')
|
||||||
|
entry = soup.new_tag('entry')
|
||||||
|
soup.append(entry)
|
||||||
|
|
||||||
|
id_element = soup.new_tag('id')
|
||||||
|
id_element.string = str(self.id)
|
||||||
|
entry.append(id_element)
|
||||||
|
|
||||||
|
title = soup.new_tag('title')
|
||||||
|
title.string = self.display_name
|
||||||
|
entry.append(title)
|
||||||
|
|
||||||
|
link = soup.new_tag('link')
|
||||||
|
link['rel'] = 'alternate'
|
||||||
|
link['type'] = 'text/html'
|
||||||
|
link['href'] = f'/album/{self.id}'
|
||||||
|
entry.append(link)
|
||||||
|
|
||||||
|
published = soup.new_tag('published')
|
||||||
|
published.string = self.created.isoformat()
|
||||||
|
entry.append(published)
|
||||||
|
|
||||||
|
content = soup.new_tag('content')
|
||||||
|
# content.string = bs4.CData(f'<img src="/thumbnail/{self.id}.jpg"/>')
|
||||||
|
entry.append(content)
|
||||||
|
|
||||||
|
typ = soup.new_tag('etiquette:type')
|
||||||
|
typ.string = 'album'
|
||||||
|
entry.append(typ)
|
||||||
|
|
||||||
|
return soup
|
||||||
|
|
||||||
@decorators.required_feature('album.edit')
|
@decorators.required_feature('album.edit')
|
||||||
@worms.atomic
|
@worms.atomic
|
||||||
def delete(self, *, delete_children=False) -> None:
|
def delete(self, *, delete_children=False) -> None:
|
||||||
|
@ -937,6 +971,39 @@ class Photo(ObjectBase):
|
||||||
|
|
||||||
return tag
|
return tag
|
||||||
|
|
||||||
|
def atomify(self) -> bs4.BeautifulSoup:
|
||||||
|
soup = bs4.BeautifulSoup('', 'xml')
|
||||||
|
entry = soup.new_tag('entry')
|
||||||
|
soup.append(entry)
|
||||||
|
|
||||||
|
id_element = soup.new_tag('id')
|
||||||
|
id_element.string = str(self.id)
|
||||||
|
entry.append(id_element)
|
||||||
|
|
||||||
|
title = soup.new_tag('title')
|
||||||
|
title.string = self.basename
|
||||||
|
entry.append(title)
|
||||||
|
|
||||||
|
link = soup.new_tag('link')
|
||||||
|
link['rel'] = 'alternate'
|
||||||
|
link['type'] = 'text/html'
|
||||||
|
link['href'] = f'/photo/{self.id}'
|
||||||
|
entry.append(link)
|
||||||
|
|
||||||
|
published = soup.new_tag('published')
|
||||||
|
published.string = self.created.isoformat()
|
||||||
|
entry.append(published)
|
||||||
|
|
||||||
|
content = soup.new_tag('content')
|
||||||
|
content.string = bs4.CData(f'<img src="/thumbnail/{self.id}.jpg"/>')
|
||||||
|
entry.append(content)
|
||||||
|
|
||||||
|
typ = soup.new_tag('etiquette:type')
|
||||||
|
typ.string = 'photo'
|
||||||
|
entry.append(typ)
|
||||||
|
|
||||||
|
return soup
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def basename(self) -> str:
|
def basename(self) -> str:
|
||||||
return self.override_filename or self.real_path.basename
|
return self.override_filename or self.real_path.basename
|
||||||
|
|
|
@ -530,6 +530,21 @@ def get_search_html():
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@site.route('/search.atom')
|
||||||
|
def get_search_atom():
|
||||||
|
search_results = get_search_core()['results']
|
||||||
|
soup = etiquette.helpers.make_atom_feed(
|
||||||
|
search_results,
|
||||||
|
feed_id=request.query_string.decode('utf-8'),
|
||||||
|
feed_title='etiquette search',
|
||||||
|
feed_link=request.url.replace('/search.atom', '/search'),
|
||||||
|
)
|
||||||
|
outgoing_headers = {
|
||||||
|
'Content-Type': 'application/atom+xml; charset=utf-8',
|
||||||
|
}
|
||||||
|
response = flask.Response(str(soup), headers=outgoing_headers)
|
||||||
|
return response
|
||||||
|
|
||||||
@site.route('/search.json')
|
@site.route('/search.json')
|
||||||
def get_search_json():
|
def get_search_json():
|
||||||
search_results = get_search_core()
|
search_results = get_search_core()
|
||||||
|
|
Loading…
Reference in a new issue