2018-06-23 06:06:29 +00:00
|
|
|
import argparse
|
|
|
|
import datetime
|
|
|
|
import gzip
|
|
|
|
import PIL.Image
|
2020-02-14 04:28:47 +00:00
|
|
|
import random
|
2018-06-23 06:06:29 +00:00
|
|
|
import requests
|
|
|
|
import sqlite3
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
|
2020-02-12 01:24:02 +00:00
|
|
|
from voussoirkit import threadpool
|
2021-03-23 19:15:31 +00:00
|
|
|
from voussoirkit import vlogging
|
2020-02-12 01:24:02 +00:00
|
|
|
|
2021-03-23 19:15:31 +00:00
|
|
|
log = vlogging.getLogger(__name__, 'pixelcanvasdl')
|
2021-10-25 18:54:06 +00:00
|
|
|
vlogging.getLogger('urllib3.connectionpool').setLevel(vlogging.CRITICAL)
|
2018-06-23 06:06:29 +00:00
|
|
|
|
|
|
|
WHITE = (255, 255, 255)
|
|
|
|
LIGHTGRAY = (228, 228, 228)
|
|
|
|
DARKGRAY = (136, 136, 136)
|
|
|
|
BLACK = (34, 34, 34)
|
|
|
|
PINK = (255, 167, 209)
|
|
|
|
RED = (229, 0, 0)
|
|
|
|
ORANGE = (229, 149, 0)
|
|
|
|
BROWN = (160, 106, 66)
|
|
|
|
YELLOW = (229, 217, 0)
|
|
|
|
LIGHTGREEN = (148, 224, 68)
|
|
|
|
DARKGREEN = (2, 190, 1)
|
|
|
|
LIGHTBLUE = (0, 211, 221)
|
|
|
|
MEDIUMBLUE = (0, 131, 199)
|
|
|
|
DARKBLUE = (0, 0, 234)
|
|
|
|
LIGHTPURPLE = (207, 110, 228)
|
|
|
|
DARKPURPLE = (130, 0, 128)
|
|
|
|
|
|
|
|
COLOR_MAP = {
|
|
|
|
0: WHITE,
|
|
|
|
1: LIGHTGRAY,
|
|
|
|
2: DARKGRAY,
|
|
|
|
3: BLACK,
|
|
|
|
4: PINK,
|
|
|
|
5: RED,
|
|
|
|
6: ORANGE,
|
|
|
|
7: BROWN,
|
|
|
|
8: YELLOW,
|
|
|
|
9: LIGHTGREEN,
|
|
|
|
10: DARKGREEN,
|
|
|
|
11: LIGHTBLUE,
|
|
|
|
12: MEDIUMBLUE,
|
|
|
|
13: DARKBLUE,
|
|
|
|
14: LIGHTPURPLE,
|
|
|
|
15: DARKPURPLE,
|
|
|
|
}
|
|
|
|
|
|
|
|
# The width and height of a chunk, in pixels.
|
|
|
|
CHUNK_SIZE_PIX = 64
|
|
|
|
|
|
|
|
# The number of bytes for a full chunk.
|
|
|
|
# They are 32x64 because each byte represents two 4-bit pixels.
|
|
|
|
CHUNK_SIZE_BYTES = int(CHUNK_SIZE_PIX * (CHUNK_SIZE_PIX / 2))
|
|
|
|
|
|
|
|
# The width and height of a bigchunk, in chunks.
|
|
|
|
BIGCHUNK_SIZE_CHUNKS = 15
|
|
|
|
|
|
|
|
# The width and height of a bigchunk, in pixels.
|
|
|
|
BIGCHUNK_SIZE_PIX = BIGCHUNK_SIZE_CHUNKS * CHUNK_SIZE_PIX
|
|
|
|
|
|
|
|
# The number of bytes for a full bigchunk.
|
|
|
|
BIGCHUNK_SIZE_BYTES = int(BIGCHUNK_SIZE_PIX * (BIGCHUNK_SIZE_PIX / 2))
|
|
|
|
|
|
|
|
# The chunk 0, 0 has a pixel coordinate of -448, -448 for some reason.
|
|
|
|
ORIGIN_OFFSET_X = 448
|
|
|
|
ORIGIN_OFFSET_Y = 448
|
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
DB_INIT = '''
|
|
|
|
BEGIN;
|
|
|
|
CREATE TABLE IF NOT EXISTS chunks (x INT, y INT, data BLOB, updated_at REAL);
|
|
|
|
CREATE INDEX IF NOT EXISTS chunks_x_y ON chunks(x, y);
|
|
|
|
COMMIT;
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
sql = sqlite3.connect('pixelcanvas.db')
|
|
|
|
cur = sql.cursor()
|
|
|
|
cur.executescript(DB_INIT)
|
2018-06-23 06:06:29 +00:00
|
|
|
|
2021-03-23 19:14:57 +00:00
|
|
|
session = requests.Session()
|
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
# HELPER FUNCTIONS
|
|
|
|
################################################################################
|
|
|
|
def now():
|
|
|
|
n = datetime.datetime.now(datetime.timezone.utc)
|
|
|
|
return n.timestamp()
|
2018-06-23 06:06:29 +00:00
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
# DB FUNCTIONS
|
|
|
|
################################################################################
|
2021-03-23 19:17:44 +00:00
|
|
|
def db_commit():
|
|
|
|
log.debug('Committing')
|
|
|
|
sql.commit()
|
|
|
|
|
2020-02-11 23:38:04 +00:00
|
|
|
def get_chunk_from_db(chunk_x, chunk_y, as_of=None):
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
|
|
|
Get the chunk from the database, and raise IndexError if it doesn't exist.
|
|
|
|
'''
|
2020-02-11 23:38:04 +00:00
|
|
|
query = f'''
|
2018-06-23 06:06:29 +00:00
|
|
|
SELECT x, y, data FROM chunks
|
|
|
|
WHERE x == ? AND y == ?
|
2020-02-11 23:38:04 +00:00
|
|
|
{'AND updated_at <= ?' if as_of is not None else ''}
|
2018-06-23 06:06:29 +00:00
|
|
|
ORDER BY updated_at DESC
|
|
|
|
LIMIT 1
|
|
|
|
'''
|
|
|
|
bindings = [chunk_x, chunk_y]
|
2020-02-11 23:38:04 +00:00
|
|
|
if as_of is not None:
|
|
|
|
bindings.append(as_of)
|
2018-06-23 06:06:29 +00:00
|
|
|
cur.execute(query, bindings)
|
|
|
|
fetch = cur.fetchone()
|
|
|
|
if fetch is None:
|
2020-02-09 05:08:52 +00:00
|
|
|
raise IndexError(f'{chunk_x}, {chunk_y}')
|
2018-06-23 06:06:29 +00:00
|
|
|
(x, y, data) = fetch
|
|
|
|
data = gzip.decompress(data)
|
|
|
|
return (x, y, data)
|
|
|
|
|
2020-02-11 23:38:04 +00:00
|
|
|
def get_chunk(chunk_x, chunk_y, *args, **kwargs):
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
|
|
|
Get the chunk from the database if it exists, or else download it.
|
|
|
|
'''
|
|
|
|
try:
|
2020-02-11 23:38:04 +00:00
|
|
|
return get_chunk_from_db(chunk_x, chunk_y, *args, **kwargs)
|
2018-06-23 06:06:29 +00:00
|
|
|
except IndexError:
|
|
|
|
(bigchunk_x, bigchunk_y) = chunk_to_bigchunk(chunk_x, chunk_y)
|
2020-02-12 01:21:51 +00:00
|
|
|
chunks = download_bigchunk(bigchunk_x, bigchunk_y)
|
|
|
|
insert_chunks(chunks)
|
2020-02-11 23:38:04 +00:00
|
|
|
return get_chunk_from_db(chunk_x, chunk_y, *args, **kwargs)
|
2018-06-23 06:06:29 +00:00
|
|
|
|
|
|
|
def insert_chunk(chunk_x, chunk_y, data, commit=True):
|
|
|
|
try:
|
|
|
|
existing_chunk = get_chunk_from_db(chunk_x, chunk_y)
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if data == existing_chunk[2]:
|
|
|
|
return
|
2021-03-23 19:17:44 +00:00
|
|
|
log.debug('Updating chunk %s %s', chunk_x, chunk_y)
|
2018-06-23 06:06:29 +00:00
|
|
|
data = gzip.compress(data)
|
|
|
|
cur.execute('INSERT INTO chunks VALUES(?, ?, ?, ?)', [chunk_x, chunk_y, data, now()])
|
|
|
|
if commit:
|
2021-03-23 19:17:44 +00:00
|
|
|
db_commit()
|
2018-06-23 06:06:29 +00:00
|
|
|
|
2020-02-12 01:21:51 +00:00
|
|
|
def insert_chunks(chunks, commit=True):
|
2020-02-18 06:30:28 +00:00
|
|
|
for (index, chunk) in enumerate(chunks):
|
|
|
|
if index % 25000 == 0 and commit:
|
2021-03-23 19:17:44 +00:00
|
|
|
db_commit()
|
2020-02-12 01:21:51 +00:00
|
|
|
insert_chunk(*chunk, commit=False)
|
|
|
|
if commit:
|
2021-03-23 19:17:44 +00:00
|
|
|
db_commit()
|
2020-02-12 01:21:51 +00:00
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
# API FUNCTIONS
|
|
|
|
################################################################################
|
|
|
|
def url_for_bigchunk(bigchunk_x, bigchunk_y):
|
2022-04-05 17:20:17 +00:00
|
|
|
return f'https://pixelcanvas.io/api/bigchunk/{bigchunk_x}.{bigchunk_y}.bmp'
|
2020-02-09 05:08:52 +00:00
|
|
|
|
|
|
|
def request(url):
|
2021-03-23 19:14:57 +00:00
|
|
|
response = session.get(url)
|
2020-02-09 05:08:52 +00:00
|
|
|
response.raise_for_status()
|
|
|
|
return response
|
|
|
|
|
2018-06-23 06:06:29 +00:00
|
|
|
def download_bigchunk(bigchunk_x, bigchunk_y):
|
|
|
|
'''
|
2020-02-12 01:21:51 +00:00
|
|
|
Download a bigchunk and return the list of chunks.
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
|
|
|
url = url_for_bigchunk(bigchunk_x, bigchunk_y)
|
2021-03-23 19:15:31 +00:00
|
|
|
log.info('Downloading %s', url)
|
2020-02-09 05:08:52 +00:00
|
|
|
response = request(url)
|
2018-06-23 06:06:29 +00:00
|
|
|
bigchunk_data = response.content
|
|
|
|
if len(bigchunk_data) != BIGCHUNK_SIZE_BYTES:
|
|
|
|
message = 'Received bigchunk does not matched the expected byte size!\n'
|
|
|
|
message += 'Got %d instead of %d' % (len(bigchunk_data), BIGCHUNK_SIZE_BYTES)
|
|
|
|
raise ValueError(message)
|
|
|
|
chunks = split_bigchunk(bigchunk_x, bigchunk_y, bigchunk_data)
|
|
|
|
return chunks
|
|
|
|
|
2020-02-14 04:28:47 +00:00
|
|
|
def download_bigchunk_range(bigchunk_xy1, bigchunk_xy2, shuffle=False, threads=1):
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
2020-02-09 05:08:52 +00:00
|
|
|
Given (UPPERLEFT_X, UPPERLEFT_Y), (LOWERRIGHT_X, LOWERRIGHT_Y),
|
2020-02-12 01:24:02 +00:00
|
|
|
download multiple bigchunks, and yield all of the small chunks.
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
2021-03-23 19:15:51 +00:00
|
|
|
if threads < 1:
|
|
|
|
raise ValueError(threads)
|
|
|
|
|
2021-03-23 19:15:31 +00:00
|
|
|
log.debug('Downloading bigchunk range %s-%s', bigchunk_xy1, bigchunk_xy2)
|
2020-02-12 01:24:02 +00:00
|
|
|
bigchunks = bigchunk_range_iterator(bigchunk_xy1, bigchunk_xy2)
|
2020-02-14 04:28:47 +00:00
|
|
|
if shuffle:
|
|
|
|
bigchunks = list(bigchunks)
|
|
|
|
random.shuffle(bigchunks)
|
|
|
|
|
2020-02-12 01:24:02 +00:00
|
|
|
if threads == 1:
|
|
|
|
for (x, y) in bigchunks:
|
|
|
|
chunks = download_bigchunk(x, y)
|
|
|
|
yield from chunks
|
2020-02-12 22:32:54 +00:00
|
|
|
|
2020-02-12 01:24:02 +00:00
|
|
|
else:
|
|
|
|
pool = threadpool.ThreadPool(size=threads)
|
2021-03-23 19:15:51 +00:00
|
|
|
job_gen = (
|
2020-02-13 03:32:11 +00:00
|
|
|
{'function': download_bigchunk, 'args': (x, y), 'name': (x, y),}
|
|
|
|
for (x, y) in bigchunks
|
2021-03-23 19:15:51 +00:00
|
|
|
)
|
|
|
|
pool.add_generator(job_gen)
|
|
|
|
for job in pool.result_generator():
|
2020-02-12 01:24:02 +00:00
|
|
|
job.join()
|
|
|
|
if job.exception:
|
|
|
|
raise job.exception
|
|
|
|
yield from job.value
|
2018-06-23 06:06:29 +00:00
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
# CHUNK FUNCTIONS
|
|
|
|
################################################################################
|
2018-06-23 06:06:29 +00:00
|
|
|
def chunk_range_iterator(chunk_xy1, chunk_xy2):
|
|
|
|
'''
|
2020-02-09 05:08:52 +00:00
|
|
|
Given (UPPERLEFT_X, UPPERLEFT_Y), (LOWERRIGHT_X, LOWERRIGHT_Y),
|
|
|
|
yield (x, y) pairs for chunks in this range, inclusive.
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
|
|
|
for x in range(chunk_xy1[0], chunk_xy2[0] + 1):
|
|
|
|
for y in range(chunk_xy1[1], chunk_xy2[1] + 1):
|
|
|
|
yield (x, y)
|
|
|
|
|
|
|
|
def bigchunk_range_iterator(bigchunk_xy1, bigchunk_xy2):
|
|
|
|
'''
|
2020-02-09 05:08:52 +00:00
|
|
|
Given (UPPERLEFT_X, UPPERLEFT_Y), (LOWERRIGHT_X, LOWERRIGHT_Y),
|
|
|
|
yield (x, y) pairs for bigchunks in this range, inclusive.
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
|
|
|
for x in range(bigchunk_xy1[0], bigchunk_xy2[0] + BIGCHUNK_SIZE_CHUNKS, BIGCHUNK_SIZE_CHUNKS):
|
|
|
|
for y in range(bigchunk_xy1[1], bigchunk_xy2[1] + BIGCHUNK_SIZE_CHUNKS, BIGCHUNK_SIZE_CHUNKS):
|
|
|
|
yield (x, y)
|
|
|
|
|
|
|
|
def chunk_to_bigchunk(chunk_x, chunk_y):
|
|
|
|
bigchunk_x = (chunk_x // BIGCHUNK_SIZE_CHUNKS) * BIGCHUNK_SIZE_CHUNKS
|
|
|
|
bigchunk_y = (chunk_y // BIGCHUNK_SIZE_CHUNKS) * BIGCHUNK_SIZE_CHUNKS
|
2021-03-23 19:17:44 +00:00
|
|
|
log.loud('Converted chunk %s, %s to bigchunk %s, %s', chunk_x, chunk_y, bigchunk_x, bigchunk_y)
|
2018-06-23 06:06:29 +00:00
|
|
|
return (bigchunk_x, bigchunk_y)
|
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
def chunk_range_to_bigchunk_range(chunk_xy1, chunk_xy2):
|
|
|
|
bigchunk_range = (chunk_to_bigchunk(*chunk_xy1), chunk_to_bigchunk(*chunk_xy2))
|
|
|
|
return bigchunk_range
|
|
|
|
|
2018-06-23 06:06:29 +00:00
|
|
|
def chunk_to_pixel(chunk_x, chunk_y):
|
|
|
|
pixel_x = chunk_x * CHUNK_SIZE_PIX - ORIGIN_OFFSET_X
|
|
|
|
pixel_y = chunk_y * CHUNK_SIZE_PIX - ORIGIN_OFFSET_Y
|
2021-03-23 19:17:44 +00:00
|
|
|
log.loud('Converted chunk %s, %s to pixel %s, %s', chunk_x, chunk_y, pixel_x, pixel_y)
|
2018-06-23 06:06:29 +00:00
|
|
|
return (pixel_x, pixel_y)
|
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
def chunk_range_to_pixel_range(chunk_xy1, chunk_xy2):
|
|
|
|
pixel_range = (chunk_to_pixel(*chunk_xy1), chunk_to_pixel(*chunk_xy2))
|
|
|
|
return pixel_range
|
|
|
|
|
2018-06-23 06:06:29 +00:00
|
|
|
def pixel_to_chunk(pixel_x, pixel_y):
|
|
|
|
chunk_x = (pixel_x + ORIGIN_OFFSET_X) // CHUNK_SIZE_PIX
|
|
|
|
chunk_y = (pixel_y + ORIGIN_OFFSET_Y) // CHUNK_SIZE_PIX
|
2021-03-23 19:17:44 +00:00
|
|
|
log.loud('Converted pixel %s, %s to chunk %s, %s', pixel_x, pixel_y, chunk_x, chunk_y)
|
2018-06-23 06:06:29 +00:00
|
|
|
return (chunk_x, chunk_y)
|
|
|
|
|
|
|
|
def pixel_range_to_chunk_range(pixel_xy1, pixel_xy2):
|
|
|
|
chunk_range = (pixel_to_chunk(*pixel_xy1), pixel_to_chunk(*pixel_xy2))
|
2021-03-23 19:17:44 +00:00
|
|
|
log.loud('Converted pixel range %s, %s to chunk range %s, %s', pixel_xy1, pixel_xy2, *chunk_range)
|
2018-06-23 06:06:29 +00:00
|
|
|
return chunk_range
|
|
|
|
|
|
|
|
def pixel_to_bigchunk(pixel_x, pixel_y):
|
|
|
|
bigchunk_x = ((pixel_x + ORIGIN_OFFSET_X) // BIGCHUNK_SIZE_PIX) * BIGCHUNK_SIZE_CHUNKS
|
|
|
|
bigchunk_y = ((pixel_y + ORIGIN_OFFSET_Y) // BIGCHUNK_SIZE_PIX) * BIGCHUNK_SIZE_CHUNKS
|
2021-03-23 19:17:44 +00:00
|
|
|
log.loud('Converted pixel %s, %s to bigchunk %s, %s', pixel_x, pixel_y, bigchunk_x, bigchunk_y)
|
2018-06-23 06:06:29 +00:00
|
|
|
return (bigchunk_x, bigchunk_y)
|
|
|
|
|
|
|
|
def pixel_range_to_bigchunk_range(pixel_xy1, pixel_xy2):
|
|
|
|
bigchunk_range = (pixel_to_bigchunk(*pixel_xy1), pixel_to_bigchunk(*pixel_xy2))
|
2021-03-23 19:17:44 +00:00
|
|
|
log.loud('Converted pixel range %s, %s to bigchunk range %s, %s', pixel_xy1, pixel_xy2, *bigchunk_range)
|
2018-06-23 06:06:29 +00:00
|
|
|
return bigchunk_range
|
|
|
|
|
|
|
|
def split_bigchunk(bigchunk_x, bigchunk_y, bigchunk_data):
|
|
|
|
'''
|
|
|
|
Chunks are downloaded from the site as a "bigchunk" which is just 15x15
|
|
|
|
chunks stitched together.
|
|
|
|
The chunks are arranged left to right, top to bottom.
|
|
|
|
For example, the byte stream:
|
|
|
|
000011112222333344445555666677778888
|
|
|
|
represents the bitmap:
|
|
|
|
001122
|
|
|
|
001122
|
|
|
|
334455
|
|
|
|
334455
|
|
|
|
667788
|
|
|
|
667788
|
|
|
|
'''
|
|
|
|
chunks = []
|
|
|
|
chunk_count = int(len(bigchunk_data) / CHUNK_SIZE_BYTES)
|
|
|
|
for chunk_index in range(chunk_count):
|
|
|
|
chunk_x = (chunk_index % BIGCHUNK_SIZE_CHUNKS) + bigchunk_x
|
|
|
|
chunk_y = (chunk_index // BIGCHUNK_SIZE_CHUNKS) + bigchunk_y
|
|
|
|
start_index = chunk_index * CHUNK_SIZE_BYTES
|
|
|
|
end_index = start_index + CHUNK_SIZE_BYTES
|
|
|
|
chunk_data = bigchunk_data[start_index:end_index]
|
|
|
|
chunk = (chunk_x, chunk_y, chunk_data)
|
|
|
|
chunks.append(chunk)
|
|
|
|
return chunks
|
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
# IMAGE FUNCTIONS
|
|
|
|
################################################################################
|
2018-06-23 06:06:29 +00:00
|
|
|
def chunk_to_rgb(chunk_data):
|
|
|
|
'''
|
|
|
|
Convert the data chunk into RGB tuples.
|
|
|
|
|
|
|
|
PixelCanvas chunks are strings of bytes where every byte represents two
|
|
|
|
horizontal pixels. Each pixel is 4 bits since there are 16 colors.
|
|
|
|
Chunks are 32x64 bytes for a total of 64x64 pixels.
|
|
|
|
'''
|
|
|
|
# Each byte actually represents two horizontal pixels. 8F is actually 8, F.
|
|
|
|
# So create a generator that takes in the bytes and yields the pixel bits.
|
|
|
|
pixels = (
|
|
|
|
pixel
|
|
|
|
for byte in chunk_data
|
|
|
|
for pixel in (byte >> 4, byte & 0xf)
|
|
|
|
)
|
|
|
|
|
|
|
|
matrix = [None for x in range(len(chunk_data) * 2)]
|
|
|
|
for (index, pixel) in enumerate(pixels):
|
|
|
|
px = index % CHUNK_SIZE_PIX
|
|
|
|
py = index // CHUNK_SIZE_PIX
|
|
|
|
matrix[(py * CHUNK_SIZE_PIX) + px] = COLOR_MAP[pixel]
|
|
|
|
return matrix
|
|
|
|
|
|
|
|
def rgb_to_image(matrix):
|
|
|
|
matrix = bytes([color for pixel in matrix for color in pixel])
|
|
|
|
i = PIL.Image.frombytes(mode='RGB', size=(CHUNK_SIZE_PIX, CHUNK_SIZE_PIX), data=matrix)
|
|
|
|
return i
|
|
|
|
|
2018-06-30 07:05:18 +00:00
|
|
|
def chunk_to_image(chunk_data, scale=1):
|
|
|
|
image = rgb_to_image(chunk_to_rgb(chunk_data))
|
|
|
|
if scale is not None and scale != 1:
|
|
|
|
new_size = (int(image.size[0] * scale), int(image.size[1] * scale))
|
2021-03-23 19:01:05 +00:00
|
|
|
image = image.resize(new_size, resample=PIL.Image.NEAREST)
|
2018-06-30 07:05:18 +00:00
|
|
|
return image
|
2018-06-23 06:06:29 +00:00
|
|
|
|
2018-06-30 07:05:18 +00:00
|
|
|
def chunks_to_image(chunks, scale=1):
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
|
|
|
Combine all of the given chunks into a single image.
|
|
|
|
'''
|
|
|
|
log.debug('Creating image from %s chunks', len(chunks))
|
|
|
|
min_x = min(chunk[0] for chunk in chunks)
|
|
|
|
max_x = max(chunk[0] for chunk in chunks)
|
|
|
|
min_y = min(chunk[1] for chunk in chunks)
|
|
|
|
max_y = max(chunk[1] for chunk in chunks)
|
|
|
|
span_x = max_x - min_x + 1
|
|
|
|
span_y = max_y - min_y + 1
|
2018-06-30 07:05:18 +00:00
|
|
|
img_width = int(span_x * CHUNK_SIZE_PIX * scale)
|
|
|
|
img_height = int(span_y * CHUNK_SIZE_PIX * scale)
|
2018-06-23 06:06:29 +00:00
|
|
|
img = PIL.Image.new(mode='RGB', size=(img_width, img_height), color=WHITE)
|
|
|
|
for (chunk_x, chunk_y, chunk_data) in chunks:
|
2018-06-30 07:05:18 +00:00
|
|
|
paste_x = int((chunk_x - min_x) * CHUNK_SIZE_PIX * scale)
|
|
|
|
paste_y = int((chunk_y - min_y) * CHUNK_SIZE_PIX * scale)
|
|
|
|
chunk_image = chunk_to_image(chunk_data, scale)
|
2018-06-23 06:06:29 +00:00
|
|
|
img.paste(chunk_image, (paste_x, paste_y))
|
|
|
|
return img
|
|
|
|
|
|
|
|
def crop_image(image, pixel_xy1, pixel_xy2):
|
|
|
|
'''
|
|
|
|
Because the images are rendered on a chunk basis, they are probably larger
|
|
|
|
than the exact area that you want. Use this function to crop the image to
|
|
|
|
the exact coordinates.
|
|
|
|
pixel_xy1 and pixel_xy2 are the world coordinates that you used to get this
|
|
|
|
image in the first place, not coordinates within this picture.
|
|
|
|
'''
|
|
|
|
img_width = pixel_xy2[0] - pixel_xy1[0] + 1
|
|
|
|
img_height = pixel_xy2[1] - pixel_xy1[1] + 1
|
|
|
|
basis_xy = chunk_to_pixel(*pixel_to_chunk(*pixel_xy1))
|
|
|
|
|
|
|
|
xy1 = (pixel_xy1[0] - (basis_xy[0]), pixel_xy1[1] - (basis_xy[1]))
|
|
|
|
xy2 = (xy1[0] + img_width, xy1[1] + img_height)
|
|
|
|
bbox = (xy1[0], xy1[1], xy2[0], xy2[1])
|
|
|
|
log.debug('Cropping image down to %s', bbox)
|
|
|
|
image = image.crop(bbox)
|
|
|
|
return image
|
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
# COMMAND LINE
|
|
|
|
################################################################################
|
|
|
|
from voussoirkit import betterhelp
|
2018-06-23 06:06:29 +00:00
|
|
|
|
2018-06-30 07:05:18 +00:00
|
|
|
def parse_coordinate_string(coordinates):
|
2018-06-23 06:06:29 +00:00
|
|
|
'''
|
|
|
|
Convert the given '~100.~100--100.100' to ((-100, -100), (100, 100)).
|
|
|
|
'''
|
|
|
|
coordinates = coordinates.strip()
|
|
|
|
if '--' in coordinates:
|
|
|
|
(xy1, xy2) = coordinates.split('--', 1)
|
|
|
|
else:
|
2020-02-09 05:08:52 +00:00
|
|
|
# If you are only updating a single chunk.
|
2018-06-23 06:06:29 +00:00
|
|
|
xy1 = coordinates
|
|
|
|
xy2 = coordinates
|
|
|
|
|
|
|
|
def split_xy(xy):
|
|
|
|
xy = xy.replace('~', '-')
|
|
|
|
xy = xy.replace(',', '.')
|
|
|
|
(x, y) = xy.split('.')
|
|
|
|
return (int(x), int(y))
|
|
|
|
|
|
|
|
(xy1, xy2) = (split_xy(xy1), split_xy(xy2))
|
2021-03-23 19:17:44 +00:00
|
|
|
log.debug('Parsed coordinates %s into %s %s', coordinates, xy1, xy2)
|
2018-06-23 06:06:29 +00:00
|
|
|
return (xy1, xy2)
|
|
|
|
|
2020-02-09 05:08:52 +00:00
|
|
|
def overview_argparse(args):
|
|
|
|
cur.execute('SELECT x, y, updated_at FROM chunks GROUP BY x, y ORDER BY updated_at DESC')
|
|
|
|
chunks = cur.fetchall()
|
|
|
|
min_x = min(chunk[0] for chunk in chunks)
|
|
|
|
max_x = max(chunk[0] for chunk in chunks)
|
|
|
|
min_y = min(chunk[1] for chunk in chunks)
|
|
|
|
max_y = max(chunk[1] for chunk in chunks)
|
|
|
|
width = max_x - min_x + 1
|
|
|
|
height = max_y - min_y + 1
|
|
|
|
x_offset = abs(min(min_x, 0))
|
|
|
|
y_offset = abs(min(min_y, 0))
|
|
|
|
|
|
|
|
matrix = [[' ' for x in range(width)] for y in range(height)]
|
|
|
|
for (x, y, updated_at) in chunks:
|
|
|
|
x += x_offset
|
|
|
|
y += y_offset
|
|
|
|
matrix[y][x] = '.'
|
|
|
|
|
|
|
|
for (x, y, updated_at) in chunks:
|
|
|
|
if (x % 15 == 0) and (y % 15 == 0):
|
|
|
|
text = f'{x},{y}'
|
|
|
|
x += x_offset
|
|
|
|
y += y_offset
|
|
|
|
for c in text:
|
|
|
|
matrix[y][x] = c
|
|
|
|
x+=1
|
|
|
|
|
|
|
|
for row in matrix:
|
|
|
|
for column in row:
|
|
|
|
print(column, end='')
|
|
|
|
print()
|
|
|
|
|
2018-06-23 06:06:29 +00:00
|
|
|
def render_argparse(args):
|
2018-06-30 07:05:18 +00:00
|
|
|
coordinates = parse_coordinate_string(args.coordinates)
|
2020-02-09 05:08:52 +00:00
|
|
|
if args.is_chunks:
|
|
|
|
chunk_range = coordinates
|
|
|
|
coordinates = chunk_range_to_pixel_range(*coordinates)
|
|
|
|
else:
|
|
|
|
chunk_range = pixel_range_to_chunk_range(*coordinates)
|
|
|
|
|
2018-06-23 06:06:29 +00:00
|
|
|
chunks = [get_chunk(*chunk_xy) for chunk_xy in chunk_range_iterator(*chunk_range)]
|
2022-02-13 03:54:13 +00:00
|
|
|
image = chunks_to_image(chunks, scale=args.scale)
|
2020-02-09 05:08:52 +00:00
|
|
|
|
2022-02-13 03:54:13 +00:00
|
|
|
if args.scale == 1 and not args.is_chunks:
|
2018-06-30 07:05:18 +00:00
|
|
|
image = crop_image(image, *coordinates)
|
2020-02-09 05:08:52 +00:00
|
|
|
|
2018-06-23 06:06:29 +00:00
|
|
|
if args.do_show:
|
|
|
|
image.show()
|
|
|
|
else:
|
2020-02-09 05:08:52 +00:00
|
|
|
((p1x, p1y), (p2x, p2y)) = coordinates
|
2022-02-13 03:54:13 +00:00
|
|
|
scale_s = f'_{args.scale}' if args.scale != 1 else ''
|
2018-06-30 07:05:18 +00:00
|
|
|
filename = f'{p1x}.{p1y}--{p2x}.{p2y}{scale_s}.png'
|
2018-06-23 06:06:29 +00:00
|
|
|
image.save(filename)
|
|
|
|
log.debug('Wrote %s', filename)
|
|
|
|
|
|
|
|
def update_argparse(args):
|
2018-06-30 07:05:18 +00:00
|
|
|
coordinates = parse_coordinate_string(args.coordinates)
|
2020-02-09 05:08:52 +00:00
|
|
|
if args.is_chunks:
|
|
|
|
bigchunk_range = chunk_range_to_bigchunk_range(*coordinates)
|
|
|
|
else:
|
|
|
|
bigchunk_range = pixel_range_to_bigchunk_range(*coordinates)
|
2020-02-14 04:28:47 +00:00
|
|
|
chunks = download_bigchunk_range(*bigchunk_range, shuffle=args.shuffle, threads=args.threads)
|
2020-02-14 04:27:28 +00:00
|
|
|
try:
|
|
|
|
insert_chunks(chunks)
|
|
|
|
except KeyboardInterrupt:
|
2021-03-23 19:17:44 +00:00
|
|
|
db_commit()
|
2018-06-23 06:06:29 +00:00
|
|
|
|
2021-10-25 18:54:06 +00:00
|
|
|
@vlogging.main_decorator
|
2020-02-09 05:08:52 +00:00
|
|
|
def main(argv):
|
2022-02-13 03:54:13 +00:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='''
|
|
|
|
This tool is run from the command line, where you provide the coordinates you
|
|
|
|
want to download and render.
|
|
|
|
|
|
|
|
The format for coordinates is `UPPERLEFT--LOWERRIGHT`. The format for each
|
|
|
|
of those pieces is `X.Y`.
|
|
|
|
|
|
|
|
Sometimes, argparse gets confused by negative coordinates because it thinks
|
|
|
|
you're trying to provide another argument. If this happens, use a tilde `~`
|
|
|
|
as the negative sign instead.
|
|
|
|
|
|
|
|
Remember, because this is an image, up and left are negative; down and right
|
|
|
|
are positive.
|
|
|
|
''',
|
|
|
|
)
|
2020-03-11 05:36:23 +00:00
|
|
|
subparsers = parser.add_subparsers()
|
|
|
|
|
2022-02-13 03:54:13 +00:00
|
|
|
################################################################################################
|
|
|
|
|
|
|
|
p_update = subparsers.add_parser(
|
|
|
|
'update',
|
|
|
|
description='''
|
|
|
|
Download chunks into the database.
|
|
|
|
''',
|
|
|
|
)
|
|
|
|
p_update.examples = [
|
|
|
|
'0.0--100.100',
|
|
|
|
'~100.~100--100.100',
|
|
|
|
'~1200.300--~900.600',
|
|
|
|
]
|
|
|
|
p_update.add_argument(
|
|
|
|
'coordinates',
|
|
|
|
)
|
|
|
|
p_update.add_argument(
|
|
|
|
'--chunks',
|
|
|
|
dest='is_chunks',
|
|
|
|
action='store_true',
|
|
|
|
help='''
|
|
|
|
The coordinates which you provided are chunk coordinates instead of
|
|
|
|
pixel coordinates.
|
|
|
|
''',
|
|
|
|
)
|
|
|
|
p_update.add_argument(
|
|
|
|
'--shuffle',
|
|
|
|
action='store_true',
|
|
|
|
help='''
|
|
|
|
Download chunks in a random order instead of from corner to corner.
|
|
|
|
''',
|
|
|
|
)
|
|
|
|
p_update.add_argument(
|
|
|
|
'--threads',
|
|
|
|
type=int,
|
|
|
|
default=1,
|
|
|
|
help='''
|
|
|
|
Use X threads to download bigchunks.
|
|
|
|
''',
|
|
|
|
)
|
2020-03-11 05:36:23 +00:00
|
|
|
p_update.set_defaults(func=update_argparse)
|
|
|
|
|
2022-02-13 03:54:13 +00:00
|
|
|
################################################################################################
|
|
|
|
|
|
|
|
p_render = subparsers.add_parser(
|
|
|
|
'render',
|
|
|
|
description='''
|
|
|
|
Export an image as PNG.
|
|
|
|
''',
|
|
|
|
)
|
|
|
|
p_render.examples = [
|
|
|
|
'0.0--100.100',
|
|
|
|
'~100.~100--100.100 --scale 2',
|
|
|
|
'~1200.300--~900.600 --show',
|
|
|
|
]
|
|
|
|
p_render.add_argument(
|
|
|
|
'coordinates',
|
|
|
|
)
|
|
|
|
p_render.add_argument(
|
|
|
|
'--chunks',
|
|
|
|
dest='is_chunks',
|
|
|
|
action='store_true',
|
|
|
|
help='''
|
|
|
|
The coordinates which you provided are chunk coordinates instead of
|
|
|
|
pixel coordinates.
|
|
|
|
''',
|
|
|
|
)
|
|
|
|
p_render.add_argument(
|
|
|
|
'--show',
|
|
|
|
dest='do_show',
|
|
|
|
action='store_true',
|
|
|
|
help='''
|
|
|
|
Render the image at a different scale.
|
|
|
|
For best results, use powers of 2 like 0.5, 0.25, etc.
|
|
|
|
This will disable the autocropping.
|
|
|
|
''',
|
|
|
|
)
|
|
|
|
p_render.add_argument(
|
|
|
|
'--scale',
|
|
|
|
dest='scale',
|
|
|
|
type=float,
|
|
|
|
default=1,
|
|
|
|
help='''
|
|
|
|
Instead of saving the image, display it on the screen.
|
|
|
|
pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.Image.show
|
|
|
|
''',
|
|
|
|
)
|
2020-03-11 05:36:23 +00:00
|
|
|
p_render.set_defaults(func=render_argparse)
|
|
|
|
|
2022-02-13 03:54:13 +00:00
|
|
|
################################################################################################
|
2020-03-11 05:36:23 +00:00
|
|
|
|
2022-02-13 03:54:13 +00:00
|
|
|
p_overview = subparsers.add_parser(
|
|
|
|
'overview',
|
|
|
|
help='''
|
|
|
|
Draw an ascii map representing the owned chunks.
|
|
|
|
''',
|
2020-03-11 05:36:23 +00:00
|
|
|
)
|
2022-02-13 03:54:13 +00:00
|
|
|
p_overview.set_defaults(func=overview_argparse)
|
|
|
|
|
|
|
|
return betterhelp.go(parser, argv)
|
2018-06-23 06:06:29 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
raise SystemExit(main(sys.argv[1:]))
|