diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d7d30c3..ad12ea63 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,12 @@ -v1.5.3 (2018-01- +v1.6.0 (2018-??-??) + +- Added support for serving image tiles (`.imtiles`) +- Added support for serving image tile fragments, i.e., fragments from tile sets of filetype `imtiles`. +- Added support for serving OpenStreetMap image tile fragments, i.e., fragments from tile sets of filetype `osm-image`. +- Added support for serving "plain" 2d annotations (filetype `2dannodb`; datatype `2d-rectangle-domains`) +- Added support for serving GeoJSON (filetype `geodb`; datatype `geo-json`) + +v1.5.3 (2018-??-??) - Refactored the chromsizes code to be more modular diff --git a/fragments/utils.py b/fragments/utils.py index 96b87a00..be1e9d7b 100644 --- a/fragments/utils.py +++ b/fragments/utils.py @@ -7,8 +7,15 @@ import logging import numpy as np import pandas as pd +import sqlite3 +import requests +import math +from random import random +from io import BytesIO, StringIO +from PIL import Image from scipy.ndimage.interpolation import zoom +from geotiles.utils import get_tile_pos_from_lng_lat logger = logging.getLogger(__name__) @@ -119,7 +126,7 @@ def get_cooler(f, zoomout_level=0): return c -def get_frag_by_loc( +def get_frag_by_loc_from_cool( cooler_file, loci, dim, @@ -154,6 +161,212 @@ def get_frag_by_loc( return fragments +def get_frag_by_loc_from_imtiles( + imtiles_file, + loci, + zoom_level=0, + padding=0, + tile_size=256 +): + db = sqlite3.connect(imtiles_file) + info = db.execute('SELECT * FROM tileset_info').fetchone() + max_zoom = info[6] + max_width = info[8] + max_height = info[9] + im_type = 'JPEG' if info[10].lower() == 'jpg' else info[10] + + div = 2 ** (max_zoom - zoom_level) + width = max_width / div + height = max_height / div + + ims = [] + + for locus in loci: + start1 = round(locus[0] / div) + end1 = round(locus[1] / div) + start2 = round(locus[2] / div) + end2 = round(locus[3] / div) + + if not is_within(start1, end1, start2, end2, width, height): + ims.append(None) + continue + + # Get tile ids + tile_start1_id = start1 // tile_size + tile_end1_id = end1 // tile_size + tile_start2_id = start2 // tile_size + tile_end2_id = end2 // tile_size + + tiles_x_range = range(tile_start1_id, tile_end1_id + 1) + tiles_y_range = range(tile_start2_id, tile_end2_id + 1) + + # Extract image tiles + tiles = [] + for y in tiles_y_range: + for x in tiles_x_range: + tiles.append(Image.open(BytesIO(db.execute( + 'SELECT image FROM tiles WHERE z=? AND y=? AND x=?', + (zoom_level, y, x) + ).fetchone()[0]))) + + im = ( + tiles[0] + if len(tiles) == 1 + else Image.new( + 'RGB', + ( + tile_size * len(tiles_x_range), + tile_size * len(tiles_y_range) + ) + ) + ) + + # Stitch them tiles together + if len(tiles) > 1: + i = 0 + for y in range(len(tiles_y_range)): + for x in range(len(tiles_x_range)): + im.paste( + tiles[i], (x * tile_size, y * tile_size) + ) + i += 1 + + # Convert starts and ends to local tile ids + start1_rel = start1 - tile_start1_id * tile_size + end1_rel = end1 - tile_start1_id * tile_size + start2_rel = start2 - tile_start2_id * tile_size + end2_rel = end2 - tile_start2_id * tile_size + + # Cut out the corresponding snippet + im_out = im.crop((start1_rel, start2_rel, end1_rel, end2_rel)) + + im_buffer = BytesIO() + im_out.save(im_buffer, format=im_type) + ims.append((im_buffer.getvalue(), 'image/{}'.format(im_type.lower()))) + + db.close() + + return ims + + +def get_frag_by_loc_from_osm( + imtiles_file, + loci, + zoom_level=0, + padding=0, + tile_size=256 +): + width = 360 + height = 180 + im_type = 'PNG' + + ims = [] + + for locus in loci: + start_lng = locus[0] + end_lng = locus[1] + start_lat = locus[2] + end_lat = locus[3] + + if not is_within( + start_lng + 180, + end_lng + 180, + end_lat + 90, + start_lat + 90, + width, + height + ): + ims.append(None) + continue + + # Get tile ids + start1, start2 = get_tile_pos_from_lng_lat( + start_lng, start_lat, zoom_level + ) + end1, end2 = get_tile_pos_from_lng_lat( + end_lng, end_lat, zoom_level + ) + + xPad = padding * (end1 - start1) + yPad = padding * (start2 - end2) + + start1 -= xPad + end1 += xPad + start2 += yPad + end2 -= yPad + + tile_start1_id = math.floor(start1) + tile_start2_id = math.floor(start2) + tile_end1_id = math.floor(end1) + tile_end2_id = math.floor(end2) + + start1 = math.floor(start1 * tile_size) + start2 = math.floor(start2 * tile_size) + end1 = math.ceil(end1 * tile_size) + end2 = math.ceil(end2 * tile_size) + + tiles_x_range = range(tile_start1_id, tile_end1_id + 1) + tiles_y_range = range(tile_start2_id, tile_end2_id + 1) + + # Extract image tiles + tiles = [] + for y in tiles_y_range: + for x in tiles_x_range: + prefixes = ['a', 'b', 'c'] + prefix_idx = math.floor(random() * len(prefixes)) + src = ( + 'http://{}.tile.openstreetmap.org/{}/{}/{}.png' + .format(prefixes[prefix_idx], zoom_level, x, y) + ) + + r = requests.get(src) + if r.status_code == 200: + tiles.append(Image.open(BytesIO(r.content))) + else: + tiles.append(None) + + im = ( + tiles[0] + if len(tiles) == 1 + else Image.new( + 'RGB', + ( + tile_size * len(tiles_x_range), + tile_size * len(tiles_y_range) + ) + ) + ) + + # Stitch them tiles together + if len(tiles) > 1: + i = 0 + for y in range(len(tiles_y_range)): + for x in range(len(tiles_x_range)): + im.paste( + tiles[i], (x * tile_size, y * tile_size) + ) + i += 1 + + # Convert starts and ends to local tile ids + start1_rel = start1 - tile_start1_id * tile_size + end1_rel = end1 - tile_start1_id * tile_size + start2_rel = start2 - tile_start2_id * tile_size + end2_rel = end2 - tile_start2_id * tile_size + + # Cut out the corresponding snippet + im_out = im.crop((start1_rel, start2_rel, end1_rel, end2_rel)) + + im_buffer = BytesIO() + im_out.save(im_buffer, format=im_type) + ims.append((im_buffer.getvalue(), 'image/{}'.format(im_type.lower()))) + + return ims + + +def is_within(start1, end1, start2, end2, width, height): + return start1 < width and end1 > 0 and start2 < height and end2 > 0 + + def calc_measure_dtd(matrix, locus): ''' Calculate the distance to the diagonal diff --git a/fragments/views.py b/fragments/views.py index 8c296143..e4b3080a 100755 --- a/fragments/views.py +++ b/fragments/views.py @@ -1,5 +1,6 @@ from __future__ import print_function +import base64 import hashlib import json import logging @@ -21,7 +22,9 @@ calc_measure_size, calc_measure_noise, calc_measure_sharpness, - get_frag_by_loc, + get_frag_by_loc_from_cool, + get_frag_by_loc_from_imtiles, + get_frag_by_loc_from_osm, get_intra_chr_loops_from_looplist, rel_loci_2_obj ) @@ -81,7 +84,7 @@ def fragments_by_loci(request): dims = 22 try: - padding = int(request.GET.get('padding', 0)) + padding = request.GET.get('padding', 0) except ValueError: padding = 0 @@ -107,48 +110,69 @@ def fragments_by_loci(request): ''' Loci list must be of type: - 0: chrom1 - 1: start1 - 2: end1 - 3: chrom2 - 4: start2 - 5: end2 + [cooler] [imtiles] + 0: chrom1 start1 + 1: start1 end1 + 2: end1 start2 + 3: chrom2 end2 + 4: start2 dataset + 5: end2 zoomLevel 6: dataset - 7: zoomOutLevel [0] + 7: zoomOutLevel ''' + tileset_idx = 6 if len(loci) and len(loci[0]) > 6 else 4 + zoom_level_idx = tileset_idx + 1 + + filetype = None + i = 0 loci_lists = {} try: for locus in loci: - cooler_file = '' + tileset_file = '' - if locus[6]: - if locus[6].endswith('.cool'): - cooler_file = path.join('data', locus[6]) + if locus[tileset_idx]: + if locus[tileset_idx].endswith('.cool'): + tileset_file = path.join('data', locus[tileset_idx]) else: try: - cooler_file = get_datapath( - Tileset.objects.get( - uuid=locus[6] - ).datafile.url + tileset = Tileset.objects.get( + uuid=locus[tileset_idx] + ) + tileset_file = get_datapath( + tileset.datafile.url ) + except AttributeError: return JsonResponse({ - 'error': 'Dataset (cooler file) not in database', - }, status=500) + 'error': 'Tileset ({}) does not exist'.format( + locus[tileset_idx] + ), + }, status=400) + except Tileset.DoesNotExist: + if locus[tileset_idx].startswith('osm'): + filetype = locus[tileset_idx] + else: + return JsonResponse({ + 'error': 'Tileset ({}) does not exist'.format( + locus[tileset_idx] + ), + }, status=400) else: return JsonResponse({ - 'error': 'Dataset (cooler file) not specified', - }, status=500) + 'error': 'Tileset not specified', + }, status=400) - if cooler_file not in loci_lists: - loci_lists[cooler_file] = {} + if tileset_file not in loci_lists: + loci_lists[tileset_file] = {} - if locus[7] not in loci_lists[cooler_file]: - loci_lists[cooler_file][locus[7]] = [] + if locus[zoom_level_idx] not in loci_lists[tileset_file]: + loci_lists[tileset_file][locus[zoom_level_idx]] = [] - loci_lists[cooler_file][locus[7]].append(locus[0:6] + [i]) + loci_lists[tileset_file][locus[zoom_level_idx]].append( + locus[0:tileset_idx] + [i] + ) i += 1 @@ -158,6 +182,12 @@ def fragments_by_loci(request): 'error_message': str(e) }, status=500) + filetype = filetype if filetype else ( + tileset.filetype + if tileset + else tileset_file[tileset_file.rfind('.') + 1:] + ) + # Get a unique string for caching dump = json.dumps(loci, sort_keys=True) + str(precision) + str(dims) uuid = hashlib.md5(dump.encode('utf-8')).hexdigest() @@ -173,29 +203,64 @@ def fragments_by_loci(request): pass matrices = [None] * i + data_types = [None] * i try: for dataset in loci_lists: for zoomout_level in loci_lists[dataset]: - raw_matrices = get_frag_by_loc( - dataset, - loci_lists[dataset][zoomout_level], - dims, - zoomout_level=zoomout_level, - balanced=not no_balance, - padding=padding, - percentile=percentile, - ignore_diags=ignore_diags, - no_normalize=no_normalize - ) + if filetype == 'cooler' or filetype == 'cool': + raw_matrices = get_frag_by_loc_from_cool( + dataset, + loci_lists[dataset][zoomout_level], + dims, + zoomout_level=zoomout_level, + balanced=not no_balance, + padding=int(padding), + percentile=percentile, + ignore_diags=ignore_diags, + no_normalize=no_normalize + ) + + if precision > 0: + raw_matrices = np.around( + raw_matrices, decimals=precision + ) - if precision > 0: - raw_matrices = np.around(raw_matrices, decimals=precision) + i = 0 + for raw_matrix in raw_matrices: + idx = loci_lists[dataset][zoomout_level][i][6] + matrices[idx] = raw_matrix.tolist() + data_types[idx] = 'matrix' + i += 1 + + if filetype == 'imtiles' or filetype == 'osm-image': + extractor = ( + get_frag_by_loc_from_imtiles + if filetype == 'imtiles' + else get_frag_by_loc_from_osm + ) + + sub_ims = extractor( + imtiles_file=dataset, + loci=loci_lists[dataset][zoomout_level], + zoom_level=zoomout_level, + padding=float(padding), + ) + + i = 0 + for im in sub_ims: + idx = loci_lists[dataset][zoomout_level][i][4] + + try: + # Store images as data URI + matrices[idx] = \ + base64.b64encode(im[0]).decode('utf-8') + except TypeError: + matrices[idx] = None + + data_types[idx] = 'dataUrl' + + i += 1 - i = 0 - for raw_matrix in raw_matrices: - matrices[loci_lists[dataset][zoomout_level][i][6]] =\ - raw_matrix.tolist() - i += 1 except Exception as ex: raise return JsonResponse({ @@ -205,7 +270,8 @@ def fragments_by_loci(request): # Create results results = { - 'fragments': matrices + 'fragments': matrices, + 'dataTypes': data_types } # Cache results for 30 minutes diff --git a/geotiles/__init__.py b/geotiles/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/geotiles/utils.py b/geotiles/utils.py new file mode 100644 index 00000000..60664ab9 --- /dev/null +++ b/geotiles/utils.py @@ -0,0 +1,181 @@ +import json +import math +import os +import sqlite3 +import collections as col + + +def get_tile_box(zoom, x, y): + """convert Google-style Mercator tile coordinate to + (minlat, maxlat, minlng, maxlng) bounding box""" + + minlng, minlat = get_lng_lat_from_tile_pos(zoom, x, y) + maxlng, maxlat = get_lng_lat_from_tile_pos(zoom, x + 1, y + 1) + + return (minlng, maxlng, minlat, maxlat) + + +def get_lng_lat_from_tile_pos(zoom, x, y): + """convert Google-style Mercator tile coordinate to + (lng, lat) of top-left corner of tile""" + + # "map-centric" latitude, in radians: + lat_rad = math.pi - 2*math.pi*y/(2**zoom) + # true latitude: + lat_rad = gudermannian(lat_rad) + lat = lat_rad * 180.0 / math.pi + + # longitude maps linearly to map, so we simply scale: + lng = -180.0 + 360.0*x/(2**zoom) + + return (lng, lat) + + +def get_tile_pos_from_lng_lat(lng, lat, zoom): + """convert lng/lat to Google-style Mercator tile coordinate (x, y) + at the given zoom level""" + + lat_rad = lat * math.pi / 180.0 + # "map-centric" latitude, in radians: + lat_rad = inv_gudermannian(lat_rad) + + x = 2**zoom * (lng + 180.0) / 360.0 + y = 2**zoom * (math.pi - lat_rad) / (2 * math.pi) + + return (x, y) + + +def gudermannian(x): + return 2*math.atan(math.exp(x)) - math.pi/2 + + +def inv_gudermannian(y): + return math.log(math.tan((y + math.pi/2) / 2)) + + +def get_tileset_info(tileset): + if not os.path.isfile(tileset): + return { + 'error': 'Tileset info is not available!' + } + + db = sqlite3.connect(tileset) + + res = db.execute('SELECT * FROM tileset_info').fetchone() + + o = { + 'zoom_step': res[0], + 'tile_size': res[1], + 'max_zoom': res[2], + 'min_x': res[3], + 'max_x': res[4], + 'min_y': res[5], + 'max_y': res[6], + 'max_data_length': res[1] * 2 ** res[2], + } + + return o + + +def get_tiles(db_file, zoom, x, y, width=1, height=1): + ''' + Retrieve a contiguous set of tiles from a 2D db tile file. + + Parameters + ---------- + db_file: str + The filename of the sqlite db file + zoom: int + The zoom level + x: int + The x position of the first tile + y: int + The y position of the first tile + width: int + The width of the block of tiles to retrieve + height: int + The height of the block of tiles to retrieve + + Returns + ------- + tiles: {pos: tile_value} + A set of tiles, indexed by position + ''' + conn = sqlite3.connect(db_file) + + c = conn.cursor() + + lng_from, _, lat_from, _ = get_tile_box(zoom, x, y) + _, lng_to, _, lat_to = get_tile_box(zoom, x + width - 1, y + height - 1) + + # Note the range query is being done in lng-lat. Since the coords go from: + # - Longitude: -180 to 180 + # - Latidue: 90 to -90 (NOTE THE SWAP HERE!!!) + # we are indexing min and max longitude and latitude but for querying we + # are using from and to longitude and latitude. Hence, the comparator and + # min/max for latitude are flipped (max == from lat; min == to lat) + query = ''' + SELECT + minLng, maxLng, maxLat, minLat, uid, importance, geometry, properties + FROM + intervals,position_index + WHERE + intervals.id=position_index.id AND + zoomLevel <= ? AND + rMaxLng >= ? AND + rMinLng <= ? AND + rMinLat <= ? AND + rMaxLat >= ? + ''' + + rows = c.execute( + query, + (zoom, lng_from, lng_to, lat_from, lat_to) + ).fetchall() + + new_rows = col.defaultdict(list) + + for r in rows: + try: + uid = r[4].decode('utf-8') + except AttributeError: + uid = r[4] + + x_start, y_start = get_tile_pos_from_lng_lat(r[0], r[2], zoom) + x_end, y_end = get_tile_pos_from_lng_lat(r[1], r[3], zoom) + + try: + geometry = json.loads(r[6]) + except Exception as e: + geometry = None + pass + + try: + properties = json.loads(r[7]) + except Exception as e: + properties = None + pass + + for i in range(x, x + width): + for j in range(y, y + height): + # Add annotations to each tile in which they are visible + if ( + x_start < i + 1 and + x_end >= i and + y_start < j + 1 and + y_end >= j + ): + # add the position offset to the returned values + new_rows[(i, j)] += [{ + 'xStart': r[0], + 'xEnd': r[1], + 'yStart': r[2], + 'yEnd': r[3], + 'importance': r[5], + 'uid': uid, + 'geometry': geometry, + 'properties': properties, + }] + conn.close() + + return new_rows diff --git a/imtiles/__init__.py b/imtiles/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/imtiles/utils.py b/imtiles/utils.py new file mode 100644 index 00000000..056b940b --- /dev/null +++ b/imtiles/utils.py @@ -0,0 +1,32 @@ +import os +import sqlite3 + + +def get_tileset_info(tileset): + if not os.path.isfile(tileset): + return { + 'error': 'Tileset info is not available!' + } + + db = sqlite3.connect(tileset) + + res = db.execute('SELECT * FROM tileset_info').fetchone() + + o = { + 'tile_size': res[5], + 'max_zoom': res[6], + 'max_size': res[7], + } + + try: + o['width'] = res[8] + o['height'] = res[9] + except IndexError: + pass + + try: + o['dtype'] = res[10] + except IndexError: + pass + + return o diff --git a/requirements.txt b/requirements.txt index e3cc2937..efefa9b5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,6 +10,7 @@ djangorestframework==3.7.3 h5py==2.6.0 numpy==1.12.0 pandas==0.19.1 +Pillow==5.0.0 requests==2.12.3 slugid==1.0.7 bumpversion==0.5.3 diff --git a/tilesets/generate_tiles.py b/tilesets/generate_tiles.py index 85e376a9..0bcccee9 100644 --- a/tilesets/generate_tiles.py +++ b/tilesets/generate_tiles.py @@ -9,12 +9,13 @@ import numpy as np import os import shutil -import time import tempfile +import sqlite3 import tilesets.utils as tut from .tiles import make_tiles import higlass_server.settings as hss +from geotiles import utils as geotu global mats mats = {} @@ -25,6 +26,7 @@ transform_descriptions['VC'] = {'name': 'VC', 'value': 'VC'} transform_descriptions['VC_SQRT'] = {'name': 'VC_SQRT', 'value': 'VC_SQRT'} + def get_cached_datapath(relpath): ''' Check if we need to cache this file or if we have a cached copy @@ -45,32 +47,33 @@ def get_cached_datapath(relpath): return tut.get_datapath(relpath) orig_path = tut.get_datapath(relpath) - cached_path = op.join(hss.CACHE_DIR, relpath) + cached_path = os.path.join(hss.CACHE_DIR, relpath) - if op.exists(cached_path): + if os.path.exists(cached_path): # this file has already been cached print("here", cached_path) return cached_path with tempfile.TemporaryDirectory() as dirpath: - tmp = op.join(dirpath, 'cached_file') + tmp = os.path.join(dirpath, 'cached_file') shutil.copyfile(orig_path, tmp) # check to make sure the destination directory exists - dest_dir = op.dirname(cached_path) + dest_dir = os.path.dirname(cached_path) print("dest_dir:", dest_dir) - if not op.exists(dest_dir): + if not os.path.exists(dest_dir): os.makedirs(dest_dir) print("moving:", cached_path) print("stat:", os.stat(tmp)) shutil.move(tmp, cached_path) print("stat:", os.stat(cached_path)) - print('abspath:', op.abspath(cached_path)) + print('abspath:', os.path.abspath(cached_path)) return cached_path + def get_available_transforms(cooler): ''' Get the available resolutions from a single cooler file. @@ -107,7 +110,11 @@ def make_mats(dset): if 'resolutions' in f: # this file contains raw resolutions so it'll return a different # sort of tileset info - info = {"resolutions": tuple(sorted(map(int, list(f['resolutions'].keys())))) } + info = { + "resolutions": tuple( + sorted(map(int, list(f['resolutions'].keys()))) + ) + } mats[dset] = [f, info] # see which transforms are available, a transform has to be @@ -116,18 +123,25 @@ def make_mats(dset): available_transforms_per_resolution = {} for resolution in info['resolutions']: - available_transforms_per_resolution[resolution] = get_available_transforms(f['resolutions'][str(resolution)]) + available_transforms_per_resolution[resolution] =\ + get_available_transforms(f['resolutions'][str(resolution)]) - all_available_transforms = set.intersection(*available_transforms_per_resolution.values()) + all_available_transforms = set.intersection( + *available_transforms_per_resolution.values() + ) - info['transforms'] = [transform_descriptions[t] for t in all_available_transforms] + info['transforms'] = [ + transform_descriptions[t] for t in all_available_transforms + ] # get the genome size resolution = list(f['resolutions'].keys())[0] - genome_length = int(sum(f['resolutions'][resolution]['chroms']['length'])) - + genome_length = int( + sum(f['resolutions'][resolution]['chroms']['length']) + ) + info['max_pos'] = [genome_length, genome_length] - info['min_pos'] = [1,1] + info['min_pos'] = [1, 1] return info = cch.get_info(dset) @@ -146,7 +160,7 @@ def make_mats(dset): def format_cooler_tile(tile_data_array): ''' Format raw cooler cooler data into a more structured tile - containing either float16 or float32 data along with a + containing either float16 or float32 data along with a dtype to differentiate between the two. Parameters @@ -176,10 +190,14 @@ def format_cooler_tile(tile_data_array): max_dense > min_f16 and max_dense < max_f16 and min_dense > min_f16 and min_dense < max_f16 ): - tile_data['dense'] = base64.b64encode(tile_data_array.astype('float16')).decode('latin-1') + tile_data['dense'] = base64.b64encode( + tile_data_array.astype('float16') + ).decode('latin-1') tile_data['dtype'] = 'float16' else: - tile_data['dense'] = base64.b64encode(tile_data_array.astype('float32')).decode('latin-1') + tile_data['dense'] = base64.b64encode( + tile_data_array.astype('float32') + ).decode('latin-1') tile_data['dtype'] = 'float32' return tile_data @@ -204,6 +222,7 @@ def extract_tileset_uid(tile_id): return tileset_uuid + def generate_multivec_tileset_info(filename): ''' Return some information about this tileset that will @@ -233,7 +252,7 @@ def generate_multivec_tileset_info(filename): min_pos = [0] # the "rightmost" datapoint position - max_pos = [len(f['resolutions'][str(resolutions[-1])])] + # max_pos = [len(f['resolutions'][str(resolutions[-1])])] tile_size = 1024 f.close() @@ -244,6 +263,7 @@ def generate_multivec_tileset_info(filename): 'tile_size': tile_size } + def get_single_multivec_tile(filename, tile_pos): ''' Retrieve a single multivec tile from a multires file @@ -257,11 +277,11 @@ def get_single_multivec_tile(filename, tile_pos): ''' tileset_info = generate_multivec_tileset_info(filename) f = h5py.File(filename, 'r') - + # which resolution does this zoom level correspond to? resolution = tileset_info['resolutions'][tile_pos[0]] tile_size = tileset_info['tile_size'] - + # where in the data does the tile start and end tile_start = tile_pos[1] * tile_size tile_end = tile_start + tile_size @@ -271,6 +291,7 @@ def get_single_multivec_tile(filename, tile_pos): return dense + def generate_1d_tiles(filename, tile_ids, get_data_function): ''' Generate a set of tiles for the given tile_ids. @@ -316,13 +337,17 @@ def generate_1d_tiles(filename, tile_ids, get_data_function): min_dense > min_f16 and min_dense < max_f16 ): tile_value = { - 'dense': base64.b64encode(dense.reshape((-1,)).astype('float16')).decode('utf-8'), + 'dense': base64.b64encode( + dense.reshape((-1,)).astype('float16') + ).decode('utf-8'), 'dtype': 'float16', 'shape': dense.shape } else: tile_value = { - 'dense': base64.b64encode(dense.reshape((-1,)).astype('float32')).decode('utf-8'), + 'dense': base64.b64encode( + dense.reshape((-1,)).astype('float32') + ).decode('utf-8'), 'dtype': 'float32', 'shape': dense.shape } @@ -343,9 +368,9 @@ def generate_bigwig_tileset_info(tileset): Returns ------- - tileset_info: {'min_pos': [], - 'max_pos': [], - 'tile_size': 1024, + tileset_info: {'min_pos': [], + 'max_pos': [], + 'tile_size': 1024, 'max_zoom': 7 } ''' @@ -391,7 +416,7 @@ def generate_bigwig_tiles(tileset, tile_ids): # this doesn't combine multiple consequetive ids, which # would speed things up dense = bwt.get_bigwig_tile_by_id( - tut.get_datapath(tileset.datafile.url), + tut.get_datapath(tileset.datafile.url), zoom_level, tile_position[1]) @@ -413,12 +438,16 @@ def generate_bigwig_tiles(tileset, tile_ids): min_dense > min_f16 and min_dense < max_f16 ): tile_value = { - 'dense': base64.b64encode(dense.astype('float16')).decode('utf-8'), + 'dense': base64.b64encode( + dense.astype('float16') + ).decode('utf-8'), 'dtype': 'float16' } else: tile_value = { - 'dense': base64.b64encode(dense.astype('float32')).decode('utf-8'), + 'dense': base64.b64encode( + dense.astype('float32') + ).decode('utf-8'), 'dtype': 'float32' } @@ -426,6 +455,7 @@ def generate_bigwig_tiles(tileset, tile_ids): return generated_tiles + def generate_hitile_tiles(tileset, tile_ids): ''' Generate tiles from a hitile file. @@ -475,12 +505,16 @@ def generate_hitile_tiles(tileset, tile_ids): min_dense > min_f16 and min_dense < max_f16 ): tile_value = { - 'dense': base64.b64encode(dense.astype('float16')).decode('utf-8'), + 'dense': base64.b64encode( + dense.astype('float16') + ).decode('utf-8'), 'dtype': 'float16' } else: tile_value = { - 'dense': base64.b64encode(dense.astype('float32')).decode('utf-8'), + 'dense': base64.b64encode( + dense.astype('float32') + ).decode('utf-8'), 'dtype': 'float32' } @@ -489,7 +523,6 @@ def generate_hitile_tiles(tileset, tile_ids): return generated_tiles - def generate_beddb_tiles(tileset, tile_ids): ''' Generate tiles from a beddb file. @@ -508,15 +541,23 @@ def generate_beddb_tiles(tileset, tile_ids): A list of tile_id, tile_data tuples ''' tile_ids_by_zoom = bin_tiles_by_zoom(tile_ids).values() - partitioned_tile_ids = list(it.chain(*[partition_by_adjacent_tiles(t, dimension=1) - for t in tile_ids_by_zoom])) + partitioned_tile_ids = list( + it.chain( + *[ + partition_by_adjacent_tiles(t, dimension=1) + for t in tile_ids_by_zoom + ] + ) + ) generated_tiles = [] for tile_group in partitioned_tile_ids: zoom_level = int(tile_group[0].split('.')[1]) tileset_id = tile_group[0].split('.')[0] - tile_positions = [[int(x) for x in t.split('.')[2:3]] for t in tile_group] + tile_positions = [ + [int(x) for x in t.split('.')[2:3]] for t in tile_group + ] if len(tile_positions) == 0: continue @@ -524,19 +565,25 @@ def generate_beddb_tiles(tileset, tile_ids): minx = min([t[0] for t in tile_positions]) maxx = max([t[0] for t in tile_positions]) - t1 = time.time() tile_data_by_position = cdt.get_tiles( get_cached_datapath(tileset.datafile.url), zoom_level, minx, maxx - minx + 1 ) - generated_tiles += [(".".join(map(str, [tileset_id] + [zoom_level] + [position])), tile_data) - for (position, tile_data) in tile_data_by_position.items()] + generated_tiles += [ + ( + ".".join( + map(str, [tileset_id] + [zoom_level] + [position]) + ), + tile_data + ) for (position, tile_data) in tile_data_by_position.items() + ] return generated_tiles -def generate_bed2ddb_tiles(tileset, tile_ids): + +def generate_bed2ddb_tiles(tileset, tile_ids, retriever=cdt.get_2d_tiles): ''' Generate tiles from a bed2db file. @@ -556,18 +603,27 @@ def generate_bed2ddb_tiles(tileset, tile_ids): generated_tiles = [] tile_ids_by_zoom = bin_tiles_by_zoom(tile_ids).values() - partitioned_tile_ids = list(it.chain(*[partition_by_adjacent_tiles(t) - for t in tile_ids_by_zoom])) + partitioned_tile_ids = list( + it.chain( + *[partition_by_adjacent_tiles(t) for t in tile_ids_by_zoom] + ) + ) for tile_group in partitioned_tile_ids: zoom_level = int(tile_group[0].split('.')[1]) tileset_id = tile_group[0].split('.')[0] - tile_positions = [[int(x) for x in t.split('.')[2:4]] for t in tile_group] + tile_positions = [ + [int(x) for x in t.split('.')[2:4]] for t in tile_group + ] # filter for tiles that are in bounds for this zoom level - tile_positions = list(filter(lambda x: x[0] < 2 ** zoom_level, tile_positions)) - tile_positions = list(filter(lambda x: x[1] < 2 ** zoom_level, tile_positions)) + tile_positions = list( + filter(lambda x: x[0] < 2 ** zoom_level, tile_positions) + ) + tile_positions = list( + filter(lambda x: x[1] < 2 ** zoom_level, tile_positions) + ) if len(tile_positions) == 0: # no in bounds tiles @@ -580,21 +636,30 @@ def generate_bed2ddb_tiles(tileset, tile_ids): maxy = max([t[1] for t in tile_positions]) cached_datapath = get_cached_datapath(tileset.datafile.url) - tile_data_by_position = cdt.get_2d_tiles( - cached_datapath, - zoom_level, - minx, miny, - maxx - minx + 1, - maxy - miny + 1 - ) + tile_data_by_position = retriever( + cached_datapath, + zoom_level, + minx, + miny, + maxx - minx + 1, + maxy - miny + 1 + ) - tiles = [(".".join(map(str, [tileset_id] + [zoom_level] + list(position))), tile_data) - for (position, tile_data) in tile_data_by_position.items()] + tiles = [ + ( + ".".join( + map(str, [tileset_id] + [zoom_level] + list(position)) + ), + tile_data + ) + for (position, tile_data) in tile_data_by_position.items() + ] generated_tiles += tiles return generated_tiles + def generate_hibed_tiles(tileset, tile_ids): ''' Generate tiles from a hibed file. @@ -624,12 +689,17 @@ def generate_hibed_tiles(tileset, tile_ids): tile_position[1] ) - tile_value = {'discrete': list([list([x.decode('utf-8') for x in d]) for d in dense])} + tile_value = { + 'discrete': list( + [list([x.decode('utf-8') for x in d]) for d in dense] + ) + } generated_tiles += [(tile_id, tile_value)] return generated_tiles + def get_transform_type(tile_id): ''' Get the transform type specified in the tile id. @@ -653,6 +723,7 @@ def get_transform_type(tile_id): return transform_method + def bin_tiles_by_zoom(tile_ids): ''' Place these tiles into separate lists according to their @@ -706,11 +777,11 @@ def bin_tiles_by_zoom_level_and_transform(tile_ids): transform_method = get_transform_type(tile_id) - tile_id_lists[(zoom_level, transform_method)].add(tile_id) return tile_id_lists + def partition_by_adjacent_tiles(tile_ids, dimension=2): ''' Partition a set of tile ids into sets of adjacent tiles @@ -731,7 +802,11 @@ def partition_by_adjacent_tiles(tile_ids, dimension=2): ''' tile_id_lists = [] - for tile_id in sorted(tile_ids, key=lambda x: [int(p) for p in x.split('.')[2:2+dimension]]): + sorted_tile_ids = sorted( + tile_ids, + key=lambda x: [int(p) for p in x.split('.')[2:2+dimension]] + ) + for tile_id in sorted_tile_ids: tile_id_parts = tile_id.split('.') # exclude the zoom level in the position @@ -743,15 +818,17 @@ def partition_by_adjacent_tiles(tile_ids, dimension=2): for tile_id_list in tile_id_lists: # iterate over each group of adjacent tiles - has_close_tile = False + # has_close_tile = False for ct_tile_id in tile_id_list: ct_tile_id_parts = ct_tile_id.split('.') - ct_tile_position = list(map(int, ct_tile_id_parts[2:2+dimension])) + ct_tile_position = list( + map(int, ct_tile_id_parts[2:2+dimension]) + ) far_apart = False # iterate over each dimension and see if this tile is close - for p1,p2 in zip(tile_position, ct_tile_position): + for p1, p2 in zip(tile_position, ct_tile_position): if abs(int(p1) - int(p2)) > 1: # too far apart can't be part of the same group far_apart = True @@ -761,7 +838,7 @@ def partition_by_adjacent_tiles(tile_ids, dimension=2): tile_id_list += [tile_id] added = True break - + if added: break if not added: @@ -769,6 +846,7 @@ def partition_by_adjacent_tiles(tile_ids, dimension=2): return tile_id_lists + def generate_cooler_tiles(tileset, tile_ids): ''' Generate tiles from a cooler file. @@ -795,9 +873,17 @@ def generate_cooler_tiles(tileset, tile_ids): tileset_file_and_info = mats[filename] - tile_ids_by_zoom_and_transform = bin_tiles_by_zoom_level_and_transform(tile_ids).values() - partitioned_tile_ids = list(it.chain(*[partition_by_adjacent_tiles(t) - for t in tile_ids_by_zoom_and_transform])) + tile_ids_by_zoom_and_transform = bin_tiles_by_zoom_level_and_transform( + tile_ids + ).values() + partitioned_tile_ids = list( + it.chain( + *[ + partition_by_adjacent_tiles(t) + for t in tile_ids_by_zoom_and_transform + ] + ) + ) generated_tiles = [] @@ -809,7 +895,10 @@ def generate_cooler_tiles(tileset, tile_ids): tileset_file = tileset_file_and_info[0] if 'resolutions' in tileset_info: - sorted_resolutions = sorted([int(r) for r in tileset_info['resolutions']], reverse=True) + sorted_resolutions = sorted( + [int(r) for r in tileset_info['resolutions']], + reverse=True + ) if zoom_level > len(sorted_resolutions): # this tile has too high of a zoom level specified continue @@ -821,14 +910,28 @@ def generate_cooler_tiles(tileset, tile_ids): # this tile has too high of a zoom level specified continue hdf_for_resolution = tileset_file[str(zoom_level)] - resolution = (tileset_info['max_width'] / 2**zoom_level) / BINS_PER_TILE + resolution = ( + tileset_info['max_width'] / 2**zoom_level + ) / BINS_PER_TILE - tile_positions = [[int(x) for x in t.split('.')[2:4]] for t in tile_group] + tile_positions = [ + [int(x) for x in t.split('.')[2:4]] for t in tile_group + ] # filter for tiles that are in bounds for this zoom level tile_width = resolution * BINS_PER_TILE - tile_positions = list(filter(lambda x: x[0] * tile_width < tileset_info['max_pos'][0]+1, tile_positions)) - tile_positions = list(filter(lambda x: x[1] * tile_width < tileset_info['max_pos'][1]+1, tile_positions)) + tile_positions = list( + filter( + lambda x: x[0] * tile_width < tileset_info['max_pos'][0]+1, + tile_positions + ) + ) + tile_positions = list( + filter( + lambda x: x[1] * tile_width < tileset_info['max_pos'][1]+1, + tile_positions + ) + ) if len(tile_positions) == 0: # no in bounds tiles @@ -840,20 +943,88 @@ def generate_cooler_tiles(tileset, tile_ids): miny = min([t[1] for t in tile_positions]) maxy = max([t[1] for t in tile_positions]) - tile_data_by_position = make_tiles(hdf_for_resolution, - resolution, - minx, miny, - transform_type, - maxx-minx+1, maxy-miny+1) - - tiles = [(".".join(map(str, [tileset_id] + [zoom_level] + list(position) + [transform_type])), format_cooler_tile(tile_data)) - for (position, tile_data) in tile_data_by_position.items()] + tile_data_by_position = make_tiles( + hdf_for_resolution, + resolution, + minx, + miny, + transform_type, + maxx-minx + 1, + maxy-miny + 1 + ) + tiles = [ + ( + ".".join( + map( + str, + ( + [tileset_id] + + [zoom_level] + + list(position) + + [transform_type] + ) + ) + ), + format_cooler_tile(tile_data) + ) for (position, tile_data) in tile_data_by_position.items() + ] generated_tiles += tiles return generated_tiles + +def generate_image_tiles(tileset, tile_ids, raw): + ''' + Generate tiles from a imtiles file. + + Parameters + ---------- + tileset: tilesets.models.Tileset object + The tileset that the tile ids should be retrieved from + tile_ids: [str,...] + A list of tile_ids (e.g. xyx.0.0.1) identifying the tiles + to be retrieved + + Returns + ------- + generated_tiles: [(tile_id, tile_data),...] + A list of tile_id, tile_data tuples + ''' + filename = tut.get_datapath(tileset.datafile.url) + + # Connect to SQLite db + db = sqlite3.connect(filename) + + generate_tiles = [] + + generate_image = raw and len(tile_ids) + + for tile_id in tile_ids: + id = tile_id[tile_id.find('.') + 1:].split('.') + + sql = 'SELECT image FROM tiles WHERE z = :z AND y = :y AND x = :x' + param = {'z': int(id[0]), 'y': int(id[1]), 'x': int(id[2])} + res = db.execute(sql, param).fetchone() + + if res: + image_blob = res[0] + + if generate_image: + tile_data = { + 'image': image_blob, + } + else: + tile_data = { + 'dense': base64.b64encode(image_blob).decode('latin-1'), + } + + generate_tiles.append((tile_id, tile_data)) + + return generate_tiles + + def generate_tiles(tileset_tile_ids): ''' Generate a tiles for the give tile_ids. @@ -875,14 +1046,19 @@ def generate_tiles(tileset_tile_ids): tile_list: [(tile_id, tile_data),...] A list of tile_id, tile_data tuples ''' - tileset, tile_ids = tileset_tile_ids + tileset, tile_ids, raw = tileset_tile_ids if tileset.filetype == 'hitile': return generate_hitile_tiles(tileset, tile_ids) elif tileset.filetype == 'beddb': return generate_beddb_tiles(tileset, tile_ids) - elif tileset.filetype == 'bed2ddb': + elif ( + tileset.filetype == 'bed2ddb' or + tileset.filetype == '2dannodb' + ): return generate_bed2ddb_tiles(tileset, tile_ids) + elif tileset.filetype == 'geodb': + return generate_bed2ddb_tiles(tileset, tile_ids, geotu.get_tiles) elif tileset.filetype == 'hibed': return generate_hibed_tiles(tileset, tile_ids) elif tileset.filetype == 'cooler': @@ -891,10 +1067,12 @@ def generate_tiles(tileset_tile_ids): return generate_bigwig_tiles(tileset, tile_ids) elif tileset.filetype == 'multivec': return generate_1d_tiles( - tut.get_datapath(tileset.datafile.url), - tile_ids, - get_single_multivec_tile) + tut.get_datapath(tileset.datafile.url), + tile_ids, + get_single_multivec_tile + ) + elif tileset.filetype == 'imtiles': + return generate_image_tiles(tileset, tile_ids, raw) else: - return [(ti, {'error': 'Unknown tileset filetype: {}'.format(tileset.filetype)}) for ti in tile_ids] - - + err_msg = 'Unknown tileset filetype: {}'.format(tileset.filetype) + return [(ti, {'error': err_msg}) for ti in tile_ids] diff --git a/tilesets/models.py b/tilesets/models.py index ba28cbf3..b952e2b7 100644 --- a/tilesets/models.py +++ b/tilesets/models.py @@ -24,7 +24,11 @@ def __str__(self): class Tileset(models.Model): created = models.DateTimeField(auto_now_add=True) - uuid = models.CharField(max_length=100, unique=True, default=lambda: slugid.nice().decode('utf-8')) + uuid = models.CharField( + max_length=100, + unique=True, + default=lambda: slugid.nice().decode('utf-8') + ) # processed_file = models.TextField() datafile = models.FileField(upload_to='uploads') filetype = models.TextField() diff --git a/tilesets/views.py b/tilesets/views.py index f5b60129..1443d16b 100644 --- a/tilesets/views.py +++ b/tilesets/views.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- from __future__ import print_function -import csv import h5py import json import logging @@ -55,18 +54,24 @@ from higlass_server.utils import getRdb +from imtiles import utils as imtu +from geotiles import utils as geotu + logger = logging.getLogger(__name__) rdb = getRdb() + class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = tss.UserSerializer + class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = tss.UserSerializer + @api_view(['GET']) def uids_by_filename(request): ''' @@ -171,13 +176,17 @@ def sizes(request): return response(err_msg, status=err_status) - # Try to load the chromosome sizes and return them as a list of + # Try to load the chromosome sizes and return them as a list of # (name, size) tuples try: if chrom_sizes.filetype == 'cooler': - data = tcs.get_cooler_chromsizes(tut.get_datapath(chrom_sizes.datafile.url)) + data = tcs.get_cooler_chromsizes( + tut.get_datapath(chrom_sizes.datafile.url) + ) else: - data = tcs.get_tsv_chromsizes(tut.get_datapath(chrom_sizes.datafile.url)) + data = tcs.get_tsv_chromsizes( + tut.get_datapath(chrom_sizes.datafile.url) + ) except Exception as ex: err_msg = str(ex) err_status = 500 @@ -299,7 +308,7 @@ def viewconfs(request): if len(existing_object) > 0: return JsonResponse({ 'error': 'Object with uid {} already exists'.format(uid) - }, status=rfs.HTTP_400_BAD_REQUEST); + }, status=rfs.HTTP_400_BAD_REQUEST) serializer = tss.ViewConfSerializer(data={'viewconf': viewconf}) @@ -330,6 +339,7 @@ def viewconfs(request): return JsonResponse(json.loads(obj.viewconf)) + def add_transform_type(tile_id): ''' Add a transform type to a cooler tile id if it's not already @@ -353,6 +363,7 @@ def add_transform_type(tile_id): new_tile_id = ".".join([tileset_uuid] + tile_position + [transform_type]) return new_tile_id + @api_view(['GET']) def tiles(request): '''Retrieve a set of tiles @@ -372,12 +383,10 @@ def tiles(request): ''' # create a set so that we don't fetch the same tile multiple times tileids_to_fetch = set(request.GET.getlist("d")) - # with ProcessPoolExecutor() as executor: - # res = executor.map(parallelize, hargs) - ''' - p = mp.Pool(4) - res = p.map(parallelize, hargs) - ''' + + # Return the raw data if only one tile is requested. This currently only + # works for `imtiles` + raw = request.GET.get('raw', False) tileids_by_tileset = col.defaultdict(set) generated_tiles = [] @@ -391,6 +400,7 @@ def tiles(request): # get the tileset object first if tileset_uuid in tilesets: + # Fritz: this condition is dead as `tilesets` haven't been set tileset = tilesets[tileset_uuid] else: tileset = tm.Tileset.objects.get(uuid=tileset_uuid) @@ -414,8 +424,8 @@ def tiles(request): # log the error and carry forward fetching the tile # from the original data logger.error(ex) - - #tile_value = None + + # tile_value = None if tile_value is not None: # we found the tile in the cache, no need to fetch it again @@ -427,11 +437,14 @@ def tiles(request): # fetch the tiles tilesets = [tilesets[tu] for tu in tileids_by_tileset] - accessible_tilesets = [(t, tileids_by_tileset[t.uuid]) for t in tilesets if ((not t.private) or request.user == t.owner)] - - #pool = mp.Pool(6) + accessible_tilesets = [ + (t, tileids_by_tileset[t.uuid], raw) + for t in tilesets if ((not t.private) or request.user == t.owner) + ] - generated_tiles += list(it.chain(*map(tgt.generate_tiles, accessible_tilesets))) + generated_tiles += list( + it.chain(*map(tgt.generate_tiles, accessible_tilesets)) + ) ''' for tileset_uuid in tileids_by_tileset: @@ -440,9 +453,14 @@ def tiles(request): # check permissions if tileset.private and request.user != tileset.owner: - generated_tiles += [(tile_id, {'error': "Forbidden"}) for tile_id in tileids_by_tileset[tileset_uuid]] + generated_tiles += [ + (tile_id, {'error': "Forbidden"}) + for tile_id in tileids_by_tileset[tileset_uuid] + ] else: - generated_tiles += generate_tiles(tileset, tileids_by_tileset[tileset_uuid]) + generated_tiles += generate_tiles( + tileset, tileids_by_tileset[tileset_uuid] + ) ''' # store the tiles in redis @@ -467,8 +485,14 @@ def tiles(request): if original_tile_id in tileids_to_fetch: tiles_to_return[original_tile_id] = tile_value + if len(generated_tiles) == 1 and raw and 'image' in generated_tiles[0][1]: + return HttpResponse( + generated_tiles[0][1]['image'], content_type='image/jpeg' + ) + return JsonResponse(tiles_to_return, safe=False) + @api_view(['GET']) def tileset_info(request): ''' Get information about a tileset @@ -491,6 +515,17 @@ def tileset_info(request): for tileset_uuid in tileset_uuids: tileset_object = queryset.filter(uuid=tileset_uuid).first() + if tileset_uuid == 'osm-image': + tileset_infos[tileset_uuid] = { + 'min_x': -180, + 'max_height': 180, + 'min_y': -90, + 'max_y': 90, + 'max_zoom': 19, + 'tile_size': 256 + } + continue + if tileset_object is None: tileset_infos[tileset_uuid] = { 'error': 'No such tileset with uid: {}'.format(tileset_uuid) @@ -520,7 +555,9 @@ def tileset_info(request): "max_zoom": int(tileset_info['max_zoom']) } elif tileset_object.filetype == 'bigwig': - tileset_infos[tileset_uuid] = tgt.generate_bigwig_tileset_info(tileset_object) + tileset_infos[tileset_uuid] = tgt.generate_bigwig_tileset_info( + tileset_object + ) elif tileset_object.filetype == 'multivec': tileset_infos[tileset_uuid] = tgt.generate_multivec_tileset_info( tut.get_datapath(tileset_object.datafile.url)) @@ -536,6 +573,17 @@ def tileset_info(request): tileset_infos[tileset_uuid] = cdt.get_2d_tileset_info( tut.get_datapath(tileset_object.datafile.url) ) + elif ( + tileset_object.filetype == '2dannodb' or + tileset_object.filetype == 'imtiles' + ): + tileset_infos[tileset_uuid] = imtu.get_tileset_info( + tut.get_datapath(tileset_object.datafile.url) + ) + elif tileset_object.filetype == 'geodb': + tileset_infos[tileset_uuid] = geotu.get_tileset_info( + tut.get_datapath(tileset_object.datafile.url) + ) elif tileset_object.filetype == 'cooler': dsetname = tut.get_datapath(queryset.filter( uuid=tileset_uuid @@ -606,11 +654,15 @@ def list(self, request, *args, **kwargs): if 'o' in request.GET: if 'r' in request.GET: - queryset = queryset.order_by(dbmf.Lower(request.GET['o']).desc()) + queryset = queryset.order_by( + dbmf.Lower(request.GET['o']).desc() + ) else: - queryset = queryset.order_by(dbmf.Lower(request.GET['o']).asc()) + queryset = queryset.order_by( + dbmf.Lower(request.GET['o']).asc() + ) - #ts_serializer = tss.UserFacingTilesetSerializer(queryset, many=True) + ts_serializer = tss.UserFacingTilesetSerializer(queryset, many=True) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True)