Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

multiscale meshes #435

Draft
wants to merge 19 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions pychunkedgraph/app/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
# pylint: disable=invalid-name, missing-docstring

import datetime
import json
import logging
Expand All @@ -8,6 +10,7 @@
import pandas as pd
import numpy as np
import redis
from flask import Blueprint
from flask import Flask
from flask.json.provider import DefaultJSONProvider
from flask.logging import default_handler
Expand Down Expand Up @@ -74,6 +77,14 @@ def create_app(test_config=None):
app.register_blueprint(segmentation_api_legacy)
app.register_blueprint(segmentation_api_v1)

auth_bp = Blueprint("auth_info", __name__, url_prefix="/")

@auth_bp.route("/auth_info")
def index():
return {"login_url": "https://globalv1.flywire-daf.com/sticky_auth"}

app.register_blueprint(auth_bp)

return app


Expand Down
15 changes: 11 additions & 4 deletions pychunkedgraph/app/meshing/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@
from pychunkedgraph.meshing.manifest import get_highest_child_nodes_with_meshes
from pychunkedgraph.meshing.manifest import get_children_before_start_layer
from pychunkedgraph.meshing.manifest import ManifestCache
from pychunkedgraph.meshing.manifest import speculative_manifest_sharded
from pychunkedgraph.meshing.manifest.multiscale import (
get_manifest as get_multiscale_manifest,
)


__meshing_url_prefix__ = os.environ.get("MESHING_URL_PREFIX", "meshing")
Expand Down Expand Up @@ -54,7 +58,7 @@ def handle_valid_frags(table_id, node_id):
## MANIFEST --------------------------------------------------------------------


def handle_get_manifest(table_id, node_id):
def handle_get_manifest(table_id, node_id, multiscale=False):
current_app.request_type = "manifest"
current_app.table_id = table_id

Expand Down Expand Up @@ -84,6 +88,7 @@ def handle_get_manifest(table_id, node_id):
args = (
node_id,
verify,
multiscale,
return_seg_ids,
prepend_seg_ids,
start_layer,
Expand All @@ -95,11 +100,10 @@ def handle_get_manifest(table_id, node_id):


def manifest_response(cg, args):
from pychunkedgraph.meshing.manifest import speculative_manifest_sharded

(
node_id,
verify,
multiscale,
return_seg_ids,
prepend_seg_ids,
start_layer,
Expand All @@ -109,11 +113,14 @@ def manifest_response(cg, args):
) = args
resp = {}
seg_ids = []
node_id = np.uint64(node_id)
if not verify:
seg_ids, resp["fragments"] = speculative_manifest_sharded(
cg, node_id, start_layer=start_layer, bounding_box=bounding_box
)

elif multiscale is True:
seg_ids, response = get_multiscale_manifest(cg, node_id)
resp.update(response)
else:
seg_ids, resp["fragments"] = get_highest_child_nodes_with_meshes(
cg,
Expand Down
11 changes: 11 additions & 0 deletions pychunkedgraph/app/meshing/v1/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,17 @@ def handle_get_manifest(table_id, node_id):
return common.handle_get_manifest(table_id, node_id)


@bp.route("/table/<table_id>/manifest/multiscale/<node_id>", methods=["GET"])
@auth_requires_permission(
"view",
public_table_key="table_id",
public_node_key="node_id",
)
@remap_public
def handle_get_multilod_manifest(table_id, node_id):
return common.handle_get_manifest(table_id, node_id, multiscale=True)


## ENQUE MESHING JOBS ----------------------------------------------------------


Expand Down
206 changes: 206 additions & 0 deletions pychunkedgraph/meshing/manifest/multiscale.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,206 @@
# pylint: disable=invalid-name, missing-docstring, line-too-long, no-member

import functools
from collections import deque
from typing import Dict, Set, Tuple

import numpy as np
from cloudvolume import CloudVolume

from pychunkedgraph.graph import ChunkedGraph
from pychunkedgraph.graph.types import empty_1d
from pychunkedgraph.graph.utils.basetypes import NODE_ID
from .cache import ManifestCache
from .sharded import normalize_fragments
from .utils import del_none_keys
from ..meshgen_utils import get_json_info


def _get_hierarchy(cg: ChunkedGraph, node_id: np.uint64) -> Dict:
node_children = {}
layer = cg.get_chunk_layer(node_id)
if layer < 2:
return node_children
if layer == 2:
node_children[node_id] = empty_1d.copy()
return node_children

node_ids = np.array([node_id], dtype=NODE_ID)
while node_ids.size > 0:
children = cg.get_children(node_ids)
node_children.update(children)

_ids = np.concatenate(list(children.values())) if children else empty_1d.copy()
node_layers = cg.get_chunk_layers(_ids)
node_ids = _ids[node_layers > 2]

for l2id in _ids[node_layers == 2]:
node_children[l2id] = empty_1d.copy()
return node_children


def _get_skipped_and_missing_leaf_nodes(
node_children: Dict, mesh_fragments: Dict
) -> Tuple[Set, Set]:
"""
Returns nodes with only one child and leaves (l2ids).
Nodes with one child do not have a mesh fragment, because it would be identical to child fragment.
Leaves are used to determine correct size for the octree.
"""
skipped = set()
leaves = set()
for node_id, children in node_children.items():
if children.size == 1:
skipped.add(node_id)
elif children.size == 0 and not node_id in mesh_fragments:
leaves.add(node_id)
return skipped, leaves


def _get_node_coords_and_layers_map(
cg: ChunkedGraph, node_children: Dict
) -> Tuple[Dict, Dict]:
node_ids = np.fromiter(node_children.keys(), dtype=NODE_ID)
node_coords = {}
node_layers = cg.get_chunk_layers(node_ids)
for layer in set(node_layers):
layer_mask = node_layers == layer
coords = cg.get_chunk_coordinates_multiple(node_ids[layer_mask])
_node_coords = dict(zip(node_ids[layer_mask], coords))
node_coords.update(_node_coords)
return node_coords, dict(zip(node_ids, node_layers))


def sort_octree_row(cg: ChunkedGraph, children: np.ndarray):
"""
Sort children by their morton code.
"""
if children.size == 0:
return children
children_coords = []

for child in children:
children_coords.append(cg.get_chunk_coordinates(child))

def cmp_zorder(lhs, rhs) -> bool:
# https://en.wikipedia.org/wiki/Z-order_curve
# https://github.com/google/neuroglancer/issues/272
def less_msb(x: int, y: int) -> bool:
return x < y and x < (x ^ y)

msd = 2
for dim in [1, 0]:
if less_msb(lhs[msd] ^ rhs[msd], lhs[dim] ^ rhs[dim]):
msd = dim
return lhs[msd] - rhs[msd]

children, _ = zip(
*sorted(
zip(children, children_coords),
key=functools.cmp_to_key(lambda x, y: cmp_zorder(x[1], y[1])),
)
)
return children


def build_octree(
cg: ChunkedGraph, node_id: np.uint64, node_children: Dict, mesh_fragments: Dict
):
"""
From neuroglancer multiscale specification:
Row-major `[n, 5]` array where each row is of the form `[x, y, z, start, end_and_empty]`, where
`x`, `y`, and `z` are the chunk grid coordinates of the entry at a particular level of detail.
Row `n-1` corresponds to level of detail `lodScales.length - 1`, the root of the octree. Given
a row corresponding to an octree node at level of detail `lod`, bits `start` specifies the row
number of the first child octree node at level of detail `lod-1`, and bits `[0,30]` of
`end_and_empty` specify one past the row number of the last child octree node. Bit `31` of
`end_and_empty` is set to `1` if the mesh for the octree node is empty and should not be
requested/rendered.
"""
node_ids = np.fromiter(mesh_fragments.keys(), dtype=NODE_ID)
node_coords_d, _ = _get_node_coords_and_layers_map(cg, node_children)
skipped, leaves = _get_skipped_and_missing_leaf_nodes(node_children, mesh_fragments)

OCTREE_NODE_SIZE = 5
ROW_TOTAL = len(node_ids) + len(skipped) + len(leaves) + 1
row_counter = len(node_ids) + len(skipped) + len(leaves) + 1
octree_size = OCTREE_NODE_SIZE * ROW_TOTAL
octree = np.zeros(octree_size, dtype=np.uint32)

octree_node_ids = ROW_TOTAL * [0]
octree_fragments = ROW_TOTAL * [""]

que = deque()
rows_used = 1
que.append(node_id)

while len(que) > 0:
row_counter -= 1
current_node = que.popleft()
children = node_children[current_node]
node_coords = node_coords_d[current_node]

x, y, z = node_coords * cg.meta.resolution
offset = OCTREE_NODE_SIZE * row_counter
octree[offset + 0] = x
octree[offset + 1] = y
octree[offset + 2] = z

rows_used += children.size
start = ROW_TOTAL - rows_used
end_empty = start + children.size

octree[offset + 3] = start
octree[offset + 4] = end_empty

octree_node_ids[row_counter] = current_node
try:
if children.size == 1:
# mark node virtual
octree[offset + 3] |= 1 << 31
else:
octree_fragments[row_counter] = mesh_fragments[current_node]
except KeyError:
# no mesh, mark node empty
octree[offset + 4] |= 1 << 31

children = sort_octree_row(cg, children)
for child in children:
que.append(child)
return octree, octree_node_ids, octree_fragments


def get_manifest(cg: ChunkedGraph, node_id: np.uint64) -> Dict:
node_children = _get_hierarchy(cg, node_id)
node_ids = np.fromiter(node_children.keys(), dtype=NODE_ID)
manifest_cache = ManifestCache(cg.graph_id, initial=True)

cv = CloudVolume(
"graphene://https://localhost/segmentation/table/dummy",
mesh_dir=cg.meta.custom_data.get("mesh", {}).get("dir", "graphene_meshes"),
info=get_json_info(cg),
progress=False,
)

fragments_d, _not_cached, _ = manifest_cache.get_fragments(node_ids)
initial_meshes = cv.mesh.initial_exists(_not_cached, return_byte_range=True)
_fragments_d, _ = del_none_keys(initial_meshes)
manifest_cache.set_fragments(_fragments_d)
fragments_d.update(_fragments_d)

octree, node_ids, fragments = build_octree(cg, node_id, node_children, fragments_d)
max_layer = min(cg.get_chunk_layer(node_id) + 1, cg.meta.layer_count)

chunk_shape = np.array(cg.meta.graph_config.CHUNK_SIZE, dtype=np.dtype("<f4"))
chunk_shape *= cg.meta.resolution

response = {
"chunkShape": chunk_shape,
"chunkGridSpatialOrigin": np.array([0, 0, 0], dtype=np.dtype("<f4")),
"lodScales": 2 ** np.arange(max_layer, dtype=np.dtype("<f4")) * 1,
"fragments": normalize_fragments(fragments),
"octree": octree,
"clipLowerBound": np.array(cg.meta.voxel_bounds[:, 0], dtype=np.dtype("<f4")),
"clipUpperBound": np.array(cg.meta.voxel_bounds[:, 1], dtype=np.dtype("<f4")),
}
return node_ids, response
43 changes: 27 additions & 16 deletions pychunkedgraph/meshing/manifest/sharded.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,43 +5,54 @@
import numpy as np
from cloudvolume import CloudVolume

from .utils import get_mesh_paths
from .utils import get_children_before_start_layer
from ...graph import ChunkedGraph
from ...graph.types import empty_1d
from ...graph.utils.basetypes import NODE_ID
from ...graph.chunks import utils as chunk_utils


def _extract_fragment(val):
try:
path, offset, size = val
path = path.split("initial/")[-1]
return f"~{path}:{offset}:{size}"
except ValueError:
return val


def normalize_fragments(fragments: list) -> list:
new_fragments = []
for val in fragments:
new_fragments.append(_extract_fragment(val))
return new_fragments


def normalize_fragments_d(fragments_d: dict):
fragments = []
for val in fragments_d.values():
fragments.append(_extract_fragment(val))
return fragments


def verified_manifest(
cg: ChunkedGraph,
node_id: np.uint64,
start_layer: int,
bounding_box=None,
):
from .utils import get_mesh_paths

bounding_box = chunk_utils.normalize_bounding_box(
cg.meta, bounding_box, bbox_is_coordinate=True
)
node_ids = get_children_before_start_layer(
cg, node_id, start_layer, bounding_box=bounding_box
)
print(f"children before start_layer {len(node_ids)}")

start = time()
result = get_mesh_paths(cg, node_ids)
node_ids = np.fromiter(result.keys(), dtype=NODE_ID)

mesh_files = []
for val in result.values():
try:
path, offset, size = val
path = path.split("initial/")[-1]
mesh_files.append(f"~{path}:{offset}:{size}")
except ValueError:
mesh_files.append(val)
print(f"shard lookups took {time() - start}")
return node_ids, mesh_files
node_ids = np.fromiter(result.keys(), dtype=NODE_ID)
result = normalize_fragments_d(result)
return node_ids, result


def speculative_manifest(
Expand Down
Loading
Loading