Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Account for transforms in layer metadata #40

Merged
merged 23 commits into from
Jun 19, 2024
Merged
Show file tree
Hide file tree
Changes from 22 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion napari_svg/_tests/test_write_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from pathlib import Path
from napari.layers import Image, Points, Labels, Shapes, Vectors
from napari.utils.colormaps.colormap_utils import ensure_colormap
from napari_svg.layer_to_xml import layer_transforms_to_xml_string
from napari_svg import (
napari_write_image,
napari_write_labels,
Expand Down Expand Up @@ -164,6 +165,15 @@ def test_write_image_colormaps_vispy(tmpdir, layer_writer_and_data, path_ensure,
assert os.path.isfile(path)


NOOP_TRANSFORM = layer_transforms_to_xml_string({
'scale': [1.0, 1.0],
'translate': [0.0, 0.0],
'shear': [0.0],
'rotate': [[1.0, 0.0], [0.0, 1.0]],
'affine': [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]],
})
NOOP_TRANSFORM_STR = f' transform="{NOOP_TRANSFORM}"'

def test_write_points_with_attributes(request, tmp_path):
data = [
[0, 0],
Expand Down Expand Up @@ -191,4 +201,6 @@ def test_write_points_with_attributes(request, tmp_path):
assert return_path == path

expected_path = Path(__file__).parent / f'{test_name}-expected.svg'
assert path.read_text() == expected_path.read_text()
actual_text = path.read_text().replace(NOOP_TRANSFORM_STR, '')
expected_text = expected_path.read_text().replace(NOOP_TRANSFORM_STR, '')
assert actual_text == expected_text
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
154 changes: 139 additions & 15 deletions napari_svg/layer_to_xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,79 @@ def map(self, image):
}


def layer_transforms_to_xml_string(meta):
"""Get the xml representation[1]_[2]_ of the layer transforms.

Parameters
----------
meta : dict
The metadata tuple from the layer.
jni marked this conversation as resolved.
Show resolved Hide resolved

Returns
-------
tf_list : str
The transformation list represented as a string.

References
----------
.. [1] https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/transform
.. [2] https://www.w3.org/TR/css-transforms-1/
"""
scale = meta.get('scale', [1, 1])[::-1]
translate = meta.get('translate', [0, 0])[::-1]
rotmat = meta.get('rotate', [[1, 0], [0, 1]])
rotate = np.degrees(np.arctan2(rotmat[0][1], rotmat[1][1]))
# 'shear' in napari specifies the skew along the y-axis in CSS/SVG, but
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I love the detailed comments here.

# the latter is in degrees.
# skew along x can be achieved by combining skewY with a rotation of the
# same amount.
# https://www.w3.org/TR/css-transforms-1/#funcdef-transform-skewy
skewy = np.degrees(np.arctan2(meta.get('shear', [0])[0], 1))
# matrix elements after converting row-column to y, x, first
# flipping the rows and then the first two columns of the matrix:
# a c e -> b d f -> d b f
# b d f -> a c e -> c a e
d, b, f, c, a, e = np.asarray(meta.get('affine', np.eye(3)))[:-1].ravel()
strs = [
f'scale({scale[0]} {scale[1]})',
f'skewY({skewy})',
f'rotate({rotate})',
f'translate({translate[0]} {translate[1]})',
f'matrix({a} {b} {c} {d} {e} {f})',
]
# Note: transforms are interpreted right-to-left in svg, so must be
# inverted here.
return ' '.join(strs[::-1])

def make_linear_matrix_and_offset(meta):
"""Make a transformation matrix from the layer metadata."""
rotate = np.array(meta.get('rotate', [[1, 0], [0, 1]]))
shear = np.array([[1, meta.get('shear', [0])[0]], [0, 1]])
scale = np.diag(meta.get('scale', [1, 1]))
translate = np.array(meta.get('translate', [0, 0]))
affine = np.array(meta.get('affine', np.eye(3)))
linear = affine[:2, :2]
affine_tr = affine[:2, 2]
matrix = linear @ (rotate @ shear @ scale)
offset = linear @ translate + affine_tr
return matrix, offset


def extrema_coords(coords, meta):
"""Compute the extrema of a set of coordinates after transforms in meta."""
matrix, offset = make_linear_matrix_and_offset(meta)
transformed_data = coords @ matrix.T + offset
return np.array([
np.min(transformed_data, axis=0), np.max(transformed_data, axis=0)
])


def extrema_image(image, meta):
"""Compute the extrema of an image layer, accounting for transforms."""
coords = np.array([[0, 0], list(image.shape)])
return extrema_coords(coords, meta)


def image_to_xml(data, meta):
"""Generates a xml data for an image.

Expand Down Expand Up @@ -106,7 +179,7 @@ def image_to_xml(data, meta):
image = data

# Find extrema of data
extrema = np.array([[0, 0], [image.shape[0], image.shape[1]]])
extrema = extrema_image(image, meta)

if rgb:
mapped_image = image
Expand All @@ -133,21 +206,40 @@ def image_to_xml(data, meta):
width = str(image.shape[1])
height = str(image.shape[0])

transform = layer_transforms_to_xml_string(meta)

xml = Element(
'image', width=width, height=height, opacity=str(opacity), **props
'image',
width=width,
height=height,
opacity=str(opacity),
transform=transform,
**props,
)
xml_list = [xml]

return xml_list, extrema


def extrema_points(data, meta):
"""Compute the extrema of points, taking transformations into account."""
# TODO: account for point sizes below, not just positions
# could do so by offsetting coordinates along both axes, see for example:
# https://github.com/scikit-image/scikit-image/blob/fa2a326a734c14b05c25057b03d31c84a6c8a635/skimage/morphology/convex_hull.py#L138-L140
return extrema_coords(data, meta)


def points_to_xml(data, meta):
"""Generates a xml data for points.

Only two dimensional points data is supported. Z ordering of the points
will be taken into account. Each point is represented by a circle. Support
for other symbols is not yet implemented.

Note: any shear or anisotropic scaling value will be applied to the
points, so the markers themselves will be transformed and not perfect
circles anymore.

Parameters
----------
data : array
Expand Down Expand Up @@ -205,7 +297,7 @@ def points_to_xml(data, meta):
points = data

# Find extrema of data
extrema = np.array([points.min(axis=0), points.max(axis=0)])
extrema = extrema_points(points, meta)

# Ensure stroke width is an array to handle older versions of
# napari (e.g. v0.4.0) where it could be a scalar.
Expand All @@ -214,6 +306,8 @@ def points_to_xml(data, meta):
if meta.get('border_width_is_relative') or meta.get('edge_width_is_relative'):
stroke_width *= size

transform = layer_transforms_to_xml_string(meta)

xml_list = []
for p, s, fc, sc, sw in zip(points, size, face_color, stroke_color, stroke_width):
cx = str(p[1])
Expand All @@ -228,13 +322,24 @@ def points_to_xml(data, meta):
'opacity': str(opacity),
}
element = Element(
'circle', cx=cx, cy=cy, r=r, stroke=stroke, fill=fill, **props
'circle',
cx=cx, cy=cy, r=r,
stroke=stroke,
fill=fill,
transform=transform,
**props,
)
xml_list.append(element)

return xml_list, extrema


def extrema_shapes(shapes_data, meta):
"""Compute the extrema of shapes, taking transformations into account."""
coords = np.concatenate(shapes_data, axis=0)
return extrema_coords(coords, meta)


def shapes_to_xml(data, meta):
"""Generates a xml data for shapes.

Expand Down Expand Up @@ -293,16 +398,20 @@ def shapes_to_xml(data, meta):

if len(shapes) > 0:
# Find extrema of data
mins = np.min([np.min(d, axis=0) for d in shapes], axis=0)
maxs = np.max([np.max(d, axis=0) for d in shapes], axis=0)
extrema = np.array([mins, maxs])
extrema = extrema_shapes(shapes, meta)
else:
# use nan — these will be discarded when aggregating all layers
extrema = np.full((2, 2), np.nan)

transform = layer_transforms_to_xml_string(meta)
raw_xml_list = []
zipped = zip(shapes, shape_type, face_color, edge_color, edge_width)
for s, st, fc, ec, ew in zipped:
props = {'stroke-width': str(ew), 'opacity': str(opacity)}
props = {
'stroke-width': str(ew),
'opacity': str(opacity),
'transform': transform,
}
fc_int = (255 * fc).astype(int)
props['fill'] = f'rgb{tuple(fc_int[:3])}'
ec_int = (255 * ec).astype(int)
Expand All @@ -317,6 +426,20 @@ def shapes_to_xml(data, meta):
return xml_list, extrema


def extrema_vectors(vectors, meta):
"""Compute the extrema of vectors, taking projections into account."""
length = meta.get('length', 1)
start_ends = np.empty(
(vectors.shape[0] * vectors.shape[1], vectors.shape[-1]),
dtype=vectors.dtype,
)
start_ends[:vectors.shape[0]] = vectors[:, 0, :]
start_ends[vectors.shape[0]:] = (
vectors[:, 0, :] + length * vectors[:, 1, :]
)
return extrema_coords(start_ends, meta)


def vectors_to_xml(data, meta):
"""Generates a xml data for vectors.

Expand Down Expand Up @@ -368,13 +491,14 @@ def vectors_to_xml(data, meta):
vectors = data

# Find extrema of data
full_vectors = copy(vectors)
full_vectors[:, 1, :] = vectors[:, 0, :] + length * vectors[:, 1, :]
mins = np.min(full_vectors, axis=(0, 1))
maxs = np.max(full_vectors, axis=(0, 1))
extrema = np.array([mins, maxs])

props = {'stroke-width': str(edge_width), 'opacity': str(opacity)}
extrema = extrema_vectors(vectors, meta)

transform = layer_transforms_to_xml_string(meta)
props = {
'stroke-width': str(edge_width),
'opacity': str(opacity),
'transform': transform,
}

xml_list = []
for v, ec in zip(vectors, edge_color):
Expand Down
Loading