-
Notifications
You must be signed in to change notification settings - Fork 18
Expand file tree
/
Copy pathutils.py
More file actions
2807 lines (2405 loc) · 106 KB
/
utils.py
File metadata and controls
2807 lines (2405 loc) · 106 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
from __future__ import annotations
import math
import os
import warnings
from collections import OrderedDict
from collections.abc import Iterable, Mapping, Sequence
from copy import copy
from functools import partial
from pathlib import Path
from types import MappingProxyType
from typing import Any, Literal
import dask
import datashader as ds
import matplotlib
import matplotlib.patches as mpatches
import matplotlib.path as mpath
import matplotlib.pyplot as plt
import matplotlib.ticker
import matplotlib.transforms as mtransforms
import numpy as np
import numpy.ma as ma
import numpy.typing as npt
import pandas as pd
import shapely
import spatialdata as sd
from anndata import AnnData
from cycler import Cycler, cycler
from datashader.core import Canvas
from geopandas import GeoDataFrame
from matplotlib import colors, patheffects, rcParams
from matplotlib.axes import Axes
from matplotlib.collections import PatchCollection
from matplotlib.colors import (
ColorConverter,
Colormap,
LinearSegmentedColormap,
ListedColormap,
Normalize,
to_rgba,
)
from matplotlib.figure import Figure
from matplotlib.gridspec import GridSpec
from matplotlib.transforms import CompositeGenericTransform
from matplotlib_scalebar.scalebar import ScaleBar
from numpy.ma.core import MaskedArray
from numpy.random import default_rng
from pandas.api.types import CategoricalDtype
from pandas.core.arrays.categorical import Categorical
from scanpy import settings
from scanpy.plotting._tools.scatterplots import _add_categorical_legend
from scanpy.plotting._utils import add_colors_for_categorical_sample_annotation
from scanpy.plotting.palettes import default_20, default_28, default_102
from scipy.spatial import ConvexHull
from skimage.color import label2rgb
from skimage.morphology import erosion, square
from skimage.segmentation import find_boundaries
from skimage.util import map_array
from spatialdata import (
SpatialData,
get_element_annotators,
get_extent,
get_values,
rasterize,
)
from spatialdata._core.query.relational_query import _locate_value
from spatialdata._types import ArrayLike
from spatialdata.models import Image2DModel, Labels2DModel, SpatialElement
from spatialdata.transformations.operations import get_transformation
from spatialdata.transformations.transformations import Scale
from xarray import DataArray, DataTree
from spatialdata_plot._logging import logger
from spatialdata_plot.pl.render_params import (
CmapParams,
Color,
FigParams,
ImageRenderParams,
LabelsRenderParams,
OutlineParams,
PointsRenderParams,
ScalebarParams,
ShapesRenderParams,
_FontSize,
_FontWeight,
)
to_hex = partial(colors.to_hex, keep_alpha=True)
# replace with
# from spatialdata._types import ColorLike
# once https://github.com/scverse/spatialdata/pull/689/ is in a release
ColorLike = tuple[float, ...] | list[float] | str
def _extract_scalar_value(value: Any, default: float = 0.0) -> float:
"""
Extract a scalar float value from various data types.
Handles pandas Series, arrays, lists, and other iterables by taking the first element.
Converts non-numeric values to the default value.
Parameters
----------
value : Any
The value to extract a scalar from
default : float, default 0.0
Default value to return if conversion fails
Returns
-------
float
The extracted scalar value
"""
try:
# Handle pandas Series or similar objects with iloc
if hasattr(value, "iloc"):
if len(value) > 0:
value = value.iloc[0]
else:
return default
# Handle other array-like objects
elif hasattr(value, "__len__") and not isinstance(value, (str, bytes)):
if len(value) > 0:
value = value[0]
else:
return default
# Convert to float, handling NaN values
if pd.isna(value):
return default
return float(value)
except (TypeError, ValueError, IndexError):
return default
def _verify_plotting_tree(sdata: SpatialData) -> SpatialData:
"""Verify that the plotting tree exists, and if not, create it."""
if not hasattr(sdata, "plotting_tree"):
sdata.plotting_tree = OrderedDict()
return sdata
def _get_coordinate_system_mapping(sdata: SpatialData) -> dict[str, list[str]]:
coordsys_keys = sdata.coordinate_systems
image_keys = [] if sdata.images is None else sdata.images.keys()
label_keys = [] if sdata.labels is None else sdata.labels.keys()
shape_keys = [] if sdata.shapes is None else sdata.shapes.keys()
point_keys = [] if sdata.points is None else sdata.points.keys()
mapping: dict[str, list[str]] = {}
if len(coordsys_keys) < 1:
raise ValueError("SpatialData object must have at least one coordinate system to generate a mapping.")
for key in coordsys_keys:
mapping[key] = []
for image_key in image_keys:
transformations = get_transformation(sdata.images[image_key], get_all=True)
if key in list(transformations.keys()):
mapping[key].append(image_key)
for label_key in label_keys:
transformations = get_transformation(sdata.labels[label_key], get_all=True)
if key in list(transformations.keys()):
mapping[key].append(label_key)
for shape_key in shape_keys:
transformations = get_transformation(sdata.shapes[shape_key], get_all=True)
if key in list(transformations.keys()):
mapping[key].append(shape_key)
for point_key in point_keys:
transformations = get_transformation(sdata.points[point_key], get_all=True)
if key in list(transformations.keys()):
mapping[key].append(point_key)
return mapping
def _is_color_like(color: Any) -> bool:
"""Check if a value is a valid color.
For discussion, see: https://github.com/scverse/spatialdata-plot/issues/327.
matplotlib accepts strings in [0, 1] as grey-scale values - therefore,
"0" and "1" are considered valid colors. However, we won't do that
so we're filtering these out.
"""
if isinstance(color, str):
try:
num_value = float(color)
if 0 <= num_value <= 1:
return False
except ValueError:
# we're not dealing with what matplotlib considers greyscale
pass
if color.startswith("#") and len(color) not in [7, 9]:
# we only accept hex colors in the form #RRGGBB or #RRGGBBAA, not short forms as matplotlib does
return False
return bool(colors.is_color_like(color))
def _prepare_params_plot(
# this param is inferred when `pl.show`` is called
num_panels: int,
# this args are passed at `pl.show``
figsize: tuple[float, float] | None = None,
dpi: int | None = None,
fig: Figure | None = None,
ax: Axes | Sequence[Axes] | None = None,
wspace: float | None = None,
hspace: float = 0.25,
ncols: int = 4,
frameon: bool | None = None,
# this args will be inferred from coordinate system
scalebar_dx: float | Sequence[float] | None = None,
scalebar_units: str | Sequence[str] | None = None,
) -> tuple[FigParams, ScalebarParams]:
# handle axes and size
wspace = 0.75 / rcParams["figure.figsize"][0] + 0.02 if wspace is None else wspace
figsize = rcParams["figure.figsize"] if figsize is None else figsize
dpi = rcParams["figure.dpi"] if dpi is None else dpi
if num_panels > 1 and ax is None:
fig, grid = _panel_grid(
num_panels=num_panels,
hspace=hspace,
wspace=wspace,
ncols=ncols,
dpi=dpi,
figsize=figsize,
)
axs: None | Sequence[Axes] = [plt.subplot(grid[c]) for c in range(num_panels)]
elif num_panels > 1:
if not isinstance(ax, Sequence):
raise TypeError(f"Expected `ax` to be a `Sequence`, but got {type(ax).__name__}")
if ax is not None and len(ax) != num_panels:
raise ValueError(f"Len of `ax`: {len(ax)} is not equal to number of panels: {num_panels}.")
if fig is None:
raise ValueError(
f"Invalid value of `fig`: {fig}. If a list of `Axes` is passed, a `Figure` must also be specified."
)
assert ax is None or isinstance(ax, Sequence), f"Invalid type of `ax`: {type(ax)}, expected `Sequence`."
axs = ax
else:
axs = None
if ax is None:
fig, ax = plt.subplots(figsize=figsize, dpi=dpi, constrained_layout=True)
elif isinstance(ax, Axes):
# needed for rasterization if user provides Axes object
fig = ax.get_figure()
fig.set_dpi(dpi)
# set scalebar
if scalebar_dx is not None:
scalebar_dx, scalebar_units = _get_scalebar(scalebar_dx, scalebar_units, num_panels)
fig_params = FigParams(
fig=fig,
ax=ax,
axs=axs,
num_panels=num_panels,
frameon=frameon,
)
scalebar_params = ScalebarParams(scalebar_dx=scalebar_dx, scalebar_units=scalebar_units)
return fig_params, scalebar_params
def _get_cs_contents(sdata: sd.SpatialData) -> pd.DataFrame:
"""Check which coordinate systems contain which elements and return that info."""
cs_mapping = _get_coordinate_system_mapping(sdata)
content_flags = ["has_images", "has_labels", "has_points", "has_shapes"]
cs_contents = pd.DataFrame(columns=["cs"] + content_flags)
for cs_name, element_ids in cs_mapping.items():
# determine if coordinate system has the respective elements
cs_has_images = any(e in sdata.images for e in element_ids)
cs_has_labels = any(e in sdata.labels for e in element_ids)
cs_has_points = any(e in sdata.points for e in element_ids)
cs_has_shapes = any(e in sdata.shapes for e in element_ids)
cs_contents = pd.concat(
[
cs_contents,
pd.DataFrame(
{
"cs": cs_name,
"has_images": [cs_has_images],
"has_labels": [cs_has_labels],
"has_points": [cs_has_points],
"has_shapes": [cs_has_shapes],
}
),
]
)
cs_contents["has_images"] = cs_contents["has_images"].astype("bool")
cs_contents["has_labels"] = cs_contents["has_labels"].astype("bool")
cs_contents["has_points"] = cs_contents["has_points"].astype("bool")
cs_contents["has_shapes"] = cs_contents["has_shapes"].astype("bool")
return cs_contents
def _get_centroid_of_pathpatch(pathpatch: mpatches.PathPatch) -> tuple[float, float]:
# Extract the vertices from the PathPatch
path = pathpatch.get_path()
vertices = path.vertices
x = vertices[:, 0]
y = vertices[:, 1]
area = 0.5 * np.sum(x[:-1] * y[1:] - x[1:] * y[:-1])
# Calculate the centroid coordinates
centroid_x = np.sum((x[:-1] + x[1:]) * (x[:-1] * y[1:] - x[1:] * y[:-1])) / (6 * area)
centroid_y = np.sum((y[:-1] + y[1:]) * (x[:-1] * y[1:] - x[1:] * y[:-1])) / (6 * area)
return centroid_x, centroid_y
def _scale_pathpatch_around_centroid(pathpatch: mpatches.PathPatch, scale_factor: float) -> None:
scale_value = _extract_scalar_value(scale_factor, default=1.0)
centroid = _get_centroid_of_pathpatch(pathpatch)
vertices = pathpatch.get_path().vertices
scaled_vertices = np.array([centroid + (vertex - centroid) * scale_value for vertex in vertices])
pathpatch.get_path().vertices = scaled_vertices
def _get_collection_shape(
shapes: list[GeoDataFrame],
c: Any,
s: float,
norm: Any,
render_params: ShapesRenderParams,
fill_alpha: None | float = None,
outline_alpha: None | float = None,
outline_color: None | str | list[float] = "white",
linewidth: float = 0.0,
**kwargs: Any,
) -> PatchCollection:
"""
Build a PatchCollection for shapes with correct handling of.
- continuous numeric vectors with NaNs,
- per-row RGBA arrays,
- a single color or a list of color specs.
Only NaNs are painted with na_color; finite values are mapped via norm+cmap.
"""
cmap = kwargs["cmap"]
# Resolve na color once
na_rgba = colors.to_rgba(render_params.cmap_params.na_color.get_hex_with_alpha())
# Try to interpret c as numpy array
c_arr = np.asarray(c)
fill_c: np.ndarray
def _as_rgba_array(x: Any) -> np.ndarray:
return np.asarray(ColorConverter().to_rgba_array(x))
# Case A: per-row numeric colors given as Nx3 or Nx4 float array
if (
c_arr.ndim == 2
and c_arr.shape[0] == len(shapes)
and c_arr.shape[1] in (3, 4)
and np.issubdtype(c_arr.dtype, np.number)
):
fill_c = _as_rgba_array(c_arr)
# Case B: continuous numeric vector len == n_shapes (possibly with NaNs)
elif c_arr.ndim == 1 and len(c_arr) == len(shapes) and np.issubdtype(c_arr.dtype, np.number):
finite_mask = np.isfinite(c_arr)
# Select or build a normalization that ignores NaNs for scaling
if isinstance(norm, Normalize):
used_norm: Normalize = norm
else:
if finite_mask.any():
vmin = float(np.nanmin(c_arr[finite_mask]))
vmax = float(np.nanmax(c_arr[finite_mask]))
if not np.isfinite(vmin) or not np.isfinite(vmax) or vmin == vmax:
vmin, vmax = 0.0, 1.0
else:
vmin, vmax = 0.0, 1.0
used_norm = colors.Normalize(vmin=vmin, vmax=vmax, clip=False)
# Map finite values through cmap(norm(.)); NaNs get na_color
fill_c = np.empty((len(c_arr), 4), dtype=float)
fill_c[:] = na_rgba
if finite_mask.any():
fill_c[finite_mask] = cmap(used_norm(c_arr[finite_mask]))
elif c_arr.ndim == 1 and len(c_arr) == len(shapes) and c_arr.dtype == object:
# Split into numeric vs color-like
c_series = pd.Series(c_arr, copy=False)
num = pd.to_numeric(c_series, errors="coerce").to_numpy()
is_num = np.isfinite(num)
# init with na color
fill_c = np.empty((len(c_series), 4), dtype=float)
fill_c[:] = na_rgba
# numeric entries via cmap(norm)
if is_num.any():
if isinstance(norm, Normalize):
used_norm = norm
else:
vmin = float(np.nanmin(num[is_num])) if is_num.any() else 0.0
vmax = float(np.nanmax(num[is_num])) if is_num.any() else 1.0
if not np.isfinite(vmin) or not np.isfinite(vmax) or vmin == vmax:
vmin, vmax = 0.0, 1.0
used_norm = colors.Normalize(vmin=vmin, vmax=vmax, clip=False)
fill_c[is_num] = cmap(used_norm(num[is_num]))
# non-numeric entries as explicit colors
if (~is_num).any():
fill_c[~is_num] = ColorConverter().to_rgba_array(c_series[~is_num].tolist())
# Case C: single color or list of color-like specs (strings or tuples)
else:
fill_c = _as_rgba_array(c)
# Apply optional fill alpha without destroying existing transparency
if fill_alpha is not None:
nonzero_alpha = fill_c[..., -1] > 0
fill_c[nonzero_alpha, -1] = fill_alpha
# Outline handling
if outline_alpha and outline_alpha > 0.0:
outline_c_array = _as_rgba_array(outline_color)
outline_c_array[..., -1] = outline_alpha
outline_c = outline_c_array.tolist()
else:
outline_c = [None] * fill_c.shape[0]
# Build DataFrame of valid geometries
shapes_df = pd.DataFrame(shapes, copy=True)
shapes_df = shapes_df[shapes_df["geometry"].apply(lambda geom: not geom.is_empty)]
shapes_df = shapes_df.reset_index(drop=True)
def _assign_fill_and_outline_to_row(
fill_colors: list[Any],
outline_colors: list[Any],
row: dict[str, Any],
idx: int,
is_multiple_shapes: bool,
) -> None:
if is_multiple_shapes and len(fill_colors) == 1:
row["fill_c"] = fill_colors[0]
row["outline_c"] = outline_colors[0]
else:
row["fill_c"] = fill_colors[idx]
row["outline_c"] = outline_colors[idx]
def _process_polygon(row: pd.Series, scale: float) -> dict[str, Any]:
coords = np.array(row["geometry"].exterior.coords)
centroid = np.mean(coords, axis=0)
scale_value = _extract_scalar_value(scale, default=1.0)
scaled = (centroid + (coords - centroid) * scale_value).tolist()
return {**row.to_dict(), "geometry": mpatches.Polygon(scaled, closed=True)}
def _process_multipolygon(row: pd.Series, scale: float) -> list[dict[str, Any]]:
mp = _make_patch_from_multipolygon(row["geometry"])
row_dict = row.to_dict()
for m in mp:
_scale_pathpatch_around_centroid(m, scale)
return [{**row_dict, "geometry": m} for m in mp]
def _process_point(row: pd.Series, scale: float) -> dict[str, Any]:
radius_value = _extract_scalar_value(row["radius"], default=0.0)
scale_value = _extract_scalar_value(scale, default=1.0)
radius = radius_value * scale_value
return {
**row.to_dict(),
"geometry": mpatches.Circle((row["geometry"].x, row["geometry"].y), radius=radius),
}
def _create_patches(
shapes_df_: GeoDataFrame, fill_colors: list[Any], outline_colors: list[Any], scale: float
) -> pd.DataFrame:
rows: list[dict[str, Any]] = []
is_multiple = len(shapes_df_) > 1
for idx, row in shapes_df_.iterrows():
geom_type = row["geometry"].geom_type
processed: list[dict[str, Any]] = []
if geom_type == "Polygon":
processed.append(_process_polygon(row, scale))
elif geom_type == "MultiPolygon":
processed.extend(_process_multipolygon(row, scale))
elif geom_type == "Point":
processed.append(_process_point(row, scale))
for pr in processed:
_assign_fill_and_outline_to_row(fill_colors, outline_colors, pr, idx, is_multiple)
rows.append(pr)
return pd.DataFrame(rows)
patches = _create_patches(
shapes_df, fill_c.tolist(), outline_c.tolist() if hasattr(outline_c, "tolist") else outline_c, s
)
return PatchCollection(
patches["geometry"].values.tolist(),
snap=False,
lw=linewidth,
facecolor=patches["fill_c"],
edgecolor=None if all(o is None for o in outline_c) else outline_c,
**kwargs,
)
def _panel_grid(
num_panels: int,
hspace: float,
wspace: float,
ncols: int,
figsize: tuple[float, float],
dpi: int | None = None,
) -> tuple[Figure, GridSpec]:
n_panels_x = min(ncols, num_panels)
n_panels_y = np.ceil(num_panels / n_panels_x).astype(int)
fig = plt.figure(
figsize=(figsize[0] * n_panels_x * (1 + wspace), figsize[1] * n_panels_y),
dpi=dpi,
)
left = 0.2 / n_panels_x
bottom = 0.13 / n_panels_y
gs = GridSpec(
nrows=n_panels_y,
ncols=n_panels_x,
left=left,
right=1 - (n_panels_x - 1) * left - 0.01 / n_panels_x,
bottom=bottom,
top=1 - (n_panels_y - 1) * bottom - 0.1 / n_panels_y,
hspace=hspace,
wspace=wspace,
)
return fig, gs
def _get_scalebar(
scalebar_dx: float | Sequence[float] | None = None,
scalebar_units: str | Sequence[str] | None = None,
len_lib: int | None = None,
) -> tuple[Sequence[float] | None, Sequence[str] | None]:
if scalebar_dx is not None:
_scalebar_dx = _get_list(scalebar_dx, _type=float, ref_len=len_lib, name="scalebar_dx")
scalebar_units = "um" if scalebar_units is None else scalebar_units
_scalebar_units = _get_list(scalebar_units, _type=str, ref_len=len_lib, name="scalebar_units")
else:
_scalebar_dx = None
_scalebar_units = None
return _scalebar_dx, _scalebar_units
def _prepare_cmap_norm(
cmap: Colormap | str | None = None,
norm: Normalize | None = None,
na_color: Color = Color(),
) -> CmapParams:
# TODO: check refactoring norm out here as it gets overwritten later
cmap_is_default = cmap is None
if cmap is None:
cmap = rcParams["image.cmap"]
if isinstance(cmap, str):
cmap = matplotlib.colormaps[cmap]
cmap = copy(cmap)
assert isinstance(cmap, Colormap), f"Invalid type of `cmap`: {type(cmap)}, expected `Colormap`."
if norm is None:
norm = Normalize(vmin=None, vmax=None, clip=False)
cmap.set_bad(na_color.get_hex_with_alpha())
return CmapParams(
cmap=cmap,
norm=norm,
na_color=na_color,
cmap_is_default=cmap_is_default,
)
def _set_outline(
outline_alpha: float | int | tuple[float | int, float | int] | None,
outline_width: int | float | tuple[float | int, float | int] | None,
outline_color: Color | tuple[Color, Color | None] | None,
**kwargs: Any,
) -> tuple[tuple[float, float], OutlineParams]:
"""Create OutlineParams object for shapes, including possibility of double outline.
Rules for outline rendering:
1) outline_alpha always takes precedence if given by the user.
In absence of outline_alpha:
2) If outline_color is specified and implying an alpha (e.g. RGBA array or #RRGGBBAA): that alpha is used
3) If outline_color (w/o implying an alpha) and/or outline_width is specified: alpha of outlines set to 1.0
"""
# A) User doesn't want to see outlines
if (
(outline_alpha and outline_alpha == 0.0)
or (isinstance(outline_alpha, tuple) and np.all(np.array(outline_alpha) == 0.0))
or not (outline_alpha or outline_width or outline_color)
):
return (0.0, 0.0), OutlineParams(None, 1.5, None, 0.5)
# B) User wants to see at least 1 outline
if isinstance(outline_width, tuple):
if len(outline_width) != 2:
raise ValueError(
f"Tuple of length {len(outline_width)} was passed for outline_width. When specifying multiple outlines,"
" please pass a tuple of exactly length 2."
)
if not outline_color:
outline_color = (Color("#000000"), Color("#ffffff"))
elif not isinstance(outline_color, tuple):
raise ValueError(
"No tuple was passed for outline_color, while two outlines were specified by using the outline_width "
"argument. Please specify the outline colors in a tuple of length two."
)
if isinstance(outline_color, tuple):
if len(outline_color) != 2:
raise ValueError(
f"Tuple of length {len(outline_color)} was passed for outline_color. When specifying multiple outlines,"
" please pass a tuple of exactly length 2."
)
if not outline_width:
outline_width = (1.5, 0.5)
elif not isinstance(outline_width, tuple):
raise ValueError(
"No tuple was passed for outline_width, while two outlines were specified by using the outline_color "
"argument. Please specify the outline widths in a tuple of length two."
)
if isinstance(outline_width, float | int):
outline_width = (outline_width, 0.0)
elif not outline_width:
outline_width = (1.5, 0.0)
if isinstance(outline_color, Color):
outline_color = (outline_color, None)
elif not outline_color:
outline_color = (Color("#000000ff"), None)
assert isinstance(outline_color, tuple), "outline_color is not a tuple" # shut up mypy
assert isinstance(outline_width, tuple), "outline_width is not a tuple"
for ow in outline_width:
if not isinstance(ow, int | float):
raise TypeError(f"Invalid type of `outline_width`: {type(ow)}, expected `int` or `float`.")
if outline_alpha:
if isinstance(outline_alpha, int | float):
# for a single outline: second width value is 0.0
outline_alpha = (outline_alpha, 0.0) if outline_width[1] == 0.0 else (outline_alpha, outline_alpha)
else:
# if alpha wasn't explicitly specified by the user
outer_ol_alpha = outline_color[0].get_alpha_as_float() if isinstance(outline_color[0], Color) else 1.0
inner_ol_alpha = outline_color[1].get_alpha_as_float() if isinstance(outline_color[1], Color) else 1.0
outline_alpha = (outer_ol_alpha, inner_ol_alpha)
# handle possible linewidths of 0.0 => outline won't be rendered in the first place
if outline_width[0] == 0.0:
outline_alpha = (0.0, outline_alpha[1])
if outline_width[1] == 0.0:
outline_alpha = (outline_alpha[0], 0.0)
if outline_alpha[0] > 0.0 or outline_alpha[1] > 0.0:
kwargs.pop("edgecolor", None) # remove edge from kwargs if present
kwargs.pop("alpha", None) # remove alpha from kwargs if present
return outline_alpha, OutlineParams(
outline_color[0],
outline_width[0],
outline_color[1],
outline_width[1],
)
def _get_subplots(num_images: int, ncols: int = 4, width: int = 4, height: int = 3) -> plt.Figure | plt.Axes:
"""Set up the axs objects.
Parameters
----------
num_images
Number of images to plot. Must be greater than 1.
ncols
Number of columns in the subplot grid, by default 4
width
Width of each subplot, by default 4
Returns
-------
Union[plt.Figure, plt.Axes]
Matplotlib figure and axes object.
"""
if num_images < ncols:
nrows = 1
ncols = num_images
else:
nrows, reminder = divmod(num_images, ncols)
if nrows == 0:
nrows = 1
if reminder > 0:
nrows += 1
fig, axes = plt.subplots(nrows, ncols, figsize=(width * ncols, height * nrows))
if not isinstance(axes, Iterable):
axes = np.array([axes])
# get rid of the empty axes
_ = [ax.axis("off") for ax in axes.flatten()[num_images:]]
return fig, axes
def _get_colors_for_categorical_obs(
categories: Sequence[str | int],
palette: ListedColormap | str | list[str] | None = None,
alpha: float = 1.0,
cmap_params: CmapParams | None = None,
) -> list[str]:
"""
Return a list of colors for a categorical observation.
Parameters
----------
adata
AnnData object
value_to_plot
Name of a valid categorical observation
categories
categories of the categorical observation.
Returns
-------
None
"""
len_cat = len(categories)
# check if default matplotlib palette has enough colors
if palette is None:
if cmap_params is not None and not cmap_params.cmap_is_default:
palette = cmap_params.cmap
elif len(rcParams["axes.prop_cycle"].by_key()["color"]) >= len_cat:
cc = rcParams["axes.prop_cycle"]()
palette = [next(cc)["color"] for _ in range(len_cat)]
elif len_cat <= 20:
palette = default_20
elif len_cat <= 28:
palette = default_28
elif len_cat <= len(default_102): # 103 colors
palette = default_102
else:
palette = ["grey" for _ in range(len_cat)]
logger.info("input has more than 103 categories. Uniform 'grey' color will be used for all categories.")
else:
# raise error when user didn't provide the right number of colors in palette
if isinstance(palette, list) and len(palette) != len(categories):
raise ValueError(
f"The number of provided values in the palette ({len(palette)}) doesn't agree with the number of "
f"categories that should be colored ({categories})."
)
# otherwise, single channels turn out grey
color_idx = np.linspace(0, 1, len_cat) if len_cat > 1 else [0.7]
if isinstance(palette, str):
palette = [to_hex(palette)]
elif isinstance(palette, list):
palette = [to_hex(x) for x in palette]
elif isinstance(palette, ListedColormap):
palette = [to_hex(x) for x in palette(color_idx, alpha=alpha)]
elif isinstance(palette, LinearSegmentedColormap):
palette = [to_hex(palette(x, alpha=alpha)) for x in color_idx] # type: ignore[attr-defined]
else:
raise TypeError(f"Palette is {type(palette)} but should be string or list.")
return palette[:len_cat] # type: ignore[return-value]
def _set_color_source_vec(
sdata: sd.SpatialData,
element: SpatialElement | None,
value_to_plot: str | None,
na_color: Color,
element_name: list[str] | str | None = None,
groups: list[str] | str | None = None,
palette: list[str] | str | None = None,
cmap_params: CmapParams | None = None,
alpha: float = 1.0,
table_name: str | None = None,
table_layer: str | None = None,
render_type: Literal["points"] | None = None,
) -> tuple[ArrayLike | pd.Series | None, ArrayLike, bool]:
if value_to_plot is None and element is not None:
color = np.full(len(element), na_color.get_hex_with_alpha())
return color, color, False
# Figure out where to get the color from
origins = _locate_value(
value_key=value_to_plot,
sdata=sdata,
element_name=element_name,
table_name=table_name,
)
if len(origins) > 1:
raise ValueError(
f"Color key '{value_to_plot}' for element '{element_name}' been found in multiple locations: {origins}."
)
if len(origins) == 1:
color_source_vector = get_values(
value_key=value_to_plot,
sdata=sdata,
element_name=element_name,
table_name=table_name,
table_layer=table_layer,
)[value_to_plot]
# numerical case, return early
# TODO temporary split until refactor is complete
if color_source_vector is not None and not isinstance(color_source_vector.dtype, pd.CategoricalDtype):
if (
not isinstance(element, GeoDataFrame)
and isinstance(palette, list)
and palette[0] is not None
or isinstance(element, GeoDataFrame)
and isinstance(palette, list)
):
logger.warning(
"Ignoring categorical palette which is given for a continuous variable. "
"Consider using `cmap` to pass a ColorMap."
)
return None, color_source_vector, False
color_source_vector = pd.Categorical(color_source_vector) # convert, e.g., `pd.Series`
color_mapping = _get_categorical_color_mapping(
adata=sdata.get(table_name, None),
cluster_key=value_to_plot,
color_source_vector=color_source_vector,
cmap_params=cmap_params,
alpha=alpha,
groups=groups,
palette=palette,
na_color=na_color,
render_type=render_type,
)
color_source_vector = color_source_vector.set_categories(color_mapping.keys())
if color_mapping is None:
raise ValueError("Unable to create color palette.")
# do not rename categories, as colors need not be unique
color_vector = color_source_vector.map(color_mapping)
return color_source_vector, color_vector, True
logger.warning(f"Color key '{value_to_plot}' for element '{element_name}' not been found, using default colors.")
color = np.full(sdata[table_name].n_obs, na_color.get_hex_with_alpha())
return color, color, False
def _map_color_seg(
seg: ArrayLike,
cell_id: ArrayLike,
color_vector: ArrayLike | pd.Series[CategoricalDtype],
color_source_vector: pd.Series[CategoricalDtype],
cmap_params: CmapParams,
na_color: Color,
seg_erosionpx: int | None = None,
seg_boundaries: bool = False,
) -> ArrayLike:
cell_id = np.array(cell_id)
if pd.api.types.is_categorical_dtype(color_vector.dtype):
# Case A: users wants to plot a categorical column
if np.any(color_source_vector.isna()):
cell_id[color_source_vector.isna()] = 0
val_im: ArrayLike = map_array(seg.copy(), cell_id, color_vector.codes + 1)
cols = colors.to_rgba_array(color_vector.categories)
elif pd.api.types.is_numeric_dtype(color_vector.dtype):
# Case B: user wants to plot a continous column
if isinstance(color_vector, pd.Series):
color_vector = color_vector.to_numpy()
cols = cmap_params.cmap(cmap_params.norm(color_vector))
val_im = map_array(seg.copy(), cell_id, cell_id)
else:
# Case C: User didn't specify any colors
if color_source_vector is not None and (
set(color_vector) == set(color_source_vector)
and len(set(color_vector)) == 1
and set(color_vector) == {na_color.get_hex_with_alpha()}
and not na_color.color_modified_by_user()
):
val_im = map_array(seg.copy(), cell_id, cell_id)
RNG = default_rng(42)
cols = RNG.random((len(color_vector), 3))
else:
# Case D: User didn't specify a column to color by, but modified the na_color
val_im = map_array(seg.copy(), cell_id, cell_id)
if "#" in str(color_vector[0]):
# we have hex colors
assert all(_is_color_like(c) for c in color_vector), "Not all values are color-like."
cols = colors.to_rgba_array(color_vector)
else:
cols = cmap_params.cmap(cmap_params.norm(color_vector))
if seg_erosionpx is not None:
val_im[val_im == erosion(val_im, square(seg_erosionpx))] = 0
seg_im: ArrayLike = label2rgb(
label=val_im,
colors=cols,
bg_label=0,
bg_color=(1, 1, 1), # transparency doesn't really work
image_alpha=0,
)
if seg_boundaries:
if seg.shape[0] == 1:
seg = np.squeeze(seg, axis=0)
seg_bound: ArrayLike = np.clip(seg_im - find_boundaries(seg)[:, :, None], 0, 1)
return np.dstack((seg_bound, np.where(val_im > 0, 1, 0))) # add transparency here
if len(val_im.shape) != len(seg_im.shape):
val_im = np.expand_dims((val_im > 0).astype(int), axis=-1)
return np.dstack((seg_im, val_im))
def _generate_base_categorial_color_mapping(
adata: AnnData | None,
cluster_key: str,
color_source_vector: ArrayLike | pd.Series[CategoricalDtype],
na_color: Color,
cmap_params: CmapParams | None = None,
) -> Mapping[str, str]:
if adata is not None and cluster_key in adata.uns and f"{cluster_key}_colors" in adata.uns:
colors = adata.uns[f"{cluster_key}_colors"]
categories = color_source_vector.categories.tolist() + ["NaN"]
colors = [to_hex(to_rgba(color)[:3]) for color in colors]
if len(categories) > len(colors):
return dict(zip(categories, colors + [na_color.get_hex_with_alpha()], strict=True))
return dict(zip(categories, colors, strict=True))
return _get_default_categorial_color_mapping(color_source_vector=color_source_vector, cmap_params=cmap_params)
def _modify_categorical_color_mapping(
mapping: Mapping[str, str],
groups: list[str] | str | None = None,
palette: list[str] | str | None = None,
) -> Mapping[str, str]:
if groups is None or isinstance(groups, list) and groups[0] is None:
return mapping
if palette is None or isinstance(palette, list) and palette[0] is None:
# subset base mapping to only those specified in groups
modified_mapping = {key: mapping[key] for key in mapping if key in groups or key == "NaN"}
elif len(palette) == len(groups) and isinstance(groups, list) and isinstance(palette, list):
modified_mapping = dict(zip(groups, palette, strict=True))
else:
raise ValueError(f"Expected palette to be of length `{len(groups)}`, found `{len(palette)}`.")
return modified_mapping
def _get_default_categorial_color_mapping(
color_source_vector: ArrayLike | pd.Series[CategoricalDtype],
cmap_params: CmapParams | None = None,
) -> Mapping[str, str]:
len_cat = len(color_source_vector.categories.unique())
# Try to use provided colormap first
if cmap_params is not None and cmap_params.cmap is not None and not cmap_params.cmap_is_default:
# Generate evenly spaced indices for the colormap
color_idx = np.linspace(0, 1, len_cat)
if isinstance(cmap_params.cmap, ListedColormap):
palette = [to_hex(x) for x in cmap_params.cmap(color_idx)]
elif isinstance(cmap_params.cmap, LinearSegmentedColormap):
palette = [to_hex(cmap_params.cmap(x)) for x in color_idx]