Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1760,7 +1760,12 @@ impl DocumentMessageHandler {
}

pub fn deserialize_document(serialized_content: &str) -> Result<Self, EditorError> {
let document_message_handler = serde_json::from_str::<DocumentMessageHandler>(serialized_content)
// Walk the document JSON and rewrite any `TaggedValue` variants that have been removed since being released as `"None"` so the document still deserializes.
// `migrate_node` then drops the resulting orphan node inputs so the value never reaches graph execution.
let mut json_value: serde_json::Value = serde_json::from_str(serialized_content).map_err(|e| EditorError::DocumentDeserialization(e.to_string()))?;
graph_craft::document::value::TaggedValue::scrub_removed_variants_from_json(&mut json_value);

let document_message_handler = serde_json::from_value::<DocumentMessageHandler>(json_value.clone())
.or_else(|e| {
log::warn!("Failed to directly load document with the following error: {e}. Trying old DocumentMessageHandler.");
// TODO: Eventually remove this document upgrade code
Expand Down Expand Up @@ -1799,7 +1804,7 @@ impl DocumentMessageHandler {
pub snapping_state: SnappingState,
}

serde_json::from_str::<OldDocumentMessageHandler>(serialized_content).map(|old_message_handler| DocumentMessageHandler {
serde_json::from_value::<OldDocumentMessageHandler>(json_value).map(|old_message_handler| DocumentMessageHandler {
network_interface: NodeNetworkInterface::from_old_network(old_message_handler.network),
collapsed: old_message_handler.collapsed,
commit_hash: old_message_handler.commit_hash,
Expand Down
15 changes: 13 additions & 2 deletions editor/src/messages/portfolio/document_migration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1622,16 +1622,27 @@ fn migrate_node(node_id: &NodeId, node: &DocumentNode, network_path: &[NodeId],
document.network_interface.set_input(&InputConnector::node(*node_id, 1), old_inputs[1].clone(), network_path);
}

// Old shape: [background, bounds, trace, cache]. Both "bounds" (input 1) and "cache" (input 3) are dropped, and "cache" is now stored as
// internal node state via `#[data]` on the brush node, so it is not a node input at all in the new shape.
if reference == DefinitionIdentifier::ProtoNode(graphene_std::brush::brush::brush::IDENTIFIER) && inputs_count == 4 {
let mut node_template = resolve_document_node_type(&reference)?.default_node_template();
document.network_interface.replace_implementation(node_id, network_path, &mut node_template);

let old_inputs = document.network_interface.replace_inputs(node_id, network_path, &mut node_template)?;

document.network_interface.set_input(&InputConnector::node(*node_id, 0), old_inputs[0].clone(), network_path);
// We have removed the second input ("bounds"), so we don't add index 1 and we shift the rest of the inputs down by one
document.network_interface.set_input(&InputConnector::node(*node_id, 1), old_inputs[2].clone(), network_path);
document.network_interface.set_input(&InputConnector::node(*node_id, 2), old_inputs[3].clone(), network_path);
}

// Old shape: [background, trace, cache]. The "cache" input is dropped because the brush node now stores its cache as internal `#[data]` state.
if reference == DefinitionIdentifier::ProtoNode(graphene_std::brush::brush::brush::IDENTIFIER) && inputs_count == 3 {
let mut node_template = resolve_document_node_type(&reference)?.default_node_template();
document.network_interface.replace_implementation(node_id, network_path, &mut node_template);

let old_inputs = document.network_interface.replace_inputs(node_id, network_path, &mut node_template)?;

document.network_interface.set_input(&InputConnector::node(*node_id, 0), old_inputs[0].clone(), network_path);
document.network_interface.set_input(&InputConnector::node(*node_id, 1), old_inputs[1].clone(), network_path);
}

if reference == DefinitionIdentifier::ProtoNode(ProtoNodeIdentifier::new("graphene_core::vector::RemoveHandlesNode")) {
Expand Down
67 changes: 63 additions & 4 deletions node-graph/graph-craft/src/document/value.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use super::DocumentNode;
use crate::application_io::PlatformEditorApi;
use crate::proto::{Any as DAny, FutureAny};
use brush_nodes::brush_cache::BrushCache;
use brush_nodes::brush_stroke::BrushStroke;
use core_types::table::Table;
use core_types::transform::Footprint;
Expand Down Expand Up @@ -39,7 +38,7 @@ macro_rules! tagged_value {
$( $(#[$meta] ) *$identifier( $ty ), )*
RenderOutput(RenderOutput),
#[serde(skip)]
EditorApi(Arc<PlatformEditorApi>)
EditorApi(Arc<PlatformEditorApi>),
}

impl CacheHash for TaggedValue {
Expand Down Expand Up @@ -79,7 +78,7 @@ macro_rules! tagged_value {
Self::None => concrete!(()),
$( Self::$identifier(_) => concrete!($ty), )*
Self::RenderOutput(_) => concrete!(RenderOutput),
Self::EditorApi(_) => concrete!(&PlatformEditorApi)
Self::EditorApi(_) => concrete!(&PlatformEditorApi),
}
}
/// Attempts to downcast the dynamic type to a tagged value
Expand Down Expand Up @@ -211,7 +210,6 @@ tagged_value! {
Stroke(Stroke),
Gradient(Gradient),
Font(Font),
BrushCache(BrushCache),
DocumentNode(DocumentNode),
ContextFeatures(ContextFeatures),
Curve(Curve),
Expand Down Expand Up @@ -412,6 +410,35 @@ impl TaggedValue {
_ => panic!("Passed value is not of type u32"),
}
}

/// Walks a JSON document tree and replaces any externally-tagged `TaggedValue` whose discriminant is in `REMOVED_VARIANTS` with the unit variant `"None"`.
/// Lets documents written before a variant was removed continue to deserialize. The document migration step then removes any orphan node inputs that result.
#[cfg(feature = "loading")]
pub fn scrub_removed_variants_from_json(value: &mut serde_json::Value) {
// Names of `TaggedValue` variants that have been removed since being released. Any object of the form `{"<name>": <payload>}` is rewritten to `"None"` on load.
const REMOVED_VARIANTS: &[&str] = &["BrushCache"];

match value {
serde_json::Value::Object(map) => {
if map.len() == 1
&& let Some(key) = map.keys().next()
&& REMOVED_VARIANTS.contains(&key.as_str())
{
*value = serde_json::Value::String("None".to_string());
return;
}
for child in map.values_mut() {
Self::scrub_removed_variants_from_json(child);
}
}
serde_json::Value::Array(array) => {
for child in array {
Self::scrub_removed_variants_from_json(child);
}
}
_ => {}
}
}
}

impl Display for TaggedValue {
Expand Down Expand Up @@ -518,3 +545,35 @@ impl CacheHash for RenderOutput {
self.data.cache_hash(state);
}
}

#[cfg(all(test, feature = "loading"))]
mod tests {
use super::*;

#[test]
fn scrub_replaces_removed_variant_with_none_unit() {
let mut value = serde_json::json!({
"Value": {
"tagged_value": { "BrushCache": { "unique_id": 1, "prev_input": [] } },
"exposed": false
}
});
TaggedValue::scrub_removed_variants_from_json(&mut value);
assert_eq!(value, serde_json::json!({ "Value": { "tagged_value": "None", "exposed": false } }));
}

#[test]
fn scrub_leaves_live_variants_unchanged() {
let mut value = serde_json::json!({ "Value": { "tagged_value": { "F64": 1.5 }, "exposed": false } });
let original = value.clone();
TaggedValue::scrub_removed_variants_from_json(&mut value);
assert_eq!(value, original);
}

#[test]
fn scrub_recurses_through_arrays_and_nested_objects() {
let mut value = serde_json::json!([{ "BrushCache": { "any": "payload" } }, { "F32": 0.5 }]);
TaggedValue::scrub_removed_variants_from_json(&mut value);
assert_eq!(value, serde_json::json!(["None", { "F32": 0.5 }]));
}
}
3 changes: 0 additions & 3 deletions node-graph/interpreted-executor/src/node_registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ use graph_craft::document::value::RenderOutput;
use graph_craft::proto::{NodeConstructor, TypeErasedBox};
use graphene_std::any::DynAnyNode;
use graphene_std::application_io::ImageTexture;
use graphene_std::brush::brush_cache::BrushCache;
use graphene_std::brush::brush_stroke::BrushStroke;
use graphene_std::gradient::GradientStops;
#[cfg(target_family = "wasm")]
Expand Down Expand Up @@ -158,7 +157,6 @@ fn node_registry() -> HashMap<ProtoNodeIdentifier, HashMap<NodeIOTypes, NodeCons
async_node!(graphene_core::memo::MonitorNode<_, _, _>, input: Context, fn_params: [Context => Graphic]),
async_node!(graphene_core::memo::MonitorNode<_, _, _>, input: Context, fn_params: [Context => graphene_std::text::Font]),
async_node!(graphene_core::memo::MonitorNode<_, _, _>, input: Context, fn_params: [Context => Table<BrushStroke>]),
async_node!(graphene_core::memo::MonitorNode<_, _, _>, input: Context, fn_params: [Context => BrushCache]),
async_node!(graphene_core::memo::MonitorNode<_, _, _>, input: Context, fn_params: [Context => DocumentNode]),
async_node!(graphene_core::memo::MonitorNode<_, _, _>, input: Context, fn_params: [Context => graphene_std::raster::curve::Curve]),
async_node!(graphene_core::memo::MonitorNode<_, _, _>, input: Context, fn_params: [Context => graphene_std::transform::Footprint]),
Expand Down Expand Up @@ -246,7 +244,6 @@ fn node_registry() -> HashMap<ProtoNodeIdentifier, HashMap<NodeIOTypes, NodeCons
async_node!(graphene_core::memo::MemoizeNode<_, _>, input: Context, fn_params: [Context => graphene_std::vector::style::Gradient]),
async_node!(graphene_core::memo::MemoizeNode<_, _>, input: Context, fn_params: [Context => graphene_std::text::Font]),
async_node!(graphene_core::memo::MemoizeNode<_, _>, input: Context, fn_params: [Context => Table<BrushStroke>]),
async_node!(graphene_core::memo::MemoizeNode<_, _>, input: Context, fn_params: [Context => BrushCache]),
async_node!(graphene_core::memo::MemoizeNode<_, _>, input: Context, fn_params: [Context => DocumentNode]),
async_node!(graphene_core::memo::MemoizeNode<_, _>, input: Context, fn_params: [Context => graphene_std::ContextFeatures]),
async_node!(graphene_core::memo::MemoizeNode<_, _>, input: Context, fn_params: [Context => graphene_std::raster::curve::Curve]),
Expand Down
3 changes: 2 additions & 1 deletion node-graph/nodes/brush/src/brush.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ async fn brush(
/// The list of brush stroke paths drawn by the Brush tool, with each including both its coordinates and styles.
trace: Table<BrushStroke>,
/// Internal cache data used to accelerate rendering of the brush content.
#[data]
cache: BrushCache,
) -> Table<Raster<CPU>> {
if background.is_empty() {
Expand Down Expand Up @@ -419,6 +420,7 @@ mod test {
async fn test_brush_output_size() {
let image = brush(
(),
&BrushCache::default(),
Table::new_from_element(Raster::new_cpu(Image::<Color>::default())),
Table::new_from_element(BrushStroke {
trace: vec![crate::brush_stroke::BrushInputSample { position: DVec2::ZERO }],
Expand All @@ -431,7 +433,6 @@ mod test {
blend_mode: BlendMode::Normal,
},
}),
BrushCache::default(),
)
.await;
assert_eq!(image.element(0).unwrap().width, 20);
Expand Down
83 changes: 2 additions & 81 deletions node-graph/nodes/brush/src/brush_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,37 +3,22 @@ use crate::brush_stroke::BrushStyle;
use core_types::ATTR_TRANSFORM;
use core_types::graphene_hash::CacheHashWrapper;
use core_types::table::TableRow;
use dyn_any::DynAny;
use raster_types::CPU;
use raster_types::Raster;
use std::collections::HashMap;
use std::hash::Hash;
use std::hash::Hasher;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::{Arc, Mutex};

// TODO: This is a temporary hack, be sure to not reuse this when the brush system is replaced/rewritten.
static NEXT_BRUSH_CACHE_IMPL_ID: AtomicU64 = AtomicU64::new(0);

#[derive(Clone, Debug, DynAny)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[derive(Clone, Debug, Default)]
struct BrushCacheImpl {
#[cfg_attr(feature = "serde", serde(default = "new_unique_id"))]
unique_id: u64,
// The full previous input that was cached.
#[cfg_attr(feature = "serde", serde(default))]
prev_input: Vec<BrushStroke>,

// The strokes that have been fully processed and blended into the background.
#[cfg_attr(feature = "serde", serde(default, deserialize_with = "raster_types::image::migrate_image_frame_row"))]
background: TableRow<Raster<CPU>>,
#[cfg_attr(feature = "serde", serde(default, deserialize_with = "raster_types::image::migrate_image_frame_row"))]
blended_image: TableRow<Raster<CPU>>,
#[cfg_attr(feature = "serde", serde(default, deserialize_with = "raster_types::image::migrate_image_frame_row"))]
last_stroke_texture: TableRow<Raster<CPU>>,

// A cache for brush textures.
#[cfg_attr(feature = "serde", serde(skip))]
brush_texture_cache: HashMap<CacheHashWrapper<BrushStyle>, Raster<CPU>>,
}

Expand Down Expand Up @@ -97,35 +82,6 @@ impl BrushCacheImpl {
}
}

impl Default for BrushCacheImpl {
fn default() -> Self {
Self {
unique_id: new_unique_id(),
prev_input: Vec::new(),
background: Default::default(),
blended_image: Default::default(),
last_stroke_texture: Default::default(),
brush_texture_cache: HashMap::new(),
}
}
}

impl PartialEq for BrushCacheImpl {
fn eq(&self, other: &Self) -> bool {
self.unique_id == other.unique_id
}
}

impl Hash for BrushCacheImpl {
fn hash<H: Hasher>(&self, state: &mut H) {
self.unique_id.hash(state);
}
}

fn new_unique_id() -> u64 {
NEXT_BRUSH_CACHE_IMPL_ID.fetch_add(1, Ordering::SeqCst)
}

#[derive(Clone, Debug, Default)]
pub struct BrushPlan {
pub strokes: Vec<BrushStroke>,
Expand All @@ -134,44 +90,9 @@ pub struct BrushPlan {
pub first_stroke_point_skip: usize,
}

#[derive(Debug, Default, DynAny)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[derive(Debug, Default, Clone)]
pub struct BrushCache(Arc<Mutex<BrushCacheImpl>>);

// A bit of a cursed implementation to work around the current node system.
// The original object is a 'prototype' that when cloned gives you a independent
// new object. Any further clones however are all the same underlying cache object.
impl Clone for BrushCache {
fn clone(&self) -> Self {
Self(Arc::new(Mutex::new(self.0.lock().unwrap().clone())))
}
}

impl PartialEq for BrushCache {
fn eq(&self, other: &Self) -> bool {
if Arc::ptr_eq(&self.0, &other.0) {
return true;
}

let s = self.0.lock().unwrap();
let o = other.0.lock().unwrap();

*s == *o
}
}

impl Hash for BrushCache {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.lock().unwrap().hash(state);
}
}

impl graphene_hash::CacheHash for BrushCache {
fn cache_hash<H: core::hash::Hasher>(&self, state: &mut H) {
core::hash::Hash::hash(&self.0.lock().unwrap().unique_id, state);
}
}

impl BrushCache {
pub fn compute_brush_plan(&self, background: TableRow<Raster<CPU>>, input: &[BrushStroke]) -> BrushPlan {
let mut inner = self.0.lock().unwrap();
Expand Down
2 changes: 1 addition & 1 deletion node-graph/nodes/brush/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
pub mod brush;
pub mod brush_cache;
mod brush_cache;
pub mod brush_stroke;

pub mod migrations {
Expand Down
Loading