Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions docs/changes/0.54.3.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
QCoDeS 0.54.3 (2025-11-11)
==========================

Improved:
---------

- - Improved pyvisa-sim YAMLs for Lakeshore Models 335, 336, and 372.
- Updated Lakeshore tests to use pyvisa-sim backend instead of mocked classes.
- Updated lakeshore_base.py to bypass waiting when using blocking_t in sim mode. (:pr:`7606`)
- Fixes a bug in the LinSweeper iterator that caused it to always raise StopIteration after
completing a single sweep. This bug meant LinSweeper could not be used in a nested measurement function. (:pr:`7607`)

Improved Drivers:
-----------------

- The Stanford SR86x drivers now statically assign attributes statically
for more member InstrumentModules and parameters enabling better documentation,
type checking and IDE integration. (:pr:`7542`)
8 changes: 8 additions & 0 deletions docs/changes/0.54.4.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
QCoDeS 0.54.4 (2025-12-12)
==========================

Improved:
---------

- The `InterDependencies_` class is now frozen during the performance of a measurement so it cannot be modified.
This enables caching of attributes on the class significantly reducing the overhead of measurements. (:pr:`7712`)
2 changes: 2 additions & 0 deletions docs/changes/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ Changelogs

.. toctree::
Unreleased <unreleased>
0.54.4 <0.54.4>
0.54.3 <0.54.3>
0.54.1 <0.54.1>
0.54.0 <0.54.0>
0.53.0 <0.53.0>
Expand Down
3 changes: 0 additions & 3 deletions docs/changes/newsfragments/7542.improved_driver

This file was deleted.

3 changes: 0 additions & 3 deletions docs/changes/newsfragments/7606.improved

This file was deleted.

2 changes: 0 additions & 2 deletions docs/changes/newsfragments/7607.improved

This file was deleted.

7 changes: 5 additions & 2 deletions src/qcodes/dataset/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -566,7 +566,10 @@ def toggle_debug(self) -> None:
self.conn = connect(path_to_db, self._debug)

def set_interdependencies(
self, interdeps: InterDependencies_, shapes: Shapes | None = None
self,
interdeps: InterDependencies_,
shapes: Shapes | None = None,
override: bool = False,
) -> None:
"""
Set the interdependencies object (which holds all added
Expand All @@ -579,7 +582,7 @@ def set_interdependencies(
f"Wrong input type. Expected InterDepencies_, got {type(interdeps)}"
)

if not self.pristine:
if not self.pristine and not override:
mssg = "Can not set interdependencies on a DataSet that has been started."
raise RuntimeError(mssg)
self._rundescriber = RunDescriber(interdeps, shapes=shapes)
Expand Down
7 changes: 5 additions & 2 deletions src/qcodes/dataset/data_set_in_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -748,7 +748,10 @@ def _set_parent_dataset_links(self, links: list[Link]) -> None:
self._parent_dataset_links = links

def _set_interdependencies(
self, interdeps: InterDependencies_, shapes: Shapes | None = None
self,
interdeps: InterDependencies_,
shapes: Shapes | None = None,
override: bool = False,
) -> None:
"""
Set the interdependencies object (which holds all added
Expand All @@ -761,7 +764,7 @@ def _set_interdependencies(
f"Wrong input type. Expected InterDepencies_, got {type(interdeps)}"
)

if not self.pristine:
if not self.pristine and not override:
mssg = "Can not set interdependencies on a DataSet that has been started."
raise RuntimeError(mssg)
self._rundescriber = RunDescriber(interdeps, shapes=shapes)
Expand Down
184 changes: 175 additions & 9 deletions src/qcodes/dataset/descriptions/dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,18 @@ def validate_paramspectree(
else:
raise ValueError(f"Invalid {interdep_type_internal}") from TypeError(cause)

def _invalid_subsets(
self, paramspecs: Sequence[ParamSpecBase]
) -> tuple[set[str], set[str]] | None:
subset_nodes = {paramspec.name for paramspec in paramspecs}
for subset_node in subset_nodes:
descendant_nodes_per_subset_node = nx.descendants(self.graph, subset_node)
if missing_nodes := descendant_nodes_per_subset_node.difference(
subset_nodes
):
return (subset_nodes, missing_nodes)
return None

def validate_subset(self, paramspecs: Sequence[ParamSpecBase]) -> None:
"""
Validate that the given parameters form a valid subset of the
Expand All @@ -442,15 +454,11 @@ def validate_subset(self, paramspecs: Sequence[ParamSpecBase]) -> None:
InterdependencyError: If a dependency or inference is missing

"""
subset_nodes = set([paramspec.name for paramspec in paramspecs])
for subset_node in subset_nodes:
descendant_nodes_per_subset_node = nx.descendants(self.graph, subset_node)
if missing_nodes := descendant_nodes_per_subset_node.difference(
subset_nodes
):
raise IncompleteSubsetError(
subset_params=subset_nodes, missing_params=missing_nodes
)
invalid_subset = self._invalid_subsets(paramspecs)
if invalid_subset is not None:
raise IncompleteSubsetError(
subset_params=invalid_subset[0], missing_params=invalid_subset[1]
)

@classmethod
def _from_graph(cls, graph: nx.DiGraph[str]) -> InterDependencies_:
Expand Down Expand Up @@ -624,3 +632,161 @@ def paramspec_tree_to_param_name_tree(
return {
key.name: [item.name for item in items] for key, items in paramspec_tree.items()
}


class FrozenInterDependencies_(InterDependencies_): # noqa: PLW1641
# todo: not clear if this should implement __hash__.
"""
A frozen version of InterDependencies_ that is immutable and caches
expensive lookups. This is used exclusively while running a measurement
to minimize the overhead of dependency lookups for each data operation.

Args:
interdeps: An InterDependencies_ instance to freeze

"""

def __init__(self, interdeps: InterDependencies_):
self._graph = interdeps.graph.copy()
nx.freeze(self._graph)
self._top_level_parameters_cache: tuple[ParamSpecBase, ...] | None = None
self._dependencies_cache: ParamSpecTree | None = None
self._inferences_cache: ParamSpecTree | None = None
self._standalones_cache: frozenset[ParamSpecBase] | None = None
self._find_all_parameters_in_tree_cache: dict[
ParamSpecBase, set[ParamSpecBase]
] = {}
self._invalid_subsets_cache: dict[
tuple[ParamSpecBase, ...], tuple[set[str], set[str]] | None
] = {}
self._id_to_paramspec_cache: dict[str, ParamSpecBase] | None = None
self._paramspec_to_id_cache: dict[ParamSpecBase, str] | None = None

def add_dependencies(self, dependencies: ParamSpecTree | None) -> None:
raise TypeError("FrozenInterDependencies_ is immutable")

def add_inferences(self, inferences: ParamSpecTree | None) -> None:
raise TypeError("FrozenInterDependencies_ is immutable")

def add_standalones(self, standalones: tuple[ParamSpecBase, ...]) -> None:
raise TypeError("FrozenInterDependencies_ is immutable")

def add_paramspecs(self, paramspecs: Sequence[ParamSpecBase]) -> None:
raise TypeError("FrozenInterDependencies_ is immutable")

def remove(self, paramspec: ParamSpecBase) -> InterDependencies_:
raise TypeError("FrozenInterDependencies_ is immutable")

def extend(
self,
dependencies: ParamSpecTree | None = None,
inferences: ParamSpecTree | None = None,
standalones: tuple[ParamSpecBase, ...] = (),
) -> InterDependencies_:
"""
Create a new :class:`InterDependencies_` object
that is an extension of this instance with the provided input
"""
# We need to unfreeze the graph for the new instance
new_graph = nx.DiGraph(self.graph)
new_interdependencies = InterDependencies_._from_graph(new_graph)

new_interdependencies.add_dependencies(dependencies)
new_interdependencies.add_inferences(inferences)
new_interdependencies.add_standalones(standalones)
return new_interdependencies

@property
def top_level_parameters(self) -> tuple[ParamSpecBase, ...]:
if self._top_level_parameters_cache is None:
self._top_level_parameters_cache = super().top_level_parameters
return self._top_level_parameters_cache

@property
def dependencies(self) -> ParamSpecTree:
if self._dependencies_cache is None:
self._dependencies_cache = super().dependencies
return self._dependencies_cache.copy()

@property
def inferences(self) -> ParamSpecTree:
if self._inferences_cache is None:
self._inferences_cache = super().inferences
return self._inferences_cache.copy()

@property
def standalones(self) -> frozenset[ParamSpecBase]:
if self._standalones_cache is None:
self._standalones_cache = super().standalones
return self._standalones_cache

def find_all_parameters_in_tree(
self, initial_param: ParamSpecBase
) -> set[ParamSpecBase]:
if initial_param not in self._find_all_parameters_in_tree_cache:
self._find_all_parameters_in_tree_cache[initial_param] = (
super().find_all_parameters_in_tree(initial_param)
)
return self._find_all_parameters_in_tree_cache[initial_param].copy()

@classmethod
def _from_dict(cls, ser: InterDependencies_Dict) -> FrozenInterDependencies_:
interdeps = InterDependencies_._from_dict(ser)
return cls(interdeps)

@classmethod
def _from_graph(cls, graph: nx.DiGraph[str]) -> FrozenInterDependencies_:
interdeps = InterDependencies_._from_graph(graph)
return cls(interdeps)

def validate_subset(self, paramspecs: Sequence[ParamSpecBase]) -> None:
paramspecs_tuple = tuple(paramspecs)
if paramspecs_tuple not in self._invalid_subsets_cache:
self._invalid_subsets_cache[paramspecs_tuple] = self._invalid_subsets(
paramspecs_tuple
)
invalid_subset = self._invalid_subsets_cache[paramspecs_tuple]
if invalid_subset is not None:
raise IncompleteSubsetError(
subset_params=invalid_subset[0], missing_params=invalid_subset[1]
)

@property
def _id_to_paramspec(self) -> dict[str, ParamSpecBase]:
if self._id_to_paramspec_cache is None:
self._id_to_paramspec_cache = {
node_id: data["value"] for node_id, data in self.graph.nodes(data=True)
}
return self._id_to_paramspec_cache

@property
def _paramspec_to_id(self) -> dict[ParamSpecBase, str]:
if self._paramspec_to_id_cache is None:
self._paramspec_to_id_cache = {
data["value"]: node_id for node_id, data in self.graph.nodes(data=True)
}
return self._paramspec_to_id_cache

def __repr__(self) -> str:
rep = (
f"FrozenInterDependencies_(dependencies={self.dependencies}, "
f"inferences={self.inferences}, "
f"standalones={self.standalones})"
)
return rep

def __eq__(self, other: object) -> bool:
if not isinstance(other, FrozenInterDependencies_):
return False
return nx.utils.graphs_equal(self.graph, other.graph)

def to_interdependencies(self) -> InterDependencies_:
"""
Convert this FrozenInterDependencies_ back to a mutable InterDependencies_ instance.

Returns:
A new InterDependencies_ instance with the same data as this frozen instance.

"""
new_graph = nx.DiGraph(self.graph)
return InterDependencies_._from_graph(new_graph)
25 changes: 24 additions & 1 deletion src/qcodes/dataset/measurements.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
ValuesType,
)
from qcodes.dataset.descriptions.dependencies import (
FrozenInterDependencies_,
IncompleteSubsetError,
InterDependencies_,
ParamSpecTree,
Expand Down Expand Up @@ -759,6 +760,28 @@ def __exit__(
self._span.record_exception(exception_value)
self.ds.add_metadata("measurement_exception", exception_string)

# for now we set the interdependencies back to the
# not frozen state, so that further modifications are possible
# this is not recommended but we want to minimize the changes for now

if isinstance(self.ds.description.interdeps, FrozenInterDependencies_):
intedeps = self.ds.description.interdeps.to_interdependencies()
else:
intedeps = self.ds.description.interdeps

if isinstance(self.ds, DataSet):
self.ds.set_interdependencies(
shapes=self.ds.description.shapes,
interdeps=intedeps,
override=True,
)
elif isinstance(self.ds, DataSetInMem):
self.ds._set_interdependencies(
shapes=self.ds.description.shapes,
interdeps=intedeps,
override=True,
)

# and finally mark the dataset as closed, thus
# finishing the measurement
# Note that the completion of a dataset entails waiting for the
Expand Down Expand Up @@ -1508,7 +1531,7 @@ def run(
self.experiment,
station=self.station,
write_period=self._write_period,
interdeps=self._interdeps,
interdeps=FrozenInterDependencies_(self._interdeps),
name=self.name,
subscribers=self.subscribers,
parent_datasets=self._parent_datasets,
Expand Down
14 changes: 14 additions & 0 deletions tests/dataset/measurement/test_measurement_context_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@
import qcodes as qc
import qcodes.validators as vals
from qcodes.dataset.data_set import DataSet, load_by_id
from qcodes.dataset.descriptions.dependencies import (
FrozenInterDependencies_,
InterDependencies_,
)
from qcodes.dataset.experiment_container import new_experiment
from qcodes.dataset.export_config import DataExportType
from qcodes.dataset.measurements import Measurement
Expand Down Expand Up @@ -730,6 +734,16 @@ def test_datasaver_scalars(
with pytest.raises(ValueError):
datasaver.add_result((DMM.v1, 0))

ds = datasaver.dataset
assert isinstance(ds, DataSet)
assert isinstance(ds.description.interdeps, InterDependencies_)
assert not isinstance(ds.description.interdeps, FrozenInterDependencies_)

loaded_ds = load_by_id(ds.run_id)

assert isinstance(loaded_ds.description.interdeps, InterDependencies_)
assert not isinstance(loaded_ds.description.interdeps, FrozenInterDependencies_)

# More assertions of setpoints, labels and units in the DB!


Expand Down
Loading
Loading